srctree

Andrew Kelley parent eaca8626 13973415 d0c06ca7
Merge pull request #19208 from ziglang/rework-autodoc

Redesign How Autodoc Works

inlinesplit
.github/CODEOWNERS added: 7501, removed: 25316, total 0
@@ -1,8 +1,3 @@
# Autodoc
/src/Autodoc.zig @kristoff-it
/src/autodoc/* @kristoff-it
/lib/docs/* @kristoff-it
 
# std.json
/lib/std/json* @thejoshwolfe
 
 
ev/null added: 7501, removed: 25316, total 0
@@ -1,12 +0,0 @@
---
name: Autodoc Issue
about: Issues with automatically generated docs, including stdlib docs.
title: 'Autodoc: {your issue}'
labels: autodoc
assignees: kristoff-it
 
---
 
Autodoc is still work in progress and as such many bugs and missing features are already known.
 
# Please report only <ins>regressions</ins>, i.e. things that worked in a previous build of new Autodoc (orange banner) that now don't work any more.
 
CMakeLists.txt added: 7501, removed: 25316, total 0
@@ -907,8 +907,6 @@ else()
endif()
 
# -Dno-langref is currently hardcoded because building the langref takes too damn long
# -Dno-autodocs is currently hardcoded because the C backend generates a miscompilation
# that prevents it from working.
# To obtain these two forms of documentation, run zig build against stage3 rather than stage2.
set(ZIG_BUILD_ARGS
--zig-lib-dir "${CMAKE_SOURCE_DIR}/lib"
@@ -918,7 +916,6 @@ set(ZIG_BUILD_ARGS
${ZIG_STATIC_ARG}
${ZIG_NO_LIB_ARG}
"-Dno-langref"
"-Dno-autodocs"
${ZIG_SINGLE_THREADED_ARG}
${ZIG_PIE_ARG}
"-Dtarget=${ZIG_TARGET_TRIPLE}"
 
build.zig added: 7501, removed: 25316, total 0
@@ -31,7 +31,7 @@ pub fn build(b: *std.Build) !void {
const test_step = b.step("test", "Run all the tests");
const skip_install_lib_files = b.option(bool, "no-lib", "skip copying of lib/ files and langref to installation prefix. Useful for development") orelse false;
const skip_install_langref = b.option(bool, "no-langref", "skip copying of langref to the installation prefix") orelse skip_install_lib_files;
const skip_install_autodocs = b.option(bool, "no-autodocs", "skip copying of standard library autodocs to the installation prefix") orelse skip_install_lib_files;
const std_docs = b.option(bool, "std-docs", "include standard library autodocs") orelse false;
const no_bin = b.option(bool, "no-bin", "skip emitting compiler binary") orelse false;
 
const docgen_exe = b.addExecutable(.{
@@ -55,17 +55,19 @@ pub fn build(b: *std.Build) !void {
b.getInstallStep().dependOn(&install_langref.step);
}
 
const autodoc_test = b.addTest(.{
const autodoc_test = b.addObject(.{
.name = "std",
.root_source_file = .{ .path = "lib/std/std.zig" },
.target = target,
.zig_lib_dir = .{ .path = "lib" },
.optimize = .Debug,
});
const install_std_docs = b.addInstallDirectory(.{
.source_dir = autodoc_test.getEmittedDocs(),
.install_dir = .prefix,
.install_subdir = "doc/std",
});
if (!skip_install_autodocs) {
if (std_docs) {
b.getInstallStep().dependOn(&install_std_docs.step);
}
 
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,384 @@
const builtin = @import("builtin");
const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
 
pub fn main() !void {
var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena_instance.deinit();
const arena = arena_instance.allocator();
 
var general_purpose_allocator: std.heap.GeneralPurposeAllocator(.{}) = .{};
const gpa = general_purpose_allocator.allocator();
 
const args = try std.process.argsAlloc(arena);
const zig_lib_directory = args[1];
const zig_exe_path = args[2];
const global_cache_path = args[3];
 
var lib_dir = try std.fs.cwd().openDir(zig_lib_directory, .{});
defer lib_dir.close();
 
const listen_port: u16 = 0;
const address = std.net.Address.parseIp("127.0.0.1", listen_port) catch unreachable;
var http_server = try address.listen(.{});
const port = http_server.listen_address.in.getPort();
const url = try std.fmt.allocPrint(arena, "http://127.0.0.1:{d}/\n", .{port});
std.io.getStdOut().writeAll(url) catch {};
openBrowserTab(gpa, url[0 .. url.len - 1 :'\n']) catch |err| {
std.log.err("unable to open browser: {s}", .{@errorName(err)});
};
 
var context: Context = .{
.gpa = gpa,
.zig_exe_path = zig_exe_path,
.global_cache_path = global_cache_path,
.lib_dir = lib_dir,
.zig_lib_directory = zig_lib_directory,
};
 
while (true) {
const connection = try http_server.accept();
_ = std.Thread.spawn(.{}, accept, .{ &context, connection }) catch |err| {
std.log.err("unable to accept connection: {s}", .{@errorName(err)});
connection.stream.close();
continue;
};
}
}
 
fn accept(context: *Context, connection: std.net.Server.Connection) void {
defer connection.stream.close();
 
var read_buffer: [8000]u8 = undefined;
var server = std.http.Server.init(connection, &read_buffer);
while (server.state == .ready) {
var request = server.receiveHead() catch |err| switch (err) {
error.HttpConnectionClosing => return,
else => {
std.log.err("closing http connection: {s}", .{@errorName(err)});
return;
},
};
serveRequest(&request, context) catch |err| {
std.log.err("unable to serve {s}: {s}", .{ request.head.target, @errorName(err) });
return;
};
}
}
 
const Context = struct {
gpa: Allocator,
lib_dir: std.fs.Dir,
zig_lib_directory: []const u8,
zig_exe_path: []const u8,
global_cache_path: []const u8,
};
 
fn serveRequest(request: *std.http.Server.Request, context: *Context) !void {
if (std.mem.eql(u8, request.head.target, "/") or
std.mem.eql(u8, request.head.target, "/debug/"))
{
try serveDocsFile(request, context, "docs/index.html", "text/html");
} else if (std.mem.eql(u8, request.head.target, "/main.js") or
std.mem.eql(u8, request.head.target, "/debug/main.js"))
{
try serveDocsFile(request, context, "docs/main.js", "application/javascript");
} else if (std.mem.eql(u8, request.head.target, "/main.wasm")) {
try serveWasm(request, context, .ReleaseFast);
} else if (std.mem.eql(u8, request.head.target, "/debug/main.wasm")) {
try serveWasm(request, context, .Debug);
} else if (std.mem.eql(u8, request.head.target, "/sources.tar") or
std.mem.eql(u8, request.head.target, "/debug/sources.tar"))
{
try serveSourcesTar(request, context);
} else {
try request.respond("not found", .{
.status = .not_found,
.extra_headers = &.{
.{ .name = "content-type", .value = "text/plain" },
},
});
}
}
 
const cache_control_header: std.http.Header = .{
.name = "cache-control",
.value = "max-age=0, must-revalidate",
};
 
fn serveDocsFile(
request: *std.http.Server.Request,
context: *Context,
name: []const u8,
content_type: []const u8,
) !void {
const gpa = context.gpa;
// The desired API is actually sendfile, which will require enhancing std.http.Server.
// We load the file with every request so that the user can make changes to the file
// and refresh the HTML page without restarting this server.
const file_contents = try context.lib_dir.readFileAlloc(gpa, name, 10 * 1024 * 1024);
defer gpa.free(file_contents);
try request.respond(file_contents, .{
.extra_headers = &.{
.{ .name = "content-type", .value = content_type },
cache_control_header,
},
});
}
 
fn serveSourcesTar(request: *std.http.Server.Request, context: *Context) !void {
const gpa = context.gpa;
 
var send_buffer: [0x4000]u8 = undefined;
var response = request.respondStreaming(.{
.send_buffer = &send_buffer,
.respond_options = .{
.extra_headers = &.{
.{ .name = "content-type", .value = "application/x-tar" },
cache_control_header,
},
},
});
const w = response.writer();
 
var std_dir = try context.lib_dir.openDir("std", .{ .iterate = true });
defer std_dir.close();
 
var walker = try std_dir.walk(gpa);
defer walker.deinit();
 
while (try walker.next()) |entry| {
switch (entry.kind) {
.file => {
if (!std.mem.endsWith(u8, entry.basename, ".zig"))
continue;
if (std.mem.endsWith(u8, entry.basename, "test.zig"))
continue;
},
else => continue,
}
 
var file = try std_dir.openFile(entry.path, .{});
defer file.close();
 
const stat = try file.stat();
const padding = p: {
const remainder = stat.size % 512;
break :p if (remainder > 0) 512 - remainder else 0;
};
 
var file_header = std.tar.output.Header.init();
file_header.typeflag = .regular;
try file_header.setPath("std", entry.path);
try file_header.setSize(stat.size);
try file_header.updateChecksum();
try w.writeAll(std.mem.asBytes(&file_header));
try w.writeFile(file);
try w.writeByteNTimes(0, padding);
}
// intentionally omitting the pointless trailer
//try w.writeByteNTimes(0, 512 * 2);
try response.end();
}
 
fn serveWasm(
request: *std.http.Server.Request,
context: *Context,
optimize_mode: std.builtin.OptimizeMode,
) !void {
const gpa = context.gpa;
 
var arena_instance = std.heap.ArenaAllocator.init(gpa);
defer arena_instance.deinit();
const arena = arena_instance.allocator();
 
// Do the compilation every request, so that the user can edit the files
// and see the changes without restarting the server.
const wasm_binary_path = try buildWasmBinary(arena, context, optimize_mode);
// std.http.Server does not have a sendfile API yet.
const file_contents = try std.fs.cwd().readFileAlloc(gpa, wasm_binary_path, 10 * 1024 * 1024);
defer gpa.free(file_contents);
try request.respond(file_contents, .{
.extra_headers = &.{
.{ .name = "content-type", .value = "application/wasm" },
cache_control_header,
},
});
}
 
fn buildWasmBinary(
arena: Allocator,
context: *Context,
optimize_mode: std.builtin.OptimizeMode,
) ![]const u8 {
const gpa = context.gpa;
 
const main_src_path = try std.fs.path.join(arena, &.{
context.zig_lib_directory, "docs", "wasm", "main.zig",
});
 
var argv: std.ArrayListUnmanaged([]const u8) = .{};
 
try argv.appendSlice(arena, &.{
context.zig_exe_path,
"build-exe",
"-fno-entry",
"-O",
@tagName(optimize_mode),
"-target",
"wasm32-freestanding",
"-mcpu",
"baseline+atomics+bulk_memory+multivalue+mutable_globals+nontrapping_fptoint+reference_types+sign_ext",
"--cache-dir",
context.global_cache_path,
"--global-cache-dir",
context.global_cache_path,
"--name",
"autodoc",
"-rdynamic",
main_src_path,
"--listen=-",
});
 
var child = std.ChildProcess.init(argv.items, gpa);
child.stdin_behavior = .Pipe;
child.stdout_behavior = .Pipe;
child.stderr_behavior = .Pipe;
try child.spawn();
 
var poller = std.io.poll(gpa, enum { stdout, stderr }, .{
.stdout = child.stdout.?,
.stderr = child.stderr.?,
});
defer poller.deinit();
 
try sendMessage(child.stdin.?, .update);
try sendMessage(child.stdin.?, .exit);
 
const Header = std.zig.Server.Message.Header;
var result: ?[]const u8 = null;
var result_error_bundle = std.zig.ErrorBundle.empty;
 
const stdout = poller.fifo(.stdout);
 
poll: while (true) {
while (stdout.readableLength() < @sizeOf(Header)) {
if (!(try poller.poll())) break :poll;
}
const header = stdout.reader().readStruct(Header) catch unreachable;
while (stdout.readableLength() < header.bytes_len) {
if (!(try poller.poll())) break :poll;
}
const body = stdout.readableSliceOfLen(header.bytes_len);
 
switch (header.tag) {
.zig_version => {
if (!std.mem.eql(u8, builtin.zig_version_string, body)) {
return error.ZigProtocolVersionMismatch;
}
},
.error_bundle => {
const EbHdr = std.zig.Server.Message.ErrorBundle;
const eb_hdr = @as(*align(1) const EbHdr, @ptrCast(body));
const extra_bytes =
body[@sizeOf(EbHdr)..][0 .. @sizeOf(u32) * eb_hdr.extra_len];
const string_bytes =
body[@sizeOf(EbHdr) + extra_bytes.len ..][0..eb_hdr.string_bytes_len];
// TODO: use @ptrCast when the compiler supports it
const unaligned_extra = std.mem.bytesAsSlice(u32, extra_bytes);
const extra_array = try arena.alloc(u32, unaligned_extra.len);
@memcpy(extra_array, unaligned_extra);
result_error_bundle = .{
.string_bytes = try arena.dupe(u8, string_bytes),
.extra = extra_array,
};
},
.emit_bin_path => {
const EbpHdr = std.zig.Server.Message.EmitBinPath;
const ebp_hdr = @as(*align(1) const EbpHdr, @ptrCast(body));
if (!ebp_hdr.flags.cache_hit) {
std.log.info("source changes detected; rebuilt wasm component", .{});
}
result = try arena.dupe(u8, body[@sizeOf(EbpHdr)..]);
},
else => {}, // ignore other messages
}
 
stdout.discard(body.len);
}
 
const stderr = poller.fifo(.stderr);
if (stderr.readableLength() > 0) {
const owned_stderr = try stderr.toOwnedSlice();
defer gpa.free(owned_stderr);
std.debug.print("{s}", .{owned_stderr});
}
 
// Send EOF to stdin.
child.stdin.?.close();
child.stdin = null;
 
switch (try child.wait()) {
.Exited => |code| {
if (code != 0) {
std.log.err(
"the following command exited with error code {d}:\n{s}",
.{ code, try std.Build.Step.allocPrintCmd(arena, null, argv.items) },
);
return error.WasmCompilationFailed;
}
},
.Signal, .Stopped, .Unknown => {
std.log.err(
"the following command terminated unexpectedly:\n{s}",
.{try std.Build.Step.allocPrintCmd(arena, null, argv.items)},
);
return error.WasmCompilationFailed;
},
}
 
if (result_error_bundle.errorMessageCount() > 0) {
const color = std.zig.Color.auto;
result_error_bundle.renderToStdErr(color.renderOptions());
std.log.err("the following command failed with {d} compilation errors:\n{s}", .{
result_error_bundle.errorMessageCount(),
try std.Build.Step.allocPrintCmd(arena, null, argv.items),
});
return error.WasmCompilationFailed;
}
 
return result orelse {
std.log.err("child process failed to report result\n{s}", .{
try std.Build.Step.allocPrintCmd(arena, null, argv.items),
});
return error.WasmCompilationFailed;
};
}
 
fn sendMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag) !void {
const header: std.zig.Client.Message.Header = .{
.tag = tag,
.bytes_len = 0,
};
try file.writeAll(std.mem.asBytes(&header));
}
 
fn openBrowserTab(gpa: Allocator, url: []const u8) !void {
// Until https://github.com/ziglang/zig/issues/19205 is implemented, we
// spawn a thread for this child process.
_ = try std.Thread.spawn(.{}, openBrowserTabThread, .{ gpa, url });
}
 
fn openBrowserTabThread(gpa: Allocator, url: []const u8) !void {
const main_exe = switch (builtin.os.tag) {
.windows => "explorer",
else => "xdg-open",
};
var child = std.ChildProcess.init(&.{ main_exe, url }, gpa);
child.stdin_behavior = .Ignore;
child.stdout_behavior = .Ignore;
child.stderr_behavior = .Ignore;
try child.spawn();
_ = try child.wait();
}
 
ev/null added: 7501, removed: 25316, total 0
@@ -1,10270 +0,0 @@
/* commonmark 0.30.0 https://github.com/commonmark/commonmark.js @license BSD3 */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.commonmark = {}));
}(this, (function (exports) { 'use strict';
 
function isContainer(node) {
switch (node._type) {
case "document":
case "block_quote":
case "list":
case "item":
case "paragraph":
case "heading":
case "emph":
case "strong":
case "link":
case "image":
case "custom_inline":
case "custom_block":
return true;
default:
return false;
}
}
 
var resumeAt = function(node, entering) {
this.current = node;
this.entering = entering === true;
};
 
var next = function() {
var cur = this.current;
var entering = this.entering;
 
if (cur === null) {
return null;
}
 
var container = isContainer(cur);
 
if (entering && container) {
if (cur._firstChild) {
this.current = cur._firstChild;
this.entering = true;
} else {
// stay on node but exit
this.entering = false;
}
} else if (cur === this.root) {
this.current = null;
} else if (cur._next === null) {
this.current = cur._parent;
this.entering = false;
} else {
this.current = cur._next;
this.entering = true;
}
 
return { entering: entering, node: cur };
};
 
var NodeWalker = function(root) {
return {
current: root,
root: root,
entering: true,
next: next,
resumeAt: resumeAt
};
};
 
var Node = function(nodeType, sourcepos) {
this._type = nodeType;
this._parent = null;
this._firstChild = null;
this._lastChild = null;
this._prev = null;
this._next = null;
this._sourcepos = sourcepos;
this._lastLineBlank = false;
this._lastLineChecked = false;
this._open = true;
this._string_content = null;
this._literal = null;
this._listData = {};
this._info = null;
this._destination = null;
this._title = null;
this._isFenced = false;
this._fenceChar = null;
this._fenceLength = 0;
this._fenceOffset = null;
this._level = null;
this._onEnter = null;
this._onExit = null;
};
 
var proto = Node.prototype;
 
Object.defineProperty(proto, "isContainer", {
get: function() {
return isContainer(this);
}
});
 
Object.defineProperty(proto, "type", {
get: function() {
return this._type;
}
});
 
Object.defineProperty(proto, "firstChild", {
get: function() {
return this._firstChild;
}
});
 
Object.defineProperty(proto, "lastChild", {
get: function() {
return this._lastChild;
}
});
 
Object.defineProperty(proto, "next", {
get: function() {
return this._next;
}
});
 
Object.defineProperty(proto, "prev", {
get: function() {
return this._prev;
}
});
 
Object.defineProperty(proto, "parent", {
get: function() {
return this._parent;
}
});
 
Object.defineProperty(proto, "sourcepos", {
get: function() {
return this._sourcepos;
}
});
 
Object.defineProperty(proto, "literal", {
get: function() {
return this._literal;
},
set: function(s) {
this._literal = s;
}
});
 
Object.defineProperty(proto, "destination", {
get: function() {
return this._destination;
},
set: function(s) {
this._destination = s;
}
});
 
Object.defineProperty(proto, "title", {
get: function() {
return this._title;
},
set: function(s) {
this._title = s;
}
});
 
Object.defineProperty(proto, "info", {
get: function() {
return this._info;
},
set: function(s) {
this._info = s;
}
});
 
Object.defineProperty(proto, "level", {
get: function() {
return this._level;
},
set: function(s) {
this._level = s;
}
});
 
Object.defineProperty(proto, "listType", {
get: function() {
return this._listData.type;
},
set: function(t) {
this._listData.type = t;
}
});
 
Object.defineProperty(proto, "listTight", {
get: function() {
return this._listData.tight;
},
set: function(t) {
this._listData.tight = t;
}
});
 
Object.defineProperty(proto, "listStart", {
get: function() {
return this._listData.start;
},
set: function(n) {
this._listData.start = n;
}
});
 
Object.defineProperty(proto, "listDelimiter", {
get: function() {
return this._listData.delimiter;
},
set: function(delim) {
this._listData.delimiter = delim;
}
});
 
Object.defineProperty(proto, "onEnter", {
get: function() {
return this._onEnter;
},
set: function(s) {
this._onEnter = s;
}
});
 
Object.defineProperty(proto, "onExit", {
get: function() {
return this._onExit;
},
set: function(s) {
this._onExit = s;
}
});
 
Node.prototype.appendChild = function(child) {
child.unlink();
child._parent = this;
if (this._lastChild) {
this._lastChild._next = child;
child._prev = this._lastChild;
this._lastChild = child;
} else {
this._firstChild = child;
this._lastChild = child;
}
};
 
Node.prototype.prependChild = function(child) {
child.unlink();
child._parent = this;
if (this._firstChild) {
this._firstChild._prev = child;
child._next = this._firstChild;
this._firstChild = child;
} else {
this._firstChild = child;
this._lastChild = child;
}
};
 
Node.prototype.unlink = function() {
if (this._prev) {
this._prev._next = this._next;
} else if (this._parent) {
this._parent._firstChild = this._next;
}
if (this._next) {
this._next._prev = this._prev;
} else if (this._parent) {
this._parent._lastChild = this._prev;
}
this._parent = null;
this._next = null;
this._prev = null;
};
 
Node.prototype.insertAfter = function(sibling) {
sibling.unlink();
sibling._next = this._next;
if (sibling._next) {
sibling._next._prev = sibling;
}
sibling._prev = this;
this._next = sibling;
sibling._parent = this._parent;
if (!sibling._next) {
sibling._parent._lastChild = sibling;
}
};
 
Node.prototype.insertBefore = function(sibling) {
sibling.unlink();
sibling._prev = this._prev;
if (sibling._prev) {
sibling._prev._next = sibling;
}
sibling._next = this;
this._prev = sibling;
sibling._parent = this._parent;
if (!sibling._prev) {
sibling._parent._firstChild = sibling;
}
};
 
Node.prototype.walker = function() {
var walker = new NodeWalker(this);
return walker;
};
 
/* Example of use of walker:
 
var walker = w.walker();
var event;
 
while (event = walker.next()) {
console.log(event.entering, event.node.type);
}
 
*/
 
var encodeCache = {};
 
 
// Create a lookup array where anything but characters in `chars` string
// and alphanumeric chars is percent-encoded.
//
function getEncodeCache(exclude) {
var i, ch, cache = encodeCache[exclude];
if (cache) { return cache; }
 
cache = encodeCache[exclude] = [];
 
for (i = 0; i < 128; i++) {
ch = String.fromCharCode(i);
 
if (/^[0-9a-z]$/i.test(ch)) {
// always allow unencoded alphanumeric characters
cache.push(ch);
} else {
cache.push('%' + ('0' + i.toString(16).toUpperCase()).slice(-2));
}
}
 
for (i = 0; i < exclude.length; i++) {
cache[exclude.charCodeAt(i)] = exclude[i];
}
 
return cache;
}
 
 
// Encode unsafe characters with percent-encoding, skipping already
// encoded sequences.
//
// - string - string to encode
// - exclude - list of characters to ignore (in addition to a-zA-Z0-9)
// - keepEscaped - don't encode '%' in a correct escape sequence (default: true)
//
function encode(string, exclude, keepEscaped) {
var i, l, code, nextCode, cache,
result = '';
 
if (typeof exclude !== 'string') {
// encode(string, keepEscaped)
keepEscaped = exclude;
exclude = encode.defaultChars;
}
 
if (typeof keepEscaped === 'undefined') {
keepEscaped = true;
}
 
cache = getEncodeCache(exclude);
 
for (i = 0, l = string.length; i < l; i++) {
code = string.charCodeAt(i);
 
if (keepEscaped && code === 0x25 /* % */ && i + 2 < l) {
if (/^[0-9a-f]{2}$/i.test(string.slice(i + 1, i + 3))) {
result += string.slice(i, i + 3);
i += 2;
continue;
}
}
 
if (code < 128) {
result += cache[code];
continue;
}
 
if (code >= 0xD800 && code <= 0xDFFF) {
if (code >= 0xD800 && code <= 0xDBFF && i + 1 < l) {
nextCode = string.charCodeAt(i + 1);
if (nextCode >= 0xDC00 && nextCode <= 0xDFFF) {
result += encodeURIComponent(string[i] + string[i + 1]);
i++;
continue;
}
}
result += '%EF%BF%BD';
continue;
}
 
result += encodeURIComponent(string[i]);
}
 
return result;
}
 
encode.defaultChars = ";/?:@&=+$,-_.!~*'()#";
encode.componentChars = "-_.!~*'()";
 
 
var encode_1 = encode;
 
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
 
function unwrapExports (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
 
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
 
function getCjsExportFromNamespace (n) {
return n && n['default'] || n;
}
 
var Aacute = "Á";
var aacute = "á";
var Abreve = "Ă";
var abreve = "ă";
var ac = "∾";
var acd = "∿";
var acE = "∾̳";
var Acirc = "Â";
var acirc = "â";
var acute = "´";
var Acy = "А";
var acy = "а";
var AElig = "Æ";
var aelig = "æ";
var af = "⁡";
var Afr = "𝔄";
var afr = "𝔞";
var Agrave = "À";
var agrave = "à";
var alefsym = "ℵ";
var aleph = "ℵ";
var Alpha = "Α";
var alpha = "α";
var Amacr = "Ā";
var amacr = "ā";
var amalg = "⨿";
var amp = "&";
var AMP = "&";
var andand = "⩕";
var And = "⩓";
var and = "∧";
var andd = "⩜";
var andslope = "⩘";
var andv = "⩚";
var ang = "∠";
var ange = "⦤";
var angle = "∠";
var angmsdaa = "⦨";
var angmsdab = "⦩";
var angmsdac = "⦪";
var angmsdad = "⦫";
var angmsdae = "⦬";
var angmsdaf = "⦭";
var angmsdag = "⦮";
var angmsdah = "⦯";
var angmsd = "∡";
var angrt = "∟";
var angrtvb = "⊾";
var angrtvbd = "⦝";
var angsph = "∢";
var angst = "Å";
var angzarr = "⍼";
var Aogon = "Ą";
var aogon = "ą";
var Aopf = "𝔸";
var aopf = "𝕒";
var apacir = "⩯";
var ap = "≈";
var apE = "⩰";
var ape = "≊";
var apid = "≋";
var apos = "'";
var ApplyFunction = "⁡";
var approx = "≈";
var approxeq = "≊";
var Aring = "Å";
var aring = "å";
var Ascr = "𝒜";
var ascr = "𝒶";
var Assign = "≔";
var ast = "*";
var asymp = "≈";
var asympeq = "≍";
var Atilde = "Ã";
var atilde = "ã";
var Auml = "Ä";
var auml = "ä";
var awconint = "∳";
var awint = "⨑";
var backcong = "≌";
var backepsilon = "϶";
var backprime = "‵";
var backsim = "∽";
var backsimeq = "⋍";
var Backslash = "∖";
var Barv = "⫧";
var barvee = "⊽";
var barwed = "⌅";
var Barwed = "⌆";
var barwedge = "⌅";
var bbrk = "⎵";
var bbrktbrk = "⎶";
var bcong = "≌";
var Bcy = "Б";
var bcy = "б";
var bdquo = "„";
var becaus = "∵";
var because = "∵";
var Because = "∵";
var bemptyv = "⦰";
var bepsi = "϶";
var bernou = "ℬ";
var Bernoullis = "ℬ";
var Beta = "Β";
var beta = "β";
var beth = "ℶ";
var between = "≬";
var Bfr = "𝔅";
var bfr = "𝔟";
var bigcap = "⋂";
var bigcirc = "◯";
var bigcup = "⋃";
var bigodot = "⨀";
var bigoplus = "⨁";
var bigotimes = "⨂";
var bigsqcup = "⨆";
var bigstar = "★";
var bigtriangledown = "▽";
var bigtriangleup = "△";
var biguplus = "⨄";
var bigvee = "⋁";
var bigwedge = "⋀";
var bkarow = "⤍";
var blacklozenge = "⧫";
var blacksquare = "▪";
var blacktriangle = "▴";
var blacktriangledown = "▾";
var blacktriangleleft = "◂";
var blacktriangleright = "▸";
var blank = "␣";
var blk12 = "▒";
var blk14 = "░";
var blk34 = "▓";
var block = "█";
var bne = "=⃥";
var bnequiv = "≡⃥";
var bNot = "⫭";
var bnot = "⌐";
var Bopf = "𝔹";
var bopf = "𝕓";
var bot = "⊥";
var bottom = "⊥";
var bowtie = "⋈";
var boxbox = "⧉";
var boxdl = "┐";
var boxdL = "╕";
var boxDl = "╖";
var boxDL = "╗";
var boxdr = "┌";
var boxdR = "╒";
var boxDr = "╓";
var boxDR = "╔";
var boxh = "─";
var boxH = "═";
var boxhd = "┬";
var boxHd = "╤";
var boxhD = "╥";
var boxHD = "╦";
var boxhu = "┴";
var boxHu = "╧";
var boxhU = "╨";
var boxHU = "╩";
var boxminus = "⊟";
var boxplus = "⊞";
var boxtimes = "⊠";
var boxul = "┘";
var boxuL = "╛";
var boxUl = "╜";
var boxUL = "╝";
var boxur = "└";
var boxuR = "╘";
var boxUr = "╙";
var boxUR = "╚";
var boxv = "│";
var boxV = "║";
var boxvh = "┼";
var boxvH = "╪";
var boxVh = "╫";
var boxVH = "╬";
var boxvl = "┤";
var boxvL = "╡";
var boxVl = "╢";
var boxVL = "╣";
var boxvr = "├";
var boxvR = "╞";
var boxVr = "╟";
var boxVR = "╠";
var bprime = "‵";
var breve = "˘";
var Breve = "˘";
var brvbar = "¦";
var bscr = "𝒷";
var Bscr = "ℬ";
var bsemi = "⁏";
var bsim = "∽";
var bsime = "⋍";
var bsolb = "⧅";
var bsol = "\\";
var bsolhsub = "⟈";
var bull = "•";
var bullet = "•";
var bump = "≎";
var bumpE = "⪮";
var bumpe = "≏";
var Bumpeq = "≎";
var bumpeq = "≏";
var Cacute = "Ć";
var cacute = "ć";
var capand = "⩄";
var capbrcup = "⩉";
var capcap = "⩋";
var cap = "∩";
var Cap = "⋒";
var capcup = "⩇";
var capdot = "⩀";
var CapitalDifferentialD = "ⅅ";
var caps = "∩︀";
var caret = "⁁";
var caron = "ˇ";
var Cayleys = "ℭ";
var ccaps = "⩍";
var Ccaron = "Č";
var ccaron = "č";
var Ccedil = "Ç";
var ccedil = "ç";
var Ccirc = "Ĉ";
var ccirc = "ĉ";
var Cconint = "∰";
var ccups = "⩌";
var ccupssm = "⩐";
var Cdot = "Ċ";
var cdot = "ċ";
var cedil = "¸";
var Cedilla = "¸";
var cemptyv = "⦲";
var cent = "¢";
var centerdot = "·";
var CenterDot = "·";
var cfr = "𝔠";
var Cfr = "ℭ";
var CHcy = "Ч";
var chcy = "ч";
var check = "✓";
var checkmark = "✓";
var Chi = "Χ";
var chi = "χ";
var circ = "ˆ";
var circeq = "≗";
var circlearrowleft = "↺";
var circlearrowright = "↻";
var circledast = "⊛";
var circledcirc = "⊚";
var circleddash = "⊝";
var CircleDot = "⊙";
var circledR = "®";
var circledS = "Ⓢ";
var CircleMinus = "⊖";
var CirclePlus = "⊕";
var CircleTimes = "⊗";
var cir = "○";
var cirE = "⧃";
var cire = "≗";
var cirfnint = "⨐";
var cirmid = "⫯";
var cirscir = "⧂";
var ClockwiseContourIntegral = "∲";
var CloseCurlyDoubleQuote = "”";
var CloseCurlyQuote = "’";
var clubs = "♣";
var clubsuit = "♣";
var colon = ":";
var Colon = "∷";
var Colone = "⩴";
var colone = "≔";
var coloneq = "≔";
var comma = ",";
var commat = "@";
var comp = "∁";
var compfn = "∘";
var complement = "∁";
var complexes = "ℂ";
var cong = "≅";
var congdot = "⩭";
var Congruent = "≡";
var conint = "∮";
var Conint = "∯";
var ContourIntegral = "∮";
var copf = "𝕔";
var Copf = "ℂ";
var coprod = "∐";
var Coproduct = "∐";
var copy = "©";
var COPY = "©";
var copysr = "℗";
var CounterClockwiseContourIntegral = "∳";
var crarr = "↵";
var cross = "✗";
var Cross = "⨯";
var Cscr = "𝒞";
var cscr = "𝒸";
var csub = "⫏";
var csube = "⫑";
var csup = "⫐";
var csupe = "⫒";
var ctdot = "⋯";
var cudarrl = "⤸";
var cudarrr = "⤵";
var cuepr = "⋞";
var cuesc = "⋟";
var cularr = "↶";
var cularrp = "⤽";
var cupbrcap = "⩈";
var cupcap = "⩆";
var CupCap = "≍";
var cup = "∪";
var Cup = "⋓";
var cupcup = "⩊";
var cupdot = "⊍";
var cupor = "⩅";
var cups = "∪︀";
var curarr = "↷";
var curarrm = "⤼";
var curlyeqprec = "⋞";
var curlyeqsucc = "⋟";
var curlyvee = "⋎";
var curlywedge = "⋏";
var curren = "¤";
var curvearrowleft = "↶";
var curvearrowright = "↷";
var cuvee = "⋎";
var cuwed = "⋏";
var cwconint = "∲";
var cwint = "∱";
var cylcty = "⌭";
var dagger = "†";
var Dagger = "‡";
var daleth = "ℸ";
var darr = "↓";
var Darr = "↡";
var dArr = "⇓";
var dash = "‐";
var Dashv = "⫤";
var dashv = "⊣";
var dbkarow = "⤏";
var dblac = "˝";
var Dcaron = "Ď";
var dcaron = "ď";
var Dcy = "Д";
var dcy = "д";
var ddagger = "‡";
var ddarr = "⇊";
var DD = "ⅅ";
var dd = "ⅆ";
var DDotrahd = "⤑";
var ddotseq = "⩷";
var deg = "°";
var Del = "∇";
var Delta = "Δ";
var delta = "δ";
var demptyv = "⦱";
var dfisht = "⥿";
var Dfr = "𝔇";
var dfr = "𝔡";
var dHar = "⥥";
var dharl = "⇃";
var dharr = "⇂";
var DiacriticalAcute = "´";
var DiacriticalDot = "˙";
var DiacriticalDoubleAcute = "˝";
var DiacriticalGrave = "`";
var DiacriticalTilde = "˜";
var diam = "⋄";
var diamond = "⋄";
var Diamond = "⋄";
var diamondsuit = "♦";
var diams = "♦";
var die = "¨";
var DifferentialD = "ⅆ";
var digamma = "ϝ";
var disin = "⋲";
var div = "÷";
var divide = "÷";
var divideontimes = "⋇";
var divonx = "⋇";
var DJcy = "Ђ";
var djcy = "ђ";
var dlcorn = "⌞";
var dlcrop = "⌍";
var dollar = "$";
var Dopf = "𝔻";
var dopf = "𝕕";
var Dot = "¨";
var dot = "˙";
var DotDot = "⃜";
var doteq = "≐";
var doteqdot = "≑";
var DotEqual = "≐";
var dotminus = "∸";
var dotplus = "∔";
var dotsquare = "⊡";
var doublebarwedge = "⌆";
var DoubleContourIntegral = "∯";
var DoubleDot = "¨";
var DoubleDownArrow = "⇓";
var DoubleLeftArrow = "⇐";
var DoubleLeftRightArrow = "⇔";
var DoubleLeftTee = "⫤";
var DoubleLongLeftArrow = "⟸";
var DoubleLongLeftRightArrow = "⟺";
var DoubleLongRightArrow = "⟹";
var DoubleRightArrow = "⇒";
var DoubleRightTee = "⊨";
var DoubleUpArrow = "⇑";
var DoubleUpDownArrow = "⇕";
var DoubleVerticalBar = "∥";
var DownArrowBar = "⤓";
var downarrow = "↓";
var DownArrow = "↓";
var Downarrow = "⇓";
var DownArrowUpArrow = "⇵";
var DownBreve = "̑";
var downdownarrows = "⇊";
var downharpoonleft = "⇃";
var downharpoonright = "⇂";
var DownLeftRightVector = "⥐";
var DownLeftTeeVector = "⥞";
var DownLeftVectorBar = "⥖";
var DownLeftVector = "↽";
var DownRightTeeVector = "⥟";
var DownRightVectorBar = "⥗";
var DownRightVector = "⇁";
var DownTeeArrow = "↧";
var DownTee = "⊤";
var drbkarow = "⤐";
var drcorn = "⌟";
var drcrop = "⌌";
var Dscr = "𝒟";
var dscr = "𝒹";
var DScy = "Ѕ";
var dscy = "ѕ";
var dsol = "⧶";
var Dstrok = "Đ";
var dstrok = "đ";
var dtdot = "⋱";
var dtri = "▿";
var dtrif = "▾";
var duarr = "⇵";
var duhar = "⥯";
var dwangle = "⦦";
var DZcy = "Џ";
var dzcy = "џ";
var dzigrarr = "⟿";
var Eacute = "É";
var eacute = "é";
var easter = "⩮";
var Ecaron = "Ě";
var ecaron = "ě";
var Ecirc = "Ê";
var ecirc = "ê";
var ecir = "≖";
var ecolon = "≕";
var Ecy = "Э";
var ecy = "э";
var eDDot = "⩷";
var Edot = "Ė";
var edot = "ė";
var eDot = "≑";
var ee = "ⅇ";
var efDot = "≒";
var Efr = "𝔈";
var efr = "𝔢";
var eg = "⪚";
var Egrave = "È";
var egrave = "è";
var egs = "⪖";
var egsdot = "⪘";
var el = "⪙";
var Element = "∈";
var elinters = "⏧";
var ell = "ℓ";
var els = "⪕";
var elsdot = "⪗";
var Emacr = "Ē";
var emacr = "ē";
var empty = "∅";
var emptyset = "∅";
var EmptySmallSquare = "◻";
var emptyv = "∅";
var EmptyVerySmallSquare = "▫";
var emsp13 = " ";
var emsp14 = " ";
var emsp = " ";
var ENG = "Ŋ";
var eng = "ŋ";
var ensp = " ";
var Eogon = "Ę";
var eogon = "ę";
var Eopf = "𝔼";
var eopf = "𝕖";
var epar = "⋕";
var eparsl = "⧣";
var eplus = "⩱";
var epsi = "ε";
var Epsilon = "Ε";
var epsilon = "ε";
var epsiv = "ϵ";
var eqcirc = "≖";
var eqcolon = "≕";
var eqsim = "≂";
var eqslantgtr = "⪖";
var eqslantless = "⪕";
var Equal = "⩵";
var equals = "=";
var EqualTilde = "≂";
var equest = "≟";
var Equilibrium = "⇌";
var equiv = "≡";
var equivDD = "⩸";
var eqvparsl = "⧥";
var erarr = "⥱";
var erDot = "≓";
var escr = "ℯ";
var Escr = "ℰ";
var esdot = "≐";
var Esim = "⩳";
var esim = "≂";
var Eta = "Η";
var eta = "η";
var ETH = "Ð";
var eth = "ð";
var Euml = "Ë";
var euml = "ë";
var euro = "€";
var excl = "!";
var exist = "∃";
var Exists = "∃";
var expectation = "ℰ";
var exponentiale = "ⅇ";
var ExponentialE = "ⅇ";
var fallingdotseq = "≒";
var Fcy = "Ф";
var fcy = "ф";
var female = "♀";
var ffilig = "ffi";
var fflig = "ff";
var ffllig = "ffl";
var Ffr = "𝔉";
var ffr = "𝔣";
var filig = "fi";
var FilledSmallSquare = "◼";
var FilledVerySmallSquare = "▪";
var fjlig = "fj";
var flat = "♭";
var fllig = "fl";
var fltns = "▱";
var fnof = "ƒ";
var Fopf = "𝔽";
var fopf = "𝕗";
var forall = "∀";
var ForAll = "∀";
var fork = "⋔";
var forkv = "⫙";
var Fouriertrf = "ℱ";
var fpartint = "⨍";
var frac12 = "½";
var frac13 = "⅓";
var frac14 = "¼";
var frac15 = "⅕";
var frac16 = "⅙";
var frac18 = "⅛";
var frac23 = "⅔";
var frac25 = "⅖";
var frac34 = "¾";
var frac35 = "⅗";
var frac38 = "⅜";
var frac45 = "⅘";
var frac56 = "⅚";
var frac58 = "⅝";
var frac78 = "⅞";
var frasl = "⁄";
var frown = "⌢";
var fscr = "𝒻";
var Fscr = "ℱ";
var gacute = "ǵ";
var Gamma = "Γ";
var gamma = "γ";
var Gammad = "Ϝ";
var gammad = "ϝ";
var gap = "⪆";
var Gbreve = "Ğ";
var gbreve = "ğ";
var Gcedil = "Ģ";
var Gcirc = "Ĝ";
var gcirc = "ĝ";
var Gcy = "Г";
var gcy = "г";
var Gdot = "Ġ";
var gdot = "ġ";
var ge = "≥";
var gE = "≧";
var gEl = "⪌";
var gel = "⋛";
var geq = "≥";
var geqq = "≧";
var geqslant = "⩾";
var gescc = "⪩";
var ges = "⩾";
var gesdot = "⪀";
var gesdoto = "⪂";
var gesdotol = "⪄";
var gesl = "⋛︀";
var gesles = "⪔";
var Gfr = "𝔊";
var gfr = "𝔤";
var gg = "≫";
var Gg = "⋙";
var ggg = "⋙";
var gimel = "ℷ";
var GJcy = "Ѓ";
var gjcy = "ѓ";
var gla = "⪥";
var gl = "≷";
var glE = "⪒";
var glj = "⪤";
var gnap = "⪊";
var gnapprox = "⪊";
var gne = "⪈";
var gnE = "≩";
var gneq = "⪈";
var gneqq = "≩";
var gnsim = "⋧";
var Gopf = "𝔾";
var gopf = "𝕘";
var grave = "`";
var GreaterEqual = "≥";
var GreaterEqualLess = "⋛";
var GreaterFullEqual = "≧";
var GreaterGreater = "⪢";
var GreaterLess = "≷";
var GreaterSlantEqual = "⩾";
var GreaterTilde = "≳";
var Gscr = "𝒢";
var gscr = "ℊ";
var gsim = "≳";
var gsime = "⪎";
var gsiml = "⪐";
var gtcc = "⪧";
var gtcir = "⩺";
var gt = ">";
var GT = ">";
var Gt = "≫";
var gtdot = "⋗";
var gtlPar = "⦕";
var gtquest = "⩼";
var gtrapprox = "⪆";
var gtrarr = "⥸";
var gtrdot = "⋗";
var gtreqless = "⋛";
var gtreqqless = "⪌";
var gtrless = "≷";
var gtrsim = "≳";
var gvertneqq = "≩︀";
var gvnE = "≩︀";
var Hacek = "ˇ";
var hairsp = " ";
var half = "½";
var hamilt = "ℋ";
var HARDcy = "Ъ";
var hardcy = "ъ";
var harrcir = "⥈";
var harr = "↔";
var hArr = "⇔";
var harrw = "↭";
var Hat = "^";
var hbar = "ℏ";
var Hcirc = "Ĥ";
var hcirc = "ĥ";
var hearts = "♥";
var heartsuit = "♥";
var hellip = "…";
var hercon = "⊹";
var hfr = "𝔥";
var Hfr = "ℌ";
var HilbertSpace = "ℋ";
var hksearow = "⤥";
var hkswarow = "⤦";
var hoarr = "⇿";
var homtht = "∻";
var hookleftarrow = "↩";
var hookrightarrow = "↪";
var hopf = "𝕙";
var Hopf = "ℍ";
var horbar = "―";
var HorizontalLine = "─";
var hscr = "𝒽";
var Hscr = "ℋ";
var hslash = "ℏ";
var Hstrok = "Ħ";
var hstrok = "ħ";
var HumpDownHump = "≎";
var HumpEqual = "≏";
var hybull = "⁃";
var hyphen = "‐";
var Iacute = "Í";
var iacute = "í";
var ic = "⁣";
var Icirc = "Î";
var icirc = "î";
var Icy = "И";
var icy = "и";
var Idot = "İ";
var IEcy = "Е";
var iecy = "е";
var iexcl = "¡";
var iff = "⇔";
var ifr = "𝔦";
var Ifr = "ℑ";
var Igrave = "Ì";
var igrave = "ì";
var ii = "ⅈ";
var iiiint = "⨌";
var iiint = "∭";
var iinfin = "⧜";
var iiota = "℩";
var IJlig = "IJ";
var ijlig = "ij";
var Imacr = "Ī";
var imacr = "ī";
var image = "ℑ";
var ImaginaryI = "ⅈ";
var imagline = "ℐ";
var imagpart = "ℑ";
var imath = "ı";
var Im = "ℑ";
var imof = "⊷";
var imped = "Ƶ";
var Implies = "⇒";
var incare = "℅";
var infin = "∞";
var infintie = "⧝";
var inodot = "ı";
var intcal = "⊺";
var int = "∫";
var Int = "∬";
var integers = "ℤ";
var Integral = "∫";
var intercal = "⊺";
var Intersection = "⋂";
var intlarhk = "⨗";
var intprod = "⨼";
var InvisibleComma = "⁣";
var InvisibleTimes = "⁢";
var IOcy = "Ё";
var iocy = "ё";
var Iogon = "Į";
var iogon = "į";
var Iopf = "𝕀";
var iopf = "𝕚";
var Iota = "Ι";
var iota = "ι";
var iprod = "⨼";
var iquest = "¿";
var iscr = "𝒾";
var Iscr = "ℐ";
var isin = "∈";
var isindot = "⋵";
var isinE = "⋹";
var isins = "⋴";
var isinsv = "⋳";
var isinv = "∈";
var it = "⁢";
var Itilde = "Ĩ";
var itilde = "ĩ";
var Iukcy = "І";
var iukcy = "і";
var Iuml = "Ï";
var iuml = "ï";
var Jcirc = "Ĵ";
var jcirc = "ĵ";
var Jcy = "Й";
var jcy = "й";
var Jfr = "𝔍";
var jfr = "𝔧";
var jmath = "ȷ";
var Jopf = "𝕁";
var jopf = "𝕛";
var Jscr = "𝒥";
var jscr = "𝒿";
var Jsercy = "Ј";
var jsercy = "ј";
var Jukcy = "Є";
var jukcy = "є";
var Kappa = "Κ";
var kappa = "κ";
var kappav = "ϰ";
var Kcedil = "Ķ";
var kcedil = "ķ";
var Kcy = "К";
var kcy = "к";
var Kfr = "𝔎";
var kfr = "𝔨";
var kgreen = "ĸ";
var KHcy = "Х";
var khcy = "х";
var KJcy = "Ќ";
var kjcy = "ќ";
var Kopf = "𝕂";
var kopf = "𝕜";
var Kscr = "𝒦";
var kscr = "𝓀";
var lAarr = "⇚";
var Lacute = "Ĺ";
var lacute = "ĺ";
var laemptyv = "⦴";
var lagran = "ℒ";
var Lambda = "Λ";
var lambda = "λ";
var lang = "⟨";
var Lang = "⟪";
var langd = "⦑";
var langle = "⟨";
var lap = "⪅";
var Laplacetrf = "ℒ";
var laquo = "«";
var larrb = "⇤";
var larrbfs = "⤟";
var larr = "←";
var Larr = "↞";
var lArr = "⇐";
var larrfs = "⤝";
var larrhk = "↩";
var larrlp = "↫";
var larrpl = "⤹";
var larrsim = "⥳";
var larrtl = "↢";
var latail = "⤙";
var lAtail = "⤛";
var lat = "⪫";
var late = "⪭";
var lates = "⪭︀";
var lbarr = "⤌";
var lBarr = "⤎";
var lbbrk = "❲";
var lbrace = "{";
var lbrack = "[";
var lbrke = "⦋";
var lbrksld = "⦏";
var lbrkslu = "⦍";
var Lcaron = "Ľ";
var lcaron = "ľ";
var Lcedil = "Ļ";
var lcedil = "ļ";
var lceil = "⌈";
var lcub = "{";
var Lcy = "Л";
var lcy = "л";
var ldca = "⤶";
var ldquo = "“";
var ldquor = "„";
var ldrdhar = "⥧";
var ldrushar = "⥋";
var ldsh = "↲";
var le = "≤";
var lE = "≦";
var LeftAngleBracket = "⟨";
var LeftArrowBar = "⇤";
var leftarrow = "←";
var LeftArrow = "←";
var Leftarrow = "⇐";
var LeftArrowRightArrow = "⇆";
var leftarrowtail = "↢";
var LeftCeiling = "⌈";
var LeftDoubleBracket = "⟦";
var LeftDownTeeVector = "⥡";
var LeftDownVectorBar = "⥙";
var LeftDownVector = "⇃";
var LeftFloor = "⌊";
var leftharpoondown = "↽";
var leftharpoonup = "↼";
var leftleftarrows = "⇇";
var leftrightarrow = "↔";
var LeftRightArrow = "↔";
var Leftrightarrow = "⇔";
var leftrightarrows = "⇆";
var leftrightharpoons = "⇋";
var leftrightsquigarrow = "↭";
var LeftRightVector = "⥎";
var LeftTeeArrow = "↤";
var LeftTee = "⊣";
var LeftTeeVector = "⥚";
var leftthreetimes = "⋋";
var LeftTriangleBar = "⧏";
var LeftTriangle = "⊲";
var LeftTriangleEqual = "⊴";
var LeftUpDownVector = "⥑";
var LeftUpTeeVector = "⥠";
var LeftUpVectorBar = "⥘";
var LeftUpVector = "↿";
var LeftVectorBar = "⥒";
var LeftVector = "↼";
var lEg = "⪋";
var leg = "⋚";
var leq = "≤";
var leqq = "≦";
var leqslant = "⩽";
var lescc = "⪨";
var les = "⩽";
var lesdot = "⩿";
var lesdoto = "⪁";
var lesdotor = "⪃";
var lesg = "⋚︀";
var lesges = "⪓";
var lessapprox = "⪅";
var lessdot = "⋖";
var lesseqgtr = "⋚";
var lesseqqgtr = "⪋";
var LessEqualGreater = "⋚";
var LessFullEqual = "≦";
var LessGreater = "≶";
var lessgtr = "≶";
var LessLess = "⪡";
var lesssim = "≲";
var LessSlantEqual = "⩽";
var LessTilde = "≲";
var lfisht = "⥼";
var lfloor = "⌊";
var Lfr = "𝔏";
var lfr = "𝔩";
var lg = "≶";
var lgE = "⪑";
var lHar = "⥢";
var lhard = "↽";
var lharu = "↼";
var lharul = "⥪";
var lhblk = "▄";
var LJcy = "Љ";
var ljcy = "љ";
var llarr = "⇇";
var ll = "≪";
var Ll = "⋘";
var llcorner = "⌞";
var Lleftarrow = "⇚";
var llhard = "⥫";
var lltri = "◺";
var Lmidot = "Ŀ";
var lmidot = "ŀ";
var lmoustache = "⎰";
var lmoust = "⎰";
var lnap = "⪉";
var lnapprox = "⪉";
var lne = "⪇";
var lnE = "≨";
var lneq = "⪇";
var lneqq = "≨";
var lnsim = "⋦";
var loang = "⟬";
var loarr = "⇽";
var lobrk = "⟦";
var longleftarrow = "⟵";
var LongLeftArrow = "⟵";
var Longleftarrow = "⟸";
var longleftrightarrow = "⟷";
var LongLeftRightArrow = "⟷";
var Longleftrightarrow = "⟺";
var longmapsto = "⟼";
var longrightarrow = "⟶";
var LongRightArrow = "⟶";
var Longrightarrow = "⟹";
var looparrowleft = "↫";
var looparrowright = "↬";
var lopar = "⦅";
var Lopf = "𝕃";
var lopf = "𝕝";
var loplus = "⨭";
var lotimes = "⨴";
var lowast = "∗";
var lowbar = "_";
var LowerLeftArrow = "↙";
var LowerRightArrow = "↘";
var loz = "◊";
var lozenge = "◊";
var lozf = "⧫";
var lpar = "(";
var lparlt = "⦓";
var lrarr = "⇆";
var lrcorner = "⌟";
var lrhar = "⇋";
var lrhard = "⥭";
var lrm = "‎";
var lrtri = "⊿";
var lsaquo = "‹";
var lscr = "𝓁";
var Lscr = "ℒ";
var lsh = "↰";
var Lsh = "↰";
var lsim = "≲";
var lsime = "⪍";
var lsimg = "⪏";
var lsqb = "[";
var lsquo = "‘";
var lsquor = "‚";
var Lstrok = "Ł";
var lstrok = "ł";
var ltcc = "⪦";
var ltcir = "⩹";
var lt = "<";
var LT = "<";
var Lt = "≪";
var ltdot = "⋖";
var lthree = "⋋";
var ltimes = "⋉";
var ltlarr = "⥶";
var ltquest = "⩻";
var ltri = "◃";
var ltrie = "⊴";
var ltrif = "◂";
var ltrPar = "⦖";
var lurdshar = "⥊";
var luruhar = "⥦";
var lvertneqq = "≨︀";
var lvnE = "≨︀";
var macr = "¯";
var male = "♂";
var malt = "✠";
var maltese = "✠";
var map = "↦";
var mapsto = "↦";
var mapstodown = "↧";
var mapstoleft = "↤";
var mapstoup = "↥";
var marker = "▮";
var mcomma = "⨩";
var Mcy = "М";
var mcy = "м";
var mdash = "—";
var mDDot = "∺";
var measuredangle = "∡";
var MediumSpace = " ";
var Mellintrf = "ℳ";
var Mfr = "𝔐";
var mfr = "𝔪";
var mho = "℧";
var micro = "µ";
var midast = "*";
var midcir = "⫰";
var mid = "∣";
var middot = "·";
var minusb = "⊟";
var minus = "−";
var minusd = "∸";
var minusdu = "⨪";
var MinusPlus = "∓";
var mlcp = "⫛";
var mldr = "…";
var mnplus = "∓";
var models = "⊧";
var Mopf = "𝕄";
var mopf = "𝕞";
var mp = "∓";
var mscr = "𝓂";
var Mscr = "ℳ";
var mstpos = "∾";
var Mu = "Μ";
var mu = "μ";
var multimap = "⊸";
var mumap = "⊸";
var nabla = "∇";
var Nacute = "Ń";
var nacute = "ń";
var nang = "∠⃒";
var nap = "≉";
var napE = "⩰̸";
var napid = "≋̸";
var napos = "ʼn";
var napprox = "≉";
var natural = "♮";
var naturals = "ℕ";
var natur = "♮";
var nbsp = " ";
var nbump = "≎̸";
var nbumpe = "≏̸";
var ncap = "⩃";
var Ncaron = "Ň";
var ncaron = "ň";
var Ncedil = "Ņ";
var ncedil = "ņ";
var ncong = "≇";
var ncongdot = "⩭̸";
var ncup = "⩂";
var Ncy = "Н";
var ncy = "н";
var ndash = "–";
var nearhk = "⤤";
var nearr = "↗";
var neArr = "⇗";
var nearrow = "↗";
var ne = "≠";
var nedot = "≐̸";
var NegativeMediumSpace = "​";
var NegativeThickSpace = "​";
var NegativeThinSpace = "​";
var NegativeVeryThinSpace = "​";
var nequiv = "≢";
var nesear = "⤨";
var nesim = "≂̸";
var NestedGreaterGreater = "≫";
var NestedLessLess = "≪";
var NewLine = "\n";
var nexist = "∄";
var nexists = "∄";
var Nfr = "𝔑";
var nfr = "𝔫";
var ngE = "≧̸";
var nge = "≱";
var ngeq = "≱";
var ngeqq = "≧̸";
var ngeqslant = "⩾̸";
var nges = "⩾̸";
var nGg = "⋙̸";
var ngsim = "≵";
var nGt = "≫⃒";
var ngt = "≯";
var ngtr = "≯";
var nGtv = "≫̸";
var nharr = "↮";
var nhArr = "⇎";
var nhpar = "⫲";
var ni = "∋";
var nis = "⋼";
var nisd = "⋺";
var niv = "∋";
var NJcy = "Њ";
var njcy = "њ";
var nlarr = "↚";
var nlArr = "⇍";
var nldr = "‥";
var nlE = "≦̸";
var nle = "≰";
var nleftarrow = "↚";
var nLeftarrow = "⇍";
var nleftrightarrow = "↮";
var nLeftrightarrow = "⇎";
var nleq = "≰";
var nleqq = "≦̸";
var nleqslant = "⩽̸";
var nles = "⩽̸";
var nless = "≮";
var nLl = "⋘̸";
var nlsim = "≴";
var nLt = "≪⃒";
var nlt = "≮";
var nltri = "⋪";
var nltrie = "⋬";
var nLtv = "≪̸";
var nmid = "∤";
var NoBreak = "⁠";
var NonBreakingSpace = " ";
var nopf = "𝕟";
var Nopf = "ℕ";
var Not = "⫬";
var not = "¬";
var NotCongruent = "≢";
var NotCupCap = "≭";
var NotDoubleVerticalBar = "∦";
var NotElement = "∉";
var NotEqual = "≠";
var NotEqualTilde = "≂̸";
var NotExists = "∄";
var NotGreater = "≯";
var NotGreaterEqual = "≱";
var NotGreaterFullEqual = "≧̸";
var NotGreaterGreater = "≫̸";
var NotGreaterLess = "≹";
var NotGreaterSlantEqual = "⩾̸";
var NotGreaterTilde = "≵";
var NotHumpDownHump = "≎̸";
var NotHumpEqual = "≏̸";
var notin = "∉";
var notindot = "⋵̸";
var notinE = "⋹̸";
var notinva = "∉";
var notinvb = "⋷";
var notinvc = "⋶";
var NotLeftTriangleBar = "⧏̸";
var NotLeftTriangle = "⋪";
var NotLeftTriangleEqual = "⋬";
var NotLess = "≮";
var NotLessEqual = "≰";
var NotLessGreater = "≸";
var NotLessLess = "≪̸";
var NotLessSlantEqual = "⩽̸";
var NotLessTilde = "≴";
var NotNestedGreaterGreater = "⪢̸";
var NotNestedLessLess = "⪡̸";
var notni = "∌";
var notniva = "∌";
var notnivb = "⋾";
var notnivc = "⋽";
var NotPrecedes = "⊀";
var NotPrecedesEqual = "⪯̸";
var NotPrecedesSlantEqual = "⋠";
var NotReverseElement = "∌";
var NotRightTriangleBar = "⧐̸";
var NotRightTriangle = "⋫";
var NotRightTriangleEqual = "⋭";
var NotSquareSubset = "⊏̸";
var NotSquareSubsetEqual = "⋢";
var NotSquareSuperset = "⊐̸";
var NotSquareSupersetEqual = "⋣";
var NotSubset = "⊂⃒";
var NotSubsetEqual = "⊈";
var NotSucceeds = "⊁";
var NotSucceedsEqual = "⪰̸";
var NotSucceedsSlantEqual = "⋡";
var NotSucceedsTilde = "≿̸";
var NotSuperset = "⊃⃒";
var NotSupersetEqual = "⊉";
var NotTilde = "≁";
var NotTildeEqual = "≄";
var NotTildeFullEqual = "≇";
var NotTildeTilde = "≉";
var NotVerticalBar = "∤";
var nparallel = "∦";
var npar = "∦";
var nparsl = "⫽⃥";
var npart = "∂̸";
var npolint = "⨔";
var npr = "⊀";
var nprcue = "⋠";
var nprec = "⊀";
var npreceq = "⪯̸";
var npre = "⪯̸";
var nrarrc = "⤳̸";
var nrarr = "↛";
var nrArr = "⇏";
var nrarrw = "↝̸";
var nrightarrow = "↛";
var nRightarrow = "⇏";
var nrtri = "⋫";
var nrtrie = "⋭";
var nsc = "⊁";
var nsccue = "⋡";
var nsce = "⪰̸";
var Nscr = "𝒩";
var nscr = "𝓃";
var nshortmid = "∤";
var nshortparallel = "∦";
var nsim = "≁";
var nsime = "≄";
var nsimeq = "≄";
var nsmid = "∤";
var nspar = "∦";
var nsqsube = "⋢";
var nsqsupe = "⋣";
var nsub = "⊄";
var nsubE = "⫅̸";
var nsube = "⊈";
var nsubset = "⊂⃒";
var nsubseteq = "⊈";
var nsubseteqq = "⫅̸";
var nsucc = "⊁";
var nsucceq = "⪰̸";
var nsup = "⊅";
var nsupE = "⫆̸";
var nsupe = "⊉";
var nsupset = "⊃⃒";
var nsupseteq = "⊉";
var nsupseteqq = "⫆̸";
var ntgl = "≹";
var Ntilde = "Ñ";
var ntilde = "ñ";
var ntlg = "≸";
var ntriangleleft = "⋪";
var ntrianglelefteq = "⋬";
var ntriangleright = "⋫";
var ntrianglerighteq = "⋭";
var Nu = "Ν";
var nu = "ν";
var num = "#";
var numero = "№";
var numsp = " ";
var nvap = "≍⃒";
var nvdash = "⊬";
var nvDash = "⊭";
var nVdash = "⊮";
var nVDash = "⊯";
var nvge = "≥⃒";
var nvgt = ">⃒";
var nvHarr = "⤄";
var nvinfin = "⧞";
var nvlArr = "⤂";
var nvle = "≤⃒";
var nvlt = "<⃒";
var nvltrie = "⊴⃒";
var nvrArr = "⤃";
var nvrtrie = "⊵⃒";
var nvsim = "∼⃒";
var nwarhk = "⤣";
var nwarr = "↖";
var nwArr = "⇖";
var nwarrow = "↖";
var nwnear = "⤧";
var Oacute = "Ó";
var oacute = "ó";
var oast = "⊛";
var Ocirc = "Ô";
var ocirc = "ô";
var ocir = "⊚";
var Ocy = "О";
var ocy = "о";
var odash = "⊝";
var Odblac = "Ő";
var odblac = "ő";
var odiv = "⨸";
var odot = "⊙";
var odsold = "⦼";
var OElig = "Œ";
var oelig = "œ";
var ofcir = "⦿";
var Ofr = "𝔒";
var ofr = "𝔬";
var ogon = "˛";
var Ograve = "Ò";
var ograve = "ò";
var ogt = "⧁";
var ohbar = "⦵";
var ohm = "Ω";
var oint = "∮";
var olarr = "↺";
var olcir = "⦾";
var olcross = "⦻";
var oline = "‾";
var olt = "⧀";
var Omacr = "Ō";
var omacr = "ō";
var Omega = "Ω";
var omega = "ω";
var Omicron = "Ο";
var omicron = "ο";
var omid = "⦶";
var ominus = "⊖";
var Oopf = "𝕆";
var oopf = "𝕠";
var opar = "⦷";
var OpenCurlyDoubleQuote = "“";
var OpenCurlyQuote = "‘";
var operp = "⦹";
var oplus = "⊕";
var orarr = "↻";
var Or = "⩔";
var or = "∨";
var ord = "⩝";
var order = "ℴ";
var orderof = "ℴ";
var ordf = "ª";
var ordm = "º";
var origof = "⊶";
var oror = "⩖";
var orslope = "⩗";
var orv = "⩛";
var oS = "Ⓢ";
var Oscr = "𝒪";
var oscr = "ℴ";
var Oslash = "Ø";
var oslash = "ø";
var osol = "⊘";
var Otilde = "Õ";
var otilde = "õ";
var otimesas = "⨶";
var Otimes = "⨷";
var otimes = "⊗";
var Ouml = "Ö";
var ouml = "ö";
var ovbar = "⌽";
var OverBar = "‾";
var OverBrace = "⏞";
var OverBracket = "⎴";
var OverParenthesis = "⏜";
var para = "¶";
var parallel = "∥";
var par = "∥";
var parsim = "⫳";
var parsl = "⫽";
var part = "∂";
var PartialD = "∂";
var Pcy = "П";
var pcy = "п";
var percnt = "%";
var period = ".";
var permil = "‰";
var perp = "⊥";
var pertenk = "‱";
var Pfr = "𝔓";
var pfr = "𝔭";
var Phi = "Φ";
var phi = "φ";
var phiv = "ϕ";
var phmmat = "ℳ";
var phone = "☎";
var Pi = "Π";
var pi = "π";
var pitchfork = "⋔";
var piv = "ϖ";
var planck = "ℏ";
var planckh = "ℎ";
var plankv = "ℏ";
var plusacir = "⨣";
var plusb = "⊞";
var pluscir = "⨢";
var plus = "+";
var plusdo = "∔";
var plusdu = "⨥";
var pluse = "⩲";
var PlusMinus = "±";
var plusmn = "±";
var plussim = "⨦";
var plustwo = "⨧";
var pm = "±";
var Poincareplane = "ℌ";
var pointint = "⨕";
var popf = "𝕡";
var Popf = "ℙ";
var pound = "£";
var prap = "⪷";
var Pr = "⪻";
var pr = "≺";
var prcue = "≼";
var precapprox = "⪷";
var prec = "≺";
var preccurlyeq = "≼";
var Precedes = "≺";
var PrecedesEqual = "⪯";
var PrecedesSlantEqual = "≼";
var PrecedesTilde = "≾";
var preceq = "⪯";
var precnapprox = "⪹";
var precneqq = "⪵";
var precnsim = "⋨";
var pre = "⪯";
var prE = "⪳";
var precsim = "≾";
var prime = "′";
var Prime = "″";
var primes = "ℙ";
var prnap = "⪹";
var prnE = "⪵";
var prnsim = "⋨";
var prod = "∏";
var Product = "∏";
var profalar = "⌮";
var profline = "⌒";
var profsurf = "⌓";
var prop = "∝";
var Proportional = "∝";
var Proportion = "∷";
var propto = "∝";
var prsim = "≾";
var prurel = "⊰";
var Pscr = "𝒫";
var pscr = "𝓅";
var Psi = "Ψ";
var psi = "ψ";
var puncsp = " ";
var Qfr = "𝔔";
var qfr = "𝔮";
var qint = "⨌";
var qopf = "𝕢";
var Qopf = "ℚ";
var qprime = "⁗";
var Qscr = "𝒬";
var qscr = "𝓆";
var quaternions = "ℍ";
var quatint = "⨖";
var quest = "?";
var questeq = "≟";
var quot = "\"";
var QUOT = "\"";
var rAarr = "⇛";
var race = "∽̱";
var Racute = "Ŕ";
var racute = "ŕ";
var radic = "√";
var raemptyv = "⦳";
var rang = "⟩";
var Rang = "⟫";
var rangd = "⦒";
var range = "⦥";
var rangle = "⟩";
var raquo = "»";
var rarrap = "⥵";
var rarrb = "⇥";
var rarrbfs = "⤠";
var rarrc = "⤳";
var rarr = "→";
var Rarr = "↠";
var rArr = "⇒";
var rarrfs = "⤞";
var rarrhk = "↪";
var rarrlp = "↬";
var rarrpl = "⥅";
var rarrsim = "⥴";
var Rarrtl = "⤖";
var rarrtl = "↣";
var rarrw = "↝";
var ratail = "⤚";
var rAtail = "⤜";
var ratio = "∶";
var rationals = "ℚ";
var rbarr = "⤍";
var rBarr = "⤏";
var RBarr = "⤐";
var rbbrk = "❳";
var rbrace = "}";
var rbrack = "]";
var rbrke = "⦌";
var rbrksld = "⦎";
var rbrkslu = "⦐";
var Rcaron = "Ř";
var rcaron = "ř";
var Rcedil = "Ŗ";
var rcedil = "ŗ";
var rceil = "⌉";
var rcub = "}";
var Rcy = "Р";
var rcy = "р";
var rdca = "⤷";
var rdldhar = "⥩";
var rdquo = "”";
var rdquor = "”";
var rdsh = "↳";
var real = "ℜ";
var realine = "ℛ";
var realpart = "ℜ";
var reals = "ℝ";
var Re = "ℜ";
var rect = "▭";
var reg = "®";
var REG = "®";
var ReverseElement = "∋";
var ReverseEquilibrium = "⇋";
var ReverseUpEquilibrium = "⥯";
var rfisht = "⥽";
var rfloor = "⌋";
var rfr = "𝔯";
var Rfr = "ℜ";
var rHar = "⥤";
var rhard = "⇁";
var rharu = "⇀";
var rharul = "⥬";
var Rho = "Ρ";
var rho = "ρ";
var rhov = "ϱ";
var RightAngleBracket = "⟩";
var RightArrowBar = "⇥";
var rightarrow = "→";
var RightArrow = "→";
var Rightarrow = "⇒";
var RightArrowLeftArrow = "⇄";
var rightarrowtail = "↣";
var RightCeiling = "⌉";
var RightDoubleBracket = "⟧";
var RightDownTeeVector = "⥝";
var RightDownVectorBar = "⥕";
var RightDownVector = "⇂";
var RightFloor = "⌋";
var rightharpoondown = "⇁";
var rightharpoonup = "⇀";
var rightleftarrows = "⇄";
var rightleftharpoons = "⇌";
var rightrightarrows = "⇉";
var rightsquigarrow = "↝";
var RightTeeArrow = "↦";
var RightTee = "⊢";
var RightTeeVector = "⥛";
var rightthreetimes = "⋌";
var RightTriangleBar = "⧐";
var RightTriangle = "⊳";
var RightTriangleEqual = "⊵";
var RightUpDownVector = "⥏";
var RightUpTeeVector = "⥜";
var RightUpVectorBar = "⥔";
var RightUpVector = "↾";
var RightVectorBar = "⥓";
var RightVector = "⇀";
var ring = "˚";
var risingdotseq = "≓";
var rlarr = "⇄";
var rlhar = "⇌";
var rlm = "‏";
var rmoustache = "⎱";
var rmoust = "⎱";
var rnmid = "⫮";
var roang = "⟭";
var roarr = "⇾";
var robrk = "⟧";
var ropar = "⦆";
var ropf = "𝕣";
var Ropf = "ℝ";
var roplus = "⨮";
var rotimes = "⨵";
var RoundImplies = "⥰";
var rpar = ")";
var rpargt = "⦔";
var rppolint = "⨒";
var rrarr = "⇉";
var Rrightarrow = "⇛";
var rsaquo = "›";
var rscr = "𝓇";
var Rscr = "ℛ";
var rsh = "↱";
var Rsh = "↱";
var rsqb = "]";
var rsquo = "’";
var rsquor = "’";
var rthree = "⋌";
var rtimes = "⋊";
var rtri = "▹";
var rtrie = "⊵";
var rtrif = "▸";
var rtriltri = "⧎";
var RuleDelayed = "⧴";
var ruluhar = "⥨";
var rx = "℞";
var Sacute = "Ś";
var sacute = "ś";
var sbquo = "‚";
var scap = "⪸";
var Scaron = "Š";
var scaron = "š";
var Sc = "⪼";
var sc = "≻";
var sccue = "≽";
var sce = "⪰";
var scE = "⪴";
var Scedil = "Ş";
var scedil = "ş";
var Scirc = "Ŝ";
var scirc = "ŝ";
var scnap = "⪺";
var scnE = "⪶";
var scnsim = "⋩";
var scpolint = "⨓";
var scsim = "≿";
var Scy = "С";
var scy = "с";
var sdotb = "⊡";
var sdot = "⋅";
var sdote = "⩦";
var searhk = "⤥";
var searr = "↘";
var seArr = "⇘";
var searrow = "↘";
var sect = "§";
var semi = ";";
var seswar = "⤩";
var setminus = "∖";
var setmn = "∖";
var sext = "✶";
var Sfr = "𝔖";
var sfr = "𝔰";
var sfrown = "⌢";
var sharp = "♯";
var SHCHcy = "Щ";
var shchcy = "щ";
var SHcy = "Ш";
var shcy = "ш";
var ShortDownArrow = "↓";
var ShortLeftArrow = "←";
var shortmid = "∣";
var shortparallel = "∥";
var ShortRightArrow = "→";
var ShortUpArrow = "↑";
var shy = "­";
var Sigma = "Σ";
var sigma = "σ";
var sigmaf = "ς";
var sigmav = "ς";
var sim = "∼";
var simdot = "⩪";
var sime = "≃";
var simeq = "≃";
var simg = "⪞";
var simgE = "⪠";
var siml = "⪝";
var simlE = "⪟";
var simne = "≆";
var simplus = "⨤";
var simrarr = "⥲";
var slarr = "←";
var SmallCircle = "∘";
var smallsetminus = "∖";
var smashp = "⨳";
var smeparsl = "⧤";
var smid = "∣";
var smile = "⌣";
var smt = "⪪";
var smte = "⪬";
var smtes = "⪬︀";
var SOFTcy = "Ь";
var softcy = "ь";
var solbar = "⌿";
var solb = "⧄";
var sol = "/";
var Sopf = "𝕊";
var sopf = "𝕤";
var spades = "♠";
var spadesuit = "♠";
var spar = "∥";
var sqcap = "⊓";
var sqcaps = "⊓︀";
var sqcup = "⊔";
var sqcups = "⊔︀";
var Sqrt = "√";
var sqsub = "⊏";
var sqsube = "⊑";
var sqsubset = "⊏";
var sqsubseteq = "⊑";
var sqsup = "⊐";
var sqsupe = "⊒";
var sqsupset = "⊐";
var sqsupseteq = "⊒";
var square = "□";
var Square = "□";
var SquareIntersection = "⊓";
var SquareSubset = "⊏";
var SquareSubsetEqual = "⊑";
var SquareSuperset = "⊐";
var SquareSupersetEqual = "⊒";
var SquareUnion = "⊔";
var squarf = "▪";
var squ = "□";
var squf = "▪";
var srarr = "→";
var Sscr = "𝒮";
var sscr = "𝓈";
var ssetmn = "∖";
var ssmile = "⌣";
var sstarf = "⋆";
var Star = "⋆";
var star = "☆";
var starf = "★";
var straightepsilon = "ϵ";
var straightphi = "ϕ";
var strns = "¯";
var sub = "⊂";
var Sub = "⋐";
var subdot = "⪽";
var subE = "⫅";
var sube = "⊆";
var subedot = "⫃";
var submult = "⫁";
var subnE = "⫋";
var subne = "⊊";
var subplus = "⪿";
var subrarr = "⥹";
var subset = "⊂";
var Subset = "⋐";
var subseteq = "⊆";
var subseteqq = "⫅";
var SubsetEqual = "⊆";
var subsetneq = "⊊";
var subsetneqq = "⫋";
var subsim = "⫇";
var subsub = "⫕";
var subsup = "⫓";
var succapprox = "⪸";
var succ = "≻";
var succcurlyeq = "≽";
var Succeeds = "≻";
var SucceedsEqual = "⪰";
var SucceedsSlantEqual = "≽";
var SucceedsTilde = "≿";
var succeq = "⪰";
var succnapprox = "⪺";
var succneqq = "⪶";
var succnsim = "⋩";
var succsim = "≿";
var SuchThat = "∋";
var sum = "∑";
var Sum = "∑";
var sung = "♪";
var sup1 = "¹";
var sup2 = "²";
var sup3 = "³";
var sup = "⊃";
var Sup = "⋑";
var supdot = "⪾";
var supdsub = "⫘";
var supE = "⫆";
var supe = "⊇";
var supedot = "⫄";
var Superset = "⊃";
var SupersetEqual = "⊇";
var suphsol = "⟉";
var suphsub = "⫗";
var suplarr = "⥻";
var supmult = "⫂";
var supnE = "⫌";
var supne = "⊋";
var supplus = "⫀";
var supset = "⊃";
var Supset = "⋑";
var supseteq = "⊇";
var supseteqq = "⫆";
var supsetneq = "⊋";
var supsetneqq = "⫌";
var supsim = "⫈";
var supsub = "⫔";
var supsup = "⫖";
var swarhk = "⤦";
var swarr = "↙";
var swArr = "⇙";
var swarrow = "↙";
var swnwar = "⤪";
var szlig = "ß";
var Tab = "\t";
var target = "⌖";
var Tau = "Τ";
var tau = "τ";
var tbrk = "⎴";
var Tcaron = "Ť";
var tcaron = "ť";
var Tcedil = "Ţ";
var tcedil = "ţ";
var Tcy = "Т";
var tcy = "т";
var tdot = "⃛";
var telrec = "⌕";
var Tfr = "𝔗";
var tfr = "𝔱";
var there4 = "∴";
var therefore = "∴";
var Therefore = "∴";
var Theta = "Θ";
var theta = "θ";
var thetasym = "ϑ";
var thetav = "ϑ";
var thickapprox = "≈";
var thicksim = "∼";
var ThickSpace = "  ";
var ThinSpace = " ";
var thinsp = " ";
var thkap = "≈";
var thksim = "∼";
var THORN = "Þ";
var thorn = "þ";
var tilde = "˜";
var Tilde = "∼";
var TildeEqual = "≃";
var TildeFullEqual = "≅";
var TildeTilde = "≈";
var timesbar = "⨱";
var timesb = "⊠";
var times = "×";
var timesd = "⨰";
var tint = "∭";
var toea = "⤨";
var topbot = "⌶";
var topcir = "⫱";
var top = "⊤";
var Topf = "𝕋";
var topf = "𝕥";
var topfork = "⫚";
var tosa = "⤩";
var tprime = "‴";
var trade = "™";
var TRADE = "™";
var triangle = "▵";
var triangledown = "▿";
var triangleleft = "◃";
var trianglelefteq = "⊴";
var triangleq = "≜";
var triangleright = "▹";
var trianglerighteq = "⊵";
var tridot = "◬";
var trie = "≜";
var triminus = "⨺";
var TripleDot = "⃛";
var triplus = "⨹";
var trisb = "⧍";
var tritime = "⨻";
var trpezium = "⏢";
var Tscr = "𝒯";
var tscr = "𝓉";
var TScy = "Ц";
var tscy = "ц";
var TSHcy = "Ћ";
var tshcy = "ћ";
var Tstrok = "Ŧ";
var tstrok = "ŧ";
var twixt = "≬";
var twoheadleftarrow = "↞";
var twoheadrightarrow = "↠";
var Uacute = "Ú";
var uacute = "ú";
var uarr = "↑";
var Uarr = "↟";
var uArr = "⇑";
var Uarrocir = "⥉";
var Ubrcy = "Ў";
var ubrcy = "ў";
var Ubreve = "Ŭ";
var ubreve = "ŭ";
var Ucirc = "Û";
var ucirc = "û";
var Ucy = "У";
var ucy = "у";
var udarr = "⇅";
var Udblac = "Ű";
var udblac = "ű";
var udhar = "⥮";
var ufisht = "⥾";
var Ufr = "𝔘";
var ufr = "𝔲";
var Ugrave = "Ù";
var ugrave = "ù";
var uHar = "⥣";
var uharl = "↿";
var uharr = "↾";
var uhblk = "▀";
var ulcorn = "⌜";
var ulcorner = "⌜";
var ulcrop = "⌏";
var ultri = "◸";
var Umacr = "Ū";
var umacr = "ū";
var uml = "¨";
var UnderBar = "_";
var UnderBrace = "⏟";
var UnderBracket = "⎵";
var UnderParenthesis = "⏝";
var Union = "⋃";
var UnionPlus = "⊎";
var Uogon = "Ų";
var uogon = "ų";
var Uopf = "𝕌";
var uopf = "𝕦";
var UpArrowBar = "⤒";
var uparrow = "↑";
var UpArrow = "↑";
var Uparrow = "⇑";
var UpArrowDownArrow = "⇅";
var updownarrow = "↕";
var UpDownArrow = "↕";
var Updownarrow = "⇕";
var UpEquilibrium = "⥮";
var upharpoonleft = "↿";
var upharpoonright = "↾";
var uplus = "⊎";
var UpperLeftArrow = "↖";
var UpperRightArrow = "↗";
var upsi = "υ";
var Upsi = "ϒ";
var upsih = "ϒ";
var Upsilon = "Υ";
var upsilon = "υ";
var UpTeeArrow = "↥";
var UpTee = "⊥";
var upuparrows = "⇈";
var urcorn = "⌝";
var urcorner = "⌝";
var urcrop = "⌎";
var Uring = "Ů";
var uring = "ů";
var urtri = "◹";
var Uscr = "𝒰";
var uscr = "𝓊";
var utdot = "⋰";
var Utilde = "Ũ";
var utilde = "ũ";
var utri = "▵";
var utrif = "▴";
var uuarr = "⇈";
var Uuml = "Ü";
var uuml = "ü";
var uwangle = "⦧";
var vangrt = "⦜";
var varepsilon = "ϵ";
var varkappa = "ϰ";
var varnothing = "∅";
var varphi = "ϕ";
var varpi = "ϖ";
var varpropto = "∝";
var varr = "↕";
var vArr = "⇕";
var varrho = "ϱ";
var varsigma = "ς";
var varsubsetneq = "⊊︀";
var varsubsetneqq = "⫋︀";
var varsupsetneq = "⊋︀";
var varsupsetneqq = "⫌︀";
var vartheta = "ϑ";
var vartriangleleft = "⊲";
var vartriangleright = "⊳";
var vBar = "⫨";
var Vbar = "⫫";
var vBarv = "⫩";
var Vcy = "В";
var vcy = "в";
var vdash = "⊢";
var vDash = "⊨";
var Vdash = "⊩";
var VDash = "⊫";
var Vdashl = "⫦";
var veebar = "⊻";
var vee = "∨";
var Vee = "⋁";
var veeeq = "≚";
var vellip = "⋮";
var verbar = "|";
var Verbar = "‖";
var vert = "|";
var Vert = "‖";
var VerticalBar = "∣";
var VerticalLine = "|";
var VerticalSeparator = "❘";
var VerticalTilde = "≀";
var VeryThinSpace = " ";
var Vfr = "𝔙";
var vfr = "𝔳";
var vltri = "⊲";
var vnsub = "⊂⃒";
var vnsup = "⊃⃒";
var Vopf = "𝕍";
var vopf = "𝕧";
var vprop = "∝";
var vrtri = "⊳";
var Vscr = "𝒱";
var vscr = "𝓋";
var vsubnE = "⫋︀";
var vsubne = "⊊︀";
var vsupnE = "⫌︀";
var vsupne = "⊋︀";
var Vvdash = "⊪";
var vzigzag = "⦚";
var Wcirc = "Ŵ";
var wcirc = "ŵ";
var wedbar = "⩟";
var wedge = "∧";
var Wedge = "⋀";
var wedgeq = "≙";
var weierp = "℘";
var Wfr = "𝔚";
var wfr = "𝔴";
var Wopf = "𝕎";
var wopf = "𝕨";
var wp = "℘";
var wr = "≀";
var wreath = "≀";
var Wscr = "𝒲";
var wscr = "𝓌";
var xcap = "⋂";
var xcirc = "◯";
var xcup = "⋃";
var xdtri = "▽";
var Xfr = "𝔛";
var xfr = "𝔵";
var xharr = "⟷";
var xhArr = "⟺";
var Xi = "Ξ";
var xi = "ξ";
var xlarr = "⟵";
var xlArr = "⟸";
var xmap = "⟼";
var xnis = "⋻";
var xodot = "⨀";
var Xopf = "𝕏";
var xopf = "𝕩";
var xoplus = "⨁";
var xotime = "⨂";
var xrarr = "⟶";
var xrArr = "⟹";
var Xscr = "𝒳";
var xscr = "𝓍";
var xsqcup = "⨆";
var xuplus = "⨄";
var xutri = "△";
var xvee = "⋁";
var xwedge = "⋀";
var Yacute = "Ý";
var yacute = "ý";
var YAcy = "Я";
var yacy = "я";
var Ycirc = "Ŷ";
var ycirc = "ŷ";
var Ycy = "Ы";
var ycy = "ы";
var yen = "¥";
var Yfr = "𝔜";
var yfr = "𝔶";
var YIcy = "Ї";
var yicy = "ї";
var Yopf = "𝕐";
var yopf = "𝕪";
var Yscr = "𝒴";
var yscr = "𝓎";
var YUcy = "Ю";
var yucy = "ю";
var yuml = "ÿ";
var Yuml = "Ÿ";
var Zacute = "Ź";
var zacute = "ź";
var Zcaron = "Ž";
var zcaron = "ž";
var Zcy = "З";
var zcy = "з";
var Zdot = "Ż";
var zdot = "ż";
var zeetrf = "ℨ";
var ZeroWidthSpace = "​";
var Zeta = "Ζ";
var zeta = "ζ";
var zfr = "𝔷";
var Zfr = "ℨ";
var ZHcy = "Ж";
var zhcy = "ж";
var zigrarr = "⇝";
var zopf = "𝕫";
var Zopf = "ℤ";
var Zscr = "𝒵";
var zscr = "𝓏";
var zwj = "‍";
var zwnj = "‌";
var entities = {
Aacute: Aacute,
aacute: aacute,
Abreve: Abreve,
abreve: abreve,
ac: ac,
acd: acd,
acE: acE,
Acirc: Acirc,
acirc: acirc,
acute: acute,
Acy: Acy,
acy: acy,
AElig: AElig,
aelig: aelig,
af: af,
Afr: Afr,
afr: afr,
Agrave: Agrave,
agrave: agrave,
alefsym: alefsym,
aleph: aleph,
Alpha: Alpha,
alpha: alpha,
Amacr: Amacr,
amacr: amacr,
amalg: amalg,
amp: amp,
AMP: AMP,
andand: andand,
And: And,
and: and,
andd: andd,
andslope: andslope,
andv: andv,
ang: ang,
ange: ange,
angle: angle,
angmsdaa: angmsdaa,
angmsdab: angmsdab,
angmsdac: angmsdac,
angmsdad: angmsdad,
angmsdae: angmsdae,
angmsdaf: angmsdaf,
angmsdag: angmsdag,
angmsdah: angmsdah,
angmsd: angmsd,
angrt: angrt,
angrtvb: angrtvb,
angrtvbd: angrtvbd,
angsph: angsph,
angst: angst,
angzarr: angzarr,
Aogon: Aogon,
aogon: aogon,
Aopf: Aopf,
aopf: aopf,
apacir: apacir,
ap: ap,
apE: apE,
ape: ape,
apid: apid,
apos: apos,
ApplyFunction: ApplyFunction,
approx: approx,
approxeq: approxeq,
Aring: Aring,
aring: aring,
Ascr: Ascr,
ascr: ascr,
Assign: Assign,
ast: ast,
asymp: asymp,
asympeq: asympeq,
Atilde: Atilde,
atilde: atilde,
Auml: Auml,
auml: auml,
awconint: awconint,
awint: awint,
backcong: backcong,
backepsilon: backepsilon,
backprime: backprime,
backsim: backsim,
backsimeq: backsimeq,
Backslash: Backslash,
Barv: Barv,
barvee: barvee,
barwed: barwed,
Barwed: Barwed,
barwedge: barwedge,
bbrk: bbrk,
bbrktbrk: bbrktbrk,
bcong: bcong,
Bcy: Bcy,
bcy: bcy,
bdquo: bdquo,
becaus: becaus,
because: because,
Because: Because,
bemptyv: bemptyv,
bepsi: bepsi,
bernou: bernou,
Bernoullis: Bernoullis,
Beta: Beta,
beta: beta,
beth: beth,
between: between,
Bfr: Bfr,
bfr: bfr,
bigcap: bigcap,
bigcirc: bigcirc,
bigcup: bigcup,
bigodot: bigodot,
bigoplus: bigoplus,
bigotimes: bigotimes,
bigsqcup: bigsqcup,
bigstar: bigstar,
bigtriangledown: bigtriangledown,
bigtriangleup: bigtriangleup,
biguplus: biguplus,
bigvee: bigvee,
bigwedge: bigwedge,
bkarow: bkarow,
blacklozenge: blacklozenge,
blacksquare: blacksquare,
blacktriangle: blacktriangle,
blacktriangledown: blacktriangledown,
blacktriangleleft: blacktriangleleft,
blacktriangleright: blacktriangleright,
blank: blank,
blk12: blk12,
blk14: blk14,
blk34: blk34,
block: block,
bne: bne,
bnequiv: bnequiv,
bNot: bNot,
bnot: bnot,
Bopf: Bopf,
bopf: bopf,
bot: bot,
bottom: bottom,
bowtie: bowtie,
boxbox: boxbox,
boxdl: boxdl,
boxdL: boxdL,
boxDl: boxDl,
boxDL: boxDL,
boxdr: boxdr,
boxdR: boxdR,
boxDr: boxDr,
boxDR: boxDR,
boxh: boxh,
boxH: boxH,
boxhd: boxhd,
boxHd: boxHd,
boxhD: boxhD,
boxHD: boxHD,
boxhu: boxhu,
boxHu: boxHu,
boxhU: boxhU,
boxHU: boxHU,
boxminus: boxminus,
boxplus: boxplus,
boxtimes: boxtimes,
boxul: boxul,
boxuL: boxuL,
boxUl: boxUl,
boxUL: boxUL,
boxur: boxur,
boxuR: boxuR,
boxUr: boxUr,
boxUR: boxUR,
boxv: boxv,
boxV: boxV,
boxvh: boxvh,
boxvH: boxvH,
boxVh: boxVh,
boxVH: boxVH,
boxvl: boxvl,
boxvL: boxvL,
boxVl: boxVl,
boxVL: boxVL,
boxvr: boxvr,
boxvR: boxvR,
boxVr: boxVr,
boxVR: boxVR,
bprime: bprime,
breve: breve,
Breve: Breve,
brvbar: brvbar,
bscr: bscr,
Bscr: Bscr,
bsemi: bsemi,
bsim: bsim,
bsime: bsime,
bsolb: bsolb,
bsol: bsol,
bsolhsub: bsolhsub,
bull: bull,
bullet: bullet,
bump: bump,
bumpE: bumpE,
bumpe: bumpe,
Bumpeq: Bumpeq,
bumpeq: bumpeq,
Cacute: Cacute,
cacute: cacute,
capand: capand,
capbrcup: capbrcup,
capcap: capcap,
cap: cap,
Cap: Cap,
capcup: capcup,
capdot: capdot,
CapitalDifferentialD: CapitalDifferentialD,
caps: caps,
caret: caret,
caron: caron,
Cayleys: Cayleys,
ccaps: ccaps,
Ccaron: Ccaron,
ccaron: ccaron,
Ccedil: Ccedil,
ccedil: ccedil,
Ccirc: Ccirc,
ccirc: ccirc,
Cconint: Cconint,
ccups: ccups,
ccupssm: ccupssm,
Cdot: Cdot,
cdot: cdot,
cedil: cedil,
Cedilla: Cedilla,
cemptyv: cemptyv,
cent: cent,
centerdot: centerdot,
CenterDot: CenterDot,
cfr: cfr,
Cfr: Cfr,
CHcy: CHcy,
chcy: chcy,
check: check,
checkmark: checkmark,
Chi: Chi,
chi: chi,
circ: circ,
circeq: circeq,
circlearrowleft: circlearrowleft,
circlearrowright: circlearrowright,
circledast: circledast,
circledcirc: circledcirc,
circleddash: circleddash,
CircleDot: CircleDot,
circledR: circledR,
circledS: circledS,
CircleMinus: CircleMinus,
CirclePlus: CirclePlus,
CircleTimes: CircleTimes,
cir: cir,
cirE: cirE,
cire: cire,
cirfnint: cirfnint,
cirmid: cirmid,
cirscir: cirscir,
ClockwiseContourIntegral: ClockwiseContourIntegral,
CloseCurlyDoubleQuote: CloseCurlyDoubleQuote,
CloseCurlyQuote: CloseCurlyQuote,
clubs: clubs,
clubsuit: clubsuit,
colon: colon,
Colon: Colon,
Colone: Colone,
colone: colone,
coloneq: coloneq,
comma: comma,
commat: commat,
comp: comp,
compfn: compfn,
complement: complement,
complexes: complexes,
cong: cong,
congdot: congdot,
Congruent: Congruent,
conint: conint,
Conint: Conint,
ContourIntegral: ContourIntegral,
copf: copf,
Copf: Copf,
coprod: coprod,
Coproduct: Coproduct,
copy: copy,
COPY: COPY,
copysr: copysr,
CounterClockwiseContourIntegral: CounterClockwiseContourIntegral,
crarr: crarr,
cross: cross,
Cross: Cross,
Cscr: Cscr,
cscr: cscr,
csub: csub,
csube: csube,
csup: csup,
csupe: csupe,
ctdot: ctdot,
cudarrl: cudarrl,
cudarrr: cudarrr,
cuepr: cuepr,
cuesc: cuesc,
cularr: cularr,
cularrp: cularrp,
cupbrcap: cupbrcap,
cupcap: cupcap,
CupCap: CupCap,
cup: cup,
Cup: Cup,
cupcup: cupcup,
cupdot: cupdot,
cupor: cupor,
cups: cups,
curarr: curarr,
curarrm: curarrm,
curlyeqprec: curlyeqprec,
curlyeqsucc: curlyeqsucc,
curlyvee: curlyvee,
curlywedge: curlywedge,
curren: curren,
curvearrowleft: curvearrowleft,
curvearrowright: curvearrowright,
cuvee: cuvee,
cuwed: cuwed,
cwconint: cwconint,
cwint: cwint,
cylcty: cylcty,
dagger: dagger,
Dagger: Dagger,
daleth: daleth,
darr: darr,
Darr: Darr,
dArr: dArr,
dash: dash,
Dashv: Dashv,
dashv: dashv,
dbkarow: dbkarow,
dblac: dblac,
Dcaron: Dcaron,
dcaron: dcaron,
Dcy: Dcy,
dcy: dcy,
ddagger: ddagger,
ddarr: ddarr,
DD: DD,
dd: dd,
DDotrahd: DDotrahd,
ddotseq: ddotseq,
deg: deg,
Del: Del,
Delta: Delta,
delta: delta,
demptyv: demptyv,
dfisht: dfisht,
Dfr: Dfr,
dfr: dfr,
dHar: dHar,
dharl: dharl,
dharr: dharr,
DiacriticalAcute: DiacriticalAcute,
DiacriticalDot: DiacriticalDot,
DiacriticalDoubleAcute: DiacriticalDoubleAcute,
DiacriticalGrave: DiacriticalGrave,
DiacriticalTilde: DiacriticalTilde,
diam: diam,
diamond: diamond,
Diamond: Diamond,
diamondsuit: diamondsuit,
diams: diams,
die: die,
DifferentialD: DifferentialD,
digamma: digamma,
disin: disin,
div: div,
divide: divide,
divideontimes: divideontimes,
divonx: divonx,
DJcy: DJcy,
djcy: djcy,
dlcorn: dlcorn,
dlcrop: dlcrop,
dollar: dollar,
Dopf: Dopf,
dopf: dopf,
Dot: Dot,
dot: dot,
DotDot: DotDot,
doteq: doteq,
doteqdot: doteqdot,
DotEqual: DotEqual,
dotminus: dotminus,
dotplus: dotplus,
dotsquare: dotsquare,
doublebarwedge: doublebarwedge,
DoubleContourIntegral: DoubleContourIntegral,
DoubleDot: DoubleDot,
DoubleDownArrow: DoubleDownArrow,
DoubleLeftArrow: DoubleLeftArrow,
DoubleLeftRightArrow: DoubleLeftRightArrow,
DoubleLeftTee: DoubleLeftTee,
DoubleLongLeftArrow: DoubleLongLeftArrow,
DoubleLongLeftRightArrow: DoubleLongLeftRightArrow,
DoubleLongRightArrow: DoubleLongRightArrow,
DoubleRightArrow: DoubleRightArrow,
DoubleRightTee: DoubleRightTee,
DoubleUpArrow: DoubleUpArrow,
DoubleUpDownArrow: DoubleUpDownArrow,
DoubleVerticalBar: DoubleVerticalBar,
DownArrowBar: DownArrowBar,
downarrow: downarrow,
DownArrow: DownArrow,
Downarrow: Downarrow,
DownArrowUpArrow: DownArrowUpArrow,
DownBreve: DownBreve,
downdownarrows: downdownarrows,
downharpoonleft: downharpoonleft,
downharpoonright: downharpoonright,
DownLeftRightVector: DownLeftRightVector,
DownLeftTeeVector: DownLeftTeeVector,
DownLeftVectorBar: DownLeftVectorBar,
DownLeftVector: DownLeftVector,
DownRightTeeVector: DownRightTeeVector,
DownRightVectorBar: DownRightVectorBar,
DownRightVector: DownRightVector,
DownTeeArrow: DownTeeArrow,
DownTee: DownTee,
drbkarow: drbkarow,
drcorn: drcorn,
drcrop: drcrop,
Dscr: Dscr,
dscr: dscr,
DScy: DScy,
dscy: dscy,
dsol: dsol,
Dstrok: Dstrok,
dstrok: dstrok,
dtdot: dtdot,
dtri: dtri,
dtrif: dtrif,
duarr: duarr,
duhar: duhar,
dwangle: dwangle,
DZcy: DZcy,
dzcy: dzcy,
dzigrarr: dzigrarr,
Eacute: Eacute,
eacute: eacute,
easter: easter,
Ecaron: Ecaron,
ecaron: ecaron,
Ecirc: Ecirc,
ecirc: ecirc,
ecir: ecir,
ecolon: ecolon,
Ecy: Ecy,
ecy: ecy,
eDDot: eDDot,
Edot: Edot,
edot: edot,
eDot: eDot,
ee: ee,
efDot: efDot,
Efr: Efr,
efr: efr,
eg: eg,
Egrave: Egrave,
egrave: egrave,
egs: egs,
egsdot: egsdot,
el: el,
Element: Element,
elinters: elinters,
ell: ell,
els: els,
elsdot: elsdot,
Emacr: Emacr,
emacr: emacr,
empty: empty,
emptyset: emptyset,
EmptySmallSquare: EmptySmallSquare,
emptyv: emptyv,
EmptyVerySmallSquare: EmptyVerySmallSquare,
emsp13: emsp13,
emsp14: emsp14,
emsp: emsp,
ENG: ENG,
eng: eng,
ensp: ensp,
Eogon: Eogon,
eogon: eogon,
Eopf: Eopf,
eopf: eopf,
epar: epar,
eparsl: eparsl,
eplus: eplus,
epsi: epsi,
Epsilon: Epsilon,
epsilon: epsilon,
epsiv: epsiv,
eqcirc: eqcirc,
eqcolon: eqcolon,
eqsim: eqsim,
eqslantgtr: eqslantgtr,
eqslantless: eqslantless,
Equal: Equal,
equals: equals,
EqualTilde: EqualTilde,
equest: equest,
Equilibrium: Equilibrium,
equiv: equiv,
equivDD: equivDD,
eqvparsl: eqvparsl,
erarr: erarr,
erDot: erDot,
escr: escr,
Escr: Escr,
esdot: esdot,
Esim: Esim,
esim: esim,
Eta: Eta,
eta: eta,
ETH: ETH,
eth: eth,
Euml: Euml,
euml: euml,
euro: euro,
excl: excl,
exist: exist,
Exists: Exists,
expectation: expectation,
exponentiale: exponentiale,
ExponentialE: ExponentialE,
fallingdotseq: fallingdotseq,
Fcy: Fcy,
fcy: fcy,
female: female,
ffilig: ffilig,
fflig: fflig,
ffllig: ffllig,
Ffr: Ffr,
ffr: ffr,
filig: filig,
FilledSmallSquare: FilledSmallSquare,
FilledVerySmallSquare: FilledVerySmallSquare,
fjlig: fjlig,
flat: flat,
fllig: fllig,
fltns: fltns,
fnof: fnof,
Fopf: Fopf,
fopf: fopf,
forall: forall,
ForAll: ForAll,
fork: fork,
forkv: forkv,
Fouriertrf: Fouriertrf,
fpartint: fpartint,
frac12: frac12,
frac13: frac13,
frac14: frac14,
frac15: frac15,
frac16: frac16,
frac18: frac18,
frac23: frac23,
frac25: frac25,
frac34: frac34,
frac35: frac35,
frac38: frac38,
frac45: frac45,
frac56: frac56,
frac58: frac58,
frac78: frac78,
frasl: frasl,
frown: frown,
fscr: fscr,
Fscr: Fscr,
gacute: gacute,
Gamma: Gamma,
gamma: gamma,
Gammad: Gammad,
gammad: gammad,
gap: gap,
Gbreve: Gbreve,
gbreve: gbreve,
Gcedil: Gcedil,
Gcirc: Gcirc,
gcirc: gcirc,
Gcy: Gcy,
gcy: gcy,
Gdot: Gdot,
gdot: gdot,
ge: ge,
gE: gE,
gEl: gEl,
gel: gel,
geq: geq,
geqq: geqq,
geqslant: geqslant,
gescc: gescc,
ges: ges,
gesdot: gesdot,
gesdoto: gesdoto,
gesdotol: gesdotol,
gesl: gesl,
gesles: gesles,
Gfr: Gfr,
gfr: gfr,
gg: gg,
Gg: Gg,
ggg: ggg,
gimel: gimel,
GJcy: GJcy,
gjcy: gjcy,
gla: gla,
gl: gl,
glE: glE,
glj: glj,
gnap: gnap,
gnapprox: gnapprox,
gne: gne,
gnE: gnE,
gneq: gneq,
gneqq: gneqq,
gnsim: gnsim,
Gopf: Gopf,
gopf: gopf,
grave: grave,
GreaterEqual: GreaterEqual,
GreaterEqualLess: GreaterEqualLess,
GreaterFullEqual: GreaterFullEqual,
GreaterGreater: GreaterGreater,
GreaterLess: GreaterLess,
GreaterSlantEqual: GreaterSlantEqual,
GreaterTilde: GreaterTilde,
Gscr: Gscr,
gscr: gscr,
gsim: gsim,
gsime: gsime,
gsiml: gsiml,
gtcc: gtcc,
gtcir: gtcir,
gt: gt,
GT: GT,
Gt: Gt,
gtdot: gtdot,
gtlPar: gtlPar,
gtquest: gtquest,
gtrapprox: gtrapprox,
gtrarr: gtrarr,
gtrdot: gtrdot,
gtreqless: gtreqless,
gtreqqless: gtreqqless,
gtrless: gtrless,
gtrsim: gtrsim,
gvertneqq: gvertneqq,
gvnE: gvnE,
Hacek: Hacek,
hairsp: hairsp,
half: half,
hamilt: hamilt,
HARDcy: HARDcy,
hardcy: hardcy,
harrcir: harrcir,
harr: harr,
hArr: hArr,
harrw: harrw,
Hat: Hat,
hbar: hbar,
Hcirc: Hcirc,
hcirc: hcirc,
hearts: hearts,
heartsuit: heartsuit,
hellip: hellip,
hercon: hercon,
hfr: hfr,
Hfr: Hfr,
HilbertSpace: HilbertSpace,
hksearow: hksearow,
hkswarow: hkswarow,
hoarr: hoarr,
homtht: homtht,
hookleftarrow: hookleftarrow,
hookrightarrow: hookrightarrow,
hopf: hopf,
Hopf: Hopf,
horbar: horbar,
HorizontalLine: HorizontalLine,
hscr: hscr,
Hscr: Hscr,
hslash: hslash,
Hstrok: Hstrok,
hstrok: hstrok,
HumpDownHump: HumpDownHump,
HumpEqual: HumpEqual,
hybull: hybull,
hyphen: hyphen,
Iacute: Iacute,
iacute: iacute,
ic: ic,
Icirc: Icirc,
icirc: icirc,
Icy: Icy,
icy: icy,
Idot: Idot,
IEcy: IEcy,
iecy: iecy,
iexcl: iexcl,
iff: iff,
ifr: ifr,
Ifr: Ifr,
Igrave: Igrave,
igrave: igrave,
ii: ii,
iiiint: iiiint,
iiint: iiint,
iinfin: iinfin,
iiota: iiota,
IJlig: IJlig,
ijlig: ijlig,
Imacr: Imacr,
imacr: imacr,
image: image,
ImaginaryI: ImaginaryI,
imagline: imagline,
imagpart: imagpart,
imath: imath,
Im: Im,
imof: imof,
imped: imped,
Implies: Implies,
incare: incare,
"in": "∈",
infin: infin,
infintie: infintie,
inodot: inodot,
intcal: intcal,
int: int,
Int: Int,
integers: integers,
Integral: Integral,
intercal: intercal,
Intersection: Intersection,
intlarhk: intlarhk,
intprod: intprod,
InvisibleComma: InvisibleComma,
InvisibleTimes: InvisibleTimes,
IOcy: IOcy,
iocy: iocy,
Iogon: Iogon,
iogon: iogon,
Iopf: Iopf,
iopf: iopf,
Iota: Iota,
iota: iota,
iprod: iprod,
iquest: iquest,
iscr: iscr,
Iscr: Iscr,
isin: isin,
isindot: isindot,
isinE: isinE,
isins: isins,
isinsv: isinsv,
isinv: isinv,
it: it,
Itilde: Itilde,
itilde: itilde,
Iukcy: Iukcy,
iukcy: iukcy,
Iuml: Iuml,
iuml: iuml,
Jcirc: Jcirc,
jcirc: jcirc,
Jcy: Jcy,
jcy: jcy,
Jfr: Jfr,
jfr: jfr,
jmath: jmath,
Jopf: Jopf,
jopf: jopf,
Jscr: Jscr,
jscr: jscr,
Jsercy: Jsercy,
jsercy: jsercy,
Jukcy: Jukcy,
jukcy: jukcy,
Kappa: Kappa,
kappa: kappa,
kappav: kappav,
Kcedil: Kcedil,
kcedil: kcedil,
Kcy: Kcy,
kcy: kcy,
Kfr: Kfr,
kfr: kfr,
kgreen: kgreen,
KHcy: KHcy,
khcy: khcy,
KJcy: KJcy,
kjcy: kjcy,
Kopf: Kopf,
kopf: kopf,
Kscr: Kscr,
kscr: kscr,
lAarr: lAarr,
Lacute: Lacute,
lacute: lacute,
laemptyv: laemptyv,
lagran: lagran,
Lambda: Lambda,
lambda: lambda,
lang: lang,
Lang: Lang,
langd: langd,
langle: langle,
lap: lap,
Laplacetrf: Laplacetrf,
laquo: laquo,
larrb: larrb,
larrbfs: larrbfs,
larr: larr,
Larr: Larr,
lArr: lArr,
larrfs: larrfs,
larrhk: larrhk,
larrlp: larrlp,
larrpl: larrpl,
larrsim: larrsim,
larrtl: larrtl,
latail: latail,
lAtail: lAtail,
lat: lat,
late: late,
lates: lates,
lbarr: lbarr,
lBarr: lBarr,
lbbrk: lbbrk,
lbrace: lbrace,
lbrack: lbrack,
lbrke: lbrke,
lbrksld: lbrksld,
lbrkslu: lbrkslu,
Lcaron: Lcaron,
lcaron: lcaron,
Lcedil: Lcedil,
lcedil: lcedil,
lceil: lceil,
lcub: lcub,
Lcy: Lcy,
lcy: lcy,
ldca: ldca,
ldquo: ldquo,
ldquor: ldquor,
ldrdhar: ldrdhar,
ldrushar: ldrushar,
ldsh: ldsh,
le: le,
lE: lE,
LeftAngleBracket: LeftAngleBracket,
LeftArrowBar: LeftArrowBar,
leftarrow: leftarrow,
LeftArrow: LeftArrow,
Leftarrow: Leftarrow,
LeftArrowRightArrow: LeftArrowRightArrow,
leftarrowtail: leftarrowtail,
LeftCeiling: LeftCeiling,
LeftDoubleBracket: LeftDoubleBracket,
LeftDownTeeVector: LeftDownTeeVector,
LeftDownVectorBar: LeftDownVectorBar,
LeftDownVector: LeftDownVector,
LeftFloor: LeftFloor,
leftharpoondown: leftharpoondown,
leftharpoonup: leftharpoonup,
leftleftarrows: leftleftarrows,
leftrightarrow: leftrightarrow,
LeftRightArrow: LeftRightArrow,
Leftrightarrow: Leftrightarrow,
leftrightarrows: leftrightarrows,
leftrightharpoons: leftrightharpoons,
leftrightsquigarrow: leftrightsquigarrow,
LeftRightVector: LeftRightVector,
LeftTeeArrow: LeftTeeArrow,
LeftTee: LeftTee,
LeftTeeVector: LeftTeeVector,
leftthreetimes: leftthreetimes,
LeftTriangleBar: LeftTriangleBar,
LeftTriangle: LeftTriangle,
LeftTriangleEqual: LeftTriangleEqual,
LeftUpDownVector: LeftUpDownVector,
LeftUpTeeVector: LeftUpTeeVector,
LeftUpVectorBar: LeftUpVectorBar,
LeftUpVector: LeftUpVector,
LeftVectorBar: LeftVectorBar,
LeftVector: LeftVector,
lEg: lEg,
leg: leg,
leq: leq,
leqq: leqq,
leqslant: leqslant,
lescc: lescc,
les: les,
lesdot: lesdot,
lesdoto: lesdoto,
lesdotor: lesdotor,
lesg: lesg,
lesges: lesges,
lessapprox: lessapprox,
lessdot: lessdot,
lesseqgtr: lesseqgtr,
lesseqqgtr: lesseqqgtr,
LessEqualGreater: LessEqualGreater,
LessFullEqual: LessFullEqual,
LessGreater: LessGreater,
lessgtr: lessgtr,
LessLess: LessLess,
lesssim: lesssim,
LessSlantEqual: LessSlantEqual,
LessTilde: LessTilde,
lfisht: lfisht,
lfloor: lfloor,
Lfr: Lfr,
lfr: lfr,
lg: lg,
lgE: lgE,
lHar: lHar,
lhard: lhard,
lharu: lharu,
lharul: lharul,
lhblk: lhblk,
LJcy: LJcy,
ljcy: ljcy,
llarr: llarr,
ll: ll,
Ll: Ll,
llcorner: llcorner,
Lleftarrow: Lleftarrow,
llhard: llhard,
lltri: lltri,
Lmidot: Lmidot,
lmidot: lmidot,
lmoustache: lmoustache,
lmoust: lmoust,
lnap: lnap,
lnapprox: lnapprox,
lne: lne,
lnE: lnE,
lneq: lneq,
lneqq: lneqq,
lnsim: lnsim,
loang: loang,
loarr: loarr,
lobrk: lobrk,
longleftarrow: longleftarrow,
LongLeftArrow: LongLeftArrow,
Longleftarrow: Longleftarrow,
longleftrightarrow: longleftrightarrow,
LongLeftRightArrow: LongLeftRightArrow,
Longleftrightarrow: Longleftrightarrow,
longmapsto: longmapsto,
longrightarrow: longrightarrow,
LongRightArrow: LongRightArrow,
Longrightarrow: Longrightarrow,
looparrowleft: looparrowleft,
looparrowright: looparrowright,
lopar: lopar,
Lopf: Lopf,
lopf: lopf,
loplus: loplus,
lotimes: lotimes,
lowast: lowast,
lowbar: lowbar,
LowerLeftArrow: LowerLeftArrow,
LowerRightArrow: LowerRightArrow,
loz: loz,
lozenge: lozenge,
lozf: lozf,
lpar: lpar,
lparlt: lparlt,
lrarr: lrarr,
lrcorner: lrcorner,
lrhar: lrhar,
lrhard: lrhard,
lrm: lrm,
lrtri: lrtri,
lsaquo: lsaquo,
lscr: lscr,
Lscr: Lscr,
lsh: lsh,
Lsh: Lsh,
lsim: lsim,
lsime: lsime,
lsimg: lsimg,
lsqb: lsqb,
lsquo: lsquo,
lsquor: lsquor,
Lstrok: Lstrok,
lstrok: lstrok,
ltcc: ltcc,
ltcir: ltcir,
lt: lt,
LT: LT,
Lt: Lt,
ltdot: ltdot,
lthree: lthree,
ltimes: ltimes,
ltlarr: ltlarr,
ltquest: ltquest,
ltri: ltri,
ltrie: ltrie,
ltrif: ltrif,
ltrPar: ltrPar,
lurdshar: lurdshar,
luruhar: luruhar,
lvertneqq: lvertneqq,
lvnE: lvnE,
macr: macr,
male: male,
malt: malt,
maltese: maltese,
"Map": "⤅",
map: map,
mapsto: mapsto,
mapstodown: mapstodown,
mapstoleft: mapstoleft,
mapstoup: mapstoup,
marker: marker,
mcomma: mcomma,
Mcy: Mcy,
mcy: mcy,
mdash: mdash,
mDDot: mDDot,
measuredangle: measuredangle,
MediumSpace: MediumSpace,
Mellintrf: Mellintrf,
Mfr: Mfr,
mfr: mfr,
mho: mho,
micro: micro,
midast: midast,
midcir: midcir,
mid: mid,
middot: middot,
minusb: minusb,
minus: minus,
minusd: minusd,
minusdu: minusdu,
MinusPlus: MinusPlus,
mlcp: mlcp,
mldr: mldr,
mnplus: mnplus,
models: models,
Mopf: Mopf,
mopf: mopf,
mp: mp,
mscr: mscr,
Mscr: Mscr,
mstpos: mstpos,
Mu: Mu,
mu: mu,
multimap: multimap,
mumap: mumap,
nabla: nabla,
Nacute: Nacute,
nacute: nacute,
nang: nang,
nap: nap,
napE: napE,
napid: napid,
napos: napos,
napprox: napprox,
natural: natural,
naturals: naturals,
natur: natur,
nbsp: nbsp,
nbump: nbump,
nbumpe: nbumpe,
ncap: ncap,
Ncaron: Ncaron,
ncaron: ncaron,
Ncedil: Ncedil,
ncedil: ncedil,
ncong: ncong,
ncongdot: ncongdot,
ncup: ncup,
Ncy: Ncy,
ncy: ncy,
ndash: ndash,
nearhk: nearhk,
nearr: nearr,
neArr: neArr,
nearrow: nearrow,
ne: ne,
nedot: nedot,
NegativeMediumSpace: NegativeMediumSpace,
NegativeThickSpace: NegativeThickSpace,
NegativeThinSpace: NegativeThinSpace,
NegativeVeryThinSpace: NegativeVeryThinSpace,
nequiv: nequiv,
nesear: nesear,
nesim: nesim,
NestedGreaterGreater: NestedGreaterGreater,
NestedLessLess: NestedLessLess,
NewLine: NewLine,
nexist: nexist,
nexists: nexists,
Nfr: Nfr,
nfr: nfr,
ngE: ngE,
nge: nge,
ngeq: ngeq,
ngeqq: ngeqq,
ngeqslant: ngeqslant,
nges: nges,
nGg: nGg,
ngsim: ngsim,
nGt: nGt,
ngt: ngt,
ngtr: ngtr,
nGtv: nGtv,
nharr: nharr,
nhArr: nhArr,
nhpar: nhpar,
ni: ni,
nis: nis,
nisd: nisd,
niv: niv,
NJcy: NJcy,
njcy: njcy,
nlarr: nlarr,
nlArr: nlArr,
nldr: nldr,
nlE: nlE,
nle: nle,
nleftarrow: nleftarrow,
nLeftarrow: nLeftarrow,
nleftrightarrow: nleftrightarrow,
nLeftrightarrow: nLeftrightarrow,
nleq: nleq,
nleqq: nleqq,
nleqslant: nleqslant,
nles: nles,
nless: nless,
nLl: nLl,
nlsim: nlsim,
nLt: nLt,
nlt: nlt,
nltri: nltri,
nltrie: nltrie,
nLtv: nLtv,
nmid: nmid,
NoBreak: NoBreak,
NonBreakingSpace: NonBreakingSpace,
nopf: nopf,
Nopf: Nopf,
Not: Not,
not: not,
NotCongruent: NotCongruent,
NotCupCap: NotCupCap,
NotDoubleVerticalBar: NotDoubleVerticalBar,
NotElement: NotElement,
NotEqual: NotEqual,
NotEqualTilde: NotEqualTilde,
NotExists: NotExists,
NotGreater: NotGreater,
NotGreaterEqual: NotGreaterEqual,
NotGreaterFullEqual: NotGreaterFullEqual,
NotGreaterGreater: NotGreaterGreater,
NotGreaterLess: NotGreaterLess,
NotGreaterSlantEqual: NotGreaterSlantEqual,
NotGreaterTilde: NotGreaterTilde,
NotHumpDownHump: NotHumpDownHump,
NotHumpEqual: NotHumpEqual,
notin: notin,
notindot: notindot,
notinE: notinE,
notinva: notinva,
notinvb: notinvb,
notinvc: notinvc,
NotLeftTriangleBar: NotLeftTriangleBar,
NotLeftTriangle: NotLeftTriangle,
NotLeftTriangleEqual: NotLeftTriangleEqual,
NotLess: NotLess,
NotLessEqual: NotLessEqual,
NotLessGreater: NotLessGreater,
NotLessLess: NotLessLess,
NotLessSlantEqual: NotLessSlantEqual,
NotLessTilde: NotLessTilde,
NotNestedGreaterGreater: NotNestedGreaterGreater,
NotNestedLessLess: NotNestedLessLess,
notni: notni,
notniva: notniva,
notnivb: notnivb,
notnivc: notnivc,
NotPrecedes: NotPrecedes,
NotPrecedesEqual: NotPrecedesEqual,
NotPrecedesSlantEqual: NotPrecedesSlantEqual,
NotReverseElement: NotReverseElement,
NotRightTriangleBar: NotRightTriangleBar,
NotRightTriangle: NotRightTriangle,
NotRightTriangleEqual: NotRightTriangleEqual,
NotSquareSubset: NotSquareSubset,
NotSquareSubsetEqual: NotSquareSubsetEqual,
NotSquareSuperset: NotSquareSuperset,
NotSquareSupersetEqual: NotSquareSupersetEqual,
NotSubset: NotSubset,
NotSubsetEqual: NotSubsetEqual,
NotSucceeds: NotSucceeds,
NotSucceedsEqual: NotSucceedsEqual,
NotSucceedsSlantEqual: NotSucceedsSlantEqual,
NotSucceedsTilde: NotSucceedsTilde,
NotSuperset: NotSuperset,
NotSupersetEqual: NotSupersetEqual,
NotTilde: NotTilde,
NotTildeEqual: NotTildeEqual,
NotTildeFullEqual: NotTildeFullEqual,
NotTildeTilde: NotTildeTilde,
NotVerticalBar: NotVerticalBar,
nparallel: nparallel,
npar: npar,
nparsl: nparsl,
npart: npart,
npolint: npolint,
npr: npr,
nprcue: nprcue,
nprec: nprec,
npreceq: npreceq,
npre: npre,
nrarrc: nrarrc,
nrarr: nrarr,
nrArr: nrArr,
nrarrw: nrarrw,
nrightarrow: nrightarrow,
nRightarrow: nRightarrow,
nrtri: nrtri,
nrtrie: nrtrie,
nsc: nsc,
nsccue: nsccue,
nsce: nsce,
Nscr: Nscr,
nscr: nscr,
nshortmid: nshortmid,
nshortparallel: nshortparallel,
nsim: nsim,
nsime: nsime,
nsimeq: nsimeq,
nsmid: nsmid,
nspar: nspar,
nsqsube: nsqsube,
nsqsupe: nsqsupe,
nsub: nsub,
nsubE: nsubE,
nsube: nsube,
nsubset: nsubset,
nsubseteq: nsubseteq,
nsubseteqq: nsubseteqq,
nsucc: nsucc,
nsucceq: nsucceq,
nsup: nsup,
nsupE: nsupE,
nsupe: nsupe,
nsupset: nsupset,
nsupseteq: nsupseteq,
nsupseteqq: nsupseteqq,
ntgl: ntgl,
Ntilde: Ntilde,
ntilde: ntilde,
ntlg: ntlg,
ntriangleleft: ntriangleleft,
ntrianglelefteq: ntrianglelefteq,
ntriangleright: ntriangleright,
ntrianglerighteq: ntrianglerighteq,
Nu: Nu,
nu: nu,
num: num,
numero: numero,
numsp: numsp,
nvap: nvap,
nvdash: nvdash,
nvDash: nvDash,
nVdash: nVdash,
nVDash: nVDash,
nvge: nvge,
nvgt: nvgt,
nvHarr: nvHarr,
nvinfin: nvinfin,
nvlArr: nvlArr,
nvle: nvle,
nvlt: nvlt,
nvltrie: nvltrie,
nvrArr: nvrArr,
nvrtrie: nvrtrie,
nvsim: nvsim,
nwarhk: nwarhk,
nwarr: nwarr,
nwArr: nwArr,
nwarrow: nwarrow,
nwnear: nwnear,
Oacute: Oacute,
oacute: oacute,
oast: oast,
Ocirc: Ocirc,
ocirc: ocirc,
ocir: ocir,
Ocy: Ocy,
ocy: ocy,
odash: odash,
Odblac: Odblac,
odblac: odblac,
odiv: odiv,
odot: odot,
odsold: odsold,
OElig: OElig,
oelig: oelig,
ofcir: ofcir,
Ofr: Ofr,
ofr: ofr,
ogon: ogon,
Ograve: Ograve,
ograve: ograve,
ogt: ogt,
ohbar: ohbar,
ohm: ohm,
oint: oint,
olarr: olarr,
olcir: olcir,
olcross: olcross,
oline: oline,
olt: olt,
Omacr: Omacr,
omacr: omacr,
Omega: Omega,
omega: omega,
Omicron: Omicron,
omicron: omicron,
omid: omid,
ominus: ominus,
Oopf: Oopf,
oopf: oopf,
opar: opar,
OpenCurlyDoubleQuote: OpenCurlyDoubleQuote,
OpenCurlyQuote: OpenCurlyQuote,
operp: operp,
oplus: oplus,
orarr: orarr,
Or: Or,
or: or,
ord: ord,
order: order,
orderof: orderof,
ordf: ordf,
ordm: ordm,
origof: origof,
oror: oror,
orslope: orslope,
orv: orv,
oS: oS,
Oscr: Oscr,
oscr: oscr,
Oslash: Oslash,
oslash: oslash,
osol: osol,
Otilde: Otilde,
otilde: otilde,
otimesas: otimesas,
Otimes: Otimes,
otimes: otimes,
Ouml: Ouml,
ouml: ouml,
ovbar: ovbar,
OverBar: OverBar,
OverBrace: OverBrace,
OverBracket: OverBracket,
OverParenthesis: OverParenthesis,
para: para,
parallel: parallel,
par: par,
parsim: parsim,
parsl: parsl,
part: part,
PartialD: PartialD,
Pcy: Pcy,
pcy: pcy,
percnt: percnt,
period: period,
permil: permil,
perp: perp,
pertenk: pertenk,
Pfr: Pfr,
pfr: pfr,
Phi: Phi,
phi: phi,
phiv: phiv,
phmmat: phmmat,
phone: phone,
Pi: Pi,
pi: pi,
pitchfork: pitchfork,
piv: piv,
planck: planck,
planckh: planckh,
plankv: plankv,
plusacir: plusacir,
plusb: plusb,
pluscir: pluscir,
plus: plus,
plusdo: plusdo,
plusdu: plusdu,
pluse: pluse,
PlusMinus: PlusMinus,
plusmn: plusmn,
plussim: plussim,
plustwo: plustwo,
pm: pm,
Poincareplane: Poincareplane,
pointint: pointint,
popf: popf,
Popf: Popf,
pound: pound,
prap: prap,
Pr: Pr,
pr: pr,
prcue: prcue,
precapprox: precapprox,
prec: prec,
preccurlyeq: preccurlyeq,
Precedes: Precedes,
PrecedesEqual: PrecedesEqual,
PrecedesSlantEqual: PrecedesSlantEqual,
PrecedesTilde: PrecedesTilde,
preceq: preceq,
precnapprox: precnapprox,
precneqq: precneqq,
precnsim: precnsim,
pre: pre,
prE: prE,
precsim: precsim,
prime: prime,
Prime: Prime,
primes: primes,
prnap: prnap,
prnE: prnE,
prnsim: prnsim,
prod: prod,
Product: Product,
profalar: profalar,
profline: profline,
profsurf: profsurf,
prop: prop,
Proportional: Proportional,
Proportion: Proportion,
propto: propto,
prsim: prsim,
prurel: prurel,
Pscr: Pscr,
pscr: pscr,
Psi: Psi,
psi: psi,
puncsp: puncsp,
Qfr: Qfr,
qfr: qfr,
qint: qint,
qopf: qopf,
Qopf: Qopf,
qprime: qprime,
Qscr: Qscr,
qscr: qscr,
quaternions: quaternions,
quatint: quatint,
quest: quest,
questeq: questeq,
quot: quot,
QUOT: QUOT,
rAarr: rAarr,
race: race,
Racute: Racute,
racute: racute,
radic: radic,
raemptyv: raemptyv,
rang: rang,
Rang: Rang,
rangd: rangd,
range: range,
rangle: rangle,
raquo: raquo,
rarrap: rarrap,
rarrb: rarrb,
rarrbfs: rarrbfs,
rarrc: rarrc,
rarr: rarr,
Rarr: Rarr,
rArr: rArr,
rarrfs: rarrfs,
rarrhk: rarrhk,
rarrlp: rarrlp,
rarrpl: rarrpl,
rarrsim: rarrsim,
Rarrtl: Rarrtl,
rarrtl: rarrtl,
rarrw: rarrw,
ratail: ratail,
rAtail: rAtail,
ratio: ratio,
rationals: rationals,
rbarr: rbarr,
rBarr: rBarr,
RBarr: RBarr,
rbbrk: rbbrk,
rbrace: rbrace,
rbrack: rbrack,
rbrke: rbrke,
rbrksld: rbrksld,
rbrkslu: rbrkslu,
Rcaron: Rcaron,
rcaron: rcaron,
Rcedil: Rcedil,
rcedil: rcedil,
rceil: rceil,
rcub: rcub,
Rcy: Rcy,
rcy: rcy,
rdca: rdca,
rdldhar: rdldhar,
rdquo: rdquo,
rdquor: rdquor,
rdsh: rdsh,
real: real,
realine: realine,
realpart: realpart,
reals: reals,
Re: Re,
rect: rect,
reg: reg,
REG: REG,
ReverseElement: ReverseElement,
ReverseEquilibrium: ReverseEquilibrium,
ReverseUpEquilibrium: ReverseUpEquilibrium,
rfisht: rfisht,
rfloor: rfloor,
rfr: rfr,
Rfr: Rfr,
rHar: rHar,
rhard: rhard,
rharu: rharu,
rharul: rharul,
Rho: Rho,
rho: rho,
rhov: rhov,
RightAngleBracket: RightAngleBracket,
RightArrowBar: RightArrowBar,
rightarrow: rightarrow,
RightArrow: RightArrow,
Rightarrow: Rightarrow,
RightArrowLeftArrow: RightArrowLeftArrow,
rightarrowtail: rightarrowtail,
RightCeiling: RightCeiling,
RightDoubleBracket: RightDoubleBracket,
RightDownTeeVector: RightDownTeeVector,
RightDownVectorBar: RightDownVectorBar,
RightDownVector: RightDownVector,
RightFloor: RightFloor,
rightharpoondown: rightharpoondown,
rightharpoonup: rightharpoonup,
rightleftarrows: rightleftarrows,
rightleftharpoons: rightleftharpoons,
rightrightarrows: rightrightarrows,
rightsquigarrow: rightsquigarrow,
RightTeeArrow: RightTeeArrow,
RightTee: RightTee,
RightTeeVector: RightTeeVector,
rightthreetimes: rightthreetimes,
RightTriangleBar: RightTriangleBar,
RightTriangle: RightTriangle,
RightTriangleEqual: RightTriangleEqual,
RightUpDownVector: RightUpDownVector,
RightUpTeeVector: RightUpTeeVector,
RightUpVectorBar: RightUpVectorBar,
RightUpVector: RightUpVector,
RightVectorBar: RightVectorBar,
RightVector: RightVector,
ring: ring,
risingdotseq: risingdotseq,
rlarr: rlarr,
rlhar: rlhar,
rlm: rlm,
rmoustache: rmoustache,
rmoust: rmoust,
rnmid: rnmid,
roang: roang,
roarr: roarr,
robrk: robrk,
ropar: ropar,
ropf: ropf,
Ropf: Ropf,
roplus: roplus,
rotimes: rotimes,
RoundImplies: RoundImplies,
rpar: rpar,
rpargt: rpargt,
rppolint: rppolint,
rrarr: rrarr,
Rrightarrow: Rrightarrow,
rsaquo: rsaquo,
rscr: rscr,
Rscr: Rscr,
rsh: rsh,
Rsh: Rsh,
rsqb: rsqb,
rsquo: rsquo,
rsquor: rsquor,
rthree: rthree,
rtimes: rtimes,
rtri: rtri,
rtrie: rtrie,
rtrif: rtrif,
rtriltri: rtriltri,
RuleDelayed: RuleDelayed,
ruluhar: ruluhar,
rx: rx,
Sacute: Sacute,
sacute: sacute,
sbquo: sbquo,
scap: scap,
Scaron: Scaron,
scaron: scaron,
Sc: Sc,
sc: sc,
sccue: sccue,
sce: sce,
scE: scE,
Scedil: Scedil,
scedil: scedil,
Scirc: Scirc,
scirc: scirc,
scnap: scnap,
scnE: scnE,
scnsim: scnsim,
scpolint: scpolint,
scsim: scsim,
Scy: Scy,
scy: scy,
sdotb: sdotb,
sdot: sdot,
sdote: sdote,
searhk: searhk,
searr: searr,
seArr: seArr,
searrow: searrow,
sect: sect,
semi: semi,
seswar: seswar,
setminus: setminus,
setmn: setmn,
sext: sext,
Sfr: Sfr,
sfr: sfr,
sfrown: sfrown,
sharp: sharp,
SHCHcy: SHCHcy,
shchcy: shchcy,
SHcy: SHcy,
shcy: shcy,
ShortDownArrow: ShortDownArrow,
ShortLeftArrow: ShortLeftArrow,
shortmid: shortmid,
shortparallel: shortparallel,
ShortRightArrow: ShortRightArrow,
ShortUpArrow: ShortUpArrow,
shy: shy,
Sigma: Sigma,
sigma: sigma,
sigmaf: sigmaf,
sigmav: sigmav,
sim: sim,
simdot: simdot,
sime: sime,
simeq: simeq,
simg: simg,
simgE: simgE,
siml: siml,
simlE: simlE,
simne: simne,
simplus: simplus,
simrarr: simrarr,
slarr: slarr,
SmallCircle: SmallCircle,
smallsetminus: smallsetminus,
smashp: smashp,
smeparsl: smeparsl,
smid: smid,
smile: smile,
smt: smt,
smte: smte,
smtes: smtes,
SOFTcy: SOFTcy,
softcy: softcy,
solbar: solbar,
solb: solb,
sol: sol,
Sopf: Sopf,
sopf: sopf,
spades: spades,
spadesuit: spadesuit,
spar: spar,
sqcap: sqcap,
sqcaps: sqcaps,
sqcup: sqcup,
sqcups: sqcups,
Sqrt: Sqrt,
sqsub: sqsub,
sqsube: sqsube,
sqsubset: sqsubset,
sqsubseteq: sqsubseteq,
sqsup: sqsup,
sqsupe: sqsupe,
sqsupset: sqsupset,
sqsupseteq: sqsupseteq,
square: square,
Square: Square,
SquareIntersection: SquareIntersection,
SquareSubset: SquareSubset,
SquareSubsetEqual: SquareSubsetEqual,
SquareSuperset: SquareSuperset,
SquareSupersetEqual: SquareSupersetEqual,
SquareUnion: SquareUnion,
squarf: squarf,
squ: squ,
squf: squf,
srarr: srarr,
Sscr: Sscr,
sscr: sscr,
ssetmn: ssetmn,
ssmile: ssmile,
sstarf: sstarf,
Star: Star,
star: star,
starf: starf,
straightepsilon: straightepsilon,
straightphi: straightphi,
strns: strns,
sub: sub,
Sub: Sub,
subdot: subdot,
subE: subE,
sube: sube,
subedot: subedot,
submult: submult,
subnE: subnE,
subne: subne,
subplus: subplus,
subrarr: subrarr,
subset: subset,
Subset: Subset,
subseteq: subseteq,
subseteqq: subseteqq,
SubsetEqual: SubsetEqual,
subsetneq: subsetneq,
subsetneqq: subsetneqq,
subsim: subsim,
subsub: subsub,
subsup: subsup,
succapprox: succapprox,
succ: succ,
succcurlyeq: succcurlyeq,
Succeeds: Succeeds,
SucceedsEqual: SucceedsEqual,
SucceedsSlantEqual: SucceedsSlantEqual,
SucceedsTilde: SucceedsTilde,
succeq: succeq,
succnapprox: succnapprox,
succneqq: succneqq,
succnsim: succnsim,
succsim: succsim,
SuchThat: SuchThat,
sum: sum,
Sum: Sum,
sung: sung,
sup1: sup1,
sup2: sup2,
sup3: sup3,
sup: sup,
Sup: Sup,
supdot: supdot,
supdsub: supdsub,
supE: supE,
supe: supe,
supedot: supedot,
Superset: Superset,
SupersetEqual: SupersetEqual,
suphsol: suphsol,
suphsub: suphsub,
suplarr: suplarr,
supmult: supmult,
supnE: supnE,
supne: supne,
supplus: supplus,
supset: supset,
Supset: Supset,
supseteq: supseteq,
supseteqq: supseteqq,
supsetneq: supsetneq,
supsetneqq: supsetneqq,
supsim: supsim,
supsub: supsub,
supsup: supsup,
swarhk: swarhk,
swarr: swarr,
swArr: swArr,
swarrow: swarrow,
swnwar: swnwar,
szlig: szlig,
Tab: Tab,
target: target,
Tau: Tau,
tau: tau,
tbrk: tbrk,
Tcaron: Tcaron,
tcaron: tcaron,
Tcedil: Tcedil,
tcedil: tcedil,
Tcy: Tcy,
tcy: tcy,
tdot: tdot,
telrec: telrec,
Tfr: Tfr,
tfr: tfr,
there4: there4,
therefore: therefore,
Therefore: Therefore,
Theta: Theta,
theta: theta,
thetasym: thetasym,
thetav: thetav,
thickapprox: thickapprox,
thicksim: thicksim,
ThickSpace: ThickSpace,
ThinSpace: ThinSpace,
thinsp: thinsp,
thkap: thkap,
thksim: thksim,
THORN: THORN,
thorn: thorn,
tilde: tilde,
Tilde: Tilde,
TildeEqual: TildeEqual,
TildeFullEqual: TildeFullEqual,
TildeTilde: TildeTilde,
timesbar: timesbar,
timesb: timesb,
times: times,
timesd: timesd,
tint: tint,
toea: toea,
topbot: topbot,
topcir: topcir,
top: top,
Topf: Topf,
topf: topf,
topfork: topfork,
tosa: tosa,
tprime: tprime,
trade: trade,
TRADE: TRADE,
triangle: triangle,
triangledown: triangledown,
triangleleft: triangleleft,
trianglelefteq: trianglelefteq,
triangleq: triangleq,
triangleright: triangleright,
trianglerighteq: trianglerighteq,
tridot: tridot,
trie: trie,
triminus: triminus,
TripleDot: TripleDot,
triplus: triplus,
trisb: trisb,
tritime: tritime,
trpezium: trpezium,
Tscr: Tscr,
tscr: tscr,
TScy: TScy,
tscy: tscy,
TSHcy: TSHcy,
tshcy: tshcy,
Tstrok: Tstrok,
tstrok: tstrok,
twixt: twixt,
twoheadleftarrow: twoheadleftarrow,
twoheadrightarrow: twoheadrightarrow,
Uacute: Uacute,
uacute: uacute,
uarr: uarr,
Uarr: Uarr,
uArr: uArr,
Uarrocir: Uarrocir,
Ubrcy: Ubrcy,
ubrcy: ubrcy,
Ubreve: Ubreve,
ubreve: ubreve,
Ucirc: Ucirc,
ucirc: ucirc,
Ucy: Ucy,
ucy: ucy,
udarr: udarr,
Udblac: Udblac,
udblac: udblac,
udhar: udhar,
ufisht: ufisht,
Ufr: Ufr,
ufr: ufr,
Ugrave: Ugrave,
ugrave: ugrave,
uHar: uHar,
uharl: uharl,
uharr: uharr,
uhblk: uhblk,
ulcorn: ulcorn,
ulcorner: ulcorner,
ulcrop: ulcrop,
ultri: ultri,
Umacr: Umacr,
umacr: umacr,
uml: uml,
UnderBar: UnderBar,
UnderBrace: UnderBrace,
UnderBracket: UnderBracket,
UnderParenthesis: UnderParenthesis,
Union: Union,
UnionPlus: UnionPlus,
Uogon: Uogon,
uogon: uogon,
Uopf: Uopf,
uopf: uopf,
UpArrowBar: UpArrowBar,
uparrow: uparrow,
UpArrow: UpArrow,
Uparrow: Uparrow,
UpArrowDownArrow: UpArrowDownArrow,
updownarrow: updownarrow,
UpDownArrow: UpDownArrow,
Updownarrow: Updownarrow,
UpEquilibrium: UpEquilibrium,
upharpoonleft: upharpoonleft,
upharpoonright: upharpoonright,
uplus: uplus,
UpperLeftArrow: UpperLeftArrow,
UpperRightArrow: UpperRightArrow,
upsi: upsi,
Upsi: Upsi,
upsih: upsih,
Upsilon: Upsilon,
upsilon: upsilon,
UpTeeArrow: UpTeeArrow,
UpTee: UpTee,
upuparrows: upuparrows,
urcorn: urcorn,
urcorner: urcorner,
urcrop: urcrop,
Uring: Uring,
uring: uring,
urtri: urtri,
Uscr: Uscr,
uscr: uscr,
utdot: utdot,
Utilde: Utilde,
utilde: utilde,
utri: utri,
utrif: utrif,
uuarr: uuarr,
Uuml: Uuml,
uuml: uuml,
uwangle: uwangle,
vangrt: vangrt,
varepsilon: varepsilon,
varkappa: varkappa,
varnothing: varnothing,
varphi: varphi,
varpi: varpi,
varpropto: varpropto,
varr: varr,
vArr: vArr,
varrho: varrho,
varsigma: varsigma,
varsubsetneq: varsubsetneq,
varsubsetneqq: varsubsetneqq,
varsupsetneq: varsupsetneq,
varsupsetneqq: varsupsetneqq,
vartheta: vartheta,
vartriangleleft: vartriangleleft,
vartriangleright: vartriangleright,
vBar: vBar,
Vbar: Vbar,
vBarv: vBarv,
Vcy: Vcy,
vcy: vcy,
vdash: vdash,
vDash: vDash,
Vdash: Vdash,
VDash: VDash,
Vdashl: Vdashl,
veebar: veebar,
vee: vee,
Vee: Vee,
veeeq: veeeq,
vellip: vellip,
verbar: verbar,
Verbar: Verbar,
vert: vert,
Vert: Vert,
VerticalBar: VerticalBar,
VerticalLine: VerticalLine,
VerticalSeparator: VerticalSeparator,
VerticalTilde: VerticalTilde,
VeryThinSpace: VeryThinSpace,
Vfr: Vfr,
vfr: vfr,
vltri: vltri,
vnsub: vnsub,
vnsup: vnsup,
Vopf: Vopf,
vopf: vopf,
vprop: vprop,
vrtri: vrtri,
Vscr: Vscr,
vscr: vscr,
vsubnE: vsubnE,
vsubne: vsubne,
vsupnE: vsupnE,
vsupne: vsupne,
Vvdash: Vvdash,
vzigzag: vzigzag,
Wcirc: Wcirc,
wcirc: wcirc,
wedbar: wedbar,
wedge: wedge,
Wedge: Wedge,
wedgeq: wedgeq,
weierp: weierp,
Wfr: Wfr,
wfr: wfr,
Wopf: Wopf,
wopf: wopf,
wp: wp,
wr: wr,
wreath: wreath,
Wscr: Wscr,
wscr: wscr,
xcap: xcap,
xcirc: xcirc,
xcup: xcup,
xdtri: xdtri,
Xfr: Xfr,
xfr: xfr,
xharr: xharr,
xhArr: xhArr,
Xi: Xi,
xi: xi,
xlarr: xlarr,
xlArr: xlArr,
xmap: xmap,
xnis: xnis,
xodot: xodot,
Xopf: Xopf,
xopf: xopf,
xoplus: xoplus,
xotime: xotime,
xrarr: xrarr,
xrArr: xrArr,
Xscr: Xscr,
xscr: xscr,
xsqcup: xsqcup,
xuplus: xuplus,
xutri: xutri,
xvee: xvee,
xwedge: xwedge,
Yacute: Yacute,
yacute: yacute,
YAcy: YAcy,
yacy: yacy,
Ycirc: Ycirc,
ycirc: ycirc,
Ycy: Ycy,
ycy: ycy,
yen: yen,
Yfr: Yfr,
yfr: yfr,
YIcy: YIcy,
yicy: yicy,
Yopf: Yopf,
yopf: yopf,
Yscr: Yscr,
yscr: yscr,
YUcy: YUcy,
yucy: yucy,
yuml: yuml,
Yuml: Yuml,
Zacute: Zacute,
zacute: zacute,
Zcaron: Zcaron,
zcaron: zcaron,
Zcy: Zcy,
zcy: zcy,
Zdot: Zdot,
zdot: zdot,
zeetrf: zeetrf,
ZeroWidthSpace: ZeroWidthSpace,
Zeta: Zeta,
zeta: zeta,
zfr: zfr,
Zfr: Zfr,
ZHcy: ZHcy,
zhcy: zhcy,
zigrarr: zigrarr,
zopf: zopf,
Zopf: Zopf,
Zscr: Zscr,
zscr: zscr,
zwj: zwj,
zwnj: zwnj
};
 
var entities$1 = /*#__PURE__*/Object.freeze({
__proto__: null,
Aacute: Aacute,
aacute: aacute,
Abreve: Abreve,
abreve: abreve,
ac: ac,
acd: acd,
acE: acE,
Acirc: Acirc,
acirc: acirc,
acute: acute,
Acy: Acy,
acy: acy,
AElig: AElig,
aelig: aelig,
af: af,
Afr: Afr,
afr: afr,
Agrave: Agrave,
agrave: agrave,
alefsym: alefsym,
aleph: aleph,
Alpha: Alpha,
alpha: alpha,
Amacr: Amacr,
amacr: amacr,
amalg: amalg,
amp: amp,
AMP: AMP,
andand: andand,
And: And,
and: and,
andd: andd,
andslope: andslope,
andv: andv,
ang: ang,
ange: ange,
angle: angle,
angmsdaa: angmsdaa,
angmsdab: angmsdab,
angmsdac: angmsdac,
angmsdad: angmsdad,
angmsdae: angmsdae,
angmsdaf: angmsdaf,
angmsdag: angmsdag,
angmsdah: angmsdah,
angmsd: angmsd,
angrt: angrt,
angrtvb: angrtvb,
angrtvbd: angrtvbd,
angsph: angsph,
angst: angst,
angzarr: angzarr,
Aogon: Aogon,
aogon: aogon,
Aopf: Aopf,
aopf: aopf,
apacir: apacir,
ap: ap,
apE: apE,
ape: ape,
apid: apid,
apos: apos,
ApplyFunction: ApplyFunction,
approx: approx,
approxeq: approxeq,
Aring: Aring,
aring: aring,
Ascr: Ascr,
ascr: ascr,
Assign: Assign,
ast: ast,
asymp: asymp,
asympeq: asympeq,
Atilde: Atilde,
atilde: atilde,
Auml: Auml,
auml: auml,
awconint: awconint,
awint: awint,
backcong: backcong,
backepsilon: backepsilon,
backprime: backprime,
backsim: backsim,
backsimeq: backsimeq,
Backslash: Backslash,
Barv: Barv,
barvee: barvee,
barwed: barwed,
Barwed: Barwed,
barwedge: barwedge,
bbrk: bbrk,
bbrktbrk: bbrktbrk,
bcong: bcong,
Bcy: Bcy,
bcy: bcy,
bdquo: bdquo,
becaus: becaus,
because: because,
Because: Because,
bemptyv: bemptyv,
bepsi: bepsi,
bernou: bernou,
Bernoullis: Bernoullis,
Beta: Beta,
beta: beta,
beth: beth,
between: between,
Bfr: Bfr,
bfr: bfr,
bigcap: bigcap,
bigcirc: bigcirc,
bigcup: bigcup,
bigodot: bigodot,
bigoplus: bigoplus,
bigotimes: bigotimes,
bigsqcup: bigsqcup,
bigstar: bigstar,
bigtriangledown: bigtriangledown,
bigtriangleup: bigtriangleup,
biguplus: biguplus,
bigvee: bigvee,
bigwedge: bigwedge,
bkarow: bkarow,
blacklozenge: blacklozenge,
blacksquare: blacksquare,
blacktriangle: blacktriangle,
blacktriangledown: blacktriangledown,
blacktriangleleft: blacktriangleleft,
blacktriangleright: blacktriangleright,
blank: blank,
blk12: blk12,
blk14: blk14,
blk34: blk34,
block: block,
bne: bne,
bnequiv: bnequiv,
bNot: bNot,
bnot: bnot,
Bopf: Bopf,
bopf: bopf,
bot: bot,
bottom: bottom,
bowtie: bowtie,
boxbox: boxbox,
boxdl: boxdl,
boxdL: boxdL,
boxDl: boxDl,
boxDL: boxDL,
boxdr: boxdr,
boxdR: boxdR,
boxDr: boxDr,
boxDR: boxDR,
boxh: boxh,
boxH: boxH,
boxhd: boxhd,
boxHd: boxHd,
boxhD: boxhD,
boxHD: boxHD,
boxhu: boxhu,
boxHu: boxHu,
boxhU: boxhU,
boxHU: boxHU,
boxminus: boxminus,
boxplus: boxplus,
boxtimes: boxtimes,
boxul: boxul,
boxuL: boxuL,
boxUl: boxUl,
boxUL: boxUL,
boxur: boxur,
boxuR: boxuR,
boxUr: boxUr,
boxUR: boxUR,
boxv: boxv,
boxV: boxV,
boxvh: boxvh,
boxvH: boxvH,
boxVh: boxVh,
boxVH: boxVH,
boxvl: boxvl,
boxvL: boxvL,
boxVl: boxVl,
boxVL: boxVL,
boxvr: boxvr,
boxvR: boxvR,
boxVr: boxVr,
boxVR: boxVR,
bprime: bprime,
breve: breve,
Breve: Breve,
brvbar: brvbar,
bscr: bscr,
Bscr: Bscr,
bsemi: bsemi,
bsim: bsim,
bsime: bsime,
bsolb: bsolb,
bsol: bsol,
bsolhsub: bsolhsub,
bull: bull,
bullet: bullet,
bump: bump,
bumpE: bumpE,
bumpe: bumpe,
Bumpeq: Bumpeq,
bumpeq: bumpeq,
Cacute: Cacute,
cacute: cacute,
capand: capand,
capbrcup: capbrcup,
capcap: capcap,
cap: cap,
Cap: Cap,
capcup: capcup,
capdot: capdot,
CapitalDifferentialD: CapitalDifferentialD,
caps: caps,
caret: caret,
caron: caron,
Cayleys: Cayleys,
ccaps: ccaps,
Ccaron: Ccaron,
ccaron: ccaron,
Ccedil: Ccedil,
ccedil: ccedil,
Ccirc: Ccirc,
ccirc: ccirc,
Cconint: Cconint,
ccups: ccups,
ccupssm: ccupssm,
Cdot: Cdot,
cdot: cdot,
cedil: cedil,
Cedilla: Cedilla,
cemptyv: cemptyv,
cent: cent,
centerdot: centerdot,
CenterDot: CenterDot,
cfr: cfr,
Cfr: Cfr,
CHcy: CHcy,
chcy: chcy,
check: check,
checkmark: checkmark,
Chi: Chi,
chi: chi,
circ: circ,
circeq: circeq,
circlearrowleft: circlearrowleft,
circlearrowright: circlearrowright,
circledast: circledast,
circledcirc: circledcirc,
circleddash: circleddash,
CircleDot: CircleDot,
circledR: circledR,
circledS: circledS,
CircleMinus: CircleMinus,
CirclePlus: CirclePlus,
CircleTimes: CircleTimes,
cir: cir,
cirE: cirE,
cire: cire,
cirfnint: cirfnint,
cirmid: cirmid,
cirscir: cirscir,
ClockwiseContourIntegral: ClockwiseContourIntegral,
CloseCurlyDoubleQuote: CloseCurlyDoubleQuote,
CloseCurlyQuote: CloseCurlyQuote,
clubs: clubs,
clubsuit: clubsuit,
colon: colon,
Colon: Colon,
Colone: Colone,
colone: colone,
coloneq: coloneq,
comma: comma,
commat: commat,
comp: comp,
compfn: compfn,
complement: complement,
complexes: complexes,
cong: cong,
congdot: congdot,
Congruent: Congruent,
conint: conint,
Conint: Conint,
ContourIntegral: ContourIntegral,
copf: copf,
Copf: Copf,
coprod: coprod,
Coproduct: Coproduct,
copy: copy,
COPY: COPY,
copysr: copysr,
CounterClockwiseContourIntegral: CounterClockwiseContourIntegral,
crarr: crarr,
cross: cross,
Cross: Cross,
Cscr: Cscr,
cscr: cscr,
csub: csub,
csube: csube,
csup: csup,
csupe: csupe,
ctdot: ctdot,
cudarrl: cudarrl,
cudarrr: cudarrr,
cuepr: cuepr,
cuesc: cuesc,
cularr: cularr,
cularrp: cularrp,
cupbrcap: cupbrcap,
cupcap: cupcap,
CupCap: CupCap,
cup: cup,
Cup: Cup,
cupcup: cupcup,
cupdot: cupdot,
cupor: cupor,
cups: cups,
curarr: curarr,
curarrm: curarrm,
curlyeqprec: curlyeqprec,
curlyeqsucc: curlyeqsucc,
curlyvee: curlyvee,
curlywedge: curlywedge,
curren: curren,
curvearrowleft: curvearrowleft,
curvearrowright: curvearrowright,
cuvee: cuvee,
cuwed: cuwed,
cwconint: cwconint,
cwint: cwint,
cylcty: cylcty,
dagger: dagger,
Dagger: Dagger,
daleth: daleth,
darr: darr,
Darr: Darr,
dArr: dArr,
dash: dash,
Dashv: Dashv,
dashv: dashv,
dbkarow: dbkarow,
dblac: dblac,
Dcaron: Dcaron,
dcaron: dcaron,
Dcy: Dcy,
dcy: dcy,
ddagger: ddagger,
ddarr: ddarr,
DD: DD,
dd: dd,
DDotrahd: DDotrahd,
ddotseq: ddotseq,
deg: deg,
Del: Del,
Delta: Delta,
delta: delta,
demptyv: demptyv,
dfisht: dfisht,
Dfr: Dfr,
dfr: dfr,
dHar: dHar,
dharl: dharl,
dharr: dharr,
DiacriticalAcute: DiacriticalAcute,
DiacriticalDot: DiacriticalDot,
DiacriticalDoubleAcute: DiacriticalDoubleAcute,
DiacriticalGrave: DiacriticalGrave,
DiacriticalTilde: DiacriticalTilde,
diam: diam,
diamond: diamond,
Diamond: Diamond,
diamondsuit: diamondsuit,
diams: diams,
die: die,
DifferentialD: DifferentialD,
digamma: digamma,
disin: disin,
div: div,
divide: divide,
divideontimes: divideontimes,
divonx: divonx,
DJcy: DJcy,
djcy: djcy,
dlcorn: dlcorn,
dlcrop: dlcrop,
dollar: dollar,
Dopf: Dopf,
dopf: dopf,
Dot: Dot,
dot: dot,
DotDot: DotDot,
doteq: doteq,
doteqdot: doteqdot,
DotEqual: DotEqual,
dotminus: dotminus,
dotplus: dotplus,
dotsquare: dotsquare,
doublebarwedge: doublebarwedge,
DoubleContourIntegral: DoubleContourIntegral,
DoubleDot: DoubleDot,
DoubleDownArrow: DoubleDownArrow,
DoubleLeftArrow: DoubleLeftArrow,
DoubleLeftRightArrow: DoubleLeftRightArrow,
DoubleLeftTee: DoubleLeftTee,
DoubleLongLeftArrow: DoubleLongLeftArrow,
DoubleLongLeftRightArrow: DoubleLongLeftRightArrow,
DoubleLongRightArrow: DoubleLongRightArrow,
DoubleRightArrow: DoubleRightArrow,
DoubleRightTee: DoubleRightTee,
DoubleUpArrow: DoubleUpArrow,
DoubleUpDownArrow: DoubleUpDownArrow,
DoubleVerticalBar: DoubleVerticalBar,
DownArrowBar: DownArrowBar,
downarrow: downarrow,
DownArrow: DownArrow,
Downarrow: Downarrow,
DownArrowUpArrow: DownArrowUpArrow,
DownBreve: DownBreve,
downdownarrows: downdownarrows,
downharpoonleft: downharpoonleft,
downharpoonright: downharpoonright,
DownLeftRightVector: DownLeftRightVector,
DownLeftTeeVector: DownLeftTeeVector,
DownLeftVectorBar: DownLeftVectorBar,
DownLeftVector: DownLeftVector,
DownRightTeeVector: DownRightTeeVector,
DownRightVectorBar: DownRightVectorBar,
DownRightVector: DownRightVector,
DownTeeArrow: DownTeeArrow,
DownTee: DownTee,
drbkarow: drbkarow,
drcorn: drcorn,
drcrop: drcrop,
Dscr: Dscr,
dscr: dscr,
DScy: DScy,
dscy: dscy,
dsol: dsol,
Dstrok: Dstrok,
dstrok: dstrok,
dtdot: dtdot,
dtri: dtri,
dtrif: dtrif,
duarr: duarr,
duhar: duhar,
dwangle: dwangle,
DZcy: DZcy,
dzcy: dzcy,
dzigrarr: dzigrarr,
Eacute: Eacute,
eacute: eacute,
easter: easter,
Ecaron: Ecaron,
ecaron: ecaron,
Ecirc: Ecirc,
ecirc: ecirc,
ecir: ecir,
ecolon: ecolon,
Ecy: Ecy,
ecy: ecy,
eDDot: eDDot,
Edot: Edot,
edot: edot,
eDot: eDot,
ee: ee,
efDot: efDot,
Efr: Efr,
efr: efr,
eg: eg,
Egrave: Egrave,
egrave: egrave,
egs: egs,
egsdot: egsdot,
el: el,
Element: Element,
elinters: elinters,
ell: ell,
els: els,
elsdot: elsdot,
Emacr: Emacr,
emacr: emacr,
empty: empty,
emptyset: emptyset,
EmptySmallSquare: EmptySmallSquare,
emptyv: emptyv,
EmptyVerySmallSquare: EmptyVerySmallSquare,
emsp13: emsp13,
emsp14: emsp14,
emsp: emsp,
ENG: ENG,
eng: eng,
ensp: ensp,
Eogon: Eogon,
eogon: eogon,
Eopf: Eopf,
eopf: eopf,
epar: epar,
eparsl: eparsl,
eplus: eplus,
epsi: epsi,
Epsilon: Epsilon,
epsilon: epsilon,
epsiv: epsiv,
eqcirc: eqcirc,
eqcolon: eqcolon,
eqsim: eqsim,
eqslantgtr: eqslantgtr,
eqslantless: eqslantless,
Equal: Equal,
equals: equals,
EqualTilde: EqualTilde,
equest: equest,
Equilibrium: Equilibrium,
equiv: equiv,
equivDD: equivDD,
eqvparsl: eqvparsl,
erarr: erarr,
erDot: erDot,
escr: escr,
Escr: Escr,
esdot: esdot,
Esim: Esim,
esim: esim,
Eta: Eta,
eta: eta,
ETH: ETH,
eth: eth,
Euml: Euml,
euml: euml,
euro: euro,
excl: excl,
exist: exist,
Exists: Exists,
expectation: expectation,
exponentiale: exponentiale,
ExponentialE: ExponentialE,
fallingdotseq: fallingdotseq,
Fcy: Fcy,
fcy: fcy,
female: female,
ffilig: ffilig,
fflig: fflig,
ffllig: ffllig,
Ffr: Ffr,
ffr: ffr,
filig: filig,
FilledSmallSquare: FilledSmallSquare,
FilledVerySmallSquare: FilledVerySmallSquare,
fjlig: fjlig,
flat: flat,
fllig: fllig,
fltns: fltns,
fnof: fnof,
Fopf: Fopf,
fopf: fopf,
forall: forall,
ForAll: ForAll,
fork: fork,
forkv: forkv,
Fouriertrf: Fouriertrf,
fpartint: fpartint,
frac12: frac12,
frac13: frac13,
frac14: frac14,
frac15: frac15,
frac16: frac16,
frac18: frac18,
frac23: frac23,
frac25: frac25,
frac34: frac34,
frac35: frac35,
frac38: frac38,
frac45: frac45,
frac56: frac56,
frac58: frac58,
frac78: frac78,
frasl: frasl,
frown: frown,
fscr: fscr,
Fscr: Fscr,
gacute: gacute,
Gamma: Gamma,
gamma: gamma,
Gammad: Gammad,
gammad: gammad,
gap: gap,
Gbreve: Gbreve,
gbreve: gbreve,
Gcedil: Gcedil,
Gcirc: Gcirc,
gcirc: gcirc,
Gcy: Gcy,
gcy: gcy,
Gdot: Gdot,
gdot: gdot,
ge: ge,
gE: gE,
gEl: gEl,
gel: gel,
geq: geq,
geqq: geqq,
geqslant: geqslant,
gescc: gescc,
ges: ges,
gesdot: gesdot,
gesdoto: gesdoto,
gesdotol: gesdotol,
gesl: gesl,
gesles: gesles,
Gfr: Gfr,
gfr: gfr,
gg: gg,
Gg: Gg,
ggg: ggg,
gimel: gimel,
GJcy: GJcy,
gjcy: gjcy,
gla: gla,
gl: gl,
glE: glE,
glj: glj,
gnap: gnap,
gnapprox: gnapprox,
gne: gne,
gnE: gnE,
gneq: gneq,
gneqq: gneqq,
gnsim: gnsim,
Gopf: Gopf,
gopf: gopf,
grave: grave,
GreaterEqual: GreaterEqual,
GreaterEqualLess: GreaterEqualLess,
GreaterFullEqual: GreaterFullEqual,
GreaterGreater: GreaterGreater,
GreaterLess: GreaterLess,
GreaterSlantEqual: GreaterSlantEqual,
GreaterTilde: GreaterTilde,
Gscr: Gscr,
gscr: gscr,
gsim: gsim,
gsime: gsime,
gsiml: gsiml,
gtcc: gtcc,
gtcir: gtcir,
gt: gt,
GT: GT,
Gt: Gt,
gtdot: gtdot,
gtlPar: gtlPar,
gtquest: gtquest,
gtrapprox: gtrapprox,
gtrarr: gtrarr,
gtrdot: gtrdot,
gtreqless: gtreqless,
gtreqqless: gtreqqless,
gtrless: gtrless,
gtrsim: gtrsim,
gvertneqq: gvertneqq,
gvnE: gvnE,
Hacek: Hacek,
hairsp: hairsp,
half: half,
hamilt: hamilt,
HARDcy: HARDcy,
hardcy: hardcy,
harrcir: harrcir,
harr: harr,
hArr: hArr,
harrw: harrw,
Hat: Hat,
hbar: hbar,
Hcirc: Hcirc,
hcirc: hcirc,
hearts: hearts,
heartsuit: heartsuit,
hellip: hellip,
hercon: hercon,
hfr: hfr,
Hfr: Hfr,
HilbertSpace: HilbertSpace,
hksearow: hksearow,
hkswarow: hkswarow,
hoarr: hoarr,
homtht: homtht,
hookleftarrow: hookleftarrow,
hookrightarrow: hookrightarrow,
hopf: hopf,
Hopf: Hopf,
horbar: horbar,
HorizontalLine: HorizontalLine,
hscr: hscr,
Hscr: Hscr,
hslash: hslash,
Hstrok: Hstrok,
hstrok: hstrok,
HumpDownHump: HumpDownHump,
HumpEqual: HumpEqual,
hybull: hybull,
hyphen: hyphen,
Iacute: Iacute,
iacute: iacute,
ic: ic,
Icirc: Icirc,
icirc: icirc,
Icy: Icy,
icy: icy,
Idot: Idot,
IEcy: IEcy,
iecy: iecy,
iexcl: iexcl,
iff: iff,
ifr: ifr,
Ifr: Ifr,
Igrave: Igrave,
igrave: igrave,
ii: ii,
iiiint: iiiint,
iiint: iiint,
iinfin: iinfin,
iiota: iiota,
IJlig: IJlig,
ijlig: ijlig,
Imacr: Imacr,
imacr: imacr,
image: image,
ImaginaryI: ImaginaryI,
imagline: imagline,
imagpart: imagpart,
imath: imath,
Im: Im,
imof: imof,
imped: imped,
Implies: Implies,
incare: incare,
infin: infin,
infintie: infintie,
inodot: inodot,
intcal: intcal,
int: int,
Int: Int,
integers: integers,
Integral: Integral,
intercal: intercal,
Intersection: Intersection,
intlarhk: intlarhk,
intprod: intprod,
InvisibleComma: InvisibleComma,
InvisibleTimes: InvisibleTimes,
IOcy: IOcy,
iocy: iocy,
Iogon: Iogon,
iogon: iogon,
Iopf: Iopf,
iopf: iopf,
Iota: Iota,
iota: iota,
iprod: iprod,
iquest: iquest,
iscr: iscr,
Iscr: Iscr,
isin: isin,
isindot: isindot,
isinE: isinE,
isins: isins,
isinsv: isinsv,
isinv: isinv,
it: it,
Itilde: Itilde,
itilde: itilde,
Iukcy: Iukcy,
iukcy: iukcy,
Iuml: Iuml,
iuml: iuml,
Jcirc: Jcirc,
jcirc: jcirc,
Jcy: Jcy,
jcy: jcy,
Jfr: Jfr,
jfr: jfr,
jmath: jmath,
Jopf: Jopf,
jopf: jopf,
Jscr: Jscr,
jscr: jscr,
Jsercy: Jsercy,
jsercy: jsercy,
Jukcy: Jukcy,
jukcy: jukcy,
Kappa: Kappa,
kappa: kappa,
kappav: kappav,
Kcedil: Kcedil,
kcedil: kcedil,
Kcy: Kcy,
kcy: kcy,
Kfr: Kfr,
kfr: kfr,
kgreen: kgreen,
KHcy: KHcy,
khcy: khcy,
KJcy: KJcy,
kjcy: kjcy,
Kopf: Kopf,
kopf: kopf,
Kscr: Kscr,
kscr: kscr,
lAarr: lAarr,
Lacute: Lacute,
lacute: lacute,
laemptyv: laemptyv,
lagran: lagran,
Lambda: Lambda,
lambda: lambda,
lang: lang,
Lang: Lang,
langd: langd,
langle: langle,
lap: lap,
Laplacetrf: Laplacetrf,
laquo: laquo,
larrb: larrb,
larrbfs: larrbfs,
larr: larr,
Larr: Larr,
lArr: lArr,
larrfs: larrfs,
larrhk: larrhk,
larrlp: larrlp,
larrpl: larrpl,
larrsim: larrsim,
larrtl: larrtl,
latail: latail,
lAtail: lAtail,
lat: lat,
late: late,
lates: lates,
lbarr: lbarr,
lBarr: lBarr,
lbbrk: lbbrk,
lbrace: lbrace,
lbrack: lbrack,
lbrke: lbrke,
lbrksld: lbrksld,
lbrkslu: lbrkslu,
Lcaron: Lcaron,
lcaron: lcaron,
Lcedil: Lcedil,
lcedil: lcedil,
lceil: lceil,
lcub: lcub,
Lcy: Lcy,
lcy: lcy,
ldca: ldca,
ldquo: ldquo,
ldquor: ldquor,
ldrdhar: ldrdhar,
ldrushar: ldrushar,
ldsh: ldsh,
le: le,
lE: lE,
LeftAngleBracket: LeftAngleBracket,
LeftArrowBar: LeftArrowBar,
leftarrow: leftarrow,
LeftArrow: LeftArrow,
Leftarrow: Leftarrow,
LeftArrowRightArrow: LeftArrowRightArrow,
leftarrowtail: leftarrowtail,
LeftCeiling: LeftCeiling,
LeftDoubleBracket: LeftDoubleBracket,
LeftDownTeeVector: LeftDownTeeVector,
LeftDownVectorBar: LeftDownVectorBar,
LeftDownVector: LeftDownVector,
LeftFloor: LeftFloor,
leftharpoondown: leftharpoondown,
leftharpoonup: leftharpoonup,
leftleftarrows: leftleftarrows,
leftrightarrow: leftrightarrow,
LeftRightArrow: LeftRightArrow,
Leftrightarrow: Leftrightarrow,
leftrightarrows: leftrightarrows,
leftrightharpoons: leftrightharpoons,
leftrightsquigarrow: leftrightsquigarrow,
LeftRightVector: LeftRightVector,
LeftTeeArrow: LeftTeeArrow,
LeftTee: LeftTee,
LeftTeeVector: LeftTeeVector,
leftthreetimes: leftthreetimes,
LeftTriangleBar: LeftTriangleBar,
LeftTriangle: LeftTriangle,
LeftTriangleEqual: LeftTriangleEqual,
LeftUpDownVector: LeftUpDownVector,
LeftUpTeeVector: LeftUpTeeVector,
LeftUpVectorBar: LeftUpVectorBar,
LeftUpVector: LeftUpVector,
LeftVectorBar: LeftVectorBar,
LeftVector: LeftVector,
lEg: lEg,
leg: leg,
leq: leq,
leqq: leqq,
leqslant: leqslant,
lescc: lescc,
les: les,
lesdot: lesdot,
lesdoto: lesdoto,
lesdotor: lesdotor,
lesg: lesg,
lesges: lesges,
lessapprox: lessapprox,
lessdot: lessdot,
lesseqgtr: lesseqgtr,
lesseqqgtr: lesseqqgtr,
LessEqualGreater: LessEqualGreater,
LessFullEqual: LessFullEqual,
LessGreater: LessGreater,
lessgtr: lessgtr,
LessLess: LessLess,
lesssim: lesssim,
LessSlantEqual: LessSlantEqual,
LessTilde: LessTilde,
lfisht: lfisht,
lfloor: lfloor,
Lfr: Lfr,
lfr: lfr,
lg: lg,
lgE: lgE,
lHar: lHar,
lhard: lhard,
lharu: lharu,
lharul: lharul,
lhblk: lhblk,
LJcy: LJcy,
ljcy: ljcy,
llarr: llarr,
ll: ll,
Ll: Ll,
llcorner: llcorner,
Lleftarrow: Lleftarrow,
llhard: llhard,
lltri: lltri,
Lmidot: Lmidot,
lmidot: lmidot,
lmoustache: lmoustache,
lmoust: lmoust,
lnap: lnap,
lnapprox: lnapprox,
lne: lne,
lnE: lnE,
lneq: lneq,
lneqq: lneqq,
lnsim: lnsim,
loang: loang,
loarr: loarr,
lobrk: lobrk,
longleftarrow: longleftarrow,
LongLeftArrow: LongLeftArrow,
Longleftarrow: Longleftarrow,
longleftrightarrow: longleftrightarrow,
LongLeftRightArrow: LongLeftRightArrow,
Longleftrightarrow: Longleftrightarrow,
longmapsto: longmapsto,
longrightarrow: longrightarrow,
LongRightArrow: LongRightArrow,
Longrightarrow: Longrightarrow,
looparrowleft: looparrowleft,
looparrowright: looparrowright,
lopar: lopar,
Lopf: Lopf,
lopf: lopf,
loplus: loplus,
lotimes: lotimes,
lowast: lowast,
lowbar: lowbar,
LowerLeftArrow: LowerLeftArrow,
LowerRightArrow: LowerRightArrow,
loz: loz,
lozenge: lozenge,
lozf: lozf,
lpar: lpar,
lparlt: lparlt,
lrarr: lrarr,
lrcorner: lrcorner,
lrhar: lrhar,
lrhard: lrhard,
lrm: lrm,
lrtri: lrtri,
lsaquo: lsaquo,
lscr: lscr,
Lscr: Lscr,
lsh: lsh,
Lsh: Lsh,
lsim: lsim,
lsime: lsime,
lsimg: lsimg,
lsqb: lsqb,
lsquo: lsquo,
lsquor: lsquor,
Lstrok: Lstrok,
lstrok: lstrok,
ltcc: ltcc,
ltcir: ltcir,
lt: lt,
LT: LT,
Lt: Lt,
ltdot: ltdot,
lthree: lthree,
ltimes: ltimes,
ltlarr: ltlarr,
ltquest: ltquest,
ltri: ltri,
ltrie: ltrie,
ltrif: ltrif,
ltrPar: ltrPar,
lurdshar: lurdshar,
luruhar: luruhar,
lvertneqq: lvertneqq,
lvnE: lvnE,
macr: macr,
male: male,
malt: malt,
maltese: maltese,
map: map,
mapsto: mapsto,
mapstodown: mapstodown,
mapstoleft: mapstoleft,
mapstoup: mapstoup,
marker: marker,
mcomma: mcomma,
Mcy: Mcy,
mcy: mcy,
mdash: mdash,
mDDot: mDDot,
measuredangle: measuredangle,
MediumSpace: MediumSpace,
Mellintrf: Mellintrf,
Mfr: Mfr,
mfr: mfr,
mho: mho,
micro: micro,
midast: midast,
midcir: midcir,
mid: mid,
middot: middot,
minusb: minusb,
minus: minus,
minusd: minusd,
minusdu: minusdu,
MinusPlus: MinusPlus,
mlcp: mlcp,
mldr: mldr,
mnplus: mnplus,
models: models,
Mopf: Mopf,
mopf: mopf,
mp: mp,
mscr: mscr,
Mscr: Mscr,
mstpos: mstpos,
Mu: Mu,
mu: mu,
multimap: multimap,
mumap: mumap,
nabla: nabla,
Nacute: Nacute,
nacute: nacute,
nang: nang,
nap: nap,
napE: napE,
napid: napid,
napos: napos,
napprox: napprox,
natural: natural,
naturals: naturals,
natur: natur,
nbsp: nbsp,
nbump: nbump,
nbumpe: nbumpe,
ncap: ncap,
Ncaron: Ncaron,
ncaron: ncaron,
Ncedil: Ncedil,
ncedil: ncedil,
ncong: ncong,
ncongdot: ncongdot,
ncup: ncup,
Ncy: Ncy,
ncy: ncy,
ndash: ndash,
nearhk: nearhk,
nearr: nearr,
neArr: neArr,
nearrow: nearrow,
ne: ne,
nedot: nedot,
NegativeMediumSpace: NegativeMediumSpace,
NegativeThickSpace: NegativeThickSpace,
NegativeThinSpace: NegativeThinSpace,
NegativeVeryThinSpace: NegativeVeryThinSpace,
nequiv: nequiv,
nesear: nesear,
nesim: nesim,
NestedGreaterGreater: NestedGreaterGreater,
NestedLessLess: NestedLessLess,
NewLine: NewLine,
nexist: nexist,
nexists: nexists,
Nfr: Nfr,
nfr: nfr,
ngE: ngE,
nge: nge,
ngeq: ngeq,
ngeqq: ngeqq,
ngeqslant: ngeqslant,
nges: nges,
nGg: nGg,
ngsim: ngsim,
nGt: nGt,
ngt: ngt,
ngtr: ngtr,
nGtv: nGtv,
nharr: nharr,
nhArr: nhArr,
nhpar: nhpar,
ni: ni,
nis: nis,
nisd: nisd,
niv: niv,
NJcy: NJcy,
njcy: njcy,
nlarr: nlarr,
nlArr: nlArr,
nldr: nldr,
nlE: nlE,
nle: nle,
nleftarrow: nleftarrow,
nLeftarrow: nLeftarrow,
nleftrightarrow: nleftrightarrow,
nLeftrightarrow: nLeftrightarrow,
nleq: nleq,
nleqq: nleqq,
nleqslant: nleqslant,
nles: nles,
nless: nless,
nLl: nLl,
nlsim: nlsim,
nLt: nLt,
nlt: nlt,
nltri: nltri,
nltrie: nltrie,
nLtv: nLtv,
nmid: nmid,
NoBreak: NoBreak,
NonBreakingSpace: NonBreakingSpace,
nopf: nopf,
Nopf: Nopf,
Not: Not,
not: not,
NotCongruent: NotCongruent,
NotCupCap: NotCupCap,
NotDoubleVerticalBar: NotDoubleVerticalBar,
NotElement: NotElement,
NotEqual: NotEqual,
NotEqualTilde: NotEqualTilde,
NotExists: NotExists,
NotGreater: NotGreater,
NotGreaterEqual: NotGreaterEqual,
NotGreaterFullEqual: NotGreaterFullEqual,
NotGreaterGreater: NotGreaterGreater,
NotGreaterLess: NotGreaterLess,
NotGreaterSlantEqual: NotGreaterSlantEqual,
NotGreaterTilde: NotGreaterTilde,
NotHumpDownHump: NotHumpDownHump,
NotHumpEqual: NotHumpEqual,
notin: notin,
notindot: notindot,
notinE: notinE,
notinva: notinva,
notinvb: notinvb,
notinvc: notinvc,
NotLeftTriangleBar: NotLeftTriangleBar,
NotLeftTriangle: NotLeftTriangle,
NotLeftTriangleEqual: NotLeftTriangleEqual,
NotLess: NotLess,
NotLessEqual: NotLessEqual,
NotLessGreater: NotLessGreater,
NotLessLess: NotLessLess,
NotLessSlantEqual: NotLessSlantEqual,
NotLessTilde: NotLessTilde,
NotNestedGreaterGreater: NotNestedGreaterGreater,
NotNestedLessLess: NotNestedLessLess,
notni: notni,
notniva: notniva,
notnivb: notnivb,
notnivc: notnivc,
NotPrecedes: NotPrecedes,
NotPrecedesEqual: NotPrecedesEqual,
NotPrecedesSlantEqual: NotPrecedesSlantEqual,
NotReverseElement: NotReverseElement,
NotRightTriangleBar: NotRightTriangleBar,
NotRightTriangle: NotRightTriangle,
NotRightTriangleEqual: NotRightTriangleEqual,
NotSquareSubset: NotSquareSubset,
NotSquareSubsetEqual: NotSquareSubsetEqual,
NotSquareSuperset: NotSquareSuperset,
NotSquareSupersetEqual: NotSquareSupersetEqual,
NotSubset: NotSubset,
NotSubsetEqual: NotSubsetEqual,
NotSucceeds: NotSucceeds,
NotSucceedsEqual: NotSucceedsEqual,
NotSucceedsSlantEqual: NotSucceedsSlantEqual,
NotSucceedsTilde: NotSucceedsTilde,
NotSuperset: NotSuperset,
NotSupersetEqual: NotSupersetEqual,
NotTilde: NotTilde,
NotTildeEqual: NotTildeEqual,
NotTildeFullEqual: NotTildeFullEqual,
NotTildeTilde: NotTildeTilde,
NotVerticalBar: NotVerticalBar,
nparallel: nparallel,
npar: npar,
nparsl: nparsl,
npart: npart,
npolint: npolint,
npr: npr,
nprcue: nprcue,
nprec: nprec,
npreceq: npreceq,
npre: npre,
nrarrc: nrarrc,
nrarr: nrarr,
nrArr: nrArr,
nrarrw: nrarrw,
nrightarrow: nrightarrow,
nRightarrow: nRightarrow,
nrtri: nrtri,
nrtrie: nrtrie,
nsc: nsc,
nsccue: nsccue,
nsce: nsce,
Nscr: Nscr,
nscr: nscr,
nshortmid: nshortmid,
nshortparallel: nshortparallel,
nsim: nsim,
nsime: nsime,
nsimeq: nsimeq,
nsmid: nsmid,
nspar: nspar,
nsqsube: nsqsube,
nsqsupe: nsqsupe,
nsub: nsub,
nsubE: nsubE,
nsube: nsube,
nsubset: nsubset,
nsubseteq: nsubseteq,
nsubseteqq: nsubseteqq,
nsucc: nsucc,
nsucceq: nsucceq,
nsup: nsup,
nsupE: nsupE,
nsupe: nsupe,
nsupset: nsupset,
nsupseteq: nsupseteq,
nsupseteqq: nsupseteqq,
ntgl: ntgl,
Ntilde: Ntilde,
ntilde: ntilde,
ntlg: ntlg,
ntriangleleft: ntriangleleft,
ntrianglelefteq: ntrianglelefteq,
ntriangleright: ntriangleright,
ntrianglerighteq: ntrianglerighteq,
Nu: Nu,
nu: nu,
num: num,
numero: numero,
numsp: numsp,
nvap: nvap,
nvdash: nvdash,
nvDash: nvDash,
nVdash: nVdash,
nVDash: nVDash,
nvge: nvge,
nvgt: nvgt,
nvHarr: nvHarr,
nvinfin: nvinfin,
nvlArr: nvlArr,
nvle: nvle,
nvlt: nvlt,
nvltrie: nvltrie,
nvrArr: nvrArr,
nvrtrie: nvrtrie,
nvsim: nvsim,
nwarhk: nwarhk,
nwarr: nwarr,
nwArr: nwArr,
nwarrow: nwarrow,
nwnear: nwnear,
Oacute: Oacute,
oacute: oacute,
oast: oast,
Ocirc: Ocirc,
ocirc: ocirc,
ocir: ocir,
Ocy: Ocy,
ocy: ocy,
odash: odash,
Odblac: Odblac,
odblac: odblac,
odiv: odiv,
odot: odot,
odsold: odsold,
OElig: OElig,
oelig: oelig,
ofcir: ofcir,
Ofr: Ofr,
ofr: ofr,
ogon: ogon,
Ograve: Ograve,
ograve: ograve,
ogt: ogt,
ohbar: ohbar,
ohm: ohm,
oint: oint,
olarr: olarr,
olcir: olcir,
olcross: olcross,
oline: oline,
olt: olt,
Omacr: Omacr,
omacr: omacr,
Omega: Omega,
omega: omega,
Omicron: Omicron,
omicron: omicron,
omid: omid,
ominus: ominus,
Oopf: Oopf,
oopf: oopf,
opar: opar,
OpenCurlyDoubleQuote: OpenCurlyDoubleQuote,
OpenCurlyQuote: OpenCurlyQuote,
operp: operp,
oplus: oplus,
orarr: orarr,
Or: Or,
or: or,
ord: ord,
order: order,
orderof: orderof,
ordf: ordf,
ordm: ordm,
origof: origof,
oror: oror,
orslope: orslope,
orv: orv,
oS: oS,
Oscr: Oscr,
oscr: oscr,
Oslash: Oslash,
oslash: oslash,
osol: osol,
Otilde: Otilde,
otilde: otilde,
otimesas: otimesas,
Otimes: Otimes,
otimes: otimes,
Ouml: Ouml,
ouml: ouml,
ovbar: ovbar,
OverBar: OverBar,
OverBrace: OverBrace,
OverBracket: OverBracket,
OverParenthesis: OverParenthesis,
para: para,
parallel: parallel,
par: par,
parsim: parsim,
parsl: parsl,
part: part,
PartialD: PartialD,
Pcy: Pcy,
pcy: pcy,
percnt: percnt,
period: period,
permil: permil,
perp: perp,
pertenk: pertenk,
Pfr: Pfr,
pfr: pfr,
Phi: Phi,
phi: phi,
phiv: phiv,
phmmat: phmmat,
phone: phone,
Pi: Pi,
pi: pi,
pitchfork: pitchfork,
piv: piv,
planck: planck,
planckh: planckh,
plankv: plankv,
plusacir: plusacir,
plusb: plusb,
pluscir: pluscir,
plus: plus,
plusdo: plusdo,
plusdu: plusdu,
pluse: pluse,
PlusMinus: PlusMinus,
plusmn: plusmn,
plussim: plussim,
plustwo: plustwo,
pm: pm,
Poincareplane: Poincareplane,
pointint: pointint,
popf: popf,
Popf: Popf,
pound: pound,
prap: prap,
Pr: Pr,
pr: pr,
prcue: prcue,
precapprox: precapprox,
prec: prec,
preccurlyeq: preccurlyeq,
Precedes: Precedes,
PrecedesEqual: PrecedesEqual,
PrecedesSlantEqual: PrecedesSlantEqual,
PrecedesTilde: PrecedesTilde,
preceq: preceq,
precnapprox: precnapprox,
precneqq: precneqq,
precnsim: precnsim,
pre: pre,
prE: prE,
precsim: precsim,
prime: prime,
Prime: Prime,
primes: primes,
prnap: prnap,
prnE: prnE,
prnsim: prnsim,
prod: prod,
Product: Product,
profalar: profalar,
profline: profline,
profsurf: profsurf,
prop: prop,
Proportional: Proportional,
Proportion: Proportion,
propto: propto,
prsim: prsim,
prurel: prurel,
Pscr: Pscr,
pscr: pscr,
Psi: Psi,
psi: psi,
puncsp: puncsp,
Qfr: Qfr,
qfr: qfr,
qint: qint,
qopf: qopf,
Qopf: Qopf,
qprime: qprime,
Qscr: Qscr,
qscr: qscr,
quaternions: quaternions,
quatint: quatint,
quest: quest,
questeq: questeq,
quot: quot,
QUOT: QUOT,
rAarr: rAarr,
race: race,
Racute: Racute,
racute: racute,
radic: radic,
raemptyv: raemptyv,
rang: rang,
Rang: Rang,
rangd: rangd,
range: range,
rangle: rangle,
raquo: raquo,
rarrap: rarrap,
rarrb: rarrb,
rarrbfs: rarrbfs,
rarrc: rarrc,
rarr: rarr,
Rarr: Rarr,
rArr: rArr,
rarrfs: rarrfs,
rarrhk: rarrhk,
rarrlp: rarrlp,
rarrpl: rarrpl,
rarrsim: rarrsim,
Rarrtl: Rarrtl,
rarrtl: rarrtl,
rarrw: rarrw,
ratail: ratail,
rAtail: rAtail,
ratio: ratio,
rationals: rationals,
rbarr: rbarr,
rBarr: rBarr,
RBarr: RBarr,
rbbrk: rbbrk,
rbrace: rbrace,
rbrack: rbrack,
rbrke: rbrke,
rbrksld: rbrksld,
rbrkslu: rbrkslu,
Rcaron: Rcaron,
rcaron: rcaron,
Rcedil: Rcedil,
rcedil: rcedil,
rceil: rceil,
rcub: rcub,
Rcy: Rcy,
rcy: rcy,
rdca: rdca,
rdldhar: rdldhar,
rdquo: rdquo,
rdquor: rdquor,
rdsh: rdsh,
real: real,
realine: realine,
realpart: realpart,
reals: reals,
Re: Re,
rect: rect,
reg: reg,
REG: REG,
ReverseElement: ReverseElement,
ReverseEquilibrium: ReverseEquilibrium,
ReverseUpEquilibrium: ReverseUpEquilibrium,
rfisht: rfisht,
rfloor: rfloor,
rfr: rfr,
Rfr: Rfr,
rHar: rHar,
rhard: rhard,
rharu: rharu,
rharul: rharul,
Rho: Rho,
rho: rho,
rhov: rhov,
RightAngleBracket: RightAngleBracket,
RightArrowBar: RightArrowBar,
rightarrow: rightarrow,
RightArrow: RightArrow,
Rightarrow: Rightarrow,
RightArrowLeftArrow: RightArrowLeftArrow,
rightarrowtail: rightarrowtail,
RightCeiling: RightCeiling,
RightDoubleBracket: RightDoubleBracket,
RightDownTeeVector: RightDownTeeVector,
RightDownVectorBar: RightDownVectorBar,
RightDownVector: RightDownVector,
RightFloor: RightFloor,
rightharpoondown: rightharpoondown,
rightharpoonup: rightharpoonup,
rightleftarrows: rightleftarrows,
rightleftharpoons: rightleftharpoons,
rightrightarrows: rightrightarrows,
rightsquigarrow: rightsquigarrow,
RightTeeArrow: RightTeeArrow,
RightTee: RightTee,
RightTeeVector: RightTeeVector,
rightthreetimes: rightthreetimes,
RightTriangleBar: RightTriangleBar,
RightTriangle: RightTriangle,
RightTriangleEqual: RightTriangleEqual,
RightUpDownVector: RightUpDownVector,
RightUpTeeVector: RightUpTeeVector,
RightUpVectorBar: RightUpVectorBar,
RightUpVector: RightUpVector,
RightVectorBar: RightVectorBar,
RightVector: RightVector,
ring: ring,
risingdotseq: risingdotseq,
rlarr: rlarr,
rlhar: rlhar,
rlm: rlm,
rmoustache: rmoustache,
rmoust: rmoust,
rnmid: rnmid,
roang: roang,
roarr: roarr,
robrk: robrk,
ropar: ropar,
ropf: ropf,
Ropf: Ropf,
roplus: roplus,
rotimes: rotimes,
RoundImplies: RoundImplies,
rpar: rpar,
rpargt: rpargt,
rppolint: rppolint,
rrarr: rrarr,
Rrightarrow: Rrightarrow,
rsaquo: rsaquo,
rscr: rscr,
Rscr: Rscr,
rsh: rsh,
Rsh: Rsh,
rsqb: rsqb,
rsquo: rsquo,
rsquor: rsquor,
rthree: rthree,
rtimes: rtimes,
rtri: rtri,
rtrie: rtrie,
rtrif: rtrif,
rtriltri: rtriltri,
RuleDelayed: RuleDelayed,
ruluhar: ruluhar,
rx: rx,
Sacute: Sacute,
sacute: sacute,
sbquo: sbquo,
scap: scap,
Scaron: Scaron,
scaron: scaron,
Sc: Sc,
sc: sc,
sccue: sccue,
sce: sce,
scE: scE,
Scedil: Scedil,
scedil: scedil,
Scirc: Scirc,
scirc: scirc,
scnap: scnap,
scnE: scnE,
scnsim: scnsim,
scpolint: scpolint,
scsim: scsim,
Scy: Scy,
scy: scy,
sdotb: sdotb,
sdot: sdot,
sdote: sdote,
searhk: searhk,
searr: searr,
seArr: seArr,
searrow: searrow,
sect: sect,
semi: semi,
seswar: seswar,
setminus: setminus,
setmn: setmn,
sext: sext,
Sfr: Sfr,
sfr: sfr,
sfrown: sfrown,
sharp: sharp,
SHCHcy: SHCHcy,
shchcy: shchcy,
SHcy: SHcy,
shcy: shcy,
ShortDownArrow: ShortDownArrow,
ShortLeftArrow: ShortLeftArrow,
shortmid: shortmid,
shortparallel: shortparallel,
ShortRightArrow: ShortRightArrow,
ShortUpArrow: ShortUpArrow,
shy: shy,
Sigma: Sigma,
sigma: sigma,
sigmaf: sigmaf,
sigmav: sigmav,
sim: sim,
simdot: simdot,
sime: sime,
simeq: simeq,
simg: simg,
simgE: simgE,
siml: siml,
simlE: simlE,
simne: simne,
simplus: simplus,
simrarr: simrarr,
slarr: slarr,
SmallCircle: SmallCircle,
smallsetminus: smallsetminus,
smashp: smashp,
smeparsl: smeparsl,
smid: smid,
smile: smile,
smt: smt,
smte: smte,
smtes: smtes,
SOFTcy: SOFTcy,
softcy: softcy,
solbar: solbar,
solb: solb,
sol: sol,
Sopf: Sopf,
sopf: sopf,
spades: spades,
spadesuit: spadesuit,
spar: spar,
sqcap: sqcap,
sqcaps: sqcaps,
sqcup: sqcup,
sqcups: sqcups,
Sqrt: Sqrt,
sqsub: sqsub,
sqsube: sqsube,
sqsubset: sqsubset,
sqsubseteq: sqsubseteq,
sqsup: sqsup,
sqsupe: sqsupe,
sqsupset: sqsupset,
sqsupseteq: sqsupseteq,
square: square,
Square: Square,
SquareIntersection: SquareIntersection,
SquareSubset: SquareSubset,
SquareSubsetEqual: SquareSubsetEqual,
SquareSuperset: SquareSuperset,
SquareSupersetEqual: SquareSupersetEqual,
SquareUnion: SquareUnion,
squarf: squarf,
squ: squ,
squf: squf,
srarr: srarr,
Sscr: Sscr,
sscr: sscr,
ssetmn: ssetmn,
ssmile: ssmile,
sstarf: sstarf,
Star: Star,
star: star,
starf: starf,
straightepsilon: straightepsilon,
straightphi: straightphi,
strns: strns,
sub: sub,
Sub: Sub,
subdot: subdot,
subE: subE,
sube: sube,
subedot: subedot,
submult: submult,
subnE: subnE,
subne: subne,
subplus: subplus,
subrarr: subrarr,
subset: subset,
Subset: Subset,
subseteq: subseteq,
subseteqq: subseteqq,
SubsetEqual: SubsetEqual,
subsetneq: subsetneq,
subsetneqq: subsetneqq,
subsim: subsim,
subsub: subsub,
subsup: subsup,
succapprox: succapprox,
succ: succ,
succcurlyeq: succcurlyeq,
Succeeds: Succeeds,
SucceedsEqual: SucceedsEqual,
SucceedsSlantEqual: SucceedsSlantEqual,
SucceedsTilde: SucceedsTilde,
succeq: succeq,
succnapprox: succnapprox,
succneqq: succneqq,
succnsim: succnsim,
succsim: succsim,
SuchThat: SuchThat,
sum: sum,
Sum: Sum,
sung: sung,
sup1: sup1,
sup2: sup2,
sup3: sup3,
sup: sup,
Sup: Sup,
supdot: supdot,
supdsub: supdsub,
supE: supE,
supe: supe,
supedot: supedot,
Superset: Superset,
SupersetEqual: SupersetEqual,
suphsol: suphsol,
suphsub: suphsub,
suplarr: suplarr,
supmult: supmult,
supnE: supnE,
supne: supne,
supplus: supplus,
supset: supset,
Supset: Supset,
supseteq: supseteq,
supseteqq: supseteqq,
supsetneq: supsetneq,
supsetneqq: supsetneqq,
supsim: supsim,
supsub: supsub,
supsup: supsup,
swarhk: swarhk,
swarr: swarr,
swArr: swArr,
swarrow: swarrow,
swnwar: swnwar,
szlig: szlig,
Tab: Tab,
target: target,
Tau: Tau,
tau: tau,
tbrk: tbrk,
Tcaron: Tcaron,
tcaron: tcaron,
Tcedil: Tcedil,
tcedil: tcedil,
Tcy: Tcy,
tcy: tcy,
tdot: tdot,
telrec: telrec,
Tfr: Tfr,
tfr: tfr,
there4: there4,
therefore: therefore,
Therefore: Therefore,
Theta: Theta,
theta: theta,
thetasym: thetasym,
thetav: thetav,
thickapprox: thickapprox,
thicksim: thicksim,
ThickSpace: ThickSpace,
ThinSpace: ThinSpace,
thinsp: thinsp,
thkap: thkap,
thksim: thksim,
THORN: THORN,
thorn: thorn,
tilde: tilde,
Tilde: Tilde,
TildeEqual: TildeEqual,
TildeFullEqual: TildeFullEqual,
TildeTilde: TildeTilde,
timesbar: timesbar,
timesb: timesb,
times: times,
timesd: timesd,
tint: tint,
toea: toea,
topbot: topbot,
topcir: topcir,
top: top,
Topf: Topf,
topf: topf,
topfork: topfork,
tosa: tosa,
tprime: tprime,
trade: trade,
TRADE: TRADE,
triangle: triangle,
triangledown: triangledown,
triangleleft: triangleleft,
trianglelefteq: trianglelefteq,
triangleq: triangleq,
triangleright: triangleright,
trianglerighteq: trianglerighteq,
tridot: tridot,
trie: trie,
triminus: triminus,
TripleDot: TripleDot,
triplus: triplus,
trisb: trisb,
tritime: tritime,
trpezium: trpezium,
Tscr: Tscr,
tscr: tscr,
TScy: TScy,
tscy: tscy,
TSHcy: TSHcy,
tshcy: tshcy,
Tstrok: Tstrok,
tstrok: tstrok,
twixt: twixt,
twoheadleftarrow: twoheadleftarrow,
twoheadrightarrow: twoheadrightarrow,
Uacute: Uacute,
uacute: uacute,
uarr: uarr,
Uarr: Uarr,
uArr: uArr,
Uarrocir: Uarrocir,
Ubrcy: Ubrcy,
ubrcy: ubrcy,
Ubreve: Ubreve,
ubreve: ubreve,
Ucirc: Ucirc,
ucirc: ucirc,
Ucy: Ucy,
ucy: ucy,
udarr: udarr,
Udblac: Udblac,
udblac: udblac,
udhar: udhar,
ufisht: ufisht,
Ufr: Ufr,
ufr: ufr,
Ugrave: Ugrave,
ugrave: ugrave,
uHar: uHar,
uharl: uharl,
uharr: uharr,
uhblk: uhblk,
ulcorn: ulcorn,
ulcorner: ulcorner,
ulcrop: ulcrop,
ultri: ultri,
Umacr: Umacr,
umacr: umacr,
uml: uml,
UnderBar: UnderBar,
UnderBrace: UnderBrace,
UnderBracket: UnderBracket,
UnderParenthesis: UnderParenthesis,
Union: Union,
UnionPlus: UnionPlus,
Uogon: Uogon,
uogon: uogon,
Uopf: Uopf,
uopf: uopf,
UpArrowBar: UpArrowBar,
uparrow: uparrow,
UpArrow: UpArrow,
Uparrow: Uparrow,
UpArrowDownArrow: UpArrowDownArrow,
updownarrow: updownarrow,
UpDownArrow: UpDownArrow,
Updownarrow: Updownarrow,
UpEquilibrium: UpEquilibrium,
upharpoonleft: upharpoonleft,
upharpoonright: upharpoonright,
uplus: uplus,
UpperLeftArrow: UpperLeftArrow,
UpperRightArrow: UpperRightArrow,
upsi: upsi,
Upsi: Upsi,
upsih: upsih,
Upsilon: Upsilon,
upsilon: upsilon,
UpTeeArrow: UpTeeArrow,
UpTee: UpTee,
upuparrows: upuparrows,
urcorn: urcorn,
urcorner: urcorner,
urcrop: urcrop,
Uring: Uring,
uring: uring,
urtri: urtri,
Uscr: Uscr,
uscr: uscr,
utdot: utdot,
Utilde: Utilde,
utilde: utilde,
utri: utri,
utrif: utrif,
uuarr: uuarr,
Uuml: Uuml,
uuml: uuml,
uwangle: uwangle,
vangrt: vangrt,
varepsilon: varepsilon,
varkappa: varkappa,
varnothing: varnothing,
varphi: varphi,
varpi: varpi,
varpropto: varpropto,
varr: varr,
vArr: vArr,
varrho: varrho,
varsigma: varsigma,
varsubsetneq: varsubsetneq,
varsubsetneqq: varsubsetneqq,
varsupsetneq: varsupsetneq,
varsupsetneqq: varsupsetneqq,
vartheta: vartheta,
vartriangleleft: vartriangleleft,
vartriangleright: vartriangleright,
vBar: vBar,
Vbar: Vbar,
vBarv: vBarv,
Vcy: Vcy,
vcy: vcy,
vdash: vdash,
vDash: vDash,
Vdash: Vdash,
VDash: VDash,
Vdashl: Vdashl,
veebar: veebar,
vee: vee,
Vee: Vee,
veeeq: veeeq,
vellip: vellip,
verbar: verbar,
Verbar: Verbar,
vert: vert,
Vert: Vert,
VerticalBar: VerticalBar,
VerticalLine: VerticalLine,
VerticalSeparator: VerticalSeparator,
VerticalTilde: VerticalTilde,
VeryThinSpace: VeryThinSpace,
Vfr: Vfr,
vfr: vfr,
vltri: vltri,
vnsub: vnsub,
vnsup: vnsup,
Vopf: Vopf,
vopf: vopf,
vprop: vprop,
vrtri: vrtri,
Vscr: Vscr,
vscr: vscr,
vsubnE: vsubnE,
vsubne: vsubne,
vsupnE: vsupnE,
vsupne: vsupne,
Vvdash: Vvdash,
vzigzag: vzigzag,
Wcirc: Wcirc,
wcirc: wcirc,
wedbar: wedbar,
wedge: wedge,
Wedge: Wedge,
wedgeq: wedgeq,
weierp: weierp,
Wfr: Wfr,
wfr: wfr,
Wopf: Wopf,
wopf: wopf,
wp: wp,
wr: wr,
wreath: wreath,
Wscr: Wscr,
wscr: wscr,
xcap: xcap,
xcirc: xcirc,
xcup: xcup,
xdtri: xdtri,
Xfr: Xfr,
xfr: xfr,
xharr: xharr,
xhArr: xhArr,
Xi: Xi,
xi: xi,
xlarr: xlarr,
xlArr: xlArr,
xmap: xmap,
xnis: xnis,
xodot: xodot,
Xopf: Xopf,
xopf: xopf,
xoplus: xoplus,
xotime: xotime,
xrarr: xrarr,
xrArr: xrArr,
Xscr: Xscr,
xscr: xscr,
xsqcup: xsqcup,
xuplus: xuplus,
xutri: xutri,
xvee: xvee,
xwedge: xwedge,
Yacute: Yacute,
yacute: yacute,
YAcy: YAcy,
yacy: yacy,
Ycirc: Ycirc,
ycirc: ycirc,
Ycy: Ycy,
ycy: ycy,
yen: yen,
Yfr: Yfr,
yfr: yfr,
YIcy: YIcy,
yicy: yicy,
Yopf: Yopf,
yopf: yopf,
Yscr: Yscr,
yscr: yscr,
YUcy: YUcy,
yucy: yucy,
yuml: yuml,
Yuml: Yuml,
Zacute: Zacute,
zacute: zacute,
Zcaron: Zcaron,
zcaron: zcaron,
Zcy: Zcy,
zcy: zcy,
Zdot: Zdot,
zdot: zdot,
zeetrf: zeetrf,
ZeroWidthSpace: ZeroWidthSpace,
Zeta: Zeta,
zeta: zeta,
zfr: zfr,
Zfr: Zfr,
ZHcy: ZHcy,
zhcy: zhcy,
zigrarr: zigrarr,
zopf: zopf,
Zopf: Zopf,
Zscr: Zscr,
zscr: zscr,
zwj: zwj,
zwnj: zwnj,
'default': entities
});
 
var Aacute$1 = "Á";
var aacute$1 = "á";
var Acirc$1 = "Â";
var acirc$1 = "â";
var acute$1 = "´";
var AElig$1 = "Æ";
var aelig$1 = "æ";
var Agrave$1 = "À";
var agrave$1 = "à";
var amp$1 = "&";
var AMP$1 = "&";
var Aring$1 = "Å";
var aring$1 = "å";
var Atilde$1 = "Ã";
var atilde$1 = "ã";
var Auml$1 = "Ä";
var auml$1 = "ä";
var brvbar$1 = "¦";
var Ccedil$1 = "Ç";
var ccedil$1 = "ç";
var cedil$1 = "¸";
var cent$1 = "¢";
var copy$1 = "©";
var COPY$1 = "©";
var curren$1 = "¤";
var deg$1 = "°";
var divide$1 = "÷";
var Eacute$1 = "É";
var eacute$1 = "é";
var Ecirc$1 = "Ê";
var ecirc$1 = "ê";
var Egrave$1 = "È";
var egrave$1 = "è";
var ETH$1 = "Ð";
var eth$1 = "ð";
var Euml$1 = "Ë";
var euml$1 = "ë";
var frac12$1 = "½";
var frac14$1 = "¼";
var frac34$1 = "¾";
var gt$1 = ">";
var GT$1 = ">";
var Iacute$1 = "Í";
var iacute$1 = "í";
var Icirc$1 = "Î";
var icirc$1 = "î";
var iexcl$1 = "¡";
var Igrave$1 = "Ì";
var igrave$1 = "ì";
var iquest$1 = "¿";
var Iuml$1 = "Ï";
var iuml$1 = "ï";
var laquo$1 = "«";
var lt$1 = "<";
var LT$1 = "<";
var macr$1 = "¯";
var micro$1 = "µ";
var middot$1 = "·";
var nbsp$1 = " ";
var not$1 = "¬";
var Ntilde$1 = "Ñ";
var ntilde$1 = "ñ";
var Oacute$1 = "Ó";
var oacute$1 = "ó";
var Ocirc$1 = "Ô";
var ocirc$1 = "ô";
var Ograve$1 = "Ò";
var ograve$1 = "ò";
var ordf$1 = "ª";
var ordm$1 = "º";
var Oslash$1 = "Ø";
var oslash$1 = "ø";
var Otilde$1 = "Õ";
var otilde$1 = "õ";
var Ouml$1 = "Ö";
var ouml$1 = "ö";
var para$1 = "¶";
var plusmn$1 = "±";
var pound$1 = "£";
var quot$1 = "\"";
var QUOT$1 = "\"";
var raquo$1 = "»";
var reg$1 = "®";
var REG$1 = "®";
var sect$1 = "§";
var shy$1 = "­";
var sup1$1 = "¹";
var sup2$1 = "²";
var sup3$1 = "³";
var szlig$1 = "ß";
var THORN$1 = "Þ";
var thorn$1 = "þ";
var times$1 = "×";
var Uacute$1 = "Ú";
var uacute$1 = "ú";
var Ucirc$1 = "Û";
var ucirc$1 = "û";
var Ugrave$1 = "Ù";
var ugrave$1 = "ù";
var uml$1 = "¨";
var Uuml$1 = "Ü";
var uuml$1 = "ü";
var Yacute$1 = "Ý";
var yacute$1 = "ý";
var yen$1 = "¥";
var yuml$1 = "ÿ";
var legacy = {
Aacute: Aacute$1,
aacute: aacute$1,
Acirc: Acirc$1,
acirc: acirc$1,
acute: acute$1,
AElig: AElig$1,
aelig: aelig$1,
Agrave: Agrave$1,
agrave: agrave$1,
amp: amp$1,
AMP: AMP$1,
Aring: Aring$1,
aring: aring$1,
Atilde: Atilde$1,
atilde: atilde$1,
Auml: Auml$1,
auml: auml$1,
brvbar: brvbar$1,
Ccedil: Ccedil$1,
ccedil: ccedil$1,
cedil: cedil$1,
cent: cent$1,
copy: copy$1,
COPY: COPY$1,
curren: curren$1,
deg: deg$1,
divide: divide$1,
Eacute: Eacute$1,
eacute: eacute$1,
Ecirc: Ecirc$1,
ecirc: ecirc$1,
Egrave: Egrave$1,
egrave: egrave$1,
ETH: ETH$1,
eth: eth$1,
Euml: Euml$1,
euml: euml$1,
frac12: frac12$1,
frac14: frac14$1,
frac34: frac34$1,
gt: gt$1,
GT: GT$1,
Iacute: Iacute$1,
iacute: iacute$1,
Icirc: Icirc$1,
icirc: icirc$1,
iexcl: iexcl$1,
Igrave: Igrave$1,
igrave: igrave$1,
iquest: iquest$1,
Iuml: Iuml$1,
iuml: iuml$1,
laquo: laquo$1,
lt: lt$1,
LT: LT$1,
macr: macr$1,
micro: micro$1,
middot: middot$1,
nbsp: nbsp$1,
not: not$1,
Ntilde: Ntilde$1,
ntilde: ntilde$1,
Oacute: Oacute$1,
oacute: oacute$1,
Ocirc: Ocirc$1,
ocirc: ocirc$1,
Ograve: Ograve$1,
ograve: ograve$1,
ordf: ordf$1,
ordm: ordm$1,
Oslash: Oslash$1,
oslash: oslash$1,
Otilde: Otilde$1,
otilde: otilde$1,
Ouml: Ouml$1,
ouml: ouml$1,
para: para$1,
plusmn: plusmn$1,
pound: pound$1,
quot: quot$1,
QUOT: QUOT$1,
raquo: raquo$1,
reg: reg$1,
REG: REG$1,
sect: sect$1,
shy: shy$1,
sup1: sup1$1,
sup2: sup2$1,
sup3: sup3$1,
szlig: szlig$1,
THORN: THORN$1,
thorn: thorn$1,
times: times$1,
Uacute: Uacute$1,
uacute: uacute$1,
Ucirc: Ucirc$1,
ucirc: ucirc$1,
Ugrave: Ugrave$1,
ugrave: ugrave$1,
uml: uml$1,
Uuml: Uuml$1,
uuml: uuml$1,
Yacute: Yacute$1,
yacute: yacute$1,
yen: yen$1,
yuml: yuml$1
};
 
var legacy$1 = /*#__PURE__*/Object.freeze({
__proto__: null,
Aacute: Aacute$1,
aacute: aacute$1,
Acirc: Acirc$1,
acirc: acirc$1,
acute: acute$1,
AElig: AElig$1,
aelig: aelig$1,
Agrave: Agrave$1,
agrave: agrave$1,
amp: amp$1,
AMP: AMP$1,
Aring: Aring$1,
aring: aring$1,
Atilde: Atilde$1,
atilde: atilde$1,
Auml: Auml$1,
auml: auml$1,
brvbar: brvbar$1,
Ccedil: Ccedil$1,
ccedil: ccedil$1,
cedil: cedil$1,
cent: cent$1,
copy: copy$1,
COPY: COPY$1,
curren: curren$1,
deg: deg$1,
divide: divide$1,
Eacute: Eacute$1,
eacute: eacute$1,
Ecirc: Ecirc$1,
ecirc: ecirc$1,
Egrave: Egrave$1,
egrave: egrave$1,
ETH: ETH$1,
eth: eth$1,
Euml: Euml$1,
euml: euml$1,
frac12: frac12$1,
frac14: frac14$1,
frac34: frac34$1,
gt: gt$1,
GT: GT$1,
Iacute: Iacute$1,
iacute: iacute$1,
Icirc: Icirc$1,
icirc: icirc$1,
iexcl: iexcl$1,
Igrave: Igrave$1,
igrave: igrave$1,
iquest: iquest$1,
Iuml: Iuml$1,
iuml: iuml$1,
laquo: laquo$1,
lt: lt$1,
LT: LT$1,
macr: macr$1,
micro: micro$1,
middot: middot$1,
nbsp: nbsp$1,
not: not$1,
Ntilde: Ntilde$1,
ntilde: ntilde$1,
Oacute: Oacute$1,
oacute: oacute$1,
Ocirc: Ocirc$1,
ocirc: ocirc$1,
Ograve: Ograve$1,
ograve: ograve$1,
ordf: ordf$1,
ordm: ordm$1,
Oslash: Oslash$1,
oslash: oslash$1,
Otilde: Otilde$1,
otilde: otilde$1,
Ouml: Ouml$1,
ouml: ouml$1,
para: para$1,
plusmn: plusmn$1,
pound: pound$1,
quot: quot$1,
QUOT: QUOT$1,
raquo: raquo$1,
reg: reg$1,
REG: REG$1,
sect: sect$1,
shy: shy$1,
sup1: sup1$1,
sup2: sup2$1,
sup3: sup3$1,
szlig: szlig$1,
THORN: THORN$1,
thorn: thorn$1,
times: times$1,
Uacute: Uacute$1,
uacute: uacute$1,
Ucirc: Ucirc$1,
ucirc: ucirc$1,
Ugrave: Ugrave$1,
ugrave: ugrave$1,
uml: uml$1,
Uuml: Uuml$1,
uuml: uuml$1,
Yacute: Yacute$1,
yacute: yacute$1,
yen: yen$1,
yuml: yuml$1,
'default': legacy
});
 
var amp$2 = "&";
var apos$1 = "'";
var gt$2 = ">";
var lt$2 = "<";
var quot$2 = "\"";
var xml = {
amp: amp$2,
apos: apos$1,
gt: gt$2,
lt: lt$2,
quot: quot$2
};
 
var xml$1 = /*#__PURE__*/Object.freeze({
__proto__: null,
amp: amp$2,
apos: apos$1,
gt: gt$2,
lt: lt$2,
quot: quot$2,
'default': xml
});
 
var decode = {
"0": 65533,
"128": 8364,
"130": 8218,
"131": 402,
"132": 8222,
"133": 8230,
"134": 8224,
"135": 8225,
"136": 710,
"137": 8240,
"138": 352,
"139": 8249,
"140": 338,
"142": 381,
"145": 8216,
"146": 8217,
"147": 8220,
"148": 8221,
"149": 8226,
"150": 8211,
"151": 8212,
"152": 732,
"153": 8482,
"154": 353,
"155": 8250,
"156": 339,
"158": 382,
"159": 376
};
 
var decode$1 = /*#__PURE__*/Object.freeze({
__proto__: null,
'default': decode
});
 
var require$$0 = getCjsExportFromNamespace(decode$1);
 
var decode_codepoint = createCommonjsModule(function (module, exports) {
var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var decode_json_1 = __importDefault(require$$0);
// modified version of https://github.com/mathiasbynens/he/blob/master/src/he.js#L94-L119
function decodeCodePoint(codePoint) {
if ((codePoint >= 0xd800 && codePoint <= 0xdfff) || codePoint > 0x10ffff) {
return "\uFFFD";
}
if (codePoint in decode_json_1.default) {
codePoint = decode_json_1.default[codePoint];
}
var output = "";
if (codePoint > 0xffff) {
codePoint -= 0x10000;
output += String.fromCharCode(((codePoint >>> 10) & 0x3ff) | 0xd800);
codePoint = 0xdc00 | (codePoint & 0x3ff);
}
output += String.fromCharCode(codePoint);
return output;
}
exports.default = decodeCodePoint;
});
 
unwrapExports(decode_codepoint);
 
var require$$1 = getCjsExportFromNamespace(entities$1);
 
var require$$1$1 = getCjsExportFromNamespace(legacy$1);
 
var require$$0$1 = getCjsExportFromNamespace(xml$1);
 
var decode$2 = createCommonjsModule(function (module, exports) {
var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.decodeHTML = exports.decodeHTMLStrict = exports.decodeXML = void 0;
var entities_json_1 = __importDefault(require$$1);
var legacy_json_1 = __importDefault(require$$1$1);
var xml_json_1 = __importDefault(require$$0$1);
var decode_codepoint_1 = __importDefault(decode_codepoint);
exports.decodeXML = getStrictDecoder(xml_json_1.default);
exports.decodeHTMLStrict = getStrictDecoder(entities_json_1.default);
function getStrictDecoder(map) {
var keys = Object.keys(map).join("|");
var replace = getReplacer(map);
keys += "|#[xX][\\da-fA-F]+|#\\d+";
var re = new RegExp("&(?:" + keys + ");", "g");
return function (str) { return String(str).replace(re, replace); };
}
var sorter = function (a, b) { return (a < b ? 1 : -1); };
exports.decodeHTML = (function () {
var legacy = Object.keys(legacy_json_1.default).sort(sorter);
var keys = Object.keys(entities_json_1.default).sort(sorter);
for (var i = 0, j = 0; i < keys.length; i++) {
if (legacy[j] === keys[i]) {
keys[i] += ";?";
j++;
}
else {
keys[i] += ";";
}
}
var re = new RegExp("&(?:" + keys.join("|") + "|#[xX][\\da-fA-F]+;?|#\\d+;?)", "g");
var replace = getReplacer(entities_json_1.default);
function replacer(str) {
if (str.substr(-1) !== ";")
str += ";";
return replace(str);
}
//TODO consider creating a merged map
return function (str) { return String(str).replace(re, replacer); };
})();
function getReplacer(map) {
return function replace(str) {
if (str.charAt(1) === "#") {
var secondChar = str.charAt(2);
if (secondChar === "X" || secondChar === "x") {
return decode_codepoint_1.default(parseInt(str.substr(3), 16));
}
return decode_codepoint_1.default(parseInt(str.substr(2), 10));
}
return map[str.slice(1, -1)];
};
}
});
 
unwrapExports(decode$2);
var decode_1 = decode$2.decodeHTML;
var decode_2 = decode$2.decodeHTMLStrict;
var decode_3 = decode$2.decodeXML;
 
var encode$1 = createCommonjsModule(function (module, exports) {
var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.escape = exports.encodeHTML = exports.encodeXML = void 0;
var xml_json_1 = __importDefault(require$$0$1);
var inverseXML = getInverseObj(xml_json_1.default);
var xmlReplacer = getInverseReplacer(inverseXML);
exports.encodeXML = getInverse(inverseXML, xmlReplacer);
var entities_json_1 = __importDefault(require$$1);
var inverseHTML = getInverseObj(entities_json_1.default);
var htmlReplacer = getInverseReplacer(inverseHTML);
exports.encodeHTML = getInverse(inverseHTML, htmlReplacer);
function getInverseObj(obj) {
return Object.keys(obj)
.sort()
.reduce(function (inverse, name) {
inverse[obj[name]] = "&" + name + ";";
return inverse;
}, {});
}
function getInverseReplacer(inverse) {
var single = [];
var multiple = [];
for (var _i = 0, _a = Object.keys(inverse); _i < _a.length; _i++) {
var k = _a[_i];
if (k.length === 1) {
// Add value to single array
single.push("\\" + k);
}
else {
// Add value to multiple array
multiple.push(k);
}
}
// Add ranges to single characters.
single.sort();
for (var start = 0; start < single.length - 1; start++) {
// Find the end of a run of characters
var end = start;
while (end < single.length - 1 &&
single[end].charCodeAt(1) + 1 === single[end + 1].charCodeAt(1)) {
end += 1;
}
var count = 1 + end - start;
// We want to replace at least three characters
if (count < 3)
continue;
single.splice(start, count, single[start] + "-" + single[end]);
}
multiple.unshift("[" + single.join("") + "]");
return new RegExp(multiple.join("|"), "g");
}
var reNonASCII = /(?:[\x80-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])/g;
function singleCharReplacer(c) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return "&#x" + c.codePointAt(0).toString(16).toUpperCase() + ";";
}
function getInverse(inverse, re) {
return function (data) {
return data
.replace(re, function (name) { return inverse[name]; })
.replace(reNonASCII, singleCharReplacer);
};
}
var reXmlChars = getInverseReplacer(inverseXML);
function escape(data) {
return data
.replace(reXmlChars, singleCharReplacer)
.replace(reNonASCII, singleCharReplacer);
}
exports.escape = escape;
});
 
unwrapExports(encode$1);
var encode_1$1 = encode$1.escape;
var encode_2 = encode$1.encodeHTML;
var encode_3 = encode$1.encodeXML;
 
var lib = createCommonjsModule(function (module, exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.encode = exports.decodeStrict = exports.decode = void 0;
 
 
/**
* Decodes a string with entities.
*
* @param data String to decode.
* @param level Optional level to decode at. 0 = XML, 1 = HTML. Default is 0.
*/
function decode(data, level) {
return (!level || level <= 0 ? decode$2.decodeXML : decode$2.decodeHTML)(data);
}
exports.decode = decode;
/**
* Decodes a string with entities. Does not allow missing trailing semicolons for entities.
*
* @param data String to decode.
* @param level Optional level to decode at. 0 = XML, 1 = HTML. Default is 0.
*/
function decodeStrict(data, level) {
return (!level || level <= 0 ? decode$2.decodeXML : decode$2.decodeHTMLStrict)(data);
}
exports.decodeStrict = decodeStrict;
/**
* Encodes a string with entities.
*
* @param data String to encode.
* @param level Optional level to encode at. 0 = XML, 1 = HTML. Default is 0.
*/
function encode(data, level) {
return (!level || level <= 0 ? encode$1.encodeXML : encode$1.encodeHTML)(data);
}
exports.encode = encode;
var encode_2 = encode$1;
Object.defineProperty(exports, "encodeXML", { enumerable: true, get: function () { return encode_2.encodeXML; } });
Object.defineProperty(exports, "encodeHTML", { enumerable: true, get: function () { return encode_2.encodeHTML; } });
Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return encode_2.escape; } });
// Legacy aliases
Object.defineProperty(exports, "encodeHTML4", { enumerable: true, get: function () { return encode_2.encodeHTML; } });
Object.defineProperty(exports, "encodeHTML5", { enumerable: true, get: function () { return encode_2.encodeHTML; } });
var decode_2 = decode$2;
Object.defineProperty(exports, "decodeXML", { enumerable: true, get: function () { return decode_2.decodeXML; } });
Object.defineProperty(exports, "decodeHTML", { enumerable: true, get: function () { return decode_2.decodeHTML; } });
Object.defineProperty(exports, "decodeHTMLStrict", { enumerable: true, get: function () { return decode_2.decodeHTMLStrict; } });
// Legacy aliases
Object.defineProperty(exports, "decodeHTML4", { enumerable: true, get: function () { return decode_2.decodeHTML; } });
Object.defineProperty(exports, "decodeHTML5", { enumerable: true, get: function () { return decode_2.decodeHTML; } });
Object.defineProperty(exports, "decodeHTML4Strict", { enumerable: true, get: function () { return decode_2.decodeHTMLStrict; } });
Object.defineProperty(exports, "decodeHTML5Strict", { enumerable: true, get: function () { return decode_2.decodeHTMLStrict; } });
Object.defineProperty(exports, "decodeXMLStrict", { enumerable: true, get: function () { return decode_2.decodeXML; } });
});
 
unwrapExports(lib);
var lib_1 = lib.encode;
var lib_2 = lib.decodeStrict;
var lib_3 = lib.decode;
var lib_4 = lib.encodeXML;
var lib_5 = lib.encodeHTML;
var lib_6 = lib.encodeHTML4;
var lib_7 = lib.encodeHTML5;
var lib_8 = lib.decodeXML;
var lib_9 = lib.decodeHTML;
var lib_10 = lib.decodeHTMLStrict;
var lib_11 = lib.decodeHTML4;
var lib_12 = lib.decodeHTML5;
var lib_13 = lib.decodeHTML4Strict;
var lib_14 = lib.decodeHTML5Strict;
var lib_15 = lib.decodeXMLStrict;
 
var C_BACKSLASH = 92;
 
var ENTITY = "&(?:#x[a-f0-9]{1,6}|#[0-9]{1,7}|[a-z][a-z0-9]{1,31});";
 
var TAGNAME = "[A-Za-z][A-Za-z0-9-]*";
var ATTRIBUTENAME = "[a-zA-Z_:][a-zA-Z0-9:._-]*";
var UNQUOTEDVALUE = "[^\"'=<>`\\x00-\\x20]+";
var SINGLEQUOTEDVALUE = "'[^']*'";
var DOUBLEQUOTEDVALUE = '"[^"]*"';
var ATTRIBUTEVALUE =
"(?:" +
UNQUOTEDVALUE +
"|" +
SINGLEQUOTEDVALUE +
"|" +
DOUBLEQUOTEDVALUE +
")";
var ATTRIBUTEVALUESPEC = "(?:" + "\\s*=" + "\\s*" + ATTRIBUTEVALUE + ")";
var ATTRIBUTE = "(?:" + "\\s+" + ATTRIBUTENAME + ATTRIBUTEVALUESPEC + "?)";
var OPENTAG = "<" + TAGNAME + ATTRIBUTE + "*" + "\\s*/?>";
var CLOSETAG = "</" + TAGNAME + "\\s*[>]";
var HTMLCOMMENT = "<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->";
var PROCESSINGINSTRUCTION = "[<][?][\\s\\S]*?[?][>]";
var DECLARATION = "<![A-Z]+" + "\\s+[^>]*>";
var CDATA = "<!\\[CDATA\\[[\\s\\S]*?\\]\\]>";
var HTMLTAG =
"(?:" +
OPENTAG +
"|" +
CLOSETAG +
"|" +
HTMLCOMMENT +
"|" +
PROCESSINGINSTRUCTION +
"|" +
DECLARATION +
"|" +
CDATA +
")";
var reHtmlTag = new RegExp("^" + HTMLTAG);
 
var reBackslashOrAmp = /[\\&]/;
 
var ESCAPABLE = "[!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]";
 
var reEntityOrEscapedChar = new RegExp("\\\\" + ESCAPABLE + "|" + ENTITY, "gi");
 
var XMLSPECIAL = '[&<>"]';
 
var reXmlSpecial = new RegExp(XMLSPECIAL, "g");
 
var unescapeChar = function(s) {
if (s.charCodeAt(0) === C_BACKSLASH) {
return s.charAt(1);
} else {
return lib_9(s);
}
};
 
// Replace entities and backslash escapes with literal characters.
var unescapeString = function(s) {
if (reBackslashOrAmp.test(s)) {
return s.replace(reEntityOrEscapedChar, unescapeChar);
} else {
return s;
}
};
 
var normalizeURI = function(uri) {
try {
return encode_1(uri);
} catch (err) {
return uri;
}
};
 
var replaceUnsafeChar = function(s) {
switch (s) {
case "&":
return "&amp;";
case "<":
return "&lt;";
case ">":
return "&gt;";
case '"':
return "&quot;";
default:
return s;
}
};
 
var escapeXml = function(s) {
if (reXmlSpecial.test(s)) {
return s.replace(reXmlSpecial, replaceUnsafeChar);
} else {
return s;
}
};
 
// derived from https://github.com/mathiasbynens/String.fromCodePoint
/*! http://mths.be/fromcodepoint v0.2.1 by @mathias */
 
var _fromCodePoint;
 
function fromCodePoint(_) {
return _fromCodePoint(_);
}
 
if (String.fromCodePoint) {
_fromCodePoint = function(_) {
try {
return String.fromCodePoint(_);
} catch (e) {
if (e instanceof RangeError) {
return String.fromCharCode(0xfffd);
}
throw e;
}
};
} else {
var stringFromCharCode = String.fromCharCode;
var floor = Math.floor;
_fromCodePoint = function() {
var MAX_SIZE = 0x4000;
var codeUnits = [];
var highSurrogate;
var lowSurrogate;
var index = -1;
var length = arguments.length;
if (!length) {
return "";
}
var result = "";
while (++index < length) {
var codePoint = Number(arguments[index]);
if (
!isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity`
codePoint < 0 || // not a valid Unicode code point
codePoint > 0x10ffff || // not a valid Unicode code point
floor(codePoint) !== codePoint // not an integer
) {
return String.fromCharCode(0xfffd);
}
if (codePoint <= 0xffff) {
// BMP code point
codeUnits.push(codePoint);
} else {
// Astral code point; split in surrogate halves
// http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
codePoint -= 0x10000;
highSurrogate = (codePoint >> 10) + 0xd800;
lowSurrogate = (codePoint % 0x400) + 0xdc00;
codeUnits.push(highSurrogate, lowSurrogate);
}
if (index + 1 === length || codeUnits.length > MAX_SIZE) {
result += stringFromCharCode.apply(null, codeUnits);
codeUnits.length = 0;
}
}
return result;
};
}
 
/*! http://mths.be/repeat v0.2.0 by @mathias */
if (!String.prototype.repeat) {
(function() {
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch(error) {}
return result;
}());
var repeat = function(count) {
if (this == null) {
throw TypeError();
}
var string = String(this);
// `ToInteger`
var n = count ? Number(count) : 0;
if (n != n) { // better `isNaN`
n = 0;
}
// Account for out-of-bounds indices
if (n < 0 || n == Infinity) {
throw RangeError();
}
var result = '';
while (n) {
if (n % 2 == 1) {
result += string;
}
if (n > 1) {
string += string;
}
n >>= 1;
}
return result;
};
if (defineProperty) {
defineProperty(String.prototype, 'repeat', {
'value': repeat,
'configurable': true,
'writable': true
});
} else {
String.prototype.repeat = repeat;
}
}());
}
 
var normalizeURI$1 = normalizeURI;
var unescapeString$1 = unescapeString;
 
// Constants for character codes:
 
var C_NEWLINE = 10;
var C_ASTERISK = 42;
var C_UNDERSCORE = 95;
var C_BACKTICK = 96;
var C_OPEN_BRACKET = 91;
var C_CLOSE_BRACKET = 93;
var C_LESSTHAN = 60;
var C_BANG = 33;
var C_BACKSLASH$1 = 92;
var C_AMPERSAND = 38;
var C_OPEN_PAREN = 40;
var C_CLOSE_PAREN = 41;
var C_COLON = 58;
var C_SINGLEQUOTE = 39;
var C_DOUBLEQUOTE = 34;
 
// Some regexps used in inline parser:
 
var ESCAPABLE$1 = ESCAPABLE;
var ESCAPED_CHAR = "\\\\" + ESCAPABLE$1;
 
var ENTITY$1 = ENTITY;
var reHtmlTag$1 = reHtmlTag;
 
var rePunctuation = new RegExp(
/^[!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u0AF0\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166D\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E42\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]|\uD800[\uDD00-\uDD02\uDF9F\uDFD0]|\uD801\uDD6F|\uD802[\uDC57\uDD1F\uDD3F\uDE50-\uDE58\uDE7F\uDEF0-\uDEF6\uDF39-\uDF3F\uDF99-\uDF9C]|\uD804[\uDC47-\uDC4D\uDCBB\uDCBC\uDCBE-\uDCC1\uDD40-\uDD43\uDD74\uDD75\uDDC5-\uDDC9\uDDCD\uDDDB\uDDDD-\uDDDF\uDE38-\uDE3D\uDEA9]|\uD805[\uDCC6\uDDC1-\uDDD7\uDE41-\uDE43\uDF3C-\uDF3E]|\uD809[\uDC70-\uDC74]|\uD81A[\uDE6E\uDE6F\uDEF5\uDF37-\uDF3B\uDF44]|\uD82F\uDC9F|\uD836[\uDE87-\uDE8B]/
);
 
var reLinkTitle = new RegExp(
'^(?:"(' +
ESCAPED_CHAR +
'|[^"\\x00])*"' +
"|" +
"'(" +
ESCAPED_CHAR +
"|[^'\\x00])*'" +
"|" +
"\\((" +
ESCAPED_CHAR +
"|[^()\\x00])*\\))"
);
 
var reLinkDestinationBraces = /^(?:<(?:[^<>\n\\\x00]|\\.)*>)/;
 
var reEscapable = new RegExp("^" + ESCAPABLE$1);
 
var reEntityHere = new RegExp("^" + ENTITY$1, "i");
 
var reTicks = /`+/;
 
var reTicksHere = /^`+/;
 
var reEllipses = /\.\.\./g;
 
var reDash = /--+/g;
 
var reEmailAutolink = /^<([a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*)>/;
 
var reAutolink = /^<[A-Za-z][A-Za-z0-9.+-]{1,31}:[^<>\x00-\x20]*>/i;
 
var reSpnl = /^ *(?:\n *)?/;
 
var reWhitespaceChar = /^[ \t\n\x0b\x0c\x0d]/;
 
var reUnicodeWhitespaceChar = /^\s/;
 
var reFinalSpace = / *$/;
 
var reInitialSpace = /^ */;
 
var reSpaceAtEndOfLine = /^ *(?:\n|$)/;
 
var reLinkLabel = /^\[(?:[^\\\[\]]|\\.){0,1000}\]/s;
 
// Matches a string of non-special characters.
var reMain = /^[^\n`\[\]\\!<&*_'"]+/m;
 
var text = function(s) {
var node = new Node("text");
node._literal = s;
return node;
};
 
// normalize a reference in reference link (remove []s, trim,
// collapse internal space, unicode case fold.
// See commonmark/commonmark.js#168.
var normalizeReference = function(string) {
return string
.slice(1, string.length - 1)
.trim()
.replace(/[ \t\r\n]+/, " ")
.toLowerCase()
.toUpperCase();
};
 
// INLINE PARSER
 
// These are methods of an InlineParser object, defined below.
// An InlineParser keeps track of a subject (a string to be
// parsed) and a position in that subject.
 
// If re matches at current position in the subject, advance
// position in subject and return the match; otherwise return null.
var match = function(re) {
var m = re.exec(this.subject.slice(this.pos));
if (m === null) {
return null;
} else {
this.pos += m.index + m[0].length;
return m[0];
}
};
 
// Returns the code for the character at the current subject position, or -1
// there are no more characters.
var peek = function() {
if (this.pos < this.subject.length) {
return this.subject.charCodeAt(this.pos);
} else {
return -1;
}
};
 
// Parse zero or more space characters, including at most one newline
var spnl = function() {
this.match(reSpnl);
return true;
};
 
// All of the parsers below try to match something at the current position
// in the subject. If they succeed in matching anything, they
// return the inline matched, advancing the subject.
 
// Attempt to parse backticks, adding either a backtick code span or a
// literal sequence of backticks.
var parseBackticks = function(block) {
var ticks = this.match(reTicksHere);
if (ticks === null) {
return false;
}
var afterOpenTicks = this.pos;
var matched;
var node;
var contents;
while ((matched = this.match(reTicks)) !== null) {
if (matched === ticks) {
node = new Node("code");
contents = this.subject
.slice(afterOpenTicks, this.pos - ticks.length)
.replace(/\n/gm, " ");
if (
contents.length > 0 &&
contents.match(/[^ ]/) !== null &&
contents[0] == " " &&
contents[contents.length - 1] == " "
) {
node._literal = contents.slice(1, contents.length - 1);
} else {
node._literal = contents;
}
const doc = this.options.autoDoc;
if (doc) {
const decl_hash = doc.detectDeclPath(contents);
if (decl_hash) {
var l = new Node("link");
l.destination = decl_hash;
l.appendChild(node);
node = l;
}
}
block.appendChild(node);
return true;
}
}
// If we got here, we didn't match a closing backtick sequence.
this.pos = afterOpenTicks;
block.appendChild(text(ticks));
return true;
};
 
// Parse a backslash-escaped special character, adding either the escaped
// character, a hard line break (if the backslash is followed by a newline),
// or a literal backslash to the block's children. Assumes current character
// is a backslash.
var parseBackslash = function(block) {
var subj = this.subject;
var node;
this.pos += 1;
if (this.peek() === C_NEWLINE) {
this.pos += 1;
node = new Node("linebreak");
block.appendChild(node);
} else if (reEscapable.test(subj.charAt(this.pos))) {
block.appendChild(text(subj.charAt(this.pos)));
this.pos += 1;
} else {
block.appendChild(text("\\"));
}
return true;
};
 
// Attempt to parse an autolink (URL or email in pointy brackets).
var parseAutolink = function(block) {
var m;
var dest;
var node;
if ((m = this.match(reEmailAutolink))) {
dest = m.slice(1, m.length - 1);
node = new Node("link");
node._destination = normalizeURI$1("mailto:" + dest);
node._title = "";
node.appendChild(text(dest));
block.appendChild(node);
return true;
} else if ((m = this.match(reAutolink))) {
dest = m.slice(1, m.length - 1);
node = new Node("link");
node._destination = normalizeURI$1(dest);
node._title = "";
node.appendChild(text(dest));
block.appendChild(node);
return true;
} else {
return false;
}
};
 
// Attempt to parse a raw HTML tag.
var parseHtmlTag = function(block) {
var m = this.match(reHtmlTag$1);
if (m === null) {
return false;
} else {
var node = new Node("html_inline");
node._literal = m;
block.appendChild(node);
return true;
}
};
 
// Scan a sequence of characters with code cc, and return information about
// the number of delimiters and whether they are positioned such that
// they can open and/or close emphasis or strong emphasis. A utility
// function for strong/emph parsing.
var scanDelims = function(cc) {
var numdelims = 0;
var char_before, char_after, cc_after;
var startpos = this.pos;
var left_flanking, right_flanking, can_open, can_close;
var after_is_whitespace,
after_is_punctuation,
before_is_whitespace,
before_is_punctuation;
 
if (cc === C_SINGLEQUOTE || cc === C_DOUBLEQUOTE) {
numdelims++;
this.pos++;
} else {
while (this.peek() === cc) {
numdelims++;
this.pos++;
}
}
 
if (numdelims === 0) {
return null;
}
 
char_before = startpos === 0 ? "\n" : this.subject.charAt(startpos - 1);
 
cc_after = this.peek();
if (cc_after === -1) {
char_after = "\n";
} else {
char_after = fromCodePoint(cc_after);
}
 
after_is_whitespace = reUnicodeWhitespaceChar.test(char_after);
after_is_punctuation = rePunctuation.test(char_after);
before_is_whitespace = reUnicodeWhitespaceChar.test(char_before);
before_is_punctuation = rePunctuation.test(char_before);
 
left_flanking =
!after_is_whitespace &&
(!after_is_punctuation ||
before_is_whitespace ||
before_is_punctuation);
right_flanking =
!before_is_whitespace &&
(!before_is_punctuation || after_is_whitespace || after_is_punctuation);
if (cc === C_UNDERSCORE) {
can_open = left_flanking && (!right_flanking || before_is_punctuation);
can_close = right_flanking && (!left_flanking || after_is_punctuation);
} else if (cc === C_SINGLEQUOTE || cc === C_DOUBLEQUOTE) {
can_open = left_flanking && !right_flanking;
can_close = right_flanking;
} else {
can_open = left_flanking;
can_close = right_flanking;
}
this.pos = startpos;
return { numdelims: numdelims, can_open: can_open, can_close: can_close };
};
 
// Handle a delimiter marker for emphasis or a quote.
var handleDelim = function(cc, block) {
var res = this.scanDelims(cc);
if (!res) {
return false;
}
var numdelims = res.numdelims;
var startpos = this.pos;
var contents;
 
this.pos += numdelims;
if (cc === C_SINGLEQUOTE) {
contents = "\u2019";
} else if (cc === C_DOUBLEQUOTE) {
contents = "\u201C";
} else {
contents = this.subject.slice(startpos, this.pos);
}
var node = text(contents);
block.appendChild(node);
 
// Add entry to stack for this opener
if (
(res.can_open || res.can_close) &&
(this.options.smart || (cc !== C_SINGLEQUOTE && cc !== C_DOUBLEQUOTE))
) {
this.delimiters = {
cc: cc,
numdelims: numdelims,
origdelims: numdelims,
node: node,
previous: this.delimiters,
next: null,
can_open: res.can_open,
can_close: res.can_close
};
if (this.delimiters.previous !== null) {
this.delimiters.previous.next = this.delimiters;
}
}
 
return true;
};
 
var removeDelimiter = function(delim) {
if (delim.previous !== null) {
delim.previous.next = delim.next;
}
if (delim.next === null) {
// top of stack
this.delimiters = delim.previous;
} else {
delim.next.previous = delim.previous;
}
};
 
var removeDelimitersBetween = function(bottom, top) {
if (bottom.next !== top) {
bottom.next = top;
top.previous = bottom;
}
};
 
var processEmphasis = function(stack_bottom) {
var opener, closer, old_closer;
var opener_inl, closer_inl;
var tempstack;
var use_delims;
var tmp, next;
var opener_found;
var openers_bottom = [];
var openers_bottom_index;
var odd_match = false;
 
for (var i = 0; i < 8; i++) {
openers_bottom[i] = stack_bottom;
}
// find first closer above stack_bottom:
closer = this.delimiters;
while (closer !== null && closer.previous !== stack_bottom) {
closer = closer.previous;
}
// move forward, looking for closers, and handling each
while (closer !== null) {
var closercc = closer.cc;
if (!closer.can_close) {
closer = closer.next;
} else {
// found emphasis closer. now look back for first matching opener:
opener = closer.previous;
opener_found = false;
switch (closercc) {
case C_SINGLEQUOTE:
openers_bottom_index = 0;
break;
case C_DOUBLEQUOTE:
openers_bottom_index = 1;
break;
case C_UNDERSCORE:
openers_bottom_index = 2;
break;
case C_ASTERISK:
openers_bottom_index = 3 + (closer.can_open ? 3 : 0)
+ (closer.origdelims % 3);
break;
}
while (
opener !== null &&
opener !== stack_bottom &&
opener !== openers_bottom[openers_bottom_index]
) {
odd_match =
(closer.can_open || opener.can_close) &&
closer.origdelims % 3 !== 0 &&
(opener.origdelims + closer.origdelims) % 3 === 0;
if (opener.cc === closer.cc && opener.can_open && !odd_match) {
opener_found = true;
break;
}
opener = opener.previous;
}
old_closer = closer;
 
if (closercc === C_ASTERISK || closercc === C_UNDERSCORE) {
if (!opener_found) {
closer = closer.next;
} else {
// calculate actual number of delimiters used from closer
use_delims =
closer.numdelims >= 2 && opener.numdelims >= 2 ? 2 : 1;
 
opener_inl = opener.node;
closer_inl = closer.node;
 
// remove used delimiters from stack elts and inlines
opener.numdelims -= use_delims;
closer.numdelims -= use_delims;
opener_inl._literal = opener_inl._literal.slice(
0,
opener_inl._literal.length - use_delims
);
closer_inl._literal = closer_inl._literal.slice(
0,
closer_inl._literal.length - use_delims
);
 
// build contents for new emph element
var emph = new Node(use_delims === 1 ? "emph" : "strong");
 
tmp = opener_inl._next;
while (tmp && tmp !== closer_inl) {
next = tmp._next;
tmp.unlink();
emph.appendChild(tmp);
tmp = next;
}
 
opener_inl.insertAfter(emph);
 
// remove elts between opener and closer in delimiters stack
removeDelimitersBetween(opener, closer);
 
// if opener has 0 delims, remove it and the inline
if (opener.numdelims === 0) {
opener_inl.unlink();
this.removeDelimiter(opener);
}
 
if (closer.numdelims === 0) {
closer_inl.unlink();
tempstack = closer.next;
this.removeDelimiter(closer);
closer = tempstack;
}
}
} else if (closercc === C_SINGLEQUOTE) {
closer.node._literal = "\u2019";
if (opener_found) {
opener.node._literal = "\u2018";
}
closer = closer.next;
} else if (closercc === C_DOUBLEQUOTE) {
closer.node._literal = "\u201D";
if (opener_found) {
opener.node.literal = "\u201C";
}
closer = closer.next;
}
if (!opener_found) {
// Set lower bound for future searches for openers:
openers_bottom[openers_bottom_index] =
old_closer.previous;
if (!old_closer.can_open) {
// We can remove a closer that can't be an opener,
// once we've seen there's no matching opener:
this.removeDelimiter(old_closer);
}
}
}
}
 
// remove all delimiters
while (this.delimiters !== null && this.delimiters !== stack_bottom) {
this.removeDelimiter(this.delimiters);
}
};
 
// Attempt to parse link title (sans quotes), returning the string
// or null if no match.
var parseLinkTitle = function() {
var title = this.match(reLinkTitle);
if (title === null) {
return null;
} else {
// chop off quotes from title and unescape:
return unescapeString$1(title.substr(1, title.length - 2));
}
};
 
// Attempt to parse link destination, returning the string or
// null if no match.
var parseLinkDestination = function() {
var res = this.match(reLinkDestinationBraces);
if (res === null) {
if (this.peek() === C_LESSTHAN) {
return null;
}
// TODO handrolled parser; res should be null or the string
var savepos = this.pos;
var openparens = 0;
var c;
while ((c = this.peek()) !== -1) {
if (
c === C_BACKSLASH$1 &&
reEscapable.test(this.subject.charAt(this.pos + 1))
) {
this.pos += 1;
if (this.peek() !== -1) {
this.pos += 1;
}
} else if (c === C_OPEN_PAREN) {
this.pos += 1;
openparens += 1;
} else if (c === C_CLOSE_PAREN) {
if (openparens < 1) {
break;
} else {
this.pos += 1;
openparens -= 1;
}
} else if (reWhitespaceChar.exec(fromCodePoint(c)) !== null) {
break;
} else {
this.pos += 1;
}
}
if (this.pos === savepos && c !== C_CLOSE_PAREN) {
return null;
}
if (openparens !== 0) {
return null;
}
res = this.subject.substr(savepos, this.pos - savepos);
return normalizeURI$1(unescapeString$1(res));
} else {
// chop off surrounding <..>:
return normalizeURI$1(unescapeString$1(res.substr(1, res.length - 2)));
}
};
 
// Attempt to parse a link label, returning number of characters parsed.
var parseLinkLabel = function() {
var m = this.match(reLinkLabel);
if (m === null || m.length > 1001) {
return 0;
} else {
return m.length;
}
};
 
// Add open bracket to delimiter stack and add a text node to block's children.
var parseOpenBracket = function(block) {
var startpos = this.pos;
this.pos += 1;
 
var node = text("[");
block.appendChild(node);
 
// Add entry to stack for this opener
this.addBracket(node, startpos, false);
return true;
};
 
// IF next character is [, and ! delimiter to delimiter stack and
// add a text node to block's children. Otherwise just add a text node.
var parseBang = function(block) {
var startpos = this.pos;
this.pos += 1;
if (this.peek() === C_OPEN_BRACKET) {
this.pos += 1;
 
var node = text("![");
block.appendChild(node);
 
// Add entry to stack for this opener
this.addBracket(node, startpos + 1, true);
} else {
block.appendChild(text("!"));
}
return true;
};
 
// Try to match close bracket against an opening in the delimiter
// stack. Add either a link or image, or a plain [ character,
// to block's children. If there is a matching delimiter,
// remove it from the delimiter stack.
var parseCloseBracket = function(block) {
var startpos;
var is_image;
var dest;
var title;
var matched = false;
var reflabel;
var opener;
 
this.pos += 1;
startpos = this.pos;
 
// get last [ or ![
opener = this.brackets;
 
if (opener === null) {
// no matched opener, just return a literal
block.appendChild(text("]"));
return true;
}
 
if (!opener.active) {
// no matched opener, just return a literal
block.appendChild(text("]"));
// take opener off brackets stack
this.removeBracket();
return true;
}
 
// If we got here, open is a potential opener
is_image = opener.image;
 
// Check to see if we have a link/image
 
var savepos = this.pos;
 
// Inline link?
if (this.peek() === C_OPEN_PAREN) {
this.pos++;
if (
this.spnl() &&
(dest = this.parseLinkDestination()) !== null &&
this.spnl() &&
// make sure there's a space before the title:
((reWhitespaceChar.test(this.subject.charAt(this.pos - 1)) &&
(title = this.parseLinkTitle())) ||
true) &&
this.spnl() &&
this.peek() === C_CLOSE_PAREN
) {
this.pos += 1;
matched = true;
} else {
this.pos = savepos;
}
}
 
if (!matched) {
// Next, see if there's a link label
var beforelabel = this.pos;
var n = this.parseLinkLabel();
if (n > 2) {
reflabel = this.subject.slice(beforelabel, beforelabel + n);
} else if (!opener.bracketAfter) {
// Empty or missing second label means to use the first label as the reference.
// The reference must not contain a bracket. If we know there's a bracket, we don't even bother checking it.
reflabel = this.subject.slice(opener.index, startpos);
}
if (n === 0) {
// If shortcut reference link, rewind before spaces we skipped.
this.pos = savepos;
}
 
if (reflabel) {
// lookup rawlabel in refmap
var link = this.refmap[normalizeReference(reflabel)];
if (link) {
dest = link.destination;
title = link.title;
matched = true;
}
}
}
 
if (matched) {
var node = new Node(is_image ? "image" : "link");
node._destination = dest;
node._title = title || "";
 
var tmp, next;
tmp = opener.node._next;
while (tmp) {
next = tmp._next;
tmp.unlink();
node.appendChild(tmp);
tmp = next;
}
block.appendChild(node);
this.processEmphasis(opener.previousDelimiter);
this.removeBracket();
opener.node.unlink();
 
// We remove this bracket and processEmphasis will remove later delimiters.
// Now, for a link, we also deactivate earlier link openers.
// (no links in links)
if (!is_image) {
opener = this.brackets;
while (opener !== null) {
if (!opener.image) {
opener.active = false; // deactivate this opener
}
opener = opener.previous;
}
}
 
return true;
} else {
// no match
 
this.removeBracket(); // remove this opener from stack
this.pos = startpos;
block.appendChild(text("]"));
return true;
}
};
 
var addBracket = function(node, index, image) {
if (this.brackets !== null) {
this.brackets.bracketAfter = true;
}
this.brackets = {
node: node,
previous: this.brackets,
previousDelimiter: this.delimiters,
index: index,
image: image,
active: true
};
};
 
var removeBracket = function() {
this.brackets = this.brackets.previous;
};
 
// Attempt to parse an entity.
var parseEntity = function(block) {
var m;
if ((m = this.match(reEntityHere))) {
block.appendChild(text(lib_9(m)));
return true;
} else {
return false;
}
};
 
// Parse a run of ordinary characters, or a single character with
// a special meaning in markdown, as a plain string.
var parseString = function(block) {
var m;
if ((m = this.match(reMain))) {
if (this.options.smart) {
block.appendChild(
text(
m
.replace(reEllipses, "\u2026")
.replace(reDash, function(chars) {
var enCount = 0;
var emCount = 0;
if (chars.length % 3 === 0) {
// If divisible by 3, use all em dashes
emCount = chars.length / 3;
} else if (chars.length % 2 === 0) {
// If divisible by 2, use all en dashes
enCount = chars.length / 2;
} else if (chars.length % 3 === 2) {
// If 2 extra dashes, use en dash for last 2; em dashes for rest
enCount = 1;
emCount = (chars.length - 2) / 3;
} else {
// Use en dashes for last 4 hyphens; em dashes for rest
enCount = 2;
emCount = (chars.length - 4) / 3;
}
return (
"\u2014".repeat(emCount) +
"\u2013".repeat(enCount)
);
})
)
);
} else {
block.appendChild(text(m));
}
return true;
} else {
return false;
}
};
 
// Parse a newline. If it was preceded by two spaces, return a hard
// line break; otherwise a soft line break.
var parseNewline = function(block) {
this.pos += 1; // assume we're at a \n
// check previous node for trailing spaces
var lastc = block._lastChild;
if (
lastc &&
lastc.type === "text" &&
lastc._literal[lastc._literal.length - 1] === " "
) {
var hardbreak = lastc._literal[lastc._literal.length - 2] === " ";
lastc._literal = lastc._literal.replace(reFinalSpace, "");
block.appendChild(new Node(hardbreak ? "linebreak" : "softbreak"));
} else {
block.appendChild(new Node("softbreak"));
}
this.match(reInitialSpace); // gobble leading spaces in next line
return true;
};
 
// Attempt to parse a link reference, modifying refmap.
var parseReference = function(s, refmap) {
this.subject = s;
this.pos = 0;
var rawlabel;
var dest;
var title;
var matchChars;
var startpos = this.pos;
 
// label:
matchChars = this.parseLinkLabel();
if (matchChars === 0) {
return 0;
} else {
rawlabel = this.subject.substr(0, matchChars);
}
 
// colon:
if (this.peek() === C_COLON) {
this.pos++;
} else {
this.pos = startpos;
return 0;
}
 
// link url
this.spnl();
 
dest = this.parseLinkDestination();
if (dest === null) {
this.pos = startpos;
return 0;
}
 
var beforetitle = this.pos;
this.spnl();
if (this.pos !== beforetitle) {
title = this.parseLinkTitle();
}
if (title === null) {
title = "";
// rewind before spaces
this.pos = beforetitle;
}
 
// make sure we're at line end:
var atLineEnd = true;
if (this.match(reSpaceAtEndOfLine) === null) {
if (title === "") {
atLineEnd = false;
} else {
// the potential title we found is not at the line end,
// but it could still be a legal link reference if we
// discard the title
title = "";
// rewind before spaces
this.pos = beforetitle;
// and instead check if the link URL is at the line end
atLineEnd = this.match(reSpaceAtEndOfLine) !== null;
}
}
 
if (!atLineEnd) {
this.pos = startpos;
return 0;
}
 
var normlabel = normalizeReference(rawlabel);
if (normlabel === "") {
// label must contain non-whitespace characters
this.pos = startpos;
return 0;
}
 
if (!refmap[normlabel]) {
refmap[normlabel] = { destination: dest, title: title };
}
return this.pos - startpos;
};
 
// Parse the next inline element in subject, advancing subject position.
// On success, add the result to block's children and return true.
// On failure, return false.
var parseInline = function(block) {
var res = false;
var c = this.peek();
if (c === -1) {
return false;
}
switch (c) {
case C_NEWLINE:
res = this.parseNewline(block);
break;
case C_BACKSLASH$1:
res = this.parseBackslash(block);
break;
case C_BACKTICK:
res = this.parseBackticks(block);
break;
case C_ASTERISK:
case C_UNDERSCORE:
res = this.handleDelim(c, block);
break;
case C_SINGLEQUOTE:
case C_DOUBLEQUOTE:
res = this.options.smart && this.handleDelim(c, block);
break;
case C_OPEN_BRACKET:
res = this.parseOpenBracket(block);
break;
case C_BANG:
res = this.parseBang(block);
break;
case C_CLOSE_BRACKET:
res = this.parseCloseBracket(block);
break;
case C_LESSTHAN:
res = this.parseAutolink(block) || this.parseHtmlTag(block);
break;
case C_AMPERSAND:
res = this.parseEntity(block);
break;
default:
res = this.parseString(block);
break;
}
if (!res) {
this.pos += 1;
block.appendChild(text(fromCodePoint(c)));
}
 
return true;
};
 
// Parse string content in block into inline children,
// using refmap to resolve references.
var parseInlines = function(block) {
this.subject = block._string_content.trim();
this.pos = 0;
this.delimiters = null;
this.brackets = null;
while (this.parseInline(block)) {}
block._string_content = null; // allow raw string to be garbage collected
this.processEmphasis(null);
};
 
// The InlineParser object.
function InlineParser(options) {
return {
subject: "",
delimiters: null, // used by handleDelim method
brackets: null,
pos: 0,
refmap: {},
match: match,
peek: peek,
spnl: spnl,
parseBackticks: parseBackticks,
parseBackslash: parseBackslash,
parseAutolink: parseAutolink,
parseHtmlTag: parseHtmlTag,
scanDelims: scanDelims,
handleDelim: handleDelim,
parseLinkTitle: parseLinkTitle,
parseLinkDestination: parseLinkDestination,
parseLinkLabel: parseLinkLabel,
parseOpenBracket: parseOpenBracket,
parseBang: parseBang,
parseCloseBracket: parseCloseBracket,
addBracket: addBracket,
removeBracket: removeBracket,
parseEntity: parseEntity,
parseString: parseString,
parseNewline: parseNewline,
parseReference: parseReference,
parseInline: parseInline,
processEmphasis: processEmphasis,
removeDelimiter: removeDelimiter,
options: options || {},
parse: parseInlines
};
}
 
var CODE_INDENT = 4;
 
var C_TAB = 9;
var C_NEWLINE$1 = 10;
var C_GREATERTHAN = 62;
var C_LESSTHAN$1 = 60;
var C_SPACE = 32;
var C_OPEN_BRACKET$1 = 91;
 
var reHtmlBlockOpen = [
/./, // dummy for 0
/^<(?:script|pre|textarea|style)(?:\s|>|$)/i,
/^<!--/,
/^<[?]/,
/^<![A-Z]/,
/^<!\[CDATA\[/,
/^<[/]?(?:address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[123456]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|nav|noframes|ol|optgroup|option|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul)(?:\s|[/]?[>]|$)/i,
new RegExp("^(?:" + OPENTAG + "|" + CLOSETAG + ")\\s*$", "i")
];
 
var reHtmlBlockClose = [
/./, // dummy for 0
/<\/(?:script|pre|textarea|style)>/i,
/-->/,
/\?>/,
/>/,
/\]\]>/
];
 
var reThematicBreak = /^(?:\*[ \t]*){3,}$|^(?:_[ \t]*){3,}$|^(?:-[ \t]*){3,}$/;
 
var reMaybeSpecial = /^[#`~*+_=<>0-9-]/;
 
var reNonSpace = /[^ \t\f\v\r\n]/;
 
var reBulletListMarker = /^[*+-]/;
 
var reOrderedListMarker = /^(\d{1,9})([.)])/;
 
var reATXHeadingMarker = /^#{1,6}(?:[ \t]+|$)/;
 
var reCodeFence = /^`{3,}(?!.*`)|^~{3,}/;
 
var reClosingCodeFence = /^(?:`{3,}|~{3,})(?= *$)/;
 
var reSetextHeadingLine = /^(?:=+|-+)[ \t]*$/;
 
var reLineEnding = /\r\n|\n|\r/;
 
// Returns true if string contains only space characters.
var isBlank = function(s) {
return !reNonSpace.test(s);
};
 
var isSpaceOrTab = function(c) {
return c === C_SPACE || c === C_TAB;
};
 
var peek$1 = function(ln, pos) {
if (pos < ln.length) {
return ln.charCodeAt(pos);
} else {
return -1;
}
};
 
// DOC PARSER
 
// These are methods of a Parser object, defined below.
 
// Returns true if block ends with a blank line, descending if needed
// into lists and sublists.
var endsWithBlankLine = function(block) {
while (block) {
if (block._lastLineBlank) {
return true;
}
var t = block.type;
if (!block._lastLineChecked && (t === "list" || t === "item")) {
block._lastLineChecked = true;
block = block._lastChild;
} else {
block._lastLineChecked = true;
break;
}
}
return false;
};
 
// Add a line to the block at the tip. We assume the tip
// can accept lines -- that check should be done before calling this.
var addLine = function() {
if (this.partiallyConsumedTab) {
this.offset += 1; // skip over tab
// add space characters:
var charsToTab = 4 - (this.column % 4);
this.tip._string_content += " ".repeat(charsToTab);
}
this.tip._string_content += this.currentLine.slice(this.offset) + "\n";
};
 
// Add block of type tag as a child of the tip. If the tip can't
// accept children, close and finalize it and try its parent,
// and so on til we find a block that can accept children.
var addChild = function(tag, offset) {
while (!this.blocks[this.tip.type].canContain(tag)) {
this.finalize(this.tip, this.lineNumber - 1);
}
 
var column_number = offset + 1; // offset 0 = column 1
var newBlock = new Node(tag, [
[this.lineNumber, column_number],
[0, 0]
]);
newBlock._string_content = "";
this.tip.appendChild(newBlock);
this.tip = newBlock;
return newBlock;
};
 
// Parse a list marker and return data on the marker (type,
// start, delimiter, bullet character, padding) or null.
var parseListMarker = function(parser, container) {
var rest = parser.currentLine.slice(parser.nextNonspace);
var match;
var nextc;
var spacesStartCol;
var spacesStartOffset;
var data = {
type: null,
tight: true, // lists are tight by default
bulletChar: null,
start: null,
delimiter: null,
padding: null,
markerOffset: parser.indent
};
if (parser.indent >= 4) {
return null;
}
if ((match = rest.match(reBulletListMarker))) {
data.type = "bullet";
data.bulletChar = match[0][0];
} else if (
(match = rest.match(reOrderedListMarker)) &&
(container.type !== "paragraph" || match[1] == 1)
) {
data.type = "ordered";
data.start = parseInt(match[1]);
data.delimiter = match[2];
} else {
return null;
}
// make sure we have spaces after
nextc = peek$1(parser.currentLine, parser.nextNonspace + match[0].length);
if (!(nextc === -1 || nextc === C_TAB || nextc === C_SPACE)) {
return null;
}
 
// if it interrupts paragraph, make sure first line isn't blank
if (
container.type === "paragraph" &&
!parser.currentLine
.slice(parser.nextNonspace + match[0].length)
.match(reNonSpace)
) {
return null;
}
 
// we've got a match! advance offset and calculate padding
parser.advanceNextNonspace(); // to start of marker
parser.advanceOffset(match[0].length, true); // to end of marker
spacesStartCol = parser.column;
spacesStartOffset = parser.offset;
do {
parser.advanceOffset(1, true);
nextc = peek$1(parser.currentLine, parser.offset);
} while (parser.column - spacesStartCol < 5 && isSpaceOrTab(nextc));
var blank_item = peek$1(parser.currentLine, parser.offset) === -1;
var spaces_after_marker = parser.column - spacesStartCol;
if (spaces_after_marker >= 5 || spaces_after_marker < 1 || blank_item) {
data.padding = match[0].length + 1;
parser.column = spacesStartCol;
parser.offset = spacesStartOffset;
if (isSpaceOrTab(peek$1(parser.currentLine, parser.offset))) {
parser.advanceOffset(1, true);
}
} else {
data.padding = match[0].length + spaces_after_marker;
}
return data;
};
 
// Returns true if the two list items are of the same type,
// with the same delimiter and bullet character. This is used
// in agglomerating list items into lists.
var listsMatch = function(list_data, item_data) {
return (
list_data.type === item_data.type &&
list_data.delimiter === item_data.delimiter &&
list_data.bulletChar === item_data.bulletChar
);
};
 
// Finalize and close any unmatched blocks.
var closeUnmatchedBlocks = function() {
if (!this.allClosed) {
// finalize any blocks not matched
while (this.oldtip !== this.lastMatchedContainer) {
var parent = this.oldtip._parent;
this.finalize(this.oldtip, this.lineNumber - 1);
this.oldtip = parent;
}
this.allClosed = true;
}
};
 
// 'finalize' is run when the block is closed.
// 'continue' is run to check whether the block is continuing
// at a certain line and offset (e.g. whether a block quote
// contains a `>`. It returns 0 for matched, 1 for not matched,
// and 2 for "we've dealt with this line completely, go to next."
var blocks = {
document: {
continue: function() {
return 0;
},
finalize: function() {
return;
},
canContain: function(t) {
return t !== "item";
},
acceptsLines: false
},
list: {
continue: function() {
return 0;
},
finalize: function(parser, block) {
var item = block._firstChild;
while (item) {
// check for non-final list item ending with blank line:
if (endsWithBlankLine(item) && item._next) {
block._listData.tight = false;
break;
}
// recurse into children of list item, to see if there are
// spaces between any of them:
var subitem = item._firstChild;
while (subitem) {
if (
endsWithBlankLine(subitem) &&
(item._next || subitem._next)
) {
block._listData.tight = false;
break;
}
subitem = subitem._next;
}
item = item._next;
}
},
canContain: function(t) {
return t === "item";
},
acceptsLines: false
},
block_quote: {
continue: function(parser) {
var ln = parser.currentLine;
if (
!parser.indented &&
peek$1(ln, parser.nextNonspace) === C_GREATERTHAN
) {
parser.advanceNextNonspace();
parser.advanceOffset(1, false);
if (isSpaceOrTab(peek$1(ln, parser.offset))) {
parser.advanceOffset(1, true);
}
} else {
return 1;
}
return 0;
},
finalize: function() {
return;
},
canContain: function(t) {
return t !== "item";
},
acceptsLines: false
},
item: {
continue: function(parser, container) {
if (parser.blank) {
if (container._firstChild == null) {
// Blank line after empty list item
return 1;
} else {
parser.advanceNextNonspace();
}
} else if (
parser.indent >=
container._listData.markerOffset + container._listData.padding
) {
parser.advanceOffset(
container._listData.markerOffset +
container._listData.padding,
true
);
} else {
return 1;
}
return 0;
},
finalize: function() {
return;
},
canContain: function(t) {
return t !== "item";
},
acceptsLines: false
},
heading: {
continue: function() {
// a heading can never container > 1 line, so fail to match:
return 1;
},
finalize: function() {
return;
},
canContain: function() {
return false;
},
acceptsLines: false
},
thematic_break: {
continue: function() {
// a thematic break can never container > 1 line, so fail to match:
return 1;
},
finalize: function() {
return;
},
canContain: function() {
return false;
},
acceptsLines: false
},
code_block: {
continue: function(parser, container) {
var ln = parser.currentLine;
var indent = parser.indent;
if (container._isFenced) {
// fenced
var match =
indent <= 3 &&
ln.charAt(parser.nextNonspace) === container._fenceChar &&
ln.slice(parser.nextNonspace).match(reClosingCodeFence);
if (match && match[0].length >= container._fenceLength) {
// closing fence - we're at end of line, so we can return
parser.lastLineLength =
parser.offset + indent + match[0].length;
parser.finalize(container, parser.lineNumber);
return 2;
} else {
// skip optional spaces of fence offset
var i = container._fenceOffset;
while (i > 0 && isSpaceOrTab(peek$1(ln, parser.offset))) {
parser.advanceOffset(1, true);
i--;
}
}
} else {
// indented
if (indent >= CODE_INDENT) {
parser.advanceOffset(CODE_INDENT, true);
} else if (parser.blank) {
parser.advanceNextNonspace();
} else {
return 1;
}
}
return 0;
},
finalize: function(parser, block) {
if (block._isFenced) {
// fenced
// first line becomes info string
var content = block._string_content;
var newlinePos = content.indexOf("\n");
var firstLine = content.slice(0, newlinePos);
var rest = content.slice(newlinePos + 1);
block.info = unescapeString(firstLine.trim());
block._literal = rest;
} else {
// indented
block._literal = block._string_content.replace(
/(\n *)+$/,
"\n"
);
}
block._string_content = null; // allow GC
},
canContain: function() {
return false;
},
acceptsLines: true
},
html_block: {
continue: function(parser, container) {
return parser.blank &&
(container._htmlBlockType === 6 ||
container._htmlBlockType === 7)
? 1
: 0;
},
finalize: function(parser, block) {
block._literal = block._string_content.replace(/(\n *)+$/, "");
block._string_content = null; // allow GC
},
canContain: function() {
return false;
},
acceptsLines: true
},
paragraph: {
continue: function(parser) {
return parser.blank ? 1 : 0;
},
finalize: function(parser, block) {
var pos;
var hasReferenceDefs = false;
 
// try parsing the beginning as link reference definitions:
while (
peek$1(block._string_content, 0) === C_OPEN_BRACKET$1 &&
(pos = parser.inlineParser.parseReference(
block._string_content,
parser.refmap
))
) {
block._string_content = block._string_content.slice(pos);
hasReferenceDefs = true;
}
if (hasReferenceDefs && isBlank(block._string_content)) {
block.unlink();
}
},
canContain: function() {
return false;
},
acceptsLines: true
}
};
 
// block start functions. Return values:
// 0 = no match
// 1 = matched container, keep going
// 2 = matched leaf, no more block starts
var blockStarts = [
// block quote
function(parser) {
if (
!parser.indented &&
peek$1(parser.currentLine, parser.nextNonspace) === C_GREATERTHAN
) {
parser.advanceNextNonspace();
parser.advanceOffset(1, false);
// optional following space
if (isSpaceOrTab(peek$1(parser.currentLine, parser.offset))) {
parser.advanceOffset(1, true);
}
parser.closeUnmatchedBlocks();
parser.addChild("block_quote", parser.nextNonspace);
return 1;
} else {
return 0;
}
},
 
// ATX heading
function(parser) {
var match;
if (
!parser.indented &&
(match = parser.currentLine
.slice(parser.nextNonspace)
.match(reATXHeadingMarker))
) {
parser.advanceNextNonspace();
parser.advanceOffset(match[0].length, false);
parser.closeUnmatchedBlocks();
var container = parser.addChild("heading", parser.nextNonspace);
container.level = match[0].trim().length; // number of #s
// remove trailing ###s:
container._string_content = parser.currentLine
.slice(parser.offset)
.replace(/^[ \t]*#+[ \t]*$/, "")
.replace(/[ \t]+#+[ \t]*$/, "");
parser.advanceOffset(parser.currentLine.length - parser.offset);
return 2;
} else {
return 0;
}
},
 
// Fenced code block
function(parser) {
var match;
if (
!parser.indented &&
(match = parser.currentLine
.slice(parser.nextNonspace)
.match(reCodeFence))
) {
var fenceLength = match[0].length;
parser.closeUnmatchedBlocks();
var container = parser.addChild("code_block", parser.nextNonspace);
container._isFenced = true;
container._fenceLength = fenceLength;
container._fenceChar = match[0][0];
container._fenceOffset = parser.indent;
parser.advanceNextNonspace();
parser.advanceOffset(fenceLength, false);
return 2;
} else {
return 0;
}
},
 
// HTML block
function(parser, container) {
if (
!parser.indented &&
peek$1(parser.currentLine, parser.nextNonspace) === C_LESSTHAN$1
) {
var s = parser.currentLine.slice(parser.nextNonspace);
var blockType;
 
for (blockType = 1; blockType <= 7; blockType++) {
if (
reHtmlBlockOpen[blockType].test(s) &&
(blockType < 7 || (container.type !== "paragraph" &&
!(!parser.allClosed && !parser.blank &&
parser.tip.type === "paragraph") // maybe lazy
))
) {
parser.closeUnmatchedBlocks();
// We don't adjust parser.offset;
// spaces are part of the HTML block:
var b = parser.addChild("html_block", parser.offset);
b._htmlBlockType = blockType;
return 2;
}
}
}
 
return 0;
},
 
// Setext heading
function(parser, container) {
var match;
if (
!parser.indented &&
container.type === "paragraph" &&
(match = parser.currentLine
.slice(parser.nextNonspace)
.match(reSetextHeadingLine))
) {
parser.closeUnmatchedBlocks();
// resolve reference link definitiosn
var pos;
while (
peek$1(container._string_content, 0) === C_OPEN_BRACKET$1 &&
(pos = parser.inlineParser.parseReference(
container._string_content,
parser.refmap
))
) {
container._string_content = container._string_content.slice(
pos
);
}
if (container._string_content.length > 0) {
var heading = new Node("heading", container.sourcepos);
heading.level = match[0][0] === "=" ? 1 : 2;
heading._string_content = container._string_content;
container.insertAfter(heading);
container.unlink();
parser.tip = heading;
parser.advanceOffset(
parser.currentLine.length - parser.offset,
false
);
return 2;
} else {
return 0;
}
} else {
return 0;
}
},
 
// thematic break
function(parser) {
if (
!parser.indented &&
reThematicBreak.test(parser.currentLine.slice(parser.nextNonspace))
) {
parser.closeUnmatchedBlocks();
parser.addChild("thematic_break", parser.nextNonspace);
parser.advanceOffset(
parser.currentLine.length - parser.offset,
false
);
return 2;
} else {
return 0;
}
},
 
// list item
function(parser, container) {
var data;
 
if (
(!parser.indented || container.type === "list") &&
(data = parseListMarker(parser, container))
) {
parser.closeUnmatchedBlocks();
 
// add the list if needed
if (
parser.tip.type !== "list" ||
!listsMatch(container._listData, data)
) {
container = parser.addChild("list", parser.nextNonspace);
container._listData = data;
}
 
// add the list item
container = parser.addChild("item", parser.nextNonspace);
container._listData = data;
return 1;
} else {
return 0;
}
},
 
// indented code block
function(parser) {
if (
parser.indented &&
parser.tip.type !== "paragraph" &&
!parser.blank
) {
// indented code
parser.advanceOffset(CODE_INDENT, true);
parser.closeUnmatchedBlocks();
parser.addChild("code_block", parser.offset);
return 2;
} else {
return 0;
}
}
];
 
var advanceOffset = function(count, columns) {
var currentLine = this.currentLine;
var charsToTab, charsToAdvance;
var c;
while (count > 0 && (c = currentLine[this.offset])) {
if (c === "\t") {
charsToTab = 4 - (this.column % 4);
if (columns) {
this.partiallyConsumedTab = charsToTab > count;
charsToAdvance = charsToTab > count ? count : charsToTab;
this.column += charsToAdvance;
this.offset += this.partiallyConsumedTab ? 0 : 1;
count -= charsToAdvance;
} else {
this.partiallyConsumedTab = false;
this.column += charsToTab;
this.offset += 1;
count -= 1;
}
} else {
this.partiallyConsumedTab = false;
this.offset += 1;
this.column += 1; // assume ascii; block starts are ascii
count -= 1;
}
}
};
 
var advanceNextNonspace = function() {
this.offset = this.nextNonspace;
this.column = this.nextNonspaceColumn;
this.partiallyConsumedTab = false;
};
 
var findNextNonspace = function() {
var currentLine = this.currentLine;
var i = this.offset;
var cols = this.column;
var c;
 
while ((c = currentLine.charAt(i)) !== "") {
if (c === " ") {
i++;
cols++;
} else if (c === "\t") {
i++;
cols += 4 - (cols % 4);
} else {
break;
}
}
this.blank = c === "\n" || c === "\r" || c === "";
this.nextNonspace = i;
this.nextNonspaceColumn = cols;
this.indent = this.nextNonspaceColumn - this.column;
this.indented = this.indent >= CODE_INDENT;
};
 
// Analyze a line of text and update the document appropriately.
// We parse markdown text by calling this on each line of input,
// then finalizing the document.
var incorporateLine = function(ln) {
var all_matched = true;
var t;
 
var container = this.doc;
this.oldtip = this.tip;
this.offset = 0;
this.column = 0;
this.blank = false;
this.partiallyConsumedTab = false;
this.lineNumber += 1;
 
// replace NUL characters for security
if (ln.indexOf("\u0000") !== -1) {
ln = ln.replace(/\0/g, "\uFFFD");
}
 
this.currentLine = ln;
 
// For each containing block, try to parse the associated line start.
// Bail out on failure: container will point to the last matching block.
// Set all_matched to false if not all containers match.
var lastChild;
while ((lastChild = container._lastChild) && lastChild._open) {
container = lastChild;
 
this.findNextNonspace();
 
switch (this.blocks[container.type].continue(this, container)) {
case 0: // we've matched, keep going
break;
case 1: // we've failed to match a block
all_matched = false;
break;
case 2: // we've hit end of line for fenced code close and can return
return;
default:
throw "continue returned illegal value, must be 0, 1, or 2";
}
if (!all_matched) {
container = container._parent; // back up to last matching block
break;
}
}
 
this.allClosed = container === this.oldtip;
this.lastMatchedContainer = container;
 
var matchedLeaf =
container.type !== "paragraph" && blocks[container.type].acceptsLines;
var starts = this.blockStarts;
var startsLen = starts.length;
// Unless last matched container is a code block, try new container starts,
// adding children to the last matched container:
while (!matchedLeaf) {
this.findNextNonspace();
 
// this is a little performance optimization:
if (
!this.indented &&
!reMaybeSpecial.test(ln.slice(this.nextNonspace))
) {
this.advanceNextNonspace();
break;
}
 
var i = 0;
while (i < startsLen) {
var res = starts[i](this, container);
if (res === 1) {
container = this.tip;
break;
} else if (res === 2) {
container = this.tip;
matchedLeaf = true;
break;
} else {
i++;
}
}
 
if (i === startsLen) {
// nothing matched
this.advanceNextNonspace();
break;
}
}
 
// What remains at the offset is a text line. Add the text to the
// appropriate container.
 
// First check for a lazy paragraph continuation:
if (!this.allClosed && !this.blank && this.tip.type === "paragraph") {
// lazy paragraph continuation
this.addLine();
} else {
// not a lazy continuation
 
// finalize any blocks not matched
this.closeUnmatchedBlocks();
if (this.blank && container.lastChild) {
container.lastChild._lastLineBlank = true;
}
 
t = container.type;
 
// Block quote lines are never blank as they start with >
// and we don't count blanks in fenced code for purposes of tight/loose
// lists or breaking out of lists. We also don't set _lastLineBlank
// on an empty list item, or if we just closed a fenced block.
var lastLineBlank =
this.blank &&
!(
t === "block_quote" ||
(t === "code_block" && container._isFenced) ||
(t === "item" &&
!container._firstChild &&
container.sourcepos[0][0] === this.lineNumber)
);
 
// propagate lastLineBlank up through parents:
var cont = container;
while (cont) {
cont._lastLineBlank = lastLineBlank;
cont = cont._parent;
}
 
if (this.blocks[t].acceptsLines) {
this.addLine();
// if HtmlBlock, check for end condition
if (
t === "html_block" &&
container._htmlBlockType >= 1 &&
container._htmlBlockType <= 5 &&
reHtmlBlockClose[container._htmlBlockType].test(
this.currentLine.slice(this.offset)
)
) {
this.lastLineLength = ln.length;
this.finalize(container, this.lineNumber);
}
} else if (this.offset < ln.length && !this.blank) {
// create paragraph container for line
container = this.addChild("paragraph", this.offset);
this.advanceNextNonspace();
this.addLine();
}
}
this.lastLineLength = ln.length;
};
 
// Finalize a block. Close it and do any necessary postprocessing,
// e.g. creating string_content from strings, setting the 'tight'
// or 'loose' status of a list, and parsing the beginnings
// of paragraphs for reference definitions. Reset the tip to the
// parent of the closed block.
var finalize = function(block, lineNumber) {
var above = block._parent;
block._open = false;
block.sourcepos[1] = [lineNumber, this.lastLineLength];
 
this.blocks[block.type].finalize(this, block);
 
this.tip = above;
};
 
// Walk through a block & children recursively, parsing string content
// into inline content where appropriate.
var processInlines = function(block) {
var node, event, t;
var walker = block.walker();
this.inlineParser.refmap = this.refmap;
this.inlineParser.options = this.options;
while ((event = walker.next())) {
node = event.node;
t = node.type;
if (!event.entering && (t === "paragraph" || t === "heading")) {
this.inlineParser.parse(node);
}
}
};
 
var Document = function() {
var doc = new Node("document", [
[1, 1],
[0, 0]
]);
return doc;
};
 
// The main parsing function. Returns a parsed document AST.
var parse = function(input) {
this.doc = new Document();
this.tip = this.doc;
this.refmap = {};
this.lineNumber = 0;
this.lastLineLength = 0;
this.offset = 0;
this.column = 0;
this.lastMatchedContainer = this.doc;
this.currentLine = "";
if (this.options.time) {
console.time("preparing input");
}
var lines = input.split(reLineEnding);
var len = lines.length;
if (input.charCodeAt(input.length - 1) === C_NEWLINE$1) {
// ignore last blank line created by final newline
len -= 1;
}
if (this.options.time) {
console.timeEnd("preparing input");
}
if (this.options.time) {
console.time("block parsing");
}
for (var i = 0; i < len; i++) {
this.incorporateLine(lines[i]);
}
while (this.tip) {
this.finalize(this.tip, len);
}
if (this.options.time) {
console.timeEnd("block parsing");
}
if (this.options.time) {
console.time("inline parsing");
}
this.processInlines(this.doc);
if (this.options.time) {
console.timeEnd("inline parsing");
}
return this.doc;
};
 
// The Parser object.
function Parser(options) {
return {
doc: new Document(),
blocks: blocks,
blockStarts: blockStarts,
tip: this.doc,
oldtip: this.doc,
currentLine: "",
lineNumber: 0,
offset: 0,
column: 0,
nextNonspace: 0,
nextNonspaceColumn: 0,
indent: 0,
indented: false,
blank: false,
partiallyConsumedTab: false,
allClosed: true,
lastMatchedContainer: this.doc,
refmap: {},
lastLineLength: 0,
inlineParser: new InlineParser(options),
findNextNonspace: findNextNonspace,
advanceOffset: advanceOffset,
advanceNextNonspace: advanceNextNonspace,
addLine: addLine,
addChild: addChild,
incorporateLine: incorporateLine,
finalize: finalize,
processInlines: processInlines,
closeUnmatchedBlocks: closeUnmatchedBlocks,
parse: parse,
options: options || {}
};
}
 
function Renderer() {}
 
/**
* Walks the AST and calls member methods for each Node type.
*
* @param ast {Node} The root of the abstract syntax tree.
*/
function render(ast) {
var walker = ast.walker(),
event,
type;
 
this.buffer = "";
this.lastOut = "\n";
this.heading_count = 0;
 
while ((event = walker.next())) {
type = event.node.type;
if (this[type]) {
this[type](event.node, event.entering);
}
}
return this.buffer;
}
 
/**
* Concatenate a literal string to the buffer.
*
* @param str {String} The string to concatenate.
*/
function lit(str) {
this.buffer += str;
this.lastOut = str;
}
 
/**
* Output a newline to the buffer.
*/
function cr() {
if (this.lastOut !== "\n") {
this.lit("\n");
}
}
 
/**
* Concatenate a string to the buffer possibly escaping the content.
*
* Concrete renderer implementations should override this method.
*
* @param str {String} The string to concatenate.
*/
function out(str) {
this.lit(str);
}
 
/**
* Escape a string for the target renderer.
*
* Abstract function that should be implemented by concrete
* renderer implementations.
*
* @param str {String} The string to escape.
*/
function esc(str) {
return str;
}
 
Renderer.prototype.render = render;
Renderer.prototype.out = out;
Renderer.prototype.lit = lit;
Renderer.prototype.cr = cr;
Renderer.prototype.esc = esc;
 
var reUnsafeProtocol = /^javascript:|vbscript:|file:|data:/i;
var reSafeDataProtocol = /^data:image\/(?:png|gif|jpeg|webp)/i;
 
var potentiallyUnsafe = function(url) {
return reUnsafeProtocol.test(url) && !reSafeDataProtocol.test(url);
};
 
// Helper function to produce an HTML tag.
function tag(name, attrs, selfclosing) {
if (this.disableTags > 0) {
return;
}
this.buffer += "<" + name;
if (attrs && attrs.length > 0) {
var i = 0;
var attrib;
while ((attrib = attrs[i]) !== undefined) {
this.buffer += " " + attrib[0] + '="' + attrib[1] + '"';
i++;
}
}
if (selfclosing) {
this.buffer += " /";
}
this.buffer += ">";
this.lastOut = ">";
}
 
function HtmlRenderer(options) {
options = options || {};
// by default, soft breaks are rendered as newlines in HTML
options.softbreak = options.softbreak || "\n";
// set to "<br />" to make them hard breaks
// set to " " if you want to ignore line wrapping in source
this.esc = options.esc || escapeXml;
// escape html with a custom function
// else use escapeXml
 
this.disableTags = 0;
this.lastOut = "\n";
this.options = options;
}
 
/* Node methods */
 
function text$1(node) {
this.out(node.literal);
}
 
function softbreak() {
this.lit(this.options.softbreak);
}
 
function linebreak() {
this.tag("br", [], true);
this.cr();
}
 
function link(node, entering) {
var attrs = this.attrs(node);
if (entering) {
if (!(this.options.safe && potentiallyUnsafe(node.destination))) {
attrs.push(["href", this.esc(node.destination)]);
}
if (node.title) {
attrs.push(["title", this.esc(node.title)]);
}
this.tag("a", attrs);
} else {
this.tag("/a");
}
}
 
function image$1(node, entering) {
if (entering) {
if (this.disableTags === 0) {
if (this.options.safe && potentiallyUnsafe(node.destination)) {
this.lit('<img src="" alt="');
} else {
this.lit('<img src="' + this.esc(node.destination) + '" alt="');
}
}
this.disableTags += 1;
} else {
this.disableTags -= 1;
if (this.disableTags === 0) {
if (node.title) {
this.lit('" title="' + this.esc(node.title));
}
this.lit('" />');
}
}
}
 
function emph(node, entering) {
this.tag(entering ? "em" : "/em");
}
 
function strong(node, entering) {
this.tag(entering ? "strong" : "/strong");
}
 
function paragraph(node, entering) {
var grandparent = node.parent.parent,
attrs = this.attrs(node);
if (grandparent !== null && grandparent.type === "list") {
if (grandparent.listTight) {
return;
}
}
if (entering) {
this.cr();
this.tag("p", attrs);
} else {
this.tag("/p");
this.cr();
}
}
 
function heading(node, entering) {
var tagname = "h" + node.level,
attrs = this.attrs(node);
if (entering) {
if (node.level != 1) {
attrs.push(["id", ":" + this.heading_count]);
this.heading_count += 1;
}
this.cr();
this.tag(tagname, attrs);
} else {
this.tag("/" + tagname);
this.cr();
}
}
 
function code(node) {
this.tag("code");
this.out(node.literal);
this.tag("/code");
}
 
function code_block(node) {
var info_words = node.info ? node.info.split(/\s+/) : [],
attrs = this.attrs(node);
if (info_words.length > 0 && info_words[0].length > 0) {
attrs.push(["class", "language-" + this.esc(info_words[0])]);
}
this.cr();
this.tag("pre");
this.tag("code", attrs);
this.out(node.literal);
this.tag("/code");
this.tag("/pre");
this.cr();
}
 
function thematic_break(node) {
var attrs = this.attrs(node);
this.cr();
this.tag("hr", attrs, true);
this.cr();
}
 
function block_quote(node, entering) {
var attrs = this.attrs(node);
if (entering) {
this.cr();
this.tag("blockquote", attrs);
this.cr();
} else {
this.cr();
this.tag("/blockquote");
this.cr();
}
}
 
function list(node, entering) {
var tagname = node.listType === "bullet" ? "ul" : "ol",
attrs = this.attrs(node);
 
if (entering) {
var start = node.listStart;
if (start !== null && start !== 1) {
attrs.push(["start", start.toString()]);
}
this.cr();
this.tag(tagname, attrs);
this.cr();
} else {
this.cr();
this.tag("/" + tagname);
this.cr();
}
}
 
function item(node, entering) {
var attrs = this.attrs(node);
if (entering) {
this.tag("li", attrs);
} else {
this.tag("/li");
this.cr();
}
}
 
function html_inline(node) {
if (this.options.safe) {
this.lit("<!-- raw HTML omitted -->");
} else {
this.lit(node.literal);
}
}
 
function html_block(node) {
this.cr();
if (this.options.safe) {
this.lit("<!-- raw HTML omitted -->");
} else {
this.lit(node.literal);
}
this.cr();
}
 
function custom_inline(node, entering) {
if (entering && node.onEnter) {
this.lit(node.onEnter);
} else if (!entering && node.onExit) {
this.lit(node.onExit);
}
}
 
function custom_block(node, entering) {
this.cr();
if (entering && node.onEnter) {
this.lit(node.onEnter);
} else if (!entering && node.onExit) {
this.lit(node.onExit);
}
this.cr();
}
 
/* Helper methods */
 
function out$1(s) {
this.lit(this.esc(s));
}
 
function attrs(node) {
var att = [];
if (this.options.sourcepos) {
var pos = node.sourcepos;
if (pos) {
att.push([
"data-sourcepos",
String(pos[0][0]) +
":" +
String(pos[0][1]) +
"-" +
String(pos[1][0]) +
":" +
String(pos[1][1])
]);
}
}
return att;
}
 
// quick browser-compatible inheritance
HtmlRenderer.prototype = Object.create(Renderer.prototype);
 
HtmlRenderer.prototype.text = text$1;
HtmlRenderer.prototype.html_inline = html_inline;
HtmlRenderer.prototype.html_block = html_block;
HtmlRenderer.prototype.softbreak = softbreak;
HtmlRenderer.prototype.linebreak = linebreak;
HtmlRenderer.prototype.link = link;
HtmlRenderer.prototype.image = image$1;
HtmlRenderer.prototype.emph = emph;
HtmlRenderer.prototype.strong = strong;
HtmlRenderer.prototype.paragraph = paragraph;
HtmlRenderer.prototype.heading = heading;
HtmlRenderer.prototype.code = code;
HtmlRenderer.prototype.code_block = code_block;
HtmlRenderer.prototype.thematic_break = thematic_break;
HtmlRenderer.prototype.block_quote = block_quote;
HtmlRenderer.prototype.list = list;
HtmlRenderer.prototype.item = item;
HtmlRenderer.prototype.custom_inline = custom_inline;
HtmlRenderer.prototype.custom_block = custom_block;
 
HtmlRenderer.prototype.esc = escapeXml;
 
HtmlRenderer.prototype.out = out$1;
HtmlRenderer.prototype.tag = tag;
HtmlRenderer.prototype.attrs = attrs;
 
var reXMLTag = /\<[^>]*\>/;
 
function toTagName(s) {
return s.replace(/([a-z])([A-Z])/g, "$1_$2").toLowerCase();
}
 
function XmlRenderer(options) {
options = options || {};
 
this.disableTags = 0;
this.lastOut = "\n";
 
this.indentLevel = 0;
this.indent = " ";
this.esc = options.esc || escapeXml;
// escape html with a custom function
// else use escapeXml
 
this.options = options;
}
 
function render$1(ast) {
this.buffer = "";
 
var attrs;
var tagname;
var walker = ast.walker();
var event, node, entering;
var container;
var selfClosing;
var nodetype;
 
var options = this.options;
 
if (options.time) {
console.time("rendering");
}
 
this.buffer += '<?xml version="1.0" encoding="UTF-8"?>\n';
this.buffer += '<!DOCTYPE document SYSTEM "CommonMark.dtd">\n';
 
while ((event = walker.next())) {
entering = event.entering;
node = event.node;
nodetype = node.type;
 
container = node.isContainer;
 
selfClosing =
nodetype === "thematic_break" ||
nodetype === "linebreak" ||
nodetype === "softbreak";
 
tagname = toTagName(nodetype);
 
if (entering) {
attrs = [];
 
switch (nodetype) {
case "document":
attrs.push(["xmlns", "http://commonmark.org/xml/1.0"]);
break;
case "list":
if (node.listType !== null) {
attrs.push(["type", node.listType.toLowerCase()]);
}
if (node.listStart !== null) {
attrs.push(["start", String(node.listStart)]);
}
if (node.listTight !== null) {
attrs.push([
"tight",
node.listTight ? "true" : "false"
]);
}
var delim = node.listDelimiter;
if (delim !== null) {
var delimword = "";
if (delim === ".") {
delimword = "period";
} else {
delimword = "paren";
}
attrs.push(["delimiter", delimword]);
}
break;
case "code_block":
if (node.info) {
attrs.push(["info", node.info]);
}
break;
case "heading":
attrs.push(["level", String(node.level)]);
break;
case "link":
case "image":
attrs.push(["destination", node.destination]);
attrs.push(["title", node.title]);
break;
case "custom_inline":
case "custom_block":
attrs.push(["on_enter", node.onEnter]);
attrs.push(["on_exit", node.onExit]);
break;
}
if (options.sourcepos) {
var pos = node.sourcepos;
if (pos) {
attrs.push([
"sourcepos",
String(pos[0][0]) +
":" +
String(pos[0][1]) +
"-" +
String(pos[1][0]) +
":" +
String(pos[1][1])
]);
}
}
 
this.cr();
this.out(this.tag(tagname, attrs, selfClosing));
if (container) {
this.indentLevel += 1;
} else if (!container && !selfClosing) {
var lit = node.literal;
if (lit) {
this.out(this.esc(lit));
}
this.out(this.tag("/" + tagname));
}
} else {
this.indentLevel -= 1;
this.cr();
this.out(this.tag("/" + tagname));
}
}
if (options.time) {
console.timeEnd("rendering");
}
this.buffer += "\n";
return this.buffer;
}
 
function out$2(s) {
if (this.disableTags > 0) {
this.buffer += s.replace(reXMLTag, "");
} else {
this.buffer += s;
}
this.lastOut = s;
}
 
function cr$1() {
if (this.lastOut !== "\n") {
this.buffer += "\n";
this.lastOut = "\n";
for (var i = this.indentLevel; i > 0; i--) {
this.buffer += this.indent;
}
}
}
 
// Helper function to produce an XML tag.
function tag$1(name, attrs, selfclosing) {
var result = "<" + name;
if (attrs && attrs.length > 0) {
var i = 0;
var attrib;
while ((attrib = attrs[i]) !== undefined) {
result += " " + attrib[0] + '="' + this.esc(attrib[1]) + '"';
i++;
}
}
if (selfclosing) {
result += " /";
}
result += ">";
return result;
}
 
// quick browser-compatible inheritance
XmlRenderer.prototype = Object.create(Renderer.prototype);
 
XmlRenderer.prototype.render = render$1;
XmlRenderer.prototype.out = out$2;
XmlRenderer.prototype.cr = cr$1;
XmlRenderer.prototype.tag = tag$1;
XmlRenderer.prototype.esc = escapeXml;
 
exports.HtmlRenderer = HtmlRenderer;
exports.Node = Node;
exports.Parser = Parser;
exports.Renderer = Renderer;
exports.XmlRenderer = XmlRenderer;
 
Object.defineProperty(exports, '__esModule', { value: true });
 
})));
 
lib/docs/index.html added: 7501, removed: 25316, total 0
@@ -1,476 +1,158 @@
<!doctype html>
<html lang="en">
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Documentation - Zig</title>
<link rel="icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAgklEQVR4AWMYWuD7EllJIM4G4g4g5oIJ/odhOJ8wToOxSTXgNxDHoeiBMfA4+wGShjyYOCkG/IGqWQziEzYAoUAeiF9D5U+DxEg14DRU7jWIT5IBIOdCxf+A+CQZAAoopEB7QJwBCBwHiip8UYmRdrAlDpIMgApwQZNnNii5Dq0MBgCxxycBnwEd+wAAAABJRU5ErkJggg==">
<title>Zig Documentation</title>
<link rel="icon" href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNTMgMTQwIj48ZyBmaWxsPSIjRjdBNDFEIj48Zz48cG9seWdvbiBwb2ludHM9IjQ2LDIyIDI4LDQ0IDE5LDMwIi8+PHBvbHlnb24gcG9pbnRzPSI0NiwyMiAzMywzMyAyOCw0NCAyMiw0NCAyMiw5NSAzMSw5NSAyMCwxMDAgMTIsMTE3IDAsMTE3IDAsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMzEsOTUgMTIsMTE3IDQsMTA2Ii8+PC9nPjxnPjxwb2x5Z29uIHBvaW50cz0iNTYsMjIgNjIsMzYgMzcsNDQiLz48cG9seWdvbiBwb2ludHM9IjU2LDIyIDExMSwyMiAxMTEsNDQgMzcsNDQgNTYsMzIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTE2LDk1IDk3LDExNyA5MCwxMDQiLz48cG9seWdvbiBwb2ludHM9IjExNiw5NSAxMDAsMTA0IDk3LDExNyA0MiwxMTcgNDIsOTUiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTUwLDAgNTIsMTE3IDMsMTQwIDEwMSwyMiIvPjwvZz48Zz48cG9seWdvbiBwb2ludHM9IjE0MSwyMiAxNDAsNDAgMTIyLDQ1Ii8+PHBvbHlnb24gcG9pbnRzPSIxNTMsMjIgMTUzLDExNyAxMDYsMTE3IDEyMCwxMDUgMTI1LDk1IDEzMSw5NSAxMzEsNDUgMTIyLDQ1IDEzMiwzNiAxNDEsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTI1LDk1IDEzMCwxMTAgMTA2LDExNyIvPjwvZz48L2c+PC9zdmc+">
<style>
:root {
font-size: 1em;
--ui: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
--mono: "Source Code Pro", monospace;
--tx-color: #141414;
--bg-color: #ffffff;
--link-color: #2A6286;
--sidebar-sh-color: rgba(0, 0, 0, 0.09);
--sidebar-mod-bg-color: #f1f1f1;
--sidebar-modlnk-tx-color: #141414;
--sidebar-modlnk-tx-color-hover: #fff;
--sidebar-modlnk-tx-color-active: #000;
--sidebar-modlnk-bg-color: transparent;
--sidebar-modlnk-bg-color-hover: #555;
--sidebar-modlnk-bg-color-active: #FFBB4D;
--search-bg-color: #f3f3f3;
--search-bg-color-focus: #ffffff;
--search-sh-color: rgba(0, 0, 0, 0.18);
--search-other-results-color: rgb(100, 100, 100);
--modal-sh-color: rgba(0, 0, 0, 0.75);
--modal-bg-color: #aaa;
--warning-popover-bg-color: #ff4747;
<style type="text/css">
body {
font-family: system-ui, -apple-system, Roboto, "Segoe UI", sans-serif;
color: #000000;
}
 
html, body { margin: 0; padding: 0; height: 100%; }
 
a {
text-decoration: none;
}
 
pre a {
text-decoration: underline;
color: unset;
}
 
a:hover {
text-decoration: underline;
}
 
a[href^="src/"] {
border-bottom: 2px dotted var(--tx-color);
}
 
.hidden {
display: none !important;
}
 
/* layout */
.canvas {
display:flex;
flex-direction: column;
width: 100vw;
height: 100vh;
margin: 0;
padding: 0;
font-family: var(--ui);
color: var(--tx-color);
background-color: var(--bg-color);
}
 
.flex-main {
display: flex;
flex-direction: column;
justify-content: center;
 
height: 100%;
overflow: hidden;
 
z-index: 100;
}
 
.flex-horizontal {
display: flex;
flex-direction: row;
align-items: center;
}
 
.flex-filler {
flex-grow: 1;
flex-shrink: 1;
}
 
.flex-left {
overflow: auto;
-webkit-overflow-scrolling: touch;
overflow-wrap: break-word;
flex-shrink: 0;
flex-grow: 0;
margin-right: 0.5rem;
 
z-index: 300;
}
 
.flex-right {
display: flex;
flex-direction: column;
overflow: auto;
-webkit-overflow-scrolling: touch;
flex-grow: 1;
flex-shrink: 1;
 
z-index: 200;
}
 
.flex-right > .wrap {
width: 60rem;
max-width: 85vw;
flex-shrink: 1;
}
 
.modal-container {
z-index: 400;
}
 
.understated {
color: var(--search-other-results-color);
}
 
.sidebar {
background-color: var(--bg-color);
box-shadow: 0 0 1rem var(--sidebar-sh-color);
clip-path: inset(0px -15px 0px 0px);
}
 
.logo {
margin: 0.5rem;
width: 130px;
}
 
.logo > svg {
display: block;
}
 
ul.guides-api-switch {
display: flex;
flex-direction: row;
justify-content: center;
text-align: center;
list-style-type: none;
margin: 0;
padding: 0;
}
 
.guides-api-switch a {
display: block;
padding: 0.5rem 1rem;
color: var(--sidebar-modlnk-tx-color);
background-color: var(--sidebar-modlnk-bg-color);
border: 1px solid var(--tx-color);
}
 
 
#ApiSwitch {
border-radius: 10px 0 0 10px;
}
 
#guideSwitch {
border-radius: 0 10px 10px 0;
}
 
#ApiSwitch:hover, #guideSwitch:hover {
text-decoration: none;
}
 
#ApiSwitch:hover:not(.active), #guideSwitch:hover:not(.active) {
color: var(--sidebar-modlnk-tx-color-hover);
background-color: var(--sidebar-modlnk-bg-color-hover);
}
 
.guides-api-switch .active {
color: var(--sidebar-modlnk-tx-color-active);
background-color: var(--sidebar-modlnk-bg-color-active);
}
 
#guidesMenu {
height: 100%;
overflow: hidden;
width: 30%;
margin-right: 2rem;
}
#activeGuide {
overflow-y: scroll;
height: 100%;
width: 70%;
padding-right: 1rem;
}
.sidebar h2 {
margin: 0.5rem;
padding: 0;
font-size: 1.2rem;
}
 
.sidebar h2 > span {
border-bottom: 0.125rem dotted var(--tx-color);
}
 
.sidebar .modules {
list-style-type: none;
margin: 0;
padding: 0;
background-color: var(--sidebar-mod-bg-color);
}
 
.sidebar .modules > li > a {
display: block;
padding: 0.5rem 1rem;
color: var(--sidebar-modlnk-tx-color);
background-color: var(--sidebar-modlnk-bg-color);
text-decoration: none;
}
 
.sidebar .modules > li > a:hover {
color: var(--sidebar-modlnk-tx-color-hover);
background-color: var(--sidebar-modlnk-bg-color-hover);
}
 
.sidebar .modules > li > a.active {
color: var(--sidebar-modlnk-tx-color-active);
background-color: var(--sidebar-modlnk-bg-color-active);
}
 
.sidebar p.str {
margin: 0.5rem;
font-family: var(--mono);
}
 
#guideTocList {
padding: 0 1rem;
}
#guideTocList ul {
padding-left: 1rem;
margin: 0;
}
 
#guides {
box-sizing: border-box;
font-size: 1rem;
background-color: var(--bg-color);
overflow-wrap: break-word;
}
 
/* docs section */
.docs {
flex-grow: 2;
padding: 0rem 0.7rem 0rem 1.4rem;
font-size: 1rem;
background-color: var(--bg-color);
overflow-wrap: break-word;
height: 100%;
overflow-y: scroll;
}
 
#noDocsNamespaces {
margin: 1rem;
border: 1px solid var(--search-other-results-color);
padding: 0.5rem 1rem;
background-color: var(--help-bg-color);
}
 
.column {
flex-basis: 0;
flex-grow: 1;
min-width: min(24rem, 90%);
}
 
.search-container {
flex-grow: 2;
}
 
.search {
width: 100%;
padding: 0.5rem;
font-family: var(--ui);
font-size: 1rem;
color: var(--tx-color);
background-color: var(--search-bg-color);
border-top: 0;
border-left: 0;
border-right: 0;
border-bottom-width: 0.125rem;
border-bottom-style: solid;
border-bottom-color: var(--tx-color);
outline: none;
transition: border-bottom-color 0.35s, background 0.35s, box-shadow 0.35s;
border-radius: 0;
-webkit-appearance: none;
}
 
.search:focus {
background-color: var(--search-bg-color-focus);
border-bottom-color: #ffbb4d;
box-shadow: 0 0.3em 1em 0.125em var(--search-sh-color);
}
 
#searchPlaceholder {
position: absolute;
pointer-events: none;
height: 100%;
display: flex;
align-items: center;
padding-left: 5px;
}
 
#searchPlaceholderTextMobile {
display: none;
}
 
#dotsPopover:before {
position: absolute;
content: "";
left: 20px;
top: -8px;
border-style: solid;
border-width: 0 10px 10px 10px;
border-color: transparent transparent var(--warning-popover-bg-color) transparent;
transition-duration: 0.3s;
transition-property: transform;
z-index: 10;
}
#dotsPopover {
position: absolute;
opacity: 0;
visibility: hidden;
background-color: var(--warning-popover-bg-color);
border-radius: 10px;
left: 10px;
transform: translate(0, -20px);
padding: 0.5rem 1rem;
box-shadow: 0 2px 5px 0 rgba(0, 0, 0, 0.26);
transition: all 0.5s cubic-bezier(0.75, -0.02, 0.2, 0.97);
z-index: 20;
}
 
#dotsPopover.active {
opacity: 1;
visibility: visible;
transform: translate(0, 0);
}
 
#sectSearchResults {
box-sizing: border-box;
}
 
#searchHelp summary {
color: red;
list-style-position: outside;
}
 
#searchHelp summary.normal {
color: var(--search-other-results-color);
transition: all 0.5s cubic-bezier(0.75, -0.02, 0.2, 0.97);
}
 
#searchHelp div {
background-color: var(--modal-bg-color);
padding: 0.5rem 1rem;
}
.other-results {
line-height: 1em;
position: relative;
outline: 0;
border: 0;
color: var(--search-other-results-color);
text-align: center;
height: 1.5em;
opacity: .5;
}
.other-results:before {
content: '';
background: var(--search-other-results-color);
position: absolute;
left: 0;
top: 50%;
table {
width: 100%;
height: 1px;
}
.other-results:after {
content: "other results";
position: relative;
display: inline-block;
padding: 0 .5em;
line-height: 1.5em;
color: var(--search-other-results-color);
background-color: var(--bg-color);
}
a {
color: var(--link-color);
color: #2A6286;
}
 
p {
margin: 0.8rem 0;
}
 
pre {
font-family: var(--mono);
font-size: 1em;
background-color: #F5F5F5;
pre{
font-family:"Source Code Pro",monospace;
font-size:1em;
background-color:#F5F5F5;
padding: 1em;
margin: 0;
overflow-x: auto;
}
 
pre.inline {
background-color: var(--bg-color);
padding: 0;
display: inline;
}
 
 
code {
font-family: var(--mono);
font-size: 1em;
font-family:"Source Code Pro",monospace;
font-size: 0.9em;
}
code a {
color: #000000;
}
#listFields > div, #listParams > div {
margin-bottom: 1em;
}
#hdrName a {
font-size: 0.7em;
padding-left: 1em;
}
.fieldDocs {
border: 1px solid #F5F5F5;
border-top: 0px;
padding: 1px 1em;
}
 
h1 {
font-size: 1.4em;
margin: 0.8em 0;
#logo {
width: 8em;
padding: 0.5em 1em;
}
 
#navWrap {
width: -moz-available;
width: -webkit-fill-available;
width: stretch;
margin-left: 11em;
}
 
#search {
width: 100%;
}
 
nav {
width: 10em;
float: left;
}
nav h2 {
font-size: 1.2em;
text-decoration: underline;
margin: 0;
padding: 0.5em 0;
text-align: center;
}
nav p {
margin: 0;
padding: 0;
border-bottom: 0.0625rem dashed;
text-align: center;
}
 
h2 {
section {
clear: both;
padding-top: 1em;
}
section h1 {
border-bottom: 1px dashed;
margin: 0 0;
}
section h2 {
font-size: 1.3em;
margin: 0.5em 0;
padding: 0;
border-bottom: 0.0625rem solid;
border-bottom: 1px solid;
}
.listNav {
#listNav {
list-style-type: none;
margin: 0;
margin: 0.5em 0 0 0;
padding: 0;
overflow: hidden;
background-color: #f1f1f1;
display: flex;
flex-direction: row;
}
.listNav li {
#listNav li {
float:left;
}
.listNav li a {
#listNav li a {
display: block;
color: #000;
text-align: center;
padding: .5em .8em;
text-decoration: none;
}
.listNav li a:hover {
#listNav li a:hover {
background-color: #555;
color: #fff;
}
.listNav li a.active {
#listNav li a.active {
background-color: #FFBB4D;
color: #000;
}
 
#helpDialog {
width: 21em;
height: 21em;
position: fixed;
top: 0;
left: 0;
background-color: #333;
color: #fff;
border: 1px solid #fff;
}
#helpDialog h1 {
text-align: center;
font-size: 1.5em;
}
#helpDialog dt, #helpDialog dd {
display: inline;
margin: 0 0.2em;
}
kbd {
color: #000;
background-color: #fafbfc;
border-color: #d1d5da;
border-bottom-color: #c6cbd1;
box-shadow-color: #c6cbd1;
display: inline-block;
padding: 0.3em 0.2em;
font: 1.2em monospace;
line-height: 0.8em;
vertical-align: middle;
border: solid 1px;
border-radius: 3px;
box-shadow: inset 0 -1px 0;
cursor: default;
}
 
#listSearchResults li.selected {
background-color: #93e196;
}
@@ -479,156 +161,25 @@
font-weight: bold;
}
 
.expand[open] .sum-less {
display: none;
dl > div {
padding: 0.5em;
border: 1px solid #c0c0c0;
margin-top: 0.5em;
}
 
.expand[open] .sum-more {
display: block;
}
 
.expand .sum-more {
display: none;
}
 
.expand {
position: relative;
}
 
.expand .button:before {
content: "[+] ";
font-family: var(--mono);
color: var(--link-color);
position: sticky;
float: left;
top: 0.5em;
right: -16px;
z-index: 1;
margin-left: -2em;
pointer-events: all;
cursor: pointer;
}
 
.expand[open] .button:before {
content: "[-] ";
}
 
.examples {
list-style-type: none;
margin: 0;
padding: 0;
}
.examples li {
padding: 0.5em 0;
white-space: nowrap;
overflow-x: auto;
}
 
.docs td {
td {
vertical-align: top;
margin: 0;
padding: 0.5em;
max-width: 27em;
max-width: 20em;
text-overflow: ellipsis;
overflow-x: hidden;
}
 
.fieldHasDocs {
margin-bottom: 0;
ul.columns {
column-width: 20em;
}
 
.fieldDocs {
border: 1px solid #F5F5F5;
border-top: 0px;
padding: 1px 1em;
}
 
/* modals */
.modal-container {
display: flex;
width: 100%;
height: 100%;
position: fixed;
top: 0;
left: 0;
justify-content: center;
align-items: center;
background-color: rgba(0, 0, 0, 0.15);
backdrop-filter: blur(0.3em);
}
 
.modal-container > .modal {
max-width: 97vw;
max-height: 97vh;
overflow: auto;
font-size: 1rem;
color: #fff;
background-color: var(--modal-bg-color);
border: 0.125rem solid #000;
box-shadow: 0 0.5rem 2.5rem 0.3rem var(--modal-sh-color);
}
 
.modal-container h1 {
margin: 0.75em 2.5em 1em 2.5em;
font-size: 1.5em;
text-align: center;
}
 
.modal-container dt, .modal-container dd {
display: inline;
margin: 0 0.2em;
}
 
.modal-container dl {
margin-left: 0.5em;
margin-right: 0.5em;
}
 
.prefs-list {
list-style: none;
padding: 0;
margin-left: 0.5em;
margin-right: 0.5em;
}
 
kbd {
display: inline-block;
padding: 0.3em 0.2em;
font-family: var(--mono);
font-size: 1em;
line-height: 0.8em;
vertical-align: middle;
color: #000;
background-color: #fafbfc;
border-color: #d1d5da;
border-bottom-color: #c6cbd1;
border: solid 0.0625em;
border-radius: 0.1875em;
box-shadow: inset 0 -0.2em 0 #c6cbd1;
cursor: default;
}
#listFns > div {
padding-bottom: 10px;
}
 
#listFns dt {
font-family: var(--mono);
display: flex;
flex-direction: colunm;
justify-content: space-between;
}
#listFns dt .fnSignature {
overflow-x: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.argBreaker {
display: none;
}
 
/* tokens */
.tok-kw {
color: #333;
font-weight: bold;
@@ -657,51 +208,36 @@
color: #458;
font-weight: bold;
}
.tok-decl-ref {
color: #0086b3;
font-weight: bold;
}
 
/* dark mode */
@media (prefers-color-scheme: dark) {
:root {
--tx-color: #bbb;
--bg-color: #111;
--link-color: #88f;
--sidebar-sh-color: rgba(128, 128, 128, 0.5);
--sidebar-mod-bg-color: #333;
--sidebar-modlnk-tx-color: #fff;
--sidebar-modlnk-tx-color-hover: #fff;
--sidebar-modlnk-tx-color-active: #000;
--sidebar-modlnk-bg-color: transparent;
--sidebar-modlnk-bg-color-hover: #555;
--sidebar-modlnk-bg-color-active: #FFBB4D;
--search-bg-color: #3c3c3c;
--search-bg-color-focus: #000;
--search-sh-color: rgba(255, 255, 255, 0.28);
--search-other-results-color: rgba(255, 255, 255, 0.28);
--modal-sh-color: rgba(142, 142, 142, 0.5);
--modal-bg-color: #333;
--warning-popover-bg-color: #600000;
body {
background-color: #111;
color: #bbb;
}
 
pre {
background-color:#2A2A2A;
background-color: #222;
color: #ccc;
}
a {
color: #88f;
}
code a {
color: #ccc;
}
.fieldDocs {
border-color:#2A2A2A;
}
.listNav {
#listNav {
background-color: #333;
}
.listNav li a {
#listNav li a {
color: #fff;
}
.listNav li a:hover {
#listNav li a:hover {
background-color: #555;
color: #fff;
}
.listNav li a.active {
#listNav li a.active {
background-color: #FFBB4D;
color: #000;
}
@@ -711,6 +247,9 @@
#listSearchResults li.selected a {
color: #fff;
}
dl > div {
border-color: #373737;
}
.tok-kw {
color: #eee;
}
@@ -724,7 +263,7 @@
color: #aa7;
}
.tok-fn {
color: #e33;
color: #B1A0F8;
}
.tok-null {
color: #ff8080;
@@ -735,511 +274,142 @@
.tok-type {
color: #68f;
}
.tok-decl-ref {
color: lightblue;
}
}
 
@media only screen and (max-width: 750px) {
.canvas {
overflow: auto;
}
.flex-main {
flex-direction: column;
}
.sidebar {
min-width: calc(100vw - 2.8rem);
padding-left: 1.4rem;
padding-right: 1.4rem;
}
.flex-main > .flex-filler {
display: none;
}
.flex-main > .flex-right > .flex-filler {
display: none;
}
.flex-main > .flex-right > .wrap {
max-width: 100vw;
}
.flex-main > .flex-right > .wrap > .docs {
padding-right: 1.4rem;
background: transparent;
}
.modules {
display: flex;
flex-wrap: wrap;
}
.table-container table {
display: flex;
flex-direction: column;
}
.table-container tr {
display: flex;
flex-direction: column;
}
.examples {
overflow-x: scroll;
-webkit-overflow-scrolling: touch;
max-width: 100vw;
margin-left: -1.4rem;
margin-right: -1.4rem;
}
.examples li {
width: max-content;
padding-left: 1.4rem;
padding-right: 1.4rem;
}
.mobile-scroll-container {
overflow-x: scroll;
-webkit-overflow-scrolling: touch;
margin-left: -1.4rem;
margin-right: -1.4rem;
max-width: 100vw;
}
.mobile-scroll-container > .scroll-item {
margin-left: 1.4rem;
margin-right: 1.4rem;
box-sizing: border-box;
width: max-content;
display: inline-block;
min-width: calc(100% - 2.8rem);
}
#searchPlaceholderText {
display: none;
}
#searchPlaceholderTextMobile {
display: inline;
}
}
.banner {
background-color: orange;
text-align: center;
color: black;
padding: 5px 5px;
}
.banner a {
color: black;
text-decoration: underline;
}
 
</style>
 
<style>
pre {
--zig-keyword: #333;
--zig-builtin: #0086b3;
--zig-identifier: black;
--zig-decl-identifier: #0086b3;
--zig-string-literal: #d14;
--zig-type: #458;
--zig-fn: #900;
}
@media (prefers-color-scheme: dark) {
pre {
--zig-keyword: #eee;
--zig-builtin: #ff894c;
--zig-identifier: #bbbbbb;
--zig-decl-identifier: lightblue;
--zig-string-literal: #2e5;
--zig-type: #68f;
--zig-fn: #e33;
}
}
 
.zig_keyword_addrspace,
.zig_keyword_align,
.zig_keyword_and,
.zig_keyword_asm,
.zig_keyword_async,
.zig_keyword_await,
.zig_keyword_break,
.zig_keyword_catch,
.zig_keyword_comptime,
.zig_keyword_const,
.zig_keyword_continue,
.zig_keyword_defer,
.zig_keyword_else,
.zig_keyword_enum,
.zig_keyword_errdefer,
.zig_keyword_error,
.zig_keyword_export,
.zig_keyword_extern,
.zig_keyword_for,
.zig_keyword_if,
.zig_keyword_inline,
.zig_keyword_noalias,
.zig_keyword_noinline,
.zig_keyword_nosuspend,
.zig_keyword_opaque,
.zig_keyword_or,
.zig_keyword_orelse,
.zig_keyword_packed,
.zig_keyword_anyframe,
.zig_keyword_pub,
.zig_keyword_resume,
.zig_keyword_return,
.zig_keyword_linksection,
.zig_keyword_callconv,
.zig_keyword_struct,
.zig_keyword_suspend,
.zig_keyword_switch,
.zig_keyword_test,
.zig_keyword_threadlocal,
.zig_keyword_try,
.zig_keyword_union,
.zig_keyword_unreachable,
.zig_keyword_usingnamespace,
.zig_keyword_var,
.zig_keyword_volatile,
.zig_keyword_allowzero,
.zig_keyword_while,
.zig_keyword_anytype,
.zig_keyword_fn
{
color: var(--zig-keyword);
font-weight: bold;
}
 
 
.zig_string_literal,
.zig_multiline_string_literal_line,
.zig_char_literal
{
color: var(--zig-string-literal);
}
 
.zig_builtin
{
color: var(--zig-builtin);
}
 
.zig_doc_comment,
.zig_container_doc_comment,
.zig_line_comment {
color: #545454;
font-style: italic;
}
 
.zig_identifier {
color: var(--zig-identifier);
font-weight: bold;
}
.zig_decl_identifier {
color: var(--zig-decl-identifier);
font-weight: bold;
}
 
.zig_number_literal,
.zig_special {
color: #ff8080;
}
 
.zig_type {
color: var(--zig-type);
font-weight: bold;
}
 
.zig_fn {
color: var(--zig-fn);
font-weight: bold;
}
</style>
</head>
<body class="canvas">
<div id="banner" class="banner">
This is a beta autodoc build; expect bugs and missing information.
<a href="https://github.com/ziglang/zig/wiki/How-to-contribute-to-Autodoc">Report an Issue</a>,
<a href="https://github.com/ziglang/zig/wiki/How-to-contribute-to-Autodoc">Contribute</a>,
<a href="https://github.com/ziglang/zig/wiki/How-to-read-the-standard-library-source-code">Learn more about stdlib source code</a>.
<body>
<nav>
<a class="logo" href="#">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 400 140">
<g fill="#F7A41D">
<g>
<polygon points="46,22 28,44 19,30"/>
<polygon points="46,22 33,33 28,44 22,44 22,95 31,95 20,100 12,117 0,117 0,22" shape-rendering="crispEdges"/>
<polygon points="31,95 12,117 4,106"/>
</g>
<g>
<polygon points="56,22 62,36 37,44"/>
<polygon points="56,22 111,22 111,44 37,44 56,32" shape-rendering="crispEdges"/>
<polygon points="116,95 97,117 90,104"/>
<polygon points="116,95 100,104 97,117 42,117 42,95" shape-rendering="crispEdges"/>
<polygon points="150,0 52,117 3,140 101,22"/>
</g>
<g>
<polygon points="141,22 140,40 122,45"/>
<polygon points="153,22 153,117 106,117 120,105 125,95 131,95 131,45 122,45 132,36 141,22" shape-rendering="crispEdges"/>
<polygon points="125,95 130,110 106,117"/>
</g>
</g>
<style>
#text { fill: #121212 }
@media (prefers-color-scheme: dark) { #text { fill: #f2f2f2 } }
</style>
<g id="text">
<g>
<polygon points="260,22 260,37 229,40 177,40 177,22" shape-rendering="crispEdges"/>
<polygon points="260,37 207,99 207,103 176,103 229,40 229,37"/>
<polygon points="261,99 261,117 176,117 176,103 206,99" shape-rendering="crispEdges"/>
</g>
<rect x="272" y="22" shape-rendering="crispEdges" width="22" height="95"/>
<g>
<polygon points="394,67 394,106 376,106 376,81 360,70 346,67" shape-rendering="crispEdges"/>
<polygon points="360,68 376,81 346,67"/>
<path d="M394,106c-10.2,7.3-24,12-37.7,12c-29,0-51.1-20.8-51.1-48.3c0-27.3,22.5-48.1,52-48.1 c14.3,0,29.2,5.5,38.9,14l-13,15c-7.1-6.3-16.8-10-25.9-10c-17,0-30.2,12.9-30.2,29.5c0,16.8,13.3,29.6,30.3,29.6 c5.7,0,12.8-2.3,19-5.5L394,106z"/>
</g>
</g>
</svg>
</a>
</nav>
<div id="navWrap">
<input type="search" id="search" autocomplete="off" spellcheck="false" placeholder="`s` to search, `?` to see more options">
<div id="sectNav" class="hidden"><ul id="listNav"></ul></div>
</div>
<div id="main" class="flex-main">
<div class="flex-horizontal" style="justify-content: center; padding: 0 0.5rem;">
<div class="flex-left">
<div class="logo">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 400 140">
<g fill="#F7A41D">
<g>
<polygon points="46,22 28,44 19,30"/>
<polygon points="46,22 33,33 28,44 22,44 22,95 31,95 20,100 12,117 0,117 0,22" shape-rendering="crispEdges"/>
<polygon points="31,95 12,117 4,106"/>
</g>
<g>
<polygon points="56,22 62,36 37,44"/>
<polygon points="56,22 111,22 111,44 37,44 56,32" shape-rendering="crispEdges"/>
<polygon points="116,95 97,117 90,104"/>
<polygon points="116,95 100,104 97,117 42,117 42,95" shape-rendering="crispEdges"/>
<polygon points="150,0 52,117 3,140 101,22"/>
</g>
<g>
<polygon points="141,22 140,40 122,45"/>
<polygon points="153,22 153,117 106,117 120,105 125,95 131,95 131,45 122,45 132,36 141,22" shape-rendering="crispEdges"/>
<polygon points="125,95 130,110 106,117"/>
</g>
</g>
<style>
#text { fill: #121212 }
@media (prefers-color-scheme: dark) { #text { fill: #f2f2f2 } }
</style>
<g id="text">
<g>
<polygon points="260,22 260,37 229,40 177,40 177,22" shape-rendering="crispEdges"/>
<polygon points="260,37 207,99 207,103 176,103 229,40 229,37"/>
<polygon points="261,99 261,117 176,117 176,103 206,99" shape-rendering="crispEdges"/>
</g>
<rect x="272" y="22" shape-rendering="crispEdges" width="22" height="95"/>
<g>
<polygon points="394,67 394,106 376,106 376,81 360,70 346,67" shape-rendering="crispEdges"/>
<polygon points="360,68 376,81 346,67"/>
<path d="M394,106c-10.2,7.3-24,12-37.7,12c-29,0-51.1-20.8-51.1-48.3c0-27.3,22.5-48.1,52-48.1 c14.3,0,29.2,5.5,38.9,14l-13,15c-7.1-6.3-16.8-10-25.9-10c-17,0-30.2,12.9-30.2,29.5c0,16.8,13.3,29.6,30.3,29.6 c5.7,0,12.8-2.3,19-5.5L394,106z"/>
</g>
</g>
</svg>
</div>
<div id="sectGuideApiSwitch">
<ul class="guides-api-switch">
<li><a id="ApiSwitch" class="active" href="#A;">API</a></li>
<li><a id="guideSwitch" class="" href="#G;">Guides</a></li>
</ul>
</div>
</div>
<div class="flex-right" style="padding-top: 0.5rem;overflow:visible;">
<div class="search-container" style="position:relative;">
<div id="searchPlaceholder">
<span id="searchPlaceholderText"><!-- populated by setPrefSlashSearch --></span>
<span id="searchPlaceholderTextMobile">Search</span>
</div>
<input type="search" class="search" id="search" autocomplete="off" spellcheck="false" disabled>
<div id="dotsPopover">
Use spaces instead of dots. See $resource for more info.
</div>
</div>
<div id="sectNavAPI" style="margin-top: 0.5rem;"><ul id="listNavAPI" class="listNav"></ul></div>
<div id="sectNavGuides" class="hidden" style="margin-top: 0.5rem">
<ul id="listNavGuides" class="listNav">
<li>
<a href="#G;" class="active">Index</a>
</li>
<li style="flex-grow:1;">
<a href="#G;" class="active" onclick="scrollGuidesTop(event);"></a>
</li>
</ul>
</div>
</div>
</div>
<div style="height:100%; overflow:hidden;">
<div id="sectSearchResults" class="docs hidden">
<details id="searchHelp">
<summary id="searchHelpSummary" class="normal">How to search effectively</summary>
<div>
<h2>How To Search Effectively</h2>
<h3>Matching</h3>
<ul>
<li>Search is case-insensitive by default.</li>
<li>Using uppercase letters in your query will make the search
case-sensitive.</li>
<li>Given <code>ArrayListUnmanaged</code>:
<ul>
<li>the following search terms (and their prefixes) will match:
<ul>
<li><code>array</code></li>
<li><code>list</code></li>
<li><code>unmanaged</code></li>
</ul>
</li>
<li>the following search terms will <b>NOT</b> match:
<ul>
<li><code>stun</code></li>
<li><code>ray</code></li>
<li><code>managed</code></li>
</ul>
</li>
</ul>
</li>
<li>More precisely, the search system is based on a Radix Tree. The Radix Tree contains full decl names plus some suffixes, split by following the official style guide (e.g. <code>HashMapUnmanaged</code> also produces <code>MapUnmanaged</code> and <code>Unmanaged</code>, same with snake_case and camelCase names). </li>
</ul>
 
<h3>Multiple terms</h3>
<ul>
<li>When a search query contains multiple terms, order doesn't matter when
all terms match within a single decl name (e.g. "map auto" will match <code>AutoHashMap</code>).</li>
<li>Query term order does matter when matching different decls alognside
a path (e.g. "js parse" matching <code>std.json.parse</code>), in which
case the order of the terms will determine whether the match goes above or
below the "other results" line.</li>
<li>As an example, "fs create" will put above the line all things related to the creation of files and directories inside of `std.fs`, while still showing (but below the line) matches from `std.Bulild`.</li>
<li>As another example, "fs windows" will prioritize windows-related results in `std.fs`, while "windows fs" will prioritize "fs"-related results in `std.windows`.</li>
<li>This means that if you're searching inside a target namespace, you never have to read below the "other results" line.</li>
<li>Since matching doesn't have to be perfect, you can also target a group of namespaces to search into. For example "array orderedremove" will show you all "Array-" namespaces that support <code>orderedRemove</code>.</li>
<li>Periods are replaced by spaces because the Radix Tree doesn't index full paths, and in practice you should expect the match scoring system to consistently give you what you're looking for even when your query path is split into multiple terms.</li>
</ul>
</div>
</details>
<h2>Search Results</h2>
<ul id="listSearchResults"></ul>
<p id="sectSearchAllResultsLink" class="hidden"><a href="">show all results</a></p>
</div>
<div id="sectSearchNoResults" class="docs hidden">
<h2>No Results Found</h2>
<p>Here are some things you can try:</p>
<ul>
<li>Check out the <a id="langRefLink">Language Reference</a> for the language itself.</li>
<li>Check out the <a href="https://ziglang.org/learn/">Learn page</a> for other helpful resources for learning Zig.</li>
<li>Use your search engine.</li>
</ul>
<p>Press <kbd>?</kbd> to see keyboard shortcuts and <kbd>Esc</kbd> to return.</p>
</div>
<div id="guides" class="flex-horizontal hidden" style="align-items:flex-start;height:100%;overflow:hidden;">
<div id="guidesMenu" class="sidebar">
<h2 id="guidesMenuTitle">Table of Contents</h2>
<div id="guideTocListEmpty" style="margin:0 1rem;"><i>No content to display.</i></div>
<div id="guideTocList" style="height:100%;overflow-y:scroll;"></div>
</div>
<div id="activeGuide" class="hidden"></div>
</div>
<div id="docs" class="hidden" style="align-items:flex-start;height:100%;overflow:hidden;">
<section id="docs-scroll" class="docs">
<p id="status">Loading...</p>
<div id="fnProto" class="hidden">
<div class="mobile-scroll-container"><pre id="fnProtoCode" class="scroll-item"></pre></div>
<div id="fnSourceLink" style="display:flex;flex-direction:row;justify-content:flex-end;"></div>
</div>
<h1 id="hdrName" class="hidden"></h1>
<div id="fnNoExamples" class="hidden">
<p>This function is not tested or referenced.</p>
</div>
<div id="declNoRef" class="hidden">
<p>
This declaration is not tested or referenced, and it has therefore not been included in
semantic analysis, which means the only documentation available is whatever is in the
doc comments.
</p>
</div>
<div id="tldDocs" class="hidden"></div>
<div id="sectParams" class="hidden">
<h2>Parameters</h2>
<div id="listParams"></div>
</div>
<div id="sectFnErrors" class="hidden">
<h2>Errors</h2>
<div id="fnErrorsAnyError">
<p><span class="tok-type">anyerror</span> means the error set is known only at runtime.</p>
</div>
<div id="tableFnErrors"><dl id="listFnErrors"></dl></div>
</div>
<div id="sectFields" class="hidden">
<h2>Fields</h2>
<div id="listFields"></div>
</div>
<div id="sectNamespaces" class="hidden">
<div style="position:relative;">
<h2 style="position:sticky; top:0; background-color:var(--bg-color)">Namespaces</h2>
<div class="flex-horizontal" style="justify-content:space-around;align-items:flex-start;flex-wrap:wrap;">
<ul id="listNamespacesLeft" class="column"></ul>
<ul id="listNamespacesRight" class="column"></ul>
</div>
</div>
<h3>Other Namespaces <span style="font-size:1.1rem; cursor:pointer;" title="This box contains namespaces that are exported without a doc comment.">&#9432;</span></h3>
<div id="noDocsNamespaces"></div>
</div>
<div id="sectTypes" class="hidden">
<div style="position:relative;">
<h2 style="position:sticky; top:0; background-color:var(--bg-color)">Types</h2>
<div class="flex-horizontal" style="justify-content:space-around;align-items:flex-start;flex-wrap:wrap;">
<ul id="listTypesLeft" class="column"></ul>
<ul id="listTypesRight" class="column"></ul>
</div>
</div>
</div>
<div id="sectGlobalVars" class="hidden">
<h2>Global Variables</h2>
<div class="table-container">
<table>
<tbody id="listGlobalVars"></tbody>
</table>
</div>
</div>
<div id="sectFns" class="hidden">
<h2>Functions</h2>
<div class="table-container">
<dl id="listFns"></dl>
</div>
</div>
<div id="sectValues" class="hidden">
<h2>Values</h2>
<div class="table-container">
<table>
<tbody id="listValues"></tbody>
</table>
</div>
</div>
<div id="sectErrSets" class="hidden">
<h2>Error Sets</h2>
<ul id="listErrSets"></ul>
</div>
<div id="fnExamples" class="hidden">
<h2>Examples</h2>
<ul id="listFnExamples" class="examples"></ul>
</div>
<div id="sectDocTests" class="hidden">
<h2>Usage Examples <span style="font-size:1.1rem; cursor:pointer;" title="See `doctests` in the language reference to learn more.">&#9432;</span></h2>
<pre id="docTestsCode"></pre>
</div>
<div id="sectTests" class="hidden">
<h2>Tests</h2>
<div class="table-container">
<table>
<tbody id="listTests"></tbody>
</table>
</div>
</div>
</section>
</div>
<div class="flex-filler"></div>
<section>
<p id="status">Loading...</p>
<h1 id="hdrName" class="hidden"><span></span><a href="#">[src]</a></h1>
<div id="fnProto" class="hidden">
<pre><code id="fnProtoCode"></code></pre>
</div>
<div id="tldDocs" class="hidden"></div>
<div id="sectParams" class="hidden">
<h2>Parameters</h2>
<div id="listParams">
</div>
</div>
<div id="helpModal" class="hidden">
<div class="modal-container">
<div class="modal">
<h1>Keyboard Shortcuts</h1>
<dl><dt><kbd>?</kbd></dt><dd>Toggle this help modal</dd></dl>
<dl><dt id="searchKeys"><!-- populated by setPrefSlashSearch --></dt><dd>Focus the search field</dd></dl>
<div style="margin-left: 1em">
<dl><dt><kbd>↑</kbd></dt><dd>Move up in search results</dd></dl>
<dl><dt><kbd>↓</kbd></dt><dd>Move down in search results</dd></dl>
<dl><dt><kbd>⏎</kbd></dt><dd>Go to active search result</dd></dl>
</div>
<dl><dt><kbd>p</kbd></dt><dd>Open preferences</dd></dl>
<dl><dt><kbd>Esc</kbd></dt><dd>Clear focus; close this modal</dd></dl>
</div>
<div id="sectFnErrors" class="hidden">
<h2>Errors</h2>
<div id="fnErrorsAnyError">
<p><span class="tok-type">anyerror</span> means the error set is known only at runtime.</p>
</div>
<div id="tableFnErrors"><dl id="listFnErrors"></dl></div>
</div>
<div id="sectSearchResults" class="hidden">
<h2>Search Results</h2>
<ul id="listSearchResults"></ul>
</div>
<div id="sectSearchNoResults" class="hidden">
<h2>No Results Found</h2>
<p>Press escape to exit search and then '?' to see more options.</p>
</div>
<div id="sectFields" class="hidden">
<h2>Fields</h2>
<div id="listFields">
</div>
</div>
<div id="prefsModal" class="hidden">
<div class="modal-container">
<div class="modal">
<h1>Preferences</h1>
<ul class="prefs-list">
<li><input id="prefSlashSearch" type="checkbox"><label for="prefSlashSearch">Enable <kbd>/</kbd> for search</label></li>
</ul>
</div>
</div>
<div id="sectTypes" class="hidden">
<h2>Types</h2>
<ul id="listTypes" class="columns">
</ul>
</div>
<div id="sectNamespaces" class="hidden">
<h2>Namespaces</h2>
<ul id="listNamespaces" class="columns">
</ul>
</div>
<div id="sectGlobalVars" class="hidden">
<h2>Global Variables</h2>
<table>
<tbody id="listGlobalVars">
</tbody>
</table>
</div>
<div id="sectValues" class="hidden">
<h2>Values</h2>
<table>
<tbody id="listValues">
</tbody>
</table>
</div>
<div id="sectFns" class="hidden">
<h2>Functions</h2>
<dl id="listFns">
</dl>
</div>
<div id="sectErrSets" class="hidden">
<h2>Error Sets</h2>
<ul id="listErrSets" class="columns">
</ul>
</div>
<div id="sectDocTests" class="hidden">
<h2>Example Usage</h2>
<pre><code id="docTestsCode"></code></pre>
</div>
<div id="sectSource" class="hidden">
<h2>Source Code</h2>
<pre><code id="sourceText"></code></pre>
</div>
</section>
<div id="helpDialog" class="hidden">
<h1>Keyboard Shortcuts</h1>
<dl><dt><kbd>?</kbd></dt><dd>Show this help dialog</dd></dl>
<dl><dt><kbd>Esc</kbd></dt><dd>Clear focus; close this dialog</dd></dl>
<dl><dt><kbd>s</kbd></dt><dd>Focus the search field</dd></dl>
<dl><dt><kbd>u</kbd></dt><dd>Go to source code</dd></dl>
<dl><dt><kbd>↑</kbd></dt><dd>Move up in search results</dd></dl>
<dl><dt><kbd>↓</kbd></dt><dd>Move down in search results</dd></dl>
<dl><dt><kbd>⏎</kbd></dt><dd>Go to active search result</dd></dl>
</div>
<script src="data-typeKinds.js"></script>
<script src="data-rootMod.js"></script>
<script src="data-modules.js"></script>
<script src="data-files.js"></script>
<script src="data-calls.js"></script>
<script src="data-types.js"></script>
<script src="data-decls.js"></script>
<script src="data-exprs.js"></script>
<script src="data-astNodes.js"></script>
<script src="data-comptimeExprs.js"></script>
<script src="data-guideSections.js"></script>
<script src="commonmark.js"></script>
<script src="ziglexer.js"></script>
<script src="main.js"></script>
</body>
</html>
 
 
lib/docs/main.js added: 7501, removed: 25316, total 0
@@ -1,5242 +1,987 @@
"use strict";
 
var zigAnalysis = {
typeKinds,
rootMod,
modules,
astNodes,
calls,
files,
decls,
exprs,
types,
comptimeExprs,
guideSections
};
 
let skipNextHashChange = null;
 
const NAV_MODES = {
API: "#A;",
GUIDES: "#G;",
};
 
 
var scrollHistory = {};
 
(function() {
const domBanner = document.getElementById("banner");
const domMain = document.getElementById("main");
const domStatus = document.getElementById("status");
const domSectNavAPI = document.getElementById("sectNavAPI");
const domListNavAPI = document.getElementById("listNavAPI");
const domSectNavGuides = document.getElementById("sectNavGuides");
const domListNavGuides = document.getElementById("listNavGuides");
const domApiSwitch = document.getElementById("ApiSwitch");
const domGuideSwitch = document.getElementById("guideSwitch");
const domGuidesMenu = document.getElementById("guidesMenu");
const domGuidesMenuTitle = document.getElementById("guidesMenuTitle");
const domGuideTocList = document.getElementById("guideTocList");
const domGuideTocListEmtpy = document.getElementById("guideTocListEmpty");
const domListMods = document.getElementById("listMods");
const domSectTypes = document.getElementById("sectTypes");
const domListTypesLeft = document.getElementById("listTypesLeft");
const domListTypesRight = document.getElementById("listTypesRight");
const domSectTests = document.getElementById("sectTests");
const domListTests = document.getElementById("listTests");
const domSectDocTests = document.getElementById("sectDocTests");
const domDocTestsCode = document.getElementById("docTestsCode");
const domSectNamespaces = document.getElementById("sectNamespaces");
const domListNamespacesLeft = document.getElementById("listNamespacesLeft");
const domListNamespacesRight = document.getElementById("listNamespacesRight");
const domNoDocsNamespaces = document.getElementById("noDocsNamespaces");
const domSectErrSets = document.getElementById("sectErrSets");
const domListErrSets = document.getElementById("listErrSets");
const domSectFns = document.getElementById("sectFns");
const domListFns = document.getElementById("listFns");
const domSectFields = document.getElementById("sectFields");
const domListFields = document.getElementById("listFields");
const domSectGlobalVars = document.getElementById("sectGlobalVars");
const domListGlobalVars = document.getElementById("listGlobalVars");
const domSectValues = document.getElementById("sectValues");
const domListValues = document.getElementById("listValues");
const domFnProto = document.getElementById("fnProto");
const domFnProtoCode = document.getElementById("fnProtoCode");
const domFnSourceLink = document.getElementById("fnSourceLink");
const domSectParams = document.getElementById("sectParams");
const domListParams = document.getElementById("listParams");
const domTldDocs = document.getElementById("tldDocs");
const domSectFnErrors = document.getElementById("sectFnErrors");
const domListFnErrors = document.getElementById("listFnErrors");
const domTableFnErrors = document.getElementById("tableFnErrors");
const domFnErrorsAnyError = document.getElementById("fnErrorsAnyError");
const domFnExamples = document.getElementById("fnExamples");
// const domListFnExamples = (document.getElementById("listFnExamples"));
const domFnNoExamples = document.getElementById("fnNoExamples");
const domDeclNoRef = document.getElementById("declNoRef");
const domSearch = document.getElementById("search");
const domSearchHelp = document.getElementById("searchHelp");
const domSearchHelpSummary = document.getElementById("searchHelpSummary");
const domSectSearchResults = document.getElementById("sectSearchResults");
const domSectSearchAllResultsLink = document.getElementById("sectSearchAllResultsLink");
const domDocs = document.getElementById("docs");
const domDocsScroll = document.getElementById("docs-scroll");
const domGuidesSection = document.getElementById("guides");
const domActiveGuide = document.getElementById("activeGuide");
const CAT_namespace = 0;
const CAT_global_variable = 1;
const CAT_function = 2;
const CAT_primitive = 3;
const CAT_error_set = 4;
const CAT_global_const = 5;
const CAT_alias = 6;
const CAT_type = 7;
const CAT_type_type = 8;
const CAT_type_function = 9;
 
const domListSearchResults = document.getElementById("listSearchResults");
const domSectSearchNoResults = document.getElementById("sectSearchNoResults");
// const domTdTarget = (document.getElementById("tdTarget"));
const domTdZigVer = document.getElementById("tdZigVer");
const domHdrName = document.getElementById("hdrName");
const domHelpModal = document.getElementById("helpModal");
const domSearchKeys = document.getElementById("searchKeys");
const domPrefsModal = document.getElementById("prefsModal");
const domSearchPlaceholder = document.getElementById("searchPlaceholder");
const domSearchPlaceholderText = document.getElementById("searchPlaceholderText");
const sourceFileUrlTemplate = "src/{{mod}}/{{file}}.html#L{{line}}"
const domLangRefLink = document.getElementById("langRefLink");
const domDocTestsCode = document.getElementById("docTestsCode");
const domFnErrorsAnyError = document.getElementById("fnErrorsAnyError");
const domFnProto = document.getElementById("fnProto");
const domFnProtoCode = document.getElementById("fnProtoCode");
const domHdrName = document.getElementById("hdrName");
const domHelpModal = document.getElementById("helpDialog");
const domListErrSets = document.getElementById("listErrSets");
const domListFields = document.getElementById("listFields");
const domListParams = document.getElementById("listParams");
const domListFnErrors = document.getElementById("listFnErrors");
const domListFns = document.getElementById("listFns");
const domListGlobalVars = document.getElementById("listGlobalVars");
const domListInfo = document.getElementById("listInfo");
const domListNamespaces = document.getElementById("listNamespaces");
const domListNav = document.getElementById("listNav");
const domListSearchResults = document.getElementById("listSearchResults");
const domListTypes = document.getElementById("listTypes");
const domListValues = document.getElementById("listValues");
const domSearch = document.getElementById("search");
const domSectDocTests = document.getElementById("sectDocTests");
const domSectErrSets = document.getElementById("sectErrSets");
const domSectFields = document.getElementById("sectFields");
const domSectParams = document.getElementById("sectParams");
const domSectFnErrors = document.getElementById("sectFnErrors");
const domSectFns = document.getElementById("sectFns");
const domSectGlobalVars = document.getElementById("sectGlobalVars");
const domSectNamespaces = document.getElementById("sectNamespaces");
const domSectNav = document.getElementById("sectNav");
const domSectSearchNoResults = document.getElementById("sectSearchNoResults");
const domSectSearchResults = document.getElementById("sectSearchResults");
const domSectSource = document.getElementById("sectSource");
const domSectTypes = document.getElementById("sectTypes");
const domSectValues = document.getElementById("sectValues");
const domSourceText = document.getElementById("sourceText");
const domStatus = document.getElementById("status");
const domTableFnErrors = document.getElementById("tableFnErrors");
const domTldDocs = document.getElementById("tldDocs");
 
const domPrefSlashSearch = document.getElementById("prefSlashSearch");
const prefs = getLocalStorage();
loadPrefs();
var searchTimer = null;
 
domPrefSlashSearch.addEventListener("change", () => setPrefSlashSearch(domPrefSlashSearch.checked));
const curNav = {
// 0 = home
// 1 = decl (decl)
// 2 = source (path)
tag: 0,
// unsigned int: decl index
decl: null,
// string file name matching tarball path
path: null,
 
const scrollMonitor = [
domActiveGuide,
domGuideTocList,
domDocsScroll,
domSectSearchResults,
];
// when this is populated, pressing the "view source" command will
// navigate to this hash.
viewSourceHash: null,
};
var curNavSearch = "";
var curSearchIndex = -1;
var imFeelingLucky = false;
 
computeGuideHashes();
// names of modules in the same order as wasm
const moduleList = [];
 
let searchTimer = null;
let searchTrimResults = true;
let wasm_promise = fetch("main.wasm");
let sources_promise = fetch("sources.tar").then(function(response) {
if (!response.ok) throw new Error("unable to download sources");
return response.arrayBuffer();
});
var wasm_exports = null;
 
let escapeHtmlReplacements = {
"&": "&amp;",
'"': "&quot;",
"<": "&lt;",
">": "&gt;",
};
const text_decoder = new TextDecoder();
const text_encoder = new TextEncoder();
 
let typeKinds = indexTypeKinds();
let typeTypeId = findTypeTypeId();
let pointerSizeEnum = { One: 0, Many: 1, Slice: 2, C: 3 };
WebAssembly.instantiateStreaming(wasm_promise, {
js: {
log: function(ptr, len) {
const msg = decodeString(ptr, len);
console.log(msg);
},
panic: function (ptr, len) {
const msg = decodeString(ptr, len);
throw new Error("panic: " + msg);
},
},
}).then(function(obj) {
wasm_exports = obj.instance.exports;
window.wasm = obj; // for debugging
 
let declSearchIndex = new RadixTree();
window.search = declSearchIndex;
sources_promise.then(function(buffer) {
const js_array = new Uint8Array(buffer);
const ptr = wasm_exports.alloc(js_array.length);
const wasm_array = new Uint8Array(wasm_exports.memory.buffer, ptr, js_array.length);
wasm_array.set(js_array);
wasm_exports.unpack(ptr, js_array.length);
 
// for each module, is an array with modules to get to this one
let canonModPaths = computeCanonicalModulePaths();
updateModuleList();
 
// for each decl, is an array with {declNames, modNames} to get to this one
let canonDeclPaths = null; // lazy; use getCanonDeclPath
window.addEventListener('popstate', onPopState, false);
domSearch.addEventListener('keydown', onSearchKeyDown, false);
domSearch.addEventListener('input', onSearchChange, false);
window.addEventListener('keydown', onWindowKeyDown, false);
onHashChange(null);
});
});
 
// for each type, is an array with {declNames, modNames} to get to this one
let canonTypeDecls = null; // lazy; use getCanonTypeDecl
 
let curNav = {
hash: "",
mode: NAV_MODES.API,
activeGuide: "",
activeGuideScrollTo: null,
// each element is a module name, e.g. @import("a") then within there @import("b")
// starting implicitly from root module
modNames: [],
// same as above except actual modules, not names
modObjs: [],
// Each element is a decl name, `a.b.c`, a is 0, b is 1, c is 2, etc.
// empty array means refers to the module itself
declNames: [],
// these will be all types, except the last one may be a type or a decl
declObjs: [],
// (a, b, c, d) comptime call; result is the value the docs refer to
callName: null,
};
 
let curNavSearch = "";
let curSearchIndex = -1;
let imFeelingLucky = false;
 
let rootIsStd = detectRootIsStd();
 
// map of decl index to list of non-generic fn indexes
// let nodesToFnsMap = indexNodesToFns();
// map of decl index to list of comptime fn calls
// let nodesToCallsMap = indexNodesToCalls();
 
let guidesSearchIndex = {};
window.guideSearch = guidesSearchIndex;
parseGuides();
 
// identifiers can contain modal trigger characters so we want to allow typing
// such characters when the search is focused instead of toggling the modal
let canToggleModal = true;
 
domSearch.disabled = false;
domSearch.addEventListener("keydown", onSearchKeyDown, false);
domSearch.addEventListener("input", onSearchInput, false);
domSearch.addEventListener("focus", ev => {
domSearchPlaceholder.classList.add("hidden");
canToggleModal = false;
});
domSearch.addEventListener("blur", ev => {
if (domSearch.value.length == 0)
domSearchPlaceholder.classList.remove("hidden");
canToggleModal = true;
});
domSectSearchAllResultsLink.addEventListener('click', onClickSearchShowAllResults, false);
function onClickSearchShowAllResults(ev) {
ev.preventDefault();
ev.stopPropagation();
searchTrimResults = false;
onHashChange();
}
 
if (location.hash == "") {
location.hash = "#A;";
}
 
// make the modal disappear if you click outside it
function handleModalClick(ev) {
if (ev.target.classList.contains("modal-container")) {
hideModal(this);
function renderTitle() {
const suffix = " - Zig Documentation";
if (curNavSearch.length > 0) {
document.title = curNavSearch + " - Search" + suffix;
} else if (curNav.decl != null) {
document.title = fullyQualifiedName(curNav.decl) + suffix;
} else if (curNav.path != null) {
document.title = curNav.path + suffix;
} else {
document.title = moduleList[0] + suffix; // Home
}
}
}
domHelpModal.addEventListener("click", handleModalClick);
domPrefsModal.addEventListener("click", handleModalClick);
 
window.addEventListener("hashchange", onHashChange, false);
window.addEventListener("keydown", onWindowKeyDown, false);
onHashChange();
function render() {
domFnErrorsAnyError.classList.add("hidden");
domFnProto.classList.add("hidden");
domHdrName.classList.add("hidden");
domHelpModal.classList.add("hidden");
domSectErrSets.classList.add("hidden");
domSectDocTests.classList.add("hidden");
domSectFields.classList.add("hidden");
domSectParams.classList.add("hidden");
domSectFnErrors.classList.add("hidden");
domSectFns.classList.add("hidden");
domSectGlobalVars.classList.add("hidden");
domSectNamespaces.classList.add("hidden");
domSectNav.classList.add("hidden");
domSectSearchNoResults.classList.add("hidden");
domSectSearchResults.classList.add("hidden");
domSectSource.classList.add("hidden");
domSectTypes.classList.add("hidden");
domSectValues.classList.add("hidden");
domStatus.classList.add("hidden");
domTableFnErrors.classList.add("hidden");
domTldDocs.classList.add("hidden");
 
// TODO: fix this once langref becomes part of autodoc
let langRefVersion = "master";
domLangRefLink.href = `https://ziglang.org/documentation/${langRefVersion}/`;
renderTitle();
 
function renderTitle() {
let suffix = " - Zig";
switch (curNav.mode) {
case NAV_MODES.API:
let list = curNav.modNames.concat(curNav.declNames);
if (list.length === 0) {
document.title = zigAnalysis.modules[zigAnalysis.rootMod].name + suffix;
} else {
document.title = list.join(".") + suffix;
if (curNavSearch !== "") return renderSearch();
 
switch (curNav.tag) {
case 0: return renderHome();
case 1:
if (curNav.decl == null) {
return renderNotFound();
} else {
return renderDecl(curNav.decl);
}
case 2: return renderSource(curNav.path);
default: throw new Error("invalid navigation state");
}
return;
case NAV_MODES.GUIDES:
document.title = "[G] " + curNav.activeGuide + suffix;
return;
}
}
 
function isDecl(x) {
return "value" in x;
}
 
function isType(x) {
return "kind" in x && !("value" in x);
}
 
function isContainerType(x) {
return isType(x) && typeKindIsContainer(x.kind);
}
 
function typeShorthandName(expr) {
let resolvedExpr = resolveValue({ expr: expr });
if (!("type" in resolvedExpr)) {
return null;
function renderHome() {
if (moduleList.length == 0) {
domStatus.textContent = "sources.tar contains no modules";
domStatus.classList.remove("hidden");
return;
}
return renderModule(0);
}
let type = getType(resolvedExpr.type);
 
outer: for (let i = 0; i < 10000; i += 1) {
switch (type.kind) {
case typeKinds.Optional:
case typeKinds.Pointer:
let child = type.child;
let resolvedChild = resolveValue(child);
if ("type" in resolvedChild) {
type = getType(resolvedChild.type);
continue;
} else {
return null;
}
function renderModule(pkg_index) {
const root_decl = wasm_exports.find_module_root(pkg_index);
return renderDecl(root_decl);
}
 
function renderDecl(decl_index) {
const category = wasm_exports.categorize_decl(decl_index, 0);
switch (category) {
case CAT_namespace:
return renderNamespacePage(decl_index);
case CAT_global_variable:
case CAT_primitive:
case CAT_global_const:
case CAT_type:
case CAT_type_type:
return renderGlobal(decl_index);
case CAT_function:
return renderFunction(decl_index);
case CAT_type_function:
return renderTypeFunction(decl_index);
case CAT_error_set:
return renderErrorSetPage(decl_index);
case CAT_alias:
return renderDecl(wasm_exports.get_aliasee());
default:
break outer;
}
 
if (i == 9999) throw "Exhausted typeShorthandName quota";
}
 
let name = undefined;
if (type.kind === typeKinds.Struct) {
name = "struct";
} else if (type.kind === typeKinds.Enum) {
name = "enum";
} else if (type.kind === typeKinds.Union) {
name = "union";
} else {
console.log("TODO: unhandled case in typeShortName");
return null;
}
 
return escapeHtml(name);
}
 
function typeKindIsContainer(typeKind) {
return (
typeKind === typeKinds.Struct ||
typeKind === typeKinds.Union ||
typeKind === typeKinds.Enum ||
typeKind === typeKinds.Opaque
);
}
 
function declCanRepresentTypeKind(typeKind) {
return typeKind === typeKinds.ErrorSet || typeKindIsContainer(typeKind);
}
 
//
// function findCteInRefPath(path) {
// for (let i = path.length - 1; i >= 0; i -= 1) {
// const ref = path[i];
// if ("string" in ref) continue;
// if ("comptimeExpr" in ref) return ref;
// if ("refPath" in ref) return findCteInRefPath(ref.refPath);
// return null;
// }
 
// return null;
// }
 
function resolveValue(value, trackDecls) {
let seenDecls = [];
let i = 0;
while (true) {
i += 1;
if (i >= 10000) {
throw "resolveValue quota exceeded"
}
 
if ("refPath" in value.expr) {
value = { expr: value.expr.refPath[value.expr.refPath.length - 1] };
continue;
}
 
if ("declRef" in value.expr) {
seenDecls.push(value.expr.declRef);
value = getDecl(value.expr.declRef).value;
continue;
}
 
if ("as" in value.expr) {
value = {
typeRef: zigAnalysis.exprs[value.expr.as.typeRefArg],
expr: zigAnalysis.exprs[value.expr.as.exprArg],
};
continue;
}
 
if (trackDecls) return { value, seenDecls };
return value;
}
}
 
function resolveGenericRet(genericFunc) {
if (genericFunc.generic_ret == null) return null;
let result = resolveValue({ expr: genericFunc.generic_ret });
 
let i = 0;
while (true) {
i += 1;
if (i >= 10000) {
throw "resolveGenericRet quota exceeded"
}
 
if ("call" in result.expr) {
let call = zigAnalysis.calls[result.expr.call];
let resolvedFunc = resolveValue({ expr: call.func });
if (!("type" in resolvedFunc.expr)) return null;
let callee = getType(resolvedFunc.expr.type);
if (!callee.generic_ret) return null;
result = resolveValue({ expr: callee.generic_ret });
continue;
}
 
return result;
}
}
 
// function typeOfDecl(decl){
// return decl.value.typeRef;
//
// let i = 0;
// while(i < 1000) {
// i += 1;
// console.assert(isDecl(decl));
// if ("type" in decl.value) {
// return ({ type: typeTypeId });
// }
//
//// if ("string" in decl.value) {
//// return ({ type: {
//// kind: typeKinds.Pointer,
//// size: pointerSizeEnum.One,
//// child: });
//// }
//
// if ("refPath" in decl.value) {
// decl = ({
// value: decl.value.refPath[decl.value.refPath.length -1]
// });
// continue;
// }
//
// if ("declRef" in decl.value) {
// decl = zigAnalysis.decls[decl.value.declRef];
// continue;
// }
//
// if ("int" in decl.value) {
// return decl.value.int.typeRef;
// }
//
// if ("float" in decl.value) {
// return decl.value.float.typeRef;
// }
//
// if ("array" in decl.value) {
// return decl.value.array.typeRef;
// }
//
// if ("struct" in decl.value) {
// return decl.value.struct.typeRef;
// }
//
// if ("comptimeExpr" in decl.value) {
// const cte = zigAnalysis.comptimeExprs[decl.value.comptimeExpr];
// return cte.typeRef;
// }
//
// if ("call" in decl.value) {
// const fn_call = zigAnalysis.calls[decl.value.call];
// let fn_decl = undefined;
// if ("declRef" in fn_call.func) {
// fn_decl = zigAnalysis.decls[fn_call.func.declRef];
// } else if ("refPath" in fn_call.func) {
// console.assert("declRef" in fn_call.func.refPath[fn_call.func.refPath.length -1]);
// fn_decl = zigAnalysis.decls[fn_call.func.refPath[fn_call.func.refPath.length -1].declRef];
// } else throw {};
//
// const fn_decl_value = resolveValue(fn_decl.value);
// console.assert("type" in fn_decl_value); //TODO handle comptimeExpr
// const fn_type = (zigAnalysis.types[fn_decl_value.type]);
// console.assert(fn_type.kind === typeKinds.Fn);
// return fn_type.ret;
// }
//
// if ("void" in decl.value) {
// return ({ type: typeTypeId });
// }
//
// if ("bool" in decl.value) {
// return ({ type: typeKinds.Bool });
// }
//
// console.log("TODO: handle in `typeOfDecl` more cases: ", decl);
// console.assert(false);
// throw {};
// }
// console.assert(false);
// return ({});
// }
function detectDeclPath(text, context) {
let result = "";
let separator = ":";
const components = text.split(".");
let curDeclOrType = undefined;
let curContext = context;
let limit = 10000;
while (curContext) {
limit -= 1;
if (limit == 0) {
throw "too many iterations";
}
curDeclOrType = findSubDecl(curContext, components[0]);
if (!curDeclOrType) {
if (curContext.parent_container == null) break;
curContext = getType(curContext.parent_container);
continue;
}
 
if (curContext == context) {
separator = '.';
result = location.hash + separator + components[0];
} else {
// We had to go up, which means we need a new path!
const canonPath = getCanonDeclPath(curDeclOrType.find_subdecl_idx);
if (!canonPath) return;
let lastModName = canonPath.modNames[canonPath.modNames.length - 1];
let fullPath = lastModName + ":" + canonPath.declNames.join(".");
separator = '.';
result = "#A;" + fullPath;
}
 
break;
}
 
if (!curDeclOrType) {
for (let i = 0; i < zigAnalysis.modules.length; i += 1){
const p = zigAnalysis.modules[i];
if (p.name == components[0]) {
curDeclOrType = getType(p.main);
result += "#A;" + components[0];
break;
}
}
}
 
if (!curDeclOrType) return null;
for (let i = 1; i < components.length; i += 1) {
curDeclOrType = findSubDecl(curDeclOrType, components[i]);
if (!curDeclOrType) return null;
result += separator + components[i];
separator = '.';
}
 
return result;
}
function renderGuides() {
renderTitle();
 
// set guide mode
domGuideSwitch.classList.add("active");
domApiSwitch.classList.remove("active");
domDocs.classList.add("hidden");
domSectNavAPI.classList.add("hidden");
domSectNavGuides.classList.remove("hidden");
domGuidesSection.classList.remove("hidden");
domActiveGuide.classList.add("hidden");
domSectSearchResults.classList.add("hidden");
domSectSearchAllResultsLink.classList.add("hidden");
domSectSearchNoResults.classList.add("hidden");
if (curNavSearch !== "") {
return renderSearchGuides();
}
 
let activeGuide = undefined;
outer: for (let i = 0; i < zigAnalysis.guideSections.length; i += 1) {
const section = zigAnalysis.guideSections[i];
for (let j = 0; j < section.guides.length; j += 1) {
const guide = section.guides[j];
if (guide.name == curNav.activeGuide) {
activeGuide = guide;
break outer;
}
throw new Error("unrecognized category " + category);
}
}
 
function renderSource(path) {
const decl_index = findFileRoot(path);
if (decl_index == null) return renderNotFound();
 
// navigation bar
const guideIndexDom = domListNavGuides.children[0].children[0];
const guideDom = domListNavGuides.children[1].children[0];
if (activeGuide){
guideDom.textContent = activeGuide.title;
guideDom.setAttribute("href", location.hash);
guideDom.classList.remove("hidden");
guideIndexDom.classList.remove("active");
} else {
guideDom.classList.add("hidden");
guideIndexDom.classList.add("active");
}
renderNavFancy(decl_index, [{
name: "[src]",
href: location.hash,
}]);
 
// main content
domGuidesMenuTitle.textContent = "Table of Contents";
if (activeGuide) {
if (activeGuide.toc != "") {
domGuideTocList.innerHTML = activeGuide.toc;
// add js callbacks to all links
function onLinkClick(ev) {
const link = ev.target.getAttribute("href");
skipNextHashChange = link;
location.replace(link);
scrollToHeading(":" + link.split(":")[1], true);
ev.preventDefault();
ev.stopPropagation();
}
for (let a of domGuideTocList.querySelectorAll("a")) {
a.addEventListener('click', onLinkClick, false);
}
domGuideTocList.classList.remove("hidden");
domGuideTocListEmtpy.classList.add("hidden");
} else {
domGuideTocListEmtpy.classList.remove("hidden");
domGuideTocList.classList.add("hidden");
}
let reader = new commonmark.Parser({
smart: true,
autoDoc: {
detectDeclPath: detectDeclPath,
}
});
let ast = reader.parse(activeGuide.body);
let writer = new commonmark.HtmlRenderer();
let result = writer.render(ast);
domActiveGuide.innerHTML = result;
if (curNav.activeGuideScrollTo !== null) {
scrollToHeading(curNav.activeGuideScrollTo, false);
}
} else {
domGuideTocList.classList.add("hidden");
domGuideTocListEmtpy.classList.remove("hidden");
if (zigAnalysis.guideSections.length > 1 || (zigAnalysis.guideSections[0].guides.length > 0)) {
renderGuidesIndex();
} else {
noGuidesAtAll();
}
domSourceText.innerHTML = declSourceHtml(decl_index);
 
domSectSource.classList.remove("hidden");
}
 
domGuidesMenu.classList.remove("hidden");
domActiveGuide.classList.remove("hidden");
}
function renderDeclHeading(decl_index) {
curNav.viewSourceHash = "#src/" + unwrapString(wasm_exports.decl_file_path(decl_index));
 
// TODO: ensure unique hashes
// TODO: hash also guides and their headings
function computeGuideHashes() {
for (let i = 1; i < zigAnalysis.guideSections.length; i += 1) {
const section = zigAnalysis.guideSections[i];
section.hash = "section-" + slugify(section.name || i);
}
}
 
function renderGuidesIndex() {
// main content
{
let html = "";
for (let i = 0; i < zigAnalysis.guideSections.length; i += 1) {
const section = zigAnalysis.guideSections[i];
if (i != 0) { // first section is the default section
html += "<h2 id='"+ section.hash +"'>" + section.name + "</h2>";
}
for (let guide of section.guides) {
html += "<ol><li><a href='"+ NAV_MODES.GUIDES + guide.name +"'>" + (guide.title || guide.name) + "</a></li>";
html += guide.toc + "</ol>";
}
}
domActiveGuide.innerHTML = html;
}
 
// sidebar / fast navigation
{
domGuidesMenuTitle.textContent = "Sections";
if (zigAnalysis.guideSections.length > 1) {
let html = "";
for (let i = 1; i < zigAnalysis.guideSections.length; i += 1) {
const section = zigAnalysis.guideSections[i];
html += "<li><a href='"+ NAV_MODES.GUIDES + ":" + section.hash +"'>" + section.name + "</a></li>";
}
domGuideTocList.innerHTML = "<ul>"+html+"</ul>";
 
function onLinkClick(ev) {
const link = ev.target.getAttribute("href");
skipNextHashChange = link;
location.replace(link);
scrollToHeading(link.split(":")[1], true);
ev.preventDefault();
ev.stopPropagation();
}
for (let a of domGuideTocList.querySelectorAll("a")) {
a.addEventListener('click', onLinkClick, false);
}
domGuideTocList.classList.remove("hidden");
domGuideTocListEmtpy.classList.add("hidden");
} else {
domGuideTocList.classList.add("hidden");
domGuideTocListEmtpy.classList.remove("hidden");
}
}
}
 
function noGuidesAtAll() {
const root_file_idx = zigAnalysis.modules[zigAnalysis.rootMod].file;
const root_file_name = getFile(root_file_idx).name;
let reader = new commonmark.Parser({smart: true});
let ast = reader.parse(`
# No Guides
These autodocs don't contain any guide.
 
While the API section is a reference guide autogenerated from Zig source code,
guides are meant to be handwritten explanations that provide for example:
 
- how-to explanations for common use-cases
- technical documentation
- information about advanced usage patterns
 
You can add guides by specifying which markdown files to include
in the top level doc comment of your root file, like so:
 
(At the top of *${root_file_name}*)
\`\`\`
//!zig-autodoc-guide: intro.md
//!zig-autodoc-guide: quickstart.md
//!zig-autodoc-guide: advanced-docs/advanced-stuff.md
\`\`\`
 
You can also create sections to group guides together:
 
\`\`\`
//!zig-autodoc-section: CLI Usage
//!zig-autodoc-guide: cli-basics.md
//!zig-autodoc-guide: cli-advanced.md
\`\`\`
 
**Note that this feature is still under heavy development so expect bugs**
**and missing features!**
 
Happy writing!
`);
 
let writer = new commonmark.HtmlRenderer();
let result = writer.render(ast);
domActiveGuide.innerHTML = result;
 
}
 
function renderApi() {
// set Api mode
domApiSwitch.classList.add("active");
domGuideSwitch.classList.remove("active");
domGuidesSection.classList.add("hidden");
domSectNavAPI.classList.remove("hidden");
domSectNavGuides.classList.add("hidden");
domDocs.classList.remove("hidden");
domGuidesMenu.classList.add("hidden");
domStatus.classList.add("hidden");
domFnProto.classList.add("hidden");
domSectParams.classList.add("hidden");
domTldDocs.classList.add("hidden");
domSectTypes.classList.add("hidden");
domSectTests.classList.add("hidden");
domSectDocTests.classList.add("hidden");
domSectNamespaces.classList.add("hidden");
domListNamespacesLeft.classList.add("hidden");
domListNamespacesRight.classList.add("hidden");
domNoDocsNamespaces.classList.add("hidden");
domSectErrSets.classList.add("hidden");
domSectFns.classList.add("hidden");
domSectFields.classList.add("hidden");
domSectSearchResults.classList.add("hidden");
domSectSearchAllResultsLink.classList.add("hidden");
domSectSearchNoResults.classList.add("hidden");
domHdrName.classList.add("hidden");
domSectFnErrors.classList.add("hidden");
domFnExamples.classList.add("hidden");
domFnNoExamples.classList.add("hidden");
domFnSourceLink.classList.add("hidden");
domDeclNoRef.classList.add("hidden");
domFnErrorsAnyError.classList.add("hidden");
domTableFnErrors.classList.add("hidden");
domSectGlobalVars.classList.add("hidden");
domSectValues.classList.add("hidden");
 
renderTitle();
 
if (curNavSearch !== "") {
return renderSearchAPI();
}
 
let rootMod = zigAnalysis.modules[zigAnalysis.rootMod];
let mod = rootMod;
curNav.modObjs = [mod];
for (let i = 0; i < curNav.modNames.length; i += 1) {
let childMod = zigAnalysis.modules[mod.table[curNav.modNames[i]]];
if (childMod == null) {
return render404();
}
mod = childMod;
curNav.modObjs.push(mod);
}
 
let currentType = getType(mod.main);
curNav.declObjs = [currentType];
let lastDecl = mod.main;
for (let i = 0; i < curNav.declNames.length; i += 1) {
let childDecl = findSubDecl(currentType, curNav.declNames[i]);
window.last_decl = childDecl;
if (childDecl == null || childDecl.is_private === true) {
return render404();
}
lastDecl = childDecl;
 
let childDeclValue = resolveValue(childDecl.value).expr;
if ("type" in childDeclValue) {
const t = getType(childDeclValue.type);
if (t.kind != typeKinds.Fn) {
childDecl = t;
}
}
 
currentType = childDecl;
curNav.declObjs.push(currentType);
}
 
 
 
window.x = currentType;
 
renderNav();
 
let last = curNav.declObjs[curNav.declObjs.length - 1];
let lastIsDecl = isDecl(last);
let lastIsType = isType(last);
let lastIsContainerType = isContainerType(last);
 
renderDocTest(lastDecl);
 
if (lastIsContainerType) {
return renderContainer(last);
}
 
if (!lastIsDecl && !lastIsType) {
return renderUnknownDecl(last);
}
 
if (lastIsType) {
return renderType(last);
}
 
if (lastIsDecl && last.kind === "var") {
return renderVar(last);
}
 
if (lastIsDecl && last.kind === "const") {
const value = resolveValue(last.value);
if ("type" in value.expr) {
let typeObj = getType(value.expr.type);
if (typeObj.kind === typeKinds.Fn) {
return renderFn(last);
}
}
return renderValue(last);
}
 
}
 
function render() {
switch (curNav.mode) {
case NAV_MODES.API:
return renderApi();
case NAV_MODES.GUIDES:
return renderGuides();
default:
throw "?";
}
}
 
 
function renderDocTest(decl) {
if (!decl.decltest) return;
const astNode = getAstNode(decl.decltest);
domSectDocTests.classList.remove("hidden");
domDocTestsCode.innerHTML = renderTokens(
DecoratedTokenizer(astNode.code, decl));
}
 
function renderUnknownDecl(decl) {
domDeclNoRef.classList.remove("hidden");
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs);
} else {
domTldDocs.innerHTML =
"<p>There are no doc comments for this declaration.</p>";
}
domTldDocs.classList.remove("hidden");
}
 
function typeIsErrSet(typeIndex) {
let typeObj = getType(typeIndex);
return typeObj.kind === typeKinds.ErrorSet;
}
 
function typeIsStructWithNoFields(typeIndex) {
let typeObj = getType(typeIndex);
if (typeObj.kind !== typeKinds.Struct) return false;
return typeObj.field_types.length == 0;
}
 
function typeIsGenericFn(typeIndex) {
let typeObj = getType(typeIndex);
if (typeObj.kind !== typeKinds.Fn) {
return false;
}
return typeObj.generic_ret != null;
}
 
function renderFn(fnDecl) {
if ("refPath" in fnDecl.value.expr) {
let last = fnDecl.value.expr.refPath.length - 1;
let lastExpr = fnDecl.value.expr.refPath[last];
console.assert("declRef" in lastExpr);
fnDecl = getDecl(lastExpr.declRef);
}
 
let value = resolveValue(fnDecl.value);
console.assert("type" in value.expr);
let typeObj = getType(value.expr.type);
 
domFnProtoCode.innerHTML = renderTokens(ex(value.expr, { fnDecl: fnDecl }));
domFnSourceLink.classList.remove("hidden");
domFnSourceLink.innerHTML = "[<a target=\"_blank\" href=\"" + sourceFileLink(fnDecl) + "\">src</a>]";
 
let docsSource = null;
let srcNode = getAstNode(fnDecl.src);
if (srcNode.docs != null) {
docsSource = srcNode.docs;
}
 
renderFnParamDocs(fnDecl, typeObj);
 
let retExpr = resolveValue({ expr: typeObj.ret }).expr;
if ("type" in retExpr) {
let retIndex = retExpr.type;
let errSetTypeIndex = null;
let retType = getType(retIndex);
if (retType.kind === typeKinds.ErrorSet) {
errSetTypeIndex = retIndex;
} else if (retType.kind === typeKinds.ErrorUnion) {
errSetTypeIndex = retType.err.type;
}
if (errSetTypeIndex != null) {
let errSetType = getType(errSetTypeIndex);
renderErrorSet(errSetType);
}
}
 
let protoSrcIndex = fnDecl.src;
if (typeIsGenericFn(value.expr.type)) {
// does the generic_ret contain a container?
var resolvedGenericRet = resolveValue({ expr: typeObj.generic_ret });
 
if ("call" in resolvedGenericRet.expr) {
let call = zigAnalysis.calls[resolvedGenericRet.expr.call];
let resolvedFunc = resolveValue({ expr: call.func });
if (!("type" in resolvedFunc.expr)) return;
let callee = getType(resolvedFunc.expr.type);
if (!callee.generic_ret) return;
resolvedGenericRet = resolveValue({ expr: callee.generic_ret });
}
 
// TODO: see if unwrapping the `as` here is a good idea or not.
if ("as" in resolvedGenericRet.expr) {
resolvedGenericRet = {
expr: zigAnalysis.exprs[resolvedGenericRet.expr.as.exprArg],
};
}
 
if (!("type" in resolvedGenericRet.expr)) return;
const genericType = getType(resolvedGenericRet.expr.type);
if (isContainerType(genericType)) {
renderContainer(genericType);
}
 
// old code
// let instantiations = nodesToFnsMap[protoSrcIndex];
// let calls = nodesToCallsMap[protoSrcIndex];
// if (instantiations == null && calls == null) {
// domFnNoExamples.classList.remove("hidden");
// } else if (calls != null) {
// // if (fnObj.combined === undefined) fnObj.combined = allCompTimeFnCallsResult(calls);
// if (fnObj.combined != null) renderContainer(fnObj.combined);
 
// resizeDomList(domListFnExamples, calls.length, '<li></li>');
 
// for (let callI = 0; callI < calls.length; callI += 1) {
// let liDom = domListFnExamples.children[callI];
// liDom.innerHTML = getCallHtml(fnDecl, calls[callI]);
// }
 
// domFnExamples.classList.remove("hidden");
// } else if (instantiations != null) {
// // TODO
// }
} else {
domFnExamples.classList.add("hidden");
domFnNoExamples.classList.add("hidden");
}
 
let protoSrcNode = getAstNode(protoSrcIndex);
if (
docsSource == null &&
protoSrcNode != null &&
protoSrcNode.docs != null
) {
docsSource = protoSrcNode.docs;
}
if (docsSource != null) {
domTldDocs.innerHTML = markdown(docsSource, fnDecl);
domTldDocs.classList.remove("hidden");
}
domFnProto.classList.remove("hidden");
}
 
function renderFnParamDocs(fnDecl, typeObj) {
let docCount = 0;
 
let fnNode = getAstNode(fnDecl.src);
let fields = fnNode.fields;
if (fields === null) {
fields = getAstNode(typeObj.src).fields;
}
let isVarArgs = typeObj.is_var_args;
 
for (let i = 0; i < fields.length; i += 1) {
let field = fields[i];
let fieldNode = getAstNode(field);
if (fieldNode.docs != null) {
docCount += 1;
}
}
if (docCount == 0) {
return;
}
 
resizeDomList(domListParams, docCount, "<div></div>");
let domIndex = 0;
 
for (let i = 0; i < fields.length; i += 1) {
let field = fields[i];
let fieldNode = getAstNode(field);
let docs = fieldNode.docs;
if (fieldNode.docs == null) {
continue;
}
let docsNonEmpty = docs !== "";
let divDom = domListParams.children[domIndex];
domIndex += 1;
 
let value = typeObj.params[i];
let preClass = docsNonEmpty ? ' class="fieldHasDocs"' : "";
let html = "<pre" + preClass + ">" + renderTokens((function*() {
yield Tok.identifier(fieldNode.name);
yield Tok.colon;
yield Tok.space;
if (isVarArgs && i === typeObj.params.length - 1) {
yield Tok.period;
yield Tok.period;
yield Tok.period;
} else {
yield* ex(value, {});
}
yield Tok.comma;
}()));
 
html += "</pre>";
 
if (docsNonEmpty) {
html += '<div class="fieldDocs">' + markdown(docs) + "</div>";
}
divDom.innerHTML = html;
}
domSectParams.classList.remove("hidden");
}
 
function renderNav() {
let len = curNav.modNames.length + curNav.declNames.length;
resizeDomList(domListNavAPI, len, '<li><a href="#"></a></li>');
let list = [];
let hrefModNames = [];
let hrefDeclNames = [];
for (let i = 0; i < curNav.modNames.length; i += 1) {
hrefModNames.push(curNav.modNames[i]);
let name = curNav.modNames[i];
list.push({
name: name,
link: navLink(hrefModNames, hrefDeclNames),
});
}
for (let i = 0; i < curNav.declNames.length; i += 1) {
hrefDeclNames.push(curNav.declNames[i]);
list.push({
name: curNav.declNames[i],
link: navLink(hrefModNames, hrefDeclNames),
});
}
 
for (let i = 0; i < list.length; i += 1) {
let liDom = domListNavAPI.children[i];
let aDom = liDom.children[0];
aDom.textContent = list[i].name;
aDom.setAttribute("href", list[i].link);
if (i + 1 == list.length) {
aDom.classList.add("active");
} else {
aDom.classList.remove("active");
}
}
 
}
 
 
function render404() {
domStatus.textContent = "404 Not Found";
domStatus.classList.remove("hidden");
}
 
// function renderModList() {
// const rootMod = zigAnalysis.modules[zigAnalysis.rootMod];
// let list = [];
// for (let key in rootMod.table) {
// let modIndex = rootMod.table[key];
// if (zigAnalysis.modules[modIndex] == null) continue;
// if (key == rootMod.name) continue;
// list.push({
// name: key,
// mod: modIndex,
// });
// }
 
// {
// let aDom = domSectMainMod.children[1].children[0].children[0];
// aDom.textContent = rootMod.name;
// aDom.setAttribute("href", navLinkMod(zigAnalysis.rootMod));
// if (rootMod.name === curNav.modNames[0]) {
// aDom.classList.add("active");
// } else {
// aDom.classList.remove("active");
// }
// domSectMainMod.classList.remove("hidden");
// }
 
// list.sort(function (a, b) {
// return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase());
// });
 
// if (list.length !== 0) {
// resizeDomList(domListMods, list.length, '<li><a href="#"></a></li>');
// for (let i = 0; i < list.length; i += 1) {
// let liDom = domListMods.children[i];
// let aDom = liDom.children[0];
// aDom.textContent = list[i].name;
// aDom.setAttribute("href", navLinkMod(list[i].mod));
// if (list[i].name === curNav.modNames[0]) {
// aDom.classList.add("active");
// } else {
// aDom.classList.remove("active");
// }
// }
 
// domSectMods.classList.remove("hidden");
// }
// }
 
function navLink(modNames, declNames, callName) {
let base = curNav.mode;
 
if (modNames.length === 0 && declNames.length === 0) {
return base;
} else if (declNames.length === 0 && callName == null) {
return base + modNames.join(".");
} else if (callName == null) {
return base + modNames.join(".") + ":" + declNames.join(".");
} else {
return (
base + modNames.join(".") + ":" + declNames.join(".") + ";" + callName
);
}
}
 
function navLinkMod(modIndex) {
return navLink(canonModPaths[modIndex], []);
}
 
function navLinkDecl(childName) {
return navLink(curNav.modNames, curNav.declNames.concat([childName]));
}
 
function findDeclNavLink(declName) {
if (curNav.declObjs.length == 0) return null;
const curFile = getAstNode(curNav.declObjs[curNav.declObjs.length - 1].src).file;
 
for (let i = curNav.declObjs.length - 1; i >= 0; i--) {
const curDecl = curNav.declObjs[i];
const curDeclName = curNav.declNames[i - 1];
if (curDeclName == declName) {
const declPath = curNav.declNames.slice(0, i);
return navLink(curNav.modNames, declPath);
}
 
const subDecl = findSubDecl(curDecl, declName);
 
if (subDecl != null) {
if (subDecl.is_private === true) {
return sourceFileLink(subDecl);
} else {
const declPath = curNav.declNames.slice(0, i).concat([declName]);
return navLink(curNav.modNames, declPath);
}
}
}
 
//throw("could not resolve links for '" + declName + "'");
}
 
//
// function navLinkCall(callObj) {
// let declNamesCopy = curNav.declNames.concat([]);
// let callName = (declNamesCopy.pop());
 
// callName += '(';
// for (let arg_i = 0; arg_i < callObj.args.length; arg_i += 1) {
// if (arg_i !== 0) callName += ',';
// let argObj = callObj.args[arg_i];
// callName += getValueText(argObj, argObj, false, false);
// }
// callName += ')';
 
// declNamesCopy.push(callName);
// return navLink(curNav.modNames, declNamesCopy);
// }
 
function resizeDomListDl(dlDom, desiredLen) {
// add the missing dom entries
for (let i = dlDom.childElementCount / 2; i < desiredLen; i += 1) {
dlDom.insertAdjacentHTML("beforeend", "<dt></dt><dd></dd>");
}
// remove extra dom entries
while (desiredLen < dlDom.childElementCount / 2) {
dlDom.removeChild(dlDom.lastChild);
dlDom.removeChild(dlDom.lastChild);
}
}
 
function resizeDomList(listDom, desiredLen, templateHtml) {
// add the missing dom entries
for (let i = listDom.childElementCount; i < desiredLen; i += 1) {
listDom.insertAdjacentHTML("beforeend", templateHtml);
}
// remove extra dom entries
while (desiredLen < listDom.childElementCount) {
listDom.removeChild(listDom.lastChild);
}
}
 
function walkResultTypeRef(wr) {
if (wr.typeRef) return wr.typeRef;
let resolved = resolveValue(wr);
if (wr === resolved) {
return { "undefined": {} };
}
return walkResultTypeRef(resolved);
}
 
function* DecoratedTokenizer(src, context) {
let tok_it = Tokenizer(src);
for (let t of tok_it) {
if (t.tag == Tag.identifier) {
const link = detectDeclPath(t.src, context);
if (link) {
t.link = link;
}
}
 
yield t;
}
}
 
 
function renderSingleToken(t) {
 
if (t.tag == Tag.whitespace) {
return t.src;
}
 
let src = t.src;
// if (t.tag == Tag.identifier) {
// src = escapeHtml(src);
// }
let result = "";
if (t.tag == Tag.identifier && isSimpleType(t.src)) {
result = `<span class="zig_type">${src}</span>`;
} else if (t.tag == Tag.identifier && isSpecialIndentifier(t.src)) {
result = `<span class="zig_special">${src}</span>`;
} else if (t.tag == Tag.identifier && t.fnDecl) {
result = `<span class="zig_fn">${src}</span>`;
} else if (t.tag == Tag.identifier && t.isDecl) {
result = `<span class="zig_decl_identifier">${src}</span>`;
} else {
result = `<span class="zig_${t.tag}">${src}</span>`;
}
 
if (t.link) {
result = `<a href="${t.link}">` + result + "</a>";
}
 
return result;
}
 
function renderTokens(tok_it) {
var html = [];
 
const max_iter = 100000;
let i = 0;
for (const t of tok_it) {
i += 1;
if (i > max_iter)
throw "too many iterations";
 
if (t.tag == Tag.eof)
break;
 
html.push(renderSingleToken(t));
}
 
return html.join("");
}
 
function* ex(expr, opts) {
switch (Object.keys(expr)[0]) {
default:
throw "this expression is not implemented yet: " + Object.keys(expr)[0];
case "comptimeExpr": {
const src = zigAnalysis.comptimeExprs[expr.comptimeExpr].code;
yield* DecoratedTokenizer(src);
return;
}
case "declName": {
yield { src: expr.declName, tag: Tag.identifier };
return;
}
case "declRef": {
const name = getDecl(expr.declRef).name;
const link = declLinkOrSrcLink(expr.declRef);
if (link) {
yield { src: name, tag: Tag.identifier, isDecl: true, link };
} else {
yield { src: name, tag: Tag.identifier, isDecl: true };
}
return;
}
case "refPath": {
for (let i = 0; i < expr.refPath.length; i += 1) {
if (i > 0) yield Tok.period;
yield* ex(expr.refPath[i], opts);
}
return;
}
case "fieldRef": {
const field_idx = expr.fieldRef.index;
const type = getType(expr.fieldRef.type);
const field = getAstNode(type.src).fields[field_idx];
const name = getAstNode(field).name;
yield { src: name, tag: Tag.identifier };
return;
}
case "bool": {
if (expr.bool) {
yield { src: "true", tag: Tag.identifier };
return;
}
yield { src: "false", tag: Tag.identifier };
return;
}
 
case "unreachable": {
yield { src: "unreachable", tag: Tag.identifier };
return;
}
 
case "&": {
yield { src: "&", tag: Tag.ampersand };
yield* ex(zigAnalysis.exprs[expr["&"]], opts);
return;
}
 
case "load": {
yield* ex(zigAnalysis.exprs[expr.load], opts);
yield Tok.period;
yield Tok.asterisk;
return;
}
 
case "call": {
 
let call = zigAnalysis.calls[expr.call];
 
switch (Object.keys(call.func)[0]) {
default:
throw "TODO";
case "declRef":
case "refPath": {
yield* ex(call.func, opts);
break;
}
}
yield Tok.l_paren;
 
for (let i = 0; i < call.args.length; i++) {
if (i != 0) {
yield Tok.comma;
yield Tok.space;
}
yield* ex(call.args[i], opts);
}
 
yield Tok.r_paren;
return;
}
case "typeOf_peer": {
yield { src: "@TypeOf", tag: Tag.builtin };
yield { src: "(", tag: Tag.l_paren };
for (let i = 0; i < expr.typeOf_peer.length; i+=1) {
const elem = zigAnalysis.exprs[expr.typeOf_peer[i]];
yield* ex(elem, opts);
if (i != expr.typeOf_peer.length - 1) {
yield Tok.comma;
yield Tok.space;
}
}
yield { src: ")", tag: Tag.r_paren };
return;
}
case "sizeOf": {
const sizeOf = zigAnalysis.exprs[expr.sizeOf];
yield { src: "@sizeOf", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(sizeOf, opts);
yield Tok.r_paren;
return;
}
case "bitSizeOf": {
const bitSizeOf = zigAnalysis.exprs[expr.bitSizeOf];
yield { src: "@bitSizeOf", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(bitSizeOf, opts);
yield Tok.r_paren;
return;
}
 
case "as": {
const exprArg = zigAnalysis.exprs[expr.as.exprArg];
yield* ex(exprArg, opts);
return;
}
 
case "int": {
yield { src: expr.int, tag: Tag.number_literal };
return;
}
 
case "int_big": {
if (expr.int_big.negated) {
yield { src: "-", tag: Tag.minus };
}
yield { src: expr.int_big.value, tag: Tag.number_literal };
return;
}
 
case "float": {
let float = expr.float;
if (Number.isSafeInteger(float)) float = float.toFixed(1);
yield { src: float, tag: Tag.number_literal };
return;
}
 
case "float128": {
yield { src: expr.float128, tag: Tag.number_literal };
return;
}
 
case "array": {
yield Tok.period;
yield Tok.l_brace;
for (let i = 0; i < expr.array.length; i++) {
if (i != 0) {
yield Tok.comma;
yield Tok.space;
}
let elem = zigAnalysis.exprs[expr.array[i]];
yield* ex(elem, opts);
}
yield Tok.r_brace;
return;
}
 
case "compileError": {
yield { src: "@compileError", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(zigAnalysis.exprs[expr.compileError], opts);
yield Tok.r_paren;
return;
}
 
case "optionalPayload": {
const opt = zigAnalysis.exprs[expr.optionalPayload];
yield* ex(opt, opts);
yield Tok.period;
yield Tok.question_mark;
return;
}
 
case "elemVal": {
const lhs = zigAnalysis.exprs[expr.elemVal.lhs];
const rhs = zigAnalysis.exprs[expr.elemVal.rhs];
yield* ex(lhs);
yield Tok.l_bracket;
yield* ex(rhs);
yield Tok.r_bracket;
return;
}
case "sliceIndex": {
const slice = zigAnalysis.exprs[expr.sliceIndex];
yield* ex(slice, opts);
return;
}
 
case "slice": {
const slice = expr.slice;
const lhs = zigAnalysis.exprs[slice.lhs];
const start = zigAnalysis.exprs[slice.start];
yield* ex(lhs, opts);
yield Tok.l_bracket;
yield* ex(start, opts);
yield Tok.period;
yield Tok.period;
if (slice.end !== null) {
const end = zigAnalysis.exprs[slice.end];
yield* ex(end, opts);
}
if (slice.sentinel !== null) {
yield Tok.colon;
const sent = zigAnalysis.exprs[slice.sentinel];
yield* ex(sent, opts);
}
yield Tok.r_brace;
return;
}
 
case "sliceLength": {
const slice = expr.sliceLength;
const lhs = zigAnalysis.exprs[slice.lhs];
const start = zigAnalysis.exprs[slice.start];
const len = zigAnalysis.exprs[slice.len];
yield* ex(lhs, opts);
yield Tok.l_bracket;
yield* ex(start, opts);
yield Tok.period;
yield Tok.period;
yield Tok.r_bracket;
yield Tok.l_bracket;
yield { src: "0", tag: Tag.number_literal };
yield Tok.period;
yield Tok.period;
yield* ex(len, opts);
if (slice.sentinel !== null) {
yield Tok.colon;
const sent = zigAnalysis.exprs[slice.sentinel];
yield* ex(sent, opts);
}
yield Tok.r_brace;
return;
}
 
case "string": {
yield { src: '"' + expr.string + '"', tag: Tag.string_literal };
return;
}
 
case "struct": {
yield Tok.period;
yield Tok.l_brace;
if (expr.struct.length > 0) yield Tok.space;
 
for (let i = 0; i < expr.struct.length; i++) {
const fv = expr.struct[i];
const field_name = fv.name;
const field_expr = zigAnalysis.exprs[fv.val.expr];
const field_value = ex(field_expr, opts);
yield Tok.period;
yield { src: field_name, tag: Tag.identifier };
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* field_value;
if (i !== expr.struct.length - 1) {
yield Tok.comma;
yield Tok.space;
} else {
yield Tok.space;
}
}
yield Tok.r_brace;
return;
}
 
case "unOpIndex": {
const unOp = zigAnalysis.exprs[expr.unOpIndex];
yield* ex(unOp, opts);
return;
}
 
case "unOp": {
const param = zigAnalysis.exprs[expr.unOp.param];
 
switch (expr.unOp.name) {
case "bit_not": {
yield { src: "~", tag: Tag.tilde };
break;
}
case "bool_not": {
yield { src: "!", tag: Tag.bang };
break;
}
case "negate_wrap": {
yield { src: "-%", tag: Tag.minus_percent };
break;
}
case "negate": {
yield { src: "-", tag: Tag.minus };
break;
}
default:
throw "unOp: `" + expr.unOp.name + "` not implemented yet!"
}
 
if (param["binOpIndex"] !== undefined) {
yield Tok.l_paren;
yield* ex(param, opts);
yield Tok.r_paren;
} else {
yield* ex(param, opts);
}
return;
}
case "fieldVal": {
const fv = expr.fieldVal;
const field_name = fv.name;
yield { src: field_name, tag: Tag.identifier };
return;
}
 
case "binOpIndex": {
const binOp = zigAnalysis.exprs[expr.binOpIndex];
yield* ex(binOp, opts);
return;
}
 
case "binOp": {
const lhsOp = zigAnalysis.exprs[expr.binOp.lhs];
const rhsOp = zigAnalysis.exprs[expr.binOp.rhs];
 
if (lhsOp["binOpIndex"] !== undefined) {
yield Tok.l_paren;
yield* ex(lhsOp, opts);
yield Tok.r_paren;
} else {
yield* ex(lhsOp, opts);
}
 
yield Tok.space;
 
switch (expr.binOp.name) {
case "add": {
yield { src: "+", tag: Tag.plus };
break;
}
case "addwrap": {
yield { src: "+%", tag: Tag.plus_percent };
break;
}
case "add_sat": {
yield { src: "+|", tag: Tag.plus_pipe };
break;
}
case "sub": {
yield { src: "-", tag: Tag.minus };
break;
}
case "subwrap": {
yield { src: "-%", tag: Tag.minus_percent };
break;
}
case "sub_sat": {
yield { src: "-|", tag: Tag.minus_pipe };
break;
}
case "mul": {
yield { src: "*", tag: Tag.asterisk };
break;
}
case "mulwrap": {
yield { src: "*%", tag: Tag.asterisk_percent };
break;
}
case "mul_sat": {
yield { src: "*|", tag: Tag.asterisk_pipe };
break;
}
case "div": {
yield { src: "/", tag: Tag.slash };
break;
}
case "xor": {
yield { src: "^", tag: Tag.caret };
break;
}
case "shl": {
yield { src: "<<", tag: Tag.angle_bracket_angle_bracket_left };
break;
}
case "shl_sat": {
yield { src: "<<|", tag: Tag.angle_bracket_angle_bracket_left_pipe };
break;
}
case "shr": {
yield { src: ">>", tag: Tag.angle_bracket_angle_bracket_right };
break;
}
case "bit_or": {
yield { src: "|", tag: Tag.pipe };
break;
}
case "bit_and": {
yield { src: "&", tag: Tag.ampersand };
break;
}
case "array_cat": {
yield { src: "++", tag: Tag.plus_plus };
break;
}
case "array_mul": {
yield { src: "**", tag: Tag.asterisk_asterisk };
break;
}
case "cmp_eq": {
yield { src: "==", tag: Tag.equal_equal };
break;
}
case "cmp_neq": {
yield { src: "!=", tag: Tag.bang_equal };
break;
}
case "cmp_gt": {
yield { src: ">", tag: Tag.angle_bracket_right };
break;
}
case "cmp_gte": {
yield { src: ">=", tag: Tag.angle_bracket_right_equal };
break;
}
case "cmp_lt": {
yield { src: "<", tag: Tag.angle_bracket_left };
break;
}
case "cmp_lte": {
yield { src: "<=", tag: Tag.angle_bracket_left_equal };
break;
}
case "bool_br_and": {
yield { src: "and", tag: Tag.keyword_and };
break;
}
case "bool_br_or": {
yield { src: "or", tag: Tag.keyword_or };
break;
}
default:
console.log("operator not handled yet or doesn't exist!");
}
 
yield Tok.space;
 
if (rhsOp["binOpIndex"] !== undefined) {
yield Tok.l_paren;
yield* ex(rhsOp, opts);
yield Tok.r_paren;
} else {
yield* ex(rhsOp, opts);
}
return;
}
 
case "builtinIndex": {
const builtin = zigAnalysis.exprs[expr.builtinIndex];
yield* ex(builtin, opts);
return;
}
 
case "builtin": {
const builtin = expr.builtin;
let name = "@";
const param = zigAnalysis.exprs[builtin.param];
switch (builtin.name) {
case "align_of": { name += "alignOf"; break; }
case "int_from_bool": { name += "intFromBool"; break; }
case "embed_file": { name += "embedFile"; break; }
case "error_name": { name += "errorName"; break; }
case "panic": { name += "panic"; break; }
case "set_runtime_safety": { name += "setRuntimeSafety"; break; }
case "sqrt": { name += "sqrt"; break; }
case "sin": { name += "sin"; break; }
case "cos": { name += "cos"; break; }
case "tan": { name += "tan"; break; }
case "exp": { name += "exp"; break; }
case "exp2": { name += "exp2"; break; }
case "log": { name += "log"; break; }
case "log2": { name += "log2"; break; }
case "log10": { name += "log10"; break; }
case "fabs": { name += "fabs"; break; }
case "floor": { name += "floor"; break; }
case "ceil": { name += "ceil"; break; }
case "trunc": { name += "trunc"; break; }
case "round": { name += "round"; break; }
case "tag_name": { name += "tagName"; break; }
case "type_name": { name += "typeName"; break; }
case "type_info": { name += "typeInfo"; break; }
case "frame_type": { name += "Frame"; break; }
case "frame_size": { name += "frameSize"; break; }
case "int_from_ptr": { name += "intFromPtr"; break; }
case "int_from_enum": { name += "intFromEnum"; break; }
case "clz": { name += "clz"; break; }
case "ctz": { name += "ctz"; break; }
case "pop_count": { name += "popCount"; break; }
case "byte_swap": { name += "byteSwap"; break; }
case "bit_reverse": { name += "bitReverse"; break; }
default: throw "builtin: `" + builtin.name + "` not implemented yet!";
}
yield { src: name, tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(param, opts);
yield Tok.r_paren;
return;
}
 
case "builtinBinIndex": {
const builtinBinIndex = zigAnalysis.exprs[expr.builtinBinIndex];
yield* ex(builtinBinIndex, opts);
return;
}
 
case "builtinBin": {
const lhsOp = zigAnalysis.exprs[expr.builtinBin.lhs];
const rhsOp = zigAnalysis.exprs[expr.builtinBin.rhs];
 
let builtinName = "@";
switch (expr.builtinBin.name) {
case "int_from_float": {
builtinName += "intFromFloat";
break;
}
case "float_from_int": {
builtinName += "floatFromInt";
break;
}
case "ptr_from_int": {
builtinName += "ptrFromInt";
break;
}
case "enum_from_int": {
builtinName += "enumFromInt";
break;
}
case "float_cast": {
builtinName += "floatCast";
break;
}
case "int_cast": {
builtinName += "intCast";
break;
}
case "ptr_cast": {
builtinName += "ptrCast";
break;
}
case "const_cast": {
builtinName += "constCast";
break;
}
case "volatile_cast": {
builtinName += "volatileCast";
break;
}
case "truncate": {
builtinName += "truncate";
break;
}
case "has_decl": {
builtinName += "hasDecl";
break;
}
case "has_field": {
builtinName += "hasField";
break;
}
case "bit_reverse": {
builtinName += "bitReverse";
break;
}
case "div_exact": {
builtinName += "divExact";
break;
}
case "div_floor": {
builtinName += "divFloor";
break;
}
case "div_trunc": {
builtinName += "divTrunc";
break;
}
case "mod": {
builtinName += "mod";
break;
}
case "rem": {
builtinName += "rem";
break;
}
case "mod_rem": {
builtinName += "rem";
break;
}
case "shl_exact": {
builtinName += "shlExact";
break;
}
case "shr_exact": {
builtinName += "shrExact";
break;
}
case "bitcast": {
builtinName += "bitCast";
break;
}
case "align_cast": {
builtinName += "alignCast";
break;
}
case "vector_type": {
builtinName += "Vector";
break;
}
case "reduce": {
builtinName += "reduce";
break;
}
case "splat": {
builtinName += "splat";
break;
}
case "offset_of": {
builtinName += "offsetOf";
break;
}
case "bit_offset_of": {
builtinName += "bitOffsetOf";
break;
}
default:
console.log("builtin function not handled yet or doesn't exist!");
}
 
yield { src: builtinName, tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(lhsOp, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(rhsOp, opts);
yield Tok.r_paren;
return;
}
 
case "unionInit": {
let ui = expr.unionInit;
let type = zigAnalysis.exprs[ui.type];
let field = zigAnalysis.exprs[ui.field];
let init = zigAnalysis.exprs[ui.init];
yield { src: "@unionInit", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(type, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(field, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(init, opts);
yield Tok.r_paren;
return;
}
 
case "builtinCall": {
let bcall = expr.builtinCall;
let mods = zigAnalysis.exprs[bcall.modifier];
let calee = zigAnalysis.exprs[bcall.function];
let args = zigAnalysis.exprs[bcall.args];
yield { src: "@call", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(mods, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(calee, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(args, opts);
yield Tok.r_paren;
return;
}
 
case "mulAdd": {
let muladd = expr.mulAdd;
let mul1 = zigAnalysis.exprs[muladd.mulend1];
let mul2 = zigAnalysis.exprs[muladd.mulend2];
let add = zigAnalysis.exprs[muladd.addend];
let type = zigAnalysis.exprs[muladd.type];
yield { src: "@mulAdd", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(type, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(mul1, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(mul2, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(add, opts);
yield Tok.r_paren;
return;
}
 
case "cmpxchgIndex": {
const cmpxchg = zigAnalysis.exprs[expr.cmpxchgIndex];
yield* ex(cmpxchg, opts);
return;
}
 
case "cmpxchg": {
const type = zigAnalysis.exprs[expr.cmpxchg.type];
const ptr = zigAnalysis.exprs[expr.cmpxchg.ptr];
const expectedValue = zigAnalysis.exprs[expr.cmpxchg.expected_value];
const newValue = zigAnalysis.exprs[expr.cmpxchg.new_value];
const successOrder = zigAnalysis.exprs[expr.cmpxchg.success_order];
const failureOrder = zigAnalysis.exprs[expr.cmpxchg.failure_order];
 
let fnName = "@";
switch (expr.cmpxchg.name) {
case "cmpxchg_strong": {
fnName += "cmpxchgStrong";
break;
}
case "cmpxchg_weak": {
fnName += "cmpxchgWeak";
break;
}
default:
throw "Unexpected cmpxchg name: `" + expr.cmpxchg.name + "`!";
}
yield { src: fnName, tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(type, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(ptr, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(expectedValue, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(newValue, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(successOrder, opts);
yield Tok.comma;
yield Tok.space;
yield* ex(failureOrder, opts);
yield Tok.r_paren;
return;
}
 
case "enumLiteral": {
let literal = expr.enumLiteral;
yield Tok.period;
yield { src: literal, tag: Tag.identifier };
return;
}
 
case "void": {
yield { src: "void", tag: Tag.identifier };
return;
}
 
case "null": {
yield { src: "null", tag: Tag.identifier };
return;
}
 
case "undefined": {
yield { src: "undefined", tag: Tag.identifier };
return;
}
 
case "anytype": {
yield { src: "anytype", tag: Tag.keyword_anytype };
return;
}
 
case "this": {
yield { src: "@This", tag: Tag.builtin };
yield Tok.l_paren;
yield Tok.r_paren;
return;
}
 
case "switchIndex": {
const switchIndex = zigAnalysis.exprs[expr.switchIndex];
yield* ex(switchIndex, opts);
return;
}
 
case "errorSets": {
const errSetsObj = getType(expr.errorSets);
yield* ex(errSetsObj.lhs, opts);
yield Tok.space;
yield { src: "||", tag: Tag.pipe_pipe };
yield Tok.space;
yield* ex(errSetsObj.rhs, opts);
return;
}
 
case "errorUnion": {
const errUnionObj = getType(expr.errorUnion);
yield* ex(errUnionObj.lhs, opts);
yield { src: "!", tag: Tag.bang };
yield* ex(errUnionObj.rhs, opts);
return;
}
 
case "type": {
let name = "";
 
let typeObj = expr.type;
if (typeof typeObj === "number") typeObj = getType(typeObj);
switch (typeObj.kind) {
default:
throw "TODO: " + typeObj.kind;
case typeKinds.Type: {
yield { src: typeObj.name, tag: Tag.identifier };
return;
}
case typeKinds.Void: {
yield { src: "void", tag: Tag.identifier };
return;
}
case typeKinds.NoReturn: {
yield { src: "noreturn", tag: Tag.identifier };
return;
}
case typeKinds.ComptimeExpr: {
yield { src: "anyopaque", tag: Tag.identifier };
return;
}
case typeKinds.Bool: {
yield { src: "bool", tag: Tag.identifier };
return;
}
case typeKinds.ComptimeInt: {
yield { src: "comptime_int", tag: Tag.identifier };
return;
}
case typeKinds.ComptimeFloat: {
yield { src: "comptime_float", tag: Tag.identifier };
return;
}
case typeKinds.Int: {
yield { src: typeObj.name, tag: Tag.identifier };
return;
}
case typeKinds.Float: {
yield { src: typeObj.name, tag: Tag.identifier };
return;
}
case typeKinds.Array: {
yield Tok.l_bracket;
yield* ex(typeObj.len, opts);
if (typeObj.sentinel) {
yield Tok.colon;
yield* ex(typeObj.sentinel, opts);
}
yield Tok.r_bracket;
yield* ex(typeObj.child, opts);
return;
}
case typeKinds.Optional: {
yield Tok.question_mark;
yield* ex(typeObj.child, opts);
return;
}
case typeKinds.Pointer: {
let ptrObj = typeObj;
switch (ptrObj.size) {
default:
console.log("TODO: implement unhandled pointer size case");
case pointerSizeEnum.One:
yield { src: "*", tag: Tag.asterisk };
break;
case pointerSizeEnum.Many:
yield Tok.l_bracket;
yield { src: "*", tag: Tag.asterisk };
if (ptrObj.sentinel !== null) {
yield Tok.colon;
yield* ex(ptrObj.sentinel, opts);
}
yield Tok.r_bracket;
break;
case pointerSizeEnum.Slice:
if (ptrObj.is_ref) {
yield { src: "*", tag: Tag.asterisk };
}
yield Tok.l_bracket;
if (ptrObj.sentinel !== null) {
yield Tok.colon;
yield* ex(ptrObj.sentinel, opts);
}
yield Tok.r_bracket;
break;
case pointerSizeEnum.C:
yield Tok.l_bracket;
yield { src: "*", tag: Tag.asterisk };
yield { src: "c", tag: Tag.identifier };
if (typeObj.sentinel !== null) {
yield Tok.colon;
yield* ex(ptrObj.sentinel, opts);
}
yield Tok.r_bracket;
break;
}
if (!ptrObj.is_mutable) {
yield Tok.const;
yield Tok.space;
}
if (ptrObj.is_allowzero) {
yield { src: "allowzero", tag: Tag.keyword_allowzero };
yield Tok.space;
}
if (ptrObj.is_volatile) {
yield { src: "volatile", tag: Tag.keyword_volatile };
}
if (ptrObj.has_addrspace) {
yield { src: "addrspace", tag: Tag.keyword_addrspace };
yield Tok.l_paren;
yield Tok.period;
yield Tok.r_paren;
}
if (ptrObj.has_align) {
yield { src: "align", tag: Tag.keyword_align };
yield Tok.l_paren;
yield* ex(ptrObj.align, opts);
if (ptrObj.hostIntBytes !== undefined && ptrObj.hostIntBytes !== null) {
yield Tok.colon;
yield* ex(ptrObj.bitOffsetInHost, opts);
yield Tok.colon;
yield* ex(ptrObj.hostIntBytes, opts);
}
yield Tok.r_paren;
yield Tok.space;
}
yield* ex(ptrObj.child, opts);
return;
}
case typeKinds.Struct: {
let structObj = typeObj;
if (structObj.layout !== null) {
switch (structObj.layout.enumLiteral) {
case "Packed": {
yield { src: "packed", tag: Tag.keyword_packed };
break;
}
case "Extern": {
yield { src: "extern", tag: Tag.keyword_extern };
break;
}
}
yield Tok.space;
}
yield { src: "struct", tag: Tag.keyword_struct };
if (structObj.backing_int !== null) {
yield Tok.l_paren;
yield* ex(structObj.backing_int, opts);
yield Tok.r_paren;
}
yield Tok.space;
yield Tok.l_brace;
 
if (structObj.field_types.length > 1) {
yield Tok.enter;
} else {
yield Tok.space;
}
 
let indent = 0;
if (structObj.field_types.length > 1) {
indent = 1;
}
if (opts.indent && structObj.field_types.length > 1) {
indent += opts.ident;
}
 
let structNode = getAstNode(structObj.src);
for (let i = 0; i < structObj.field_types.length; i += 1) {
let fieldNode = getAstNode(structNode.fields[i]);
let fieldName = fieldNode.name;
 
for (let j = 0; j < indent; j += 1) {
yield Tok.tab;
}
 
if (!typeObj.is_tuple) {
yield { src: fieldName, tag: Tag.identifier };
}
 
let fieldTypeExpr = structObj.field_types[i];
if (!typeObj.is_tuple) {
yield Tok.colon;
yield Tok.space;
}
yield* ex(fieldTypeExpr, { ...opts, indent: indent });
 
if (structObj.field_defaults[i] !== null) {
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(structObj.field_defaults[i], opts);
}
 
if (structObj.field_types.length > 1) {
yield Tok.comma;
yield Tok.enter;
} else {
yield Tok.space;
}
}
yield Tok.r_brace;
return;
}
case typeKinds.Enum: {
let enumObj = typeObj;
yield { src: "enum", tag: Tag.keyword_enum };
if (enumObj.tag) {
yield Tok.l_paren;
yield* ex(enumObj.tag, opts);
yield Tok.r_paren;
}
yield Tok.space;
yield Tok.l_brace;
 
let enumNode = getAstNode(enumObj.src);
let fields_len = enumNode.fields.length;
if (enumObj.nonexhaustive) {
fields_len += 1;
}
 
if (fields_len > 1) {
yield Tok.enter;
} else {
yield Tok.space;
}
 
let indent = 0;
if (fields_len > 1) {
indent = 1;
}
if (opts.indent) {
indent += opts.indent;
}
 
for (let i = 0; i < enumNode.fields.length; i += 1) {
let fieldNode = getAstNode(enumNode.fields[i]);
let fieldName = fieldNode.name;
 
for (let j = 0; j < indent; j += 1) yield Tok.tab;
yield { src: fieldName, tag: Tag.identifier };
 
if (enumObj.values[i] !== null) {
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(enumObj.values[i], opts);
}
 
if (fields_len > 1) {
yield Tok.comma;
yield Tok.enter;
}
}
if (enumObj.nonexhaustive) {
for (let j = 0; j < indent; j += 1) yield Tok.tab;
yield { src: "_", tag: Tag.identifier };
if (fields_len > 1) {
yield Tok.comma;
yield Tok.enter;
}
}
if (opts.indent) {
for (let j = 0; j < opts.indent; j += 1) yield Tok.tab;
}
yield Tok.r_brace;
return;
}
case typeKinds.Union: {
let unionObj = typeObj;
if (unionObj.layout !== null) {
switch (unionObj.layout.enumLiteral) {
case "Packed": {
yield { src: "packed", tag: Tag.keyword_packed };
break;
}
case "Extern": {
yield { src: "extern", tag: Tag.keyword_extern };
break;
}
}
yield Tok.space;
}
yield { src: "union", tag: Tag.keyword_union };
if (unionObj.auto_tag) {
yield Tok.l_paren;
yield { src: "enum", tag: Tag.keyword_enum };
if (unionObj.tag) {
yield Tok.l_paren;
yield* ex(unionObj.tag, opts);
yield Tok.r_paren;
yield Tok.r_paren;
} else {
yield Tok.r_paren;
}
} else if (unionObj.tag) {
yield Tok.l_paren;
yield* ex(unionObj.tag, opts);
yield Tok.r_paren;
}
yield Tok.space;
yield Tok.l_brace;
if (unionObj.field_types.length > 1) {
yield Tok.enter;
} else {
yield Tok.space;
}
let indent = 0;
if (unionObj.field_types.length > 1) {
indent = 1;
}
if (opts.indent) {
indent += opts.indent;
}
let unionNode = getAstNode(unionObj.src);
for (let i = 0; i < unionObj.field_types.length; i += 1) {
let fieldNode = getAstNode(unionNode.fields[i]);
let fieldName = fieldNode.name;
for (let j = 0; j < indent; j += 1) yield Tok.tab;
yield { src: fieldName, tag: Tag.identifier };
 
let fieldTypeExpr = unionObj.field_types[i];
yield Tok.colon;
yield Tok.space;
 
yield* ex(fieldTypeExpr, { ...opts, indent: indent });
 
if (unionObj.field_types.length > 1) {
yield Tok.comma;
yield Tok.enter;
} else {
yield Tok.space;
}
}
if (opts.indent) {
for (let j = 0; j < opts.indent; j += 1) yield Tok.tab;
}
yield Tok.r_brace;
return;
}
case typeKinds.Opaque: {
yield { src: "opaque", tag: Tag.keyword_opaque };
yield Tok.space;
yield Tok.l_brace;
yield Tok.r_brace;
return;
}
case typeKinds.EnumLiteral: {
yield { src: "(enum literal)", tag: Tag.identifier };
return;
}
case typeKinds.ErrorSet: {
let errSetObj = typeObj;
if (errSetObj.fields === null) {
yield { src: "anyerror", tag: Tag.identifier };
} else if (errSetObj.fields.length == 0) {
yield { src: "error", tag: Tag.keyword_error };
yield Tok.l_brace;
yield Tok.r_brace;
} else if (errSetObj.fields.length == 1) {
yield { src: "error", tag: Tag.keyword_error };
yield Tok.l_brace;
yield { src: errSetObj.fields[0].name, tag: Tag.identifier };
yield Tok.r_brace;
} else {
yield { src: "error", tag: Tag.keyword_error };
yield Tok.l_brace;
yield { src: errSetObj.fields[0].name, tag: Tag.identifier };
for (let i = 1; i < errSetObj.fields.length; i++) {
yield Tok.comma;
yield Tok.space;
yield { src: errSetObj.fields[i].name, tag: Tag.identifier };
}
yield Tok.r_brace;
}
return;
}
case typeKinds.ErrorUnion: {
let errUnionObj = typeObj;
yield* ex(errUnionObj.lhs, opts);
yield { src: "!", tag: Tag.bang };
yield* ex(errUnionObj.rhs, opts);
return;
}
case typeKinds.InferredErrorUnion: {
let errUnionObj = typeObj;
yield { src: "!", tag: Tag.bang };
yield* ex(errUnionObj.payload, opts);
return;
}
case typeKinds.Fn: {
let fnObj = typeObj;
let fnDecl = opts.fnDecl;
let linkFnNameDecl = opts.linkFnNameDecl;
opts.fnDecl = null;
opts.linkFnNameDecl = null;
if (opts.addParensIfFnSignature && fnObj.src == 0) {
yield Tok.l_paren;
}
if (fnObj.is_extern) {
yield { src: "extern", tag: Tag.keyword_extern };
yield Tok.space;
} else if (fnObj.has_cc) {
let cc_expr = zigAnalysis.exprs[fnObj.cc];
if (cc_expr.enumLiteral === "Inline") {
yield { src: "inline", tag: Tag.keyword_inline };
yield Tok.space;
}
}
if (fnObj.has_lib_name) {
yield { src: '"' + fnObj.lib_name + '"', tag: Tag.string_literal };
yield Tok.space;
}
yield { src: "fn", tag: Tag.keyword_fn };
yield Tok.space;
if (fnDecl) {
if (linkFnNameDecl) {
yield { src: fnDecl.name, tag: Tag.identifier, link: linkFnNameDecl, fnDecl: false };
} else {
yield { src: fnDecl.name, tag: Tag.identifier, fnDecl: true };
}
}
yield Tok.l_paren;
if (fnObj.params) {
let fields = null;
let isVarArgs = false;
if (fnObj.src != 0) {
let fnNode = getAstNode(fnObj.src);
fields = fnNode.fields;
isVarArgs = fnNode.varArgs;
}
 
for (let i = 0; i < fnObj.params.length; i += 1) {
if (i != 0) {
yield Tok.comma;
yield Tok.space;
}
 
let value = fnObj.params[i];
let paramValue = resolveValue({ expr: value });
 
if (fields != null) {
let paramNode = getAstNode(fields[i]);
 
if (paramNode.varArgs) {
yield Tok.period;
yield Tok.period;
yield Tok.period;
continue;
}
 
if (paramNode.noalias) {
yield { src: "noalias", tag: Tag.keyword_noalias };
yield Tok.space;
}
 
if (paramNode.comptime) {
yield { src: "comptime", tag: Tag.keyword_comptime };
yield Tok.space;
}
 
let paramName = paramNode.name;
if (paramName != null) {
// skip if it matches the type name
if (!shouldSkipParamName(paramValue, paramName)) {
if (paramName === "") {
paramName = "_";
}
yield { src: paramName, tag: Tag.identifier };
yield Tok.colon;
yield Tok.space;
}
}
}
 
// TODO: most of this seems redundant
if (isVarArgs && i === fnObj.params.length - 1) {
yield Tok.period;
yield Tok.period;
yield Tok.period;
} else if ("alignOf" in value) {
yield* ex(value, opts);
} else if ("typeOf" in value) {
yield* ex(value, opts);
} else if ("typeOf_peer" in value) {
yield* ex(value, opts);
} else if ("declRef" in value) {
yield* ex(value, opts);
} else if ("call" in value) {
yield* ex(value, opts);
} else if ("refPath" in value) {
yield* ex(value, opts);
} else if ("type" in value) {
yield* ex(value, opts);
//payloadHtml += '<span class="tok-kw">' + name + "</span>";
} else if ("binOpIndex" in value) {
yield* ex(value, opts);
} else if ("comptimeExpr" in value) {
let comptimeExpr =
zigAnalysis.comptimeExprs[value.comptimeExpr].code;
yield* Tokenizer(comptimeExpr);
} else {
yield { src: "anytype", tag: Tag.keyword_anytype };
}
}
}
 
yield Tok.r_paren;
yield Tok.space;
 
if (fnObj.has_align) {
let align = zigAnalysis.exprs[fnObj.align];
yield { src: "align", tag: Tag.keyword_align };
yield Tok.l_paren;
yield* ex(align, opts);
yield Tok.r_paren;
yield Tok.space;
}
if (fnObj.has_cc) {
let cc = zigAnalysis.exprs[fnObj.cc];
if (cc) {
if (cc.enumLiteral !== "Inline") {
yield { src: "callconv", tag: Tag.keyword_callconv };
yield Tok.l_paren;
yield* ex(cc, opts);
yield Tok.r_paren;
yield Tok.space;
}
}
}
 
if (fnObj.is_inferred_error) {
yield { src: "!", tag: Tag.bang };
}
if (fnObj.ret != null) {
yield* ex(fnObj.ret, {
...opts,
addParensIfFnSignature: true,
});
} else {
yield { src: "anytype", tag: Tag.keyword_anytype };
}
 
if (opts.addParensIfFnSignature && fnObj.src == 0) {
yield Tok.r_paren;
}
return;
}
}
}
 
case "typeOf": {
const typeRefArg = zigAnalysis.exprs[expr.typeOf];
yield { src: "@TypeOf", tag: Tag.builtin };
yield Tok.l_paren;
yield* ex(typeRefArg, opts);
yield Tok.r_paren;
return;
}
 
case "builtinField": {
yield { src: expr.builtinField, tag: Tag.identifier };
return;
}
}
 
 
}
 
 
 
function shouldSkipParamName(typeRef, paramName) {
let resolvedTypeRef = resolveValue({ expr: typeRef });
if ("type" in resolvedTypeRef) {
let typeObj = getType(resolvedTypeRef.type);
if (typeObj.kind === typeKinds.Pointer) {
let ptrObj = typeObj;
if (getPtrSize(ptrObj) === pointerSizeEnum.One) {
const value = resolveValue(ptrObj.child);
return typeValueName(value, false, true).toLowerCase() === paramName;
}
}
}
return false;
}
 
function getPtrSize(typeObj) {
return typeObj.size == null ? pointerSizeEnum.One : typeObj.size;
}
 
function renderType(typeObj) {
let name;
if (
rootIsStd &&
typeObj ===
getType(zigAnalysis.modules[zigAnalysis.rootMod].main)
) {
name = renderSingleToken(Tok.identifier("std"));
} else {
name = renderTokens(ex({ type: typeObj }));
}
if (name != null && name != "") {
domHdrName.innerHTML = "<pre class='inline'>" + name + "</pre> ("
+ zigAnalysis.typeKinds[typeObj.kind] + ")";
const hdrNameSpan = domHdrName.children[0];
const srcLink = domHdrName.children[1];
hdrNameSpan.innerText = unwrapString(wasm_exports.decl_category_name(decl_index));
srcLink.setAttribute('href', curNav.viewSourceHash);
domHdrName.classList.remove("hidden");
}
if (typeObj.kind == typeKinds.ErrorSet) {
renderErrorSet(typeObj);
}
}
 
function renderErrorSet(errSetType) {
if (errSetType.fields == null) {
domFnErrorsAnyError.classList.remove("hidden");
} else {
let errorList = [];
for (let i = 0; i < errSetType.fields.length; i += 1) {
let errObj = errSetType.fields[i];
//let srcObj = zigAnalysis.astNodes[errObj.src];
errorList.push(errObj);
}
errorList.sort(function(a, b) {
return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase());
});
 
resizeDomListDl(domListFnErrors, errorList.length);
for (let i = 0; i < errorList.length; i += 1) {
let nameTdDom = domListFnErrors.children[i * 2 + 0];
let descTdDom = domListFnErrors.children[i * 2 + 1];
nameTdDom.textContent = errorList[i].name;
let docs = errorList[i].docs;
if (docs != null) {
descTdDom.innerHTML = markdown(docs);
} else {
descTdDom.textContent = "";
}
}
domTableFnErrors.classList.remove("hidden");
}
domSectFnErrors.classList.remove("hidden");
}
 
// function allCompTimeFnCallsHaveTypeResult(typeIndex, value) {
// let srcIndex = zigAnalysis.fns[value].src;
// let calls = nodesToCallsMap[srcIndex];
// if (calls == null) return false;
// for (let i = 0; i < calls.length; i += 1) {
// let call = zigAnalysis.calls[calls[i]];
// if (call.result.type !== typeTypeId) return false;
// }
// return true;
// }
//
// function allCompTimeFnCallsResult(calls) {
// let firstTypeObj = null;
// let containerObj = {
// privDecls: [],
// };
// for (let callI = 0; callI < calls.length; callI += 1) {
// let call = zigAnalysis.calls[calls[callI]];
// if (call.result.type !== typeTypeId) return null;
// let typeObj = zigAnalysis.types[call.result.value];
// if (!typeKindIsContainer(typeObj.kind)) return null;
// if (firstTypeObj == null) {
// firstTypeObj = typeObj;
// containerObj.src = typeObj.src;
// } else if (firstTypeObj.src !== typeObj.src) {
// return null;
// }
//
// if (containerObj.fields == null) {
// containerObj.fields = (typeObj.fields || []).concat([]);
// } else for (let fieldI = 0; fieldI < typeObj.fields.length; fieldI += 1) {
// let prev = containerObj.fields[fieldI];
// let next = typeObj.fields[fieldI];
// if (prev === next) continue;
// if (typeof(prev) === 'object') {
// if (prev[next] == null) prev[next] = typeObj;
// } else {
// containerObj.fields[fieldI] = {};
// containerObj.fields[fieldI][prev] = firstTypeObj;
// containerObj.fields[fieldI][next] = typeObj;
// }
// }
//
// if (containerObj.pubDecls == null) {
// containerObj.pubDecls = (typeObj.pubDecls || []).concat([]);
// } else for (let declI = 0; declI < typeObj.pubDecls.length; declI += 1) {
// let prev = containerObj.pubDecls[declI];
// let next = typeObj.pubDecls[declI];
// if (prev === next) continue;
// // TODO instead of showing "examples" as the public declarations,
// // do logic like this:
// //if (typeof(prev) !== 'object') {
// // let newDeclId = zigAnalysis.decls.length;
// // prev = clone(zigAnalysis.decls[prev]);
// // prev.id = newDeclId;
// // zigAnalysis.decls.push(prev);
// // containerObj.pubDecls[declI] = prev;
// //}
// //mergeDecls(prev, next, firstTypeObj, typeObj);
// }
// }
// for (let declI = 0; declI < containerObj.pubDecls.length; declI += 1) {
// let decl = containerObj.pubDecls[declI];
// if (typeof(decl) === 'object') {
// containerObj.pubDecls[declI] = containerObj.pubDecls[declI].id;
// }
// }
// return containerObj;
// }
 
function renderValue(decl) {
let resolvedValue = resolveValue(decl.value);
if (resolvedValue.expr.fieldRef) {
const declRef = decl.value.expr.refPath[0].declRef;
const type = getDecl(declRef);
 
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.const;
yield Tok.space;
yield Tok.identifier(decl.name);
yield Tok.colon;
yield Tok.space;
yield Tok.identifier(type.name);
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
} else if (
resolvedValue.expr.string !== undefined ||
resolvedValue.expr.call !== undefined ||
resolvedValue.expr.comptimeExpr !== undefined
) {
// TODO: we're using the resolved value but
// not keeping track of how we got there
// that's important context that should
// be shown to the user!
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.const;
yield Tok.space;
yield Tok.identifier(decl.name);
if (decl.value.typeRef) {
yield Tok.colon;
yield Tok.space;
yield* ex(decl.value.typeRef, {});
}
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(resolvedValue.expr, {});
yield Tok.semi;
})());
} else if (resolvedValue.expr.compileError) {
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.const;
yield Tok.space;
yield Tok.identifier(decl.name);
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
} else {
const parent = getType(decl.parent_container);
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.const;
yield Tok.space;
yield Tok.identifier(decl.name);
if (decl.value.typeRef !== null) {
yield Tok.colon;
yield Tok.space;
yield* ex(decl.value.typeRef, {});
}
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
renderTopLevelDocs(decl_index);
}
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
// TODO: it shouldn't just be decl.parent_container, but rather
// the type that the decl holds (if the value is a type)
domTldDocs.innerHTML = markdown(docs, decl);
 
domTldDocs.classList.remove("hidden");
}
 
domFnProto.classList.remove("hidden");
}
 
function renderVar(decl) {
let resolvedVar = resolveValue(decl.value);
 
if (resolvedVar.expr.fieldRef) {
const declRef = decl.value.expr.refPath[0].declRef;
const type = getDecl(declRef);
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.var;
yield Tok.space;
yield Tok.identifier(decl.name);
yield Tok.colon;
yield Tok.space;
yield Tok.identifier(type.name);
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
} else if (
resolvedVar.expr.string !== undefined ||
resolvedVar.expr.call !== undefined ||
resolvedVar.expr.comptimeExpr !== undefined
) {
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.var;
yield Tok.space;
yield Tok.identifier(decl.name);
if (decl.value.typeRef) {
yield Tok.colon;
yield Tok.space;
yield* ex(decl.value.typeRef, {});
}
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
} else if (resolvedVar.expr.compileError) {
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.var;
yield Tok.space;
yield Tok.identifier(decl.name);
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
} else {
domFnProtoCode.innerHTML = renderTokens(
(function*() {
yield Tok.var;
yield Tok.space;
yield Tok.identifier(decl.name);
yield Tok.colon;
yield Tok.space;
yield* ex(resolvedVar.typeRef, {});
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(decl.value.expr, {});
yield Tok.semi;
})());
}
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs);
domTldDocs.classList.remove("hidden");
}
 
domFnProto.classList.remove("hidden");
}
 
function categorizeDecls(
decls,
typesList,
namespacesWithDocsList,
namespacesNoDocsList,
errSetsList,
fnsList,
varsList,
valsList,
testsList,
unsList
) {
for (let i = 0; i < decls.length; i += 1) {
let decl = getDecl(decls[i]);
let declValue = resolveValue(decl.value);
 
// if (decl.isTest) {
// testsList.push(decl);
// continue;
// }
 
if (decl.kind === "var") {
varsList.push(decl);
continue;
}
 
if (decl.kind === "const") {
if ("type" in declValue.expr) {
// We have the actual type expression at hand.
const typeExpr = getType(declValue.expr.type);
if (typeExpr.kind == typeKinds.Fn) {
const funcRetExpr = resolveValue({
expr: typeExpr.ret,
});
if (
"type" in funcRetExpr.expr &&
funcRetExpr.expr.type == typeTypeId
) {
if (typeIsErrSet(declValue.expr.type)) {
errSetsList.push(decl);
} else if (typeIsStructWithNoFields(declValue.expr.type)) {
let docs = getAstNode(decl.src).docs;
if (!docs) {
// If this is a re-export, try to fetch docs from the actual definition
const { value, seenDecls } = resolveValue(decl.value, true);
if (seenDecls.length > 0) {
const definitionDecl = getDecl(seenDecls[seenDecls.length - 1]);
docs = getAstNode(definitionDecl.src).docs;
} else {
docs = getAstNode(getType(value.expr.type).src).docs;
}
}
if (docs) {
namespacesWithDocsList.push({decl, docs});
} else {
namespacesNoDocsList.push(decl);
}
} else {
typesList.push(decl);
}
} else {
fnsList.push(decl);
}
} else {
if (typeIsErrSet(declValue.expr.type)) {
errSetsList.push(decl);
} else if (typeIsStructWithNoFields(declValue.expr.type)) {
let docs = getAstNode(decl.src).docs;
if (!docs) {
// If this is a re-export, try to fetch docs from the actual definition
const { value, seenDecls } = resolveValue(decl.value, true);
if (seenDecls.length > 0) {
const definitionDecl = getDecl(seenDecls[seenDecls.length - 1]);
docs = getAstNode(definitionDecl.src).docs;
} else {
docs = getAstNode(getType(value.expr.type).src).docs;
}
}
if (docs) {
namespacesWithDocsList.push({decl, docs});
} else {
namespacesNoDocsList.push(decl);
}
} else {
typesList.push(decl);
}
}
} else if (declValue.typeRef) {
if ("type" in declValue.typeRef && declValue.typeRef == typeTypeId) {
// We don't know what the type expression is, but we know it's a type.
typesList.push(decl);
} else {
valsList.push(decl);
}
} else {
valsList.push(decl);
}
}
 
if (decl.is_uns) {
unsList.push(decl);
}
}
}
 
function sourceFileLink(decl) {
const srcNode = getAstNode(decl.src);
const srcFile = getFile(srcNode.file);
return sourceFileUrlTemplate.
replace("{{mod}}", zigAnalysis.modules[srcFile.modIndex].name).
replace("{{file}}", srcFile.name).
replace("{{line}}", srcNode.line + 1);
}
 
function renderContainer(container) {
let typesList = [];
 
let namespacesWithDocsList = [];
let namespacesNoDocsList = [];
 
let errSetsList = [];
 
let fnsList = [];
 
let varsList = [];
 
let valsList = [];
 
let testsList = [];
 
let unsList = [];
 
categorizeDecls(
container.pubDecls,
typesList,
namespacesWithDocsList,
namespacesNoDocsList,
errSetsList,
fnsList,
varsList,
valsList,
testsList,
unsList
);
if (curNav.showPrivDecls)
categorizeDecls(
container.privDecls,
typesList,
namespacesWithDocsList,
namespacesNoDocsList,
errSetsList,
fnsList,
varsList,
valsList,
testsList,
unsList
);
 
while (unsList.length > 0) {
let uns = unsList.shift();
let declValue = resolveValue(uns.value);
if (!("type" in declValue.expr)) continue;
let uns_container = getType(declValue.expr.type);
if (!isContainerType(uns_container)) continue;
categorizeDecls(
uns_container.pubDecls,
typesList,
namespacesWithDocsList,
namespacesNoDocsList,
errSetsList,
fnsList,
varsList,
valsList,
testsList,
unsList
);
if (curNav.showPrivDecls)
categorizeDecls(
uns_container.privDecls,
typesList,
namespacesWithDocsList,
namespacesNoDocsList,
errSetsList,
fnsList,
varsList,
valsList,
testsList,
unsList
);
}
 
typesList.sort(byNameProperty);
namespacesWithDocsList.sort(byNameProperty);
namespacesNoDocsList.sort(byNameProperty);
errSetsList.sort(byNameProperty);
fnsList.sort(byNameProperty);
varsList.sort(byNameProperty);
valsList.sort(byNameProperty);
testsList.sort(byNameProperty);
 
if (container.src != null) {
let docs = getAstNode(container.src).docs;
if (docs != null) {
domTldDocs.innerHTML = markdown(docs, container);
function renderTopLevelDocs(decl_index) {
const tld_docs_html = unwrapString(wasm_exports.decl_docs_html(decl_index, false));
if (tld_docs_html.length > 0) {
domTldDocs.innerHTML = tld_docs_html;
domTldDocs.classList.remove("hidden");
}
}
 
if (typesList.length !== 0) {
const splitPoint = Math.ceil(typesList.length / 2);
const template = '<li><a href="#"></a><div></div></li>';
resizeDomList(domListTypesLeft, splitPoint, template);
resizeDomList(domListTypesRight, typesList.length - splitPoint, template);
 
let activeList = domListTypesLeft;
let offset = 0;
for (let i = 0; i < typesList.length; i += 1) {
let liDom = activeList.children[i - offset];
let aDom = liDom.children[0];
let decl = typesList[i];
aDom.textContent = decl.name;
aDom.setAttribute("href", navLinkDecl(decl.name));
let descDom = liDom.children[1];
let docs = getAstNode(decl.src).docs;
if (!docs) {
// If this is a re-export, try to fetch docs from the actual definition
const { value, seenDecls } = resolveValue(decl.value, true);
if (seenDecls.length > 0) {
const definitionDecl = getDecl(seenDecls[seenDecls.length - 1]);
docs = getAstNode(definitionDecl.src).docs;
} else {
const type = getType(value.expr.type);
if ("src" in type) {
docs = getAstNode(type.src).docs;
}
}
}
if (docs) {
descDom.innerHTML = markdown(shortDesc(docs));
} else {
descDom.innerHTML = "<p class='understated'><i>No documentation provided.</i></p>";
}
if (i == splitPoint - 1) {
activeList = domListTypesRight;
offset = splitPoint;
}
}
domSectTypes.classList.remove("hidden");
}
if (namespacesWithDocsList.length !== 0) {
const splitPoint = Math.ceil(namespacesWithDocsList.length / 2);
const template = '<li><a href="#"></a><div></div></li>';
resizeDomList(domListNamespacesLeft, splitPoint, template);
resizeDomList(domListNamespacesRight,
namespacesWithDocsList.length - splitPoint,
template);
 
let activeList = domListNamespacesLeft;
let offset = 0;
for (let i = 0; i < namespacesWithDocsList.length; i += 1) {
let liDom = activeList.children[i - offset];
let aDom = liDom.children[0];
let { decl, docs } = namespacesWithDocsList[i];
aDom.textContent = decl.name;
aDom.setAttribute("href", navLinkDecl(decl.name));
 
let descDom = liDom.children[1];
descDom.innerHTML = markdown(shortDesc(docs));
if (i == splitPoint - 1) {
activeList = domListNamespacesRight;
offset = splitPoint;
}
}
 
domListNamespacesLeft.classList.remove("hidden");
domListNamespacesRight.classList.remove("hidden");
domSectNamespaces.classList.remove("hidden");
function renderNav(cur_nav_decl, list) {
return renderNavFancy(cur_nav_decl, []);
}
 
if (namespacesNoDocsList.length !== 0) {
resizeDomList(
domNoDocsNamespaces,
namespacesNoDocsList.length,
'<span><a href="#"></a><span></span></span>'
);
for (let i = 0; i < namespacesNoDocsList.length; i += 1) {
let aDom = domNoDocsNamespaces.children[i].children[0];
let decl = namespacesNoDocsList[i];
aDom.textContent = decl.name;
aDom.setAttribute("href", navLinkDecl(decl.name));
let comma = domNoDocsNamespaces.children[i].children[1];
if (i == namespacesNoDocsList.length - 1) {
comma.textContent = "";
} else {
comma.textContent = ", ";
}
}
 
domNoDocsNamespaces.classList.remove("hidden");
domSectNamespaces.classList.remove("hidden");
}
 
 
 
if (errSetsList.length !== 0) {
resizeDomList(
domListErrSets,
errSetsList.length,
'<li><a href="#"></a></li>'
);
for (let i = 0; i < errSetsList.length; i += 1) {
let liDom = domListErrSets.children[i];
let aDom = liDom.children[0];
let decl = errSetsList[i];
aDom.textContent = decl.name;
aDom.setAttribute("href", navLinkDecl(decl.name));
}
domSectErrSets.classList.remove("hidden");
}
 
if (fnsList.length !== 0) {
resizeDomList(
domListFns,
fnsList.length,
'<div><dt><pre class="inline fnSignature"></pre><div></div></dt><dd></dd></div>'
);
 
for (let i = 0; i < fnsList.length; i += 1) {
let decl = fnsList[i];
let trDom = domListFns.children[i];
 
let tdFnSignature = trDom.children[0].children[0];
let tdFnSrc = trDom.children[0].children[1];
let tdDesc = trDom.children[1];
 
let declType = resolveValue(decl.value);
console.assert("type" in declType.expr);
tdFnSignature.innerHTML = renderTokens(ex(declType.expr, {
fnDecl: decl,
linkFnNameDecl: navLinkDecl(decl.name),
}));
tdFnSrc.innerHTML = "<a style=\"float: right;\" target=\"_blank\" href=\"" +
sourceFileLink(decl) + "\">[src]</a>";
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
docs = docs.trim();
var short = shortDesc(docs);
if (short != docs) {
short = markdown(short, container);
var long = markdown(docs, container); // TODO: this needs to be the file top lvl struct
tdDesc.innerHTML =
"<div class=\"expand\" ><span class=\"button\" onclick=\"toggleExpand(event)\"></span><div class=\"sum-less\">" + short + "</div>" + "<div class=\"sum-more\">" + long + "</div></details>";
}
else {
tdDesc.innerHTML = markdown(short, container);
}
} else {
tdDesc.innerHTML = "<p class='understated'><i>No documentation provided.</i><p>";
}
}
domSectFns.classList.remove("hidden");
}
 
let containerNode = getAstNode(container.src);
if (containerNode.fields && containerNode.fields.length > 0) {
resizeDomList(domListFields, containerNode.fields.length, "<div></div>");
 
for (let i = 0; i < containerNode.fields.length; i += 1) {
let fieldNode = getAstNode(containerNode.fields[i]);
let divDom = domListFields.children[i];
let fieldName = fieldNode.name;
let docs = fieldNode.docs;
let docsNonEmpty = docs != null && docs !== "";
let extraPreClass = docsNonEmpty ? " fieldHasDocs" : "";
 
let html =
'<div class="mobile-scroll-container"><pre class="scroll-item' +
extraPreClass +
'">' +
escapeHtml(fieldName);
 
if (container.kind === typeKinds.Enum) {
let value = container.values[i];
if (value !== null) {
html += renderTokens((function*() {
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(value, {});
})());
}
} else {
let fieldTypeExpr = container.field_types[i];
if (container.kind !== typeKinds.Struct || !container.is_tuple) {
html += renderTokens((function*() {
yield Tok.colon;
yield Tok.space;
})());
}
html += renderTokens(ex(fieldTypeExpr, {}));
let tsn = typeShorthandName(fieldTypeExpr);
if (tsn) {
html += "<span> (" + tsn + ")</span>";
}
if (container.kind === typeKinds.Struct && !container.is_tuple) {
let defaultInitExpr = container.field_defaults[i];
if (defaultInitExpr !== null) {
html += renderTokens((function*() {
yield Tok.space;
yield Tok.eql;
yield Tok.space;
yield* ex(defaultInitExpr, {});
})());
}
}
}
 
html += ",</pre></div>";
 
if (docsNonEmpty) {
html += '<div class="fieldDocs">' + markdown(docs) + "</div>";
}
divDom.innerHTML = html;
}
domSectFields.classList.remove("hidden");
}
 
if (varsList.length !== 0) {
resizeDomList(
domListGlobalVars,
varsList.length,
'<tr><td><a href="#"></a></td><td><pre class="inline"></pre></td><td></td></tr>'
);
for (let i = 0; i < varsList.length; i += 1) {
let decl = varsList[i];
let trDom = domListGlobalVars.children[i];
 
let tdName = trDom.children[0];
let tdNameA = tdName.children[0];
let tdType = trDom.children[1];
let preType = tdType.children[0];
let tdDesc = trDom.children[2];
 
tdNameA.setAttribute("href", navLinkDecl(decl.name));
tdNameA.textContent = decl.name;
 
preType.innerHTML = renderTokens(ex(walkResultTypeRef(decl.value), {}));
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
tdDesc.textContent = "";
}
}
domSectGlobalVars.classList.remove("hidden");
}
 
if (valsList.length !== 0) {
resizeDomList(
domListValues,
valsList.length,
'<tr><td><a href="#"></a></td><td><pre class="inline"></pre></td><td></td></tr>'
);
for (let i = 0; i < valsList.length; i += 1) {
let decl = valsList[i];
let trDom = domListValues.children[i];
 
let tdName = trDom.children[0];
let tdNameA = tdName.children[0];
let tdType = trDom.children[1];
let preType = tdType.children[0];
let tdDesc = trDom.children[2];
 
tdNameA.setAttribute("href", navLinkDecl(decl.name));
tdNameA.textContent = decl.name;
 
preType.innerHTML = renderTokens(ex(walkResultTypeRef(decl.value), {}));
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
tdDesc.textContent = "";
}
}
domSectValues.classList.remove("hidden");
}
 
if (testsList.length !== 0) {
resizeDomList(
domListTests,
testsList.length,
'<tr><td><pre class="inline"></pre></td><td><pre class="inline"></pre></td><td></td></tr>'
);
for (let i = 0; i < testsList.length; i += 1) {
let decl = testsList[i];
let trDom = domListTests.children[i];
 
let tdName = trDom.children[0];
let tdNamePre = tdName.children[0];
let tdType = trDom.children[1];
let tdTypePre = tdType.children[0];
let tdDesc = trDom.children[2];
 
tdNamePre.innerHTML = renderSingleToken(Tok.identifier(decl.name));
 
tdTypePre.innerHTML = ex(walkResultTypeRef(decl.value), {});
 
let docs = getAstNode(decl.src).docs;
if (docs != null) {
tdDesc.innerHTML = shortDescMarkdown(docs);
} else {
tdDesc.textContent = "";
}
}
domSectTests.classList.remove("hidden");
}
 
if (container.kind !== typeKinds.Struct || containerNode.fields.length > 0) {
domHdrName.innerHTML = "<pre class='inline'>" +
zigAnalysis.typeKinds[container.kind] +
"</pre>";
domHdrName.classList.remove("hidden");
}
}
 
function operatorCompare(a, b) {
if (a === b) {
return 0;
} else if (a < b) {
return -1;
} else {
return 1;
}
}
 
function detectRootIsStd() {
let rootMod = zigAnalysis.modules[zigAnalysis.rootMod];
if (rootMod.table["std"] == null) {
// no std mapped into the root module
return false;
}
let stdMod = zigAnalysis.modules[rootMod.table["std"]];
if (stdMod == null) return false;
return rootMod.file === stdMod.file;
}
 
function indexTypeKinds() {
let map = {};
for (let i = 0; i < zigAnalysis.typeKinds.length; i += 1) {
map[zigAnalysis.typeKinds[i]] = i;
}
// This is just for debugging purposes, not needed to function
let assertList = [
"Type",
"Void",
"Bool",
"NoReturn",
"Int",
"Float",
"Pointer",
"Array",
"Struct",
"ComptimeFloat",
"ComptimeInt",
"Undefined",
"Null",
"Optional",
"ErrorUnion",
"ErrorSet",
"Enum",
"Union",
"Fn",
"Opaque",
"Frame",
"AnyFrame",
"Vector",
"EnumLiteral",
];
for (let i = 0; i < assertList.length; i += 1) {
if (map[assertList[i]] == null)
throw new Error("No type kind '" + assertList[i] + "' found");
}
return map;
}
 
function findTypeTypeId() {
for (let i = 0; i < zigAnalysis.types.length; i += 1) {
if (getType(i).kind == typeKinds.Type) {
return i;
}
}
throw new Error("No type 'type' found");
}
 
 
function updateCurNav() {
curNav = {
hash: location.hash,
mode: NAV_MODES.API,
modNames: [],
modObjs: [],
declNames: [],
declObjs: [],
callName: null,
activeGuide: null,
activeGuideScrollTo: null,
};
curNavSearch = "";
 
const mode = location.hash.substring(0, 3);
let query = location.hash.substring(3);
 
let qpos = query.indexOf("?");
let nonSearchPart;
if (qpos === -1) {
nonSearchPart = query;
} else {
nonSearchPart = query.substring(0, qpos);
curNavSearch = decodeURIComponent(query.substring(qpos + 1));
}
 
const DEFAULT_HASH = NAV_MODES.API + zigAnalysis.modules[zigAnalysis.rootMod].name;
switch (mode) {
case NAV_MODES.API:
// #A;MODULE:decl.decl.decl?search-term
curNav.mode = mode;
{
let parts = nonSearchPart.split(":");
if (parts[0] == "") {
location.hash = DEFAULT_HASH;
} else {
curNav.modNames = decodeURIComponent(parts[0]).split(".");
}
 
if (parts[1] != null) {
curNav.declNames = decodeURIComponent(parts[1]).split(".");
}
}
return;
case NAV_MODES.GUIDES:
curNav.mode = mode;
 
{
let parts = nonSearchPart.split(":");
curNav.activeGuide = parts[0];
if (parts[1] != null) {
curNav.activeGuideScrollTo = decodeURIComponent(":" + parts[1]);
}
}
return;
default:
location.hash = DEFAULT_HASH;
return;
}
}
 
function onHashChange(ev) {
scrollHistory[curNav.hash] = scrollMonitor.map(function (x) {
return [x, x.scrollTop]
});
if (skipNextHashChange == decodeURIComponent(location.hash)) {
skipNextHashChange = null;
return;
}
skipNextHashChange = null;
updateCurNav();
 
if (domSearch.value !== curNavSearch) {
domSearch.value = curNavSearch;
if (domSearch.value.length == 0)
domSearchPlaceholder.classList.remove("hidden");
else
domSearchPlaceholder.classList.add("hidden");
}
render();
if (imFeelingLucky) {
imFeelingLucky = false;
activateSelectedResult();
}
 
scroll();
}
 
function scroll() {
const cur = scrollHistory[location.hash];
if (cur) {
for (let [elem, offset] of cur) {
elem.scrollTo(0, offset);
}
} else {
if (curNav.activeGuideScrollTo) return;
for (let elem of scrollMonitor) {
elem.scrollTo(0, 0);
}
}
}
 
function findSubDecl(parentTypeOrDecl, childName) {
let parentType = parentTypeOrDecl;
{
// Generic functions / resolving decls
if ("value" in parentType) {
const rv = resolveValue(parentType.value);
if ("type" in rv.expr) {
const t = getType(rv.expr.type);
parentType = t;
if (t.kind == typeKinds.Fn && t.generic_ret != null) {
let resolvedGenericRet = resolveValue({ expr: t.generic_ret });
 
if ("call" in resolvedGenericRet.expr) {
let call = zigAnalysis.calls[resolvedGenericRet.expr.call];
let resolvedFunc = resolveValue({ expr: call.func });
if (!("type" in resolvedFunc.expr)) return null;
let callee = getType(resolvedFunc.expr.type);
if (!callee.generic_ret) return null;
resolvedGenericRet = resolveValue({ expr: callee.generic_ret });
}
 
if ("type" in resolvedGenericRet.expr) {
parentType = getType(resolvedGenericRet.expr.type);
}
}
}
}
}
 
if (parentType.pubDecls) {
for (let i = 0; i < parentType.pubDecls.length; i += 1) {
let declIndex = parentType.pubDecls[i];
let childDecl = getDecl(declIndex);
if (childDecl.name === childName) {
childDecl.find_subdecl_idx = declIndex;
return childDecl;
} else if (childDecl.is_uns) {
let declValue = resolveValue(childDecl.value);
if (!("type" in declValue.expr)) continue;
let uns_container = getType(declValue.expr.type);
let uns_res = findSubDecl(uns_container, childName);
if (uns_res !== null) return uns_res;
}
}
}
 
if (parentType.privDecls) {
for (let i = 0; i < parentType.privDecls.length; i += 1) {
let declIndex = parentType.privDecls[i];
let childDecl = getDecl(declIndex);
if (childDecl.name === childName) {
childDecl.find_subdecl_idx = declIndex;
childDecl.is_private = true;
return childDecl;
} else if (childDecl.is_uns) {
let declValue = resolveValue(childDecl.value);
if (!("type" in declValue.expr)) continue;
let uns_container = getType(declValue.expr.type);
let uns_res = findSubDecl(uns_container, childName);
uns_res.is_private = true;
if (uns_res !== null) return uns_res;
}
}
}
 
return null;
}
 
function computeCanonicalModulePaths() {
let list = new Array(zigAnalysis.modules.length);
// Now we try to find all the modules from root.
let rootMod = zigAnalysis.modules[zigAnalysis.rootMod];
// Breadth-first to keep the path shortest possible.
let stack = [
function renderNavFancy(cur_nav_decl, list) {
{
path: [],
mod: rootMod,
},
];
while (stack.length !== 0) {
let item = stack.shift();
for (let key in item.mod.table) {
let childModIndex = item.mod.table[key];
if (list[childModIndex] != null) continue;
let childMod = zigAnalysis.modules[childModIndex];
if (childMod == null) continue;
 
let newPath = item.path.concat([key]);
list[childModIndex] = newPath;
stack.push({
path: newPath,
mod: childMod,
});
}
}
 
for (let i = 0; i < zigAnalysis.modules.length; i += 1) {
const p = zigAnalysis.modules[i];
// TODO
// declSearchIndex.add(p.name, {moduleId: i});
}
return list;
}
 
function computeCanonDeclPaths() {
let list = new Array(zigAnalysis.decls.length);
canonTypeDecls = new Array(zigAnalysis.types.length);
 
for (let modI = 0; modI < zigAnalysis.modules.length; modI += 1) {
let mod = zigAnalysis.modules[modI];
let modNames = canonModPaths[modI];
if (modNames === undefined) continue;
 
let stack = [
{
declNames: [],
declIndexes: [],
type: getType(mod.main),
},
];
while (stack.length !== 0) {
let item = stack.shift();
 
if (isContainerType(item.type)) {
let t = item.type;
 
let len = t.pubDecls ? t.pubDecls.length : 0;
for (let declI = 0; declI < len; declI += 1) {
let declIndex = t.pubDecls[declI];
if (list[declIndex] != null) continue;
 
let decl = getDecl(declIndex);
 
if (decl.is_uns) {
let unsDeclList = [decl];
while (unsDeclList.length != 0) {
let unsDecl = unsDeclList.pop();
let unsDeclVal = resolveValue(unsDecl.value);
if (!("type" in unsDeclVal.expr)) continue;
let unsType = getType(unsDeclVal.expr.type);
if (!isContainerType(unsType)) continue;
let unsPubDeclLen = unsType.pubDecls ? unsType.pubDecls.length : 0;
for (let unsDeclI = 0; unsDeclI < unsPubDeclLen; unsDeclI += 1) {
let childDeclIndex = unsType.pubDecls[unsDeclI];
let childDecl = getDecl(childDeclIndex);
 
if (childDecl.is_uns) {
unsDeclList.push(childDecl);
} else {
addDeclToSearchResults(childDecl, childDeclIndex, modNames, item, list, stack);
}
}
}
} else {
addDeclToSearchResults(decl, declIndex, modNames, item, list, stack);
}
}
// First, walk backwards the decl parents within a file.
let decl_it = cur_nav_decl;
let prev_decl_it = null;
while (decl_it != null) {
list.push({
name: declIndexName(decl_it),
href: navLinkDeclIndex(decl_it),
});
prev_decl_it = decl_it;
decl_it = declParent(decl_it);
}
}
}
window.cdp = list;
return list;
}
 
function addDeclToSearchResults(decl, declIndex, modNames, item, list, stack) {
let {value: declVal, seenDecls} = resolveValue(decl.value, true);
let declNames = item.declNames.concat([decl.name]);
let declIndexes = item.declIndexes.concat([declIndex]);
 
if (list[declIndex] != null) return;
list[declIndex] = {
modNames: modNames,
declNames: declNames,
declIndexes: declIndexes,
};
 
for (let sd of seenDecls) {
if (list[sd] != null) continue;
list[sd] = {
modNames: modNames,
declNames: declNames,
declIndexes: declIndexes,
};
}
 
// add to search index
{
declSearchIndex.add(decl.name, { declIndex });
}
 
 
if ("type" in declVal.expr) {
let value = getType(declVal.expr.type);
if (declCanRepresentTypeKind(value.kind)) {
canonTypeDecls[declVal.type] = declIndex;
}
 
if (isContainerType(value)) {
stack.push({
declNames: declNames,
declIndexes: declIndexes,
type: value,
});
}
 
// Generic function
if (typeIsGenericFn(declVal.expr.type)) {
let ret = resolveGenericRet(value);
if (ret != null && "type" in ret.expr) {
let generic_type = getType(ret.expr.type);
if (isContainerType(generic_type)) {
stack.push({
declNames: declNames,
declIndexes: declIndexes,
type: generic_type,
// Next, walk backwards the file path segments.
if (prev_decl_it != null) {
const file_path = fullyQualifiedName(prev_decl_it);
const parts = file_path.split(".");
parts.pop(); // skip last
for (;;) {
const href = navLinkFqn(parts.join("."));
const part = parts.pop();
if (!part) break;
list.push({
name: part,
href: href,
});
}
}
 
list.reverse();
}
resizeDomList(domListNav, list.length, '<li><a href="#"></a></li>');
 
for (let i = 0; i < list.length; i += 1) {
const liDom = domListNav.children[i];
const aDom = liDom.children[0];
aDom.textContent = list[i].name;
aDom.setAttribute('href', list[i].href);
if (i + 1 == list.length) {
aDom.classList.add("active");
} else {
aDom.classList.remove("active");
}
}
 
domSectNav.classList.remove("hidden");
}
}
 
function declLinkOrSrcLink(index) {
let match = getCanonDeclPath(index);
if (match) return navLink(match.modNames, match.declNames);
 
// could not find a precomputed decl path
const decl = getDecl(index);
// try to find a public decl by scanning declRefs and declPaths
let value = decl.value;
let i = 0;
while (true) {
i += 1;
if (i >= 10000) {
throw "getCanonDeclPath quota exceeded"
}
 
if ("refPath" in value.expr) {
value = { expr: value.expr.refPath[value.expr.refPath.length - 1] };
continue;
}
 
if ("declRef" in value.expr) {
let cp = canonDeclPaths[value.expr.declRef];
if (cp) return navLink(cp.modNames, cp.declNames);
value = getDecl(value.expr.declRef).value;
continue;
}
 
if ("as" in value.expr) {
value = {
typeRef: zigAnalysis.exprs[value.expr.as.typeRefArg],
expr: zigAnalysis.exprs[value.expr.as.exprArg],
};
continue;
}
 
// if we got here it means that we failed
// produce a link to source code instead
return sourceFileLink(decl);
 
function renderNotFound() {
domStatus.textContent = "Declaration not found.";
domStatus.classList.remove("hidden");
}
}
 
function getCanonDeclPath(index) {
if (canonDeclPaths == null) {
canonDeclPaths = computeCanonDeclPaths();
function navLinkFqn(full_name) {
return '#' + full_name;
}
return canonDeclPaths[index];
 
}
function navLinkDeclIndex(decl_index) {
return navLinkFqn(fullyQualifiedName(decl_index));
}
 
function getCanonTypeDecl(index) {
getCanonDeclPath(0);
//let ct = (canonTypeDecls);
return canonTypeDecls[index];
}
function resizeDomList(listDom, desiredLen, templateHtml) {
// add the missing dom entries
var i, ev;
for (i = listDom.childElementCount; i < desiredLen; i += 1) {
listDom.insertAdjacentHTML('beforeend', templateHtml);
}
// remove extra dom entries
while (desiredLen < listDom.childElementCount) {
listDom.removeChild(listDom.lastChild);
}
}
 
function escapeHtml(text) {
return text.replace(/[&"<>]/g, function(m) {
return escapeHtmlReplacements[m];
});
}
function renderErrorSetPage(decl_index) {
renderNav(decl_index);
renderDeclHeading(decl_index);
 
function shortDesc(docs) {
const trimmed_docs = docs.trim();
let index = trimmed_docs.indexOf("\n\n");
let cut = false;
const errorSetList = declErrorSet(decl_index).slice();
renderErrorSet(decl_index, errorSetList);
}
 
if (index < 0 || index > 130) {
if (trimmed_docs.length > 130) {
index = 130;
cut = true;
function renderErrorSet(base_decl, errorSetList) {
if (errorSetList == null) {
domFnErrorsAnyError.classList.remove("hidden");
} else {
index = trimmed_docs.length;
resizeDomList(domListFnErrors, errorSetList.length, '<div></div>');
for (let i = 0; i < errorSetList.length; i += 1) {
const divDom = domListFnErrors.children[i];
const html = unwrapString(wasm_exports.error_html(base_decl, errorSetList[i]));
divDom.innerHTML = html;
}
domTableFnErrors.classList.remove("hidden");
}
domSectFnErrors.classList.remove("hidden");
}
 
function renderParams(decl_index) {
// Prevent params from being emptied next time wasm calls memory.grow.
const params = declParams(decl_index).slice();
if (params.length !== 0) {
resizeDomList(domListParams, params.length, '<div></div>');
for (let i = 0; i < params.length; i += 1) {
const divDom = domListParams.children[i];
divDom.innerHTML = unwrapString(wasm_exports.decl_param_html(decl_index, params[i]));
}
domSectParams.classList.remove("hidden");
}
}
 
let slice = trimmed_docs.slice(0, index);
if (cut) slice += "...";
return slice;
}
function renderTypeFunction(decl_index) {
renderNav(decl_index);
renderDeclHeading(decl_index);
renderTopLevelDocs(decl_index);
renderParams(decl_index);
renderDocTests(decl_index);
 
function shortDescMarkdown(docs) {
return markdown(shortDesc(docs));
}
const members = unwrapSlice32(wasm_exports.type_fn_members(decl_index, false)).slice();
const fields = unwrapSlice32(wasm_exports.type_fn_fields(decl_index)).slice();
if (members.length !== 0 || fields.length !== 0) {
renderNamespace(decl_index, members, fields);
} else {
domSourceText.innerHTML = declSourceHtml(decl_index);
domSectSource.classList.remove("hidden");
}
}
 
function parseGuides() {
for (let j = 0; j < zigAnalysis.guideSections.length; j += 1) {
const section = zigAnalysis.guideSections[j];
for (let i = 0; i < section.guides.length; i += 1) {
let reader = new commonmark.Parser({ smart: true });
const guide = section.guides[i];
function renderDocTests(decl_index) {
const doctest_html = declDoctestHtml(decl_index);
if (doctest_html.length > 0) {
domDocTestsCode.innerHTML = doctest_html;
domSectDocTests.classList.remove("hidden");
}
}
 
// Find the first text thing to use as a sidebar title
guide.title = null;
guide.toc = "";
function renderFunction(decl_index) {
renderNav(decl_index);
renderDeclHeading(decl_index);
renderTopLevelDocs(decl_index);
renderParams(decl_index);
renderDocTests(decl_index);
 
// Discover Title & TOC for this guide
{
let reader = new commonmark.Parser({smart: true});
let ast = reader.parse(guide.body);
let walker = ast.walker();
let heading_idx = 0;
let event, node, doc, last, last_ul;
while ((event = walker.next())) {
node = event.node;
if (event.entering) {
if (node.type === 'document') {
doc = node;
continue;
}
domFnProtoCode.innerHTML = fnProtoHtml(decl_index, false);
domFnProto.classList.remove("hidden");
 
if (node.next) {
walker.resumeAt(node.next, true);
 
const errorSetNode = fnErrorSet(decl_index);
if (errorSetNode != null) {
const base_decl = wasm_exports.fn_error_set_decl(decl_index, errorSetNode);
renderErrorSet(base_decl, errorSetNodeList(decl_index, errorSetNode));
}
 
domSourceText.innerHTML = declSourceHtml(decl_index);
domSectSource.classList.remove("hidden");
}
 
function renderGlobal(decl_index) {
renderNav(decl_index);
renderDeclHeading(decl_index);
 
const docs_html = declDocsHtmlShort(decl_index);
if (docs_html.length > 0) {
domTldDocs.innerHTML = docs_html;
domTldDocs.classList.remove("hidden");
}
 
domSourceText.innerHTML = declSourceHtml(decl_index);
domSectSource.classList.remove("hidden");
}
 
function renderNamespace(base_decl, members, fields) {
const typesList = [];
const namespacesList = [];
const errSetsList = [];
const fnsList = [];
const varsList = [];
const valsList = [];
 
member_loop: for (let i = 0; i < members.length; i += 1) {
let member = members[i];
const original = member;
while (true) {
const member_category = wasm_exports.categorize_decl(member, 0);
switch (member_category) {
case CAT_namespace:
if (wasm_exports.decl_field_count(member) > 0) {
typesList.push({original: original, member: member});
} else {
walker.resumeAt(node, false);
namespacesList.push({original: original, member: member});
}
node.unlink();
if (node.type === 'heading') {
if (node.level == 1) {
if (guide.title == null) {
let doc_node = new commonmark.Node("document", node.sourcepos);
while (node.firstChild) {
doc_node.appendChild(node.firstChild);
}
let writer = new commonmark.HtmlRenderer();
let result = writer.render(doc_node);
guide.title = result;
}
continue; // don't index H1
}
 
// turn heading node into list item & add link node to it
{
node._type = "link";
node.destination = NAV_MODES.GUIDES + guide.name + ":" + heading_idx;
heading_idx += 1;
let listItem = new commonmark.Node("item", node.sourcepos);
// TODO: strip links from inside node
listItem.appendChild(node);
listItem.level = node.level;
node = listItem;
}
if (last_ul) {
// are we inside or outside of it?
 
let target_ul = last_ul;
while(target_ul.level > node.level) {
target_ul = target_ul.parent;
}
while(target_ul.level < node.level) {
let ul_node = new commonmark.Node("list", node.sourcepos);
ul_node.level = target_ul.level + 1;
ul_node.listType = "bullet";
ul_node.listStart = null;
target_ul.appendChild(ul_node);
target_ul = ul_node;
}
 
target_ul.appendChild(node);
last_ul = target_ul;
} else {
let ul_node = new commonmark.Node("list", node.sourcepos);
ul_node.level = 2;
ul_node.listType = "bullet";
ul_node.listStart = null;
doc.prependChild(ul_node);
while (ul_node.level < node.level) {
let current_ul_node = new commonmark.Node("list", node.sourcepos);
current_ul_node.level = ul_node.level + 1;
current_ul_node.listType = "bullet";
current_ul_node.listStart = null;
ul_node.appendChild(current_ul_node);
ul_node = current_ul_node;
}
 
last_ul = ul_node;
 
ul_node.appendChild(node);
}
continue member_loop;
case CAT_namespace:
namespacesList.push({original: original, member: member});
continue member_loop;
case CAT_global_variable:
varsList.push(member);
continue member_loop;
case CAT_function:
fnsList.push(member);
continue member_loop;
case CAT_type:
case CAT_type_type:
case CAT_type_function:
typesList.push({original: original, member: member});
continue member_loop;
case CAT_error_set:
errSetsList.push({original: original, member: member});
continue member_loop;
case CAT_global_const:
case CAT_primitive:
valsList.push({original: original, member: member});
continue member_loop;
case CAT_alias:
member = wasm_exports.get_aliasee();
continue;
default:
throw new Error("uknown category: " + member_category);
}
}
}
let writer = new commonmark.HtmlRenderer();
let result = writer.render(ast);
guide.toc = result;
}
// Index this guide
{
// let walker = guide.ast.walker();
// let event, node;
// while ((event = walker.next())) {
// node = event.node;
// if (event.entering == true && node.type === 'text') {
// indexTextForGuide(j, i, node);
// }
// }
}
}
}
}
 
function indexTextForGuide(section_idx, guide_idx, node) {
const terms = node.literal.split(" ");
for (let i = 0; i < terms.length; i += 1) {
const t = terms[i];
if (!guidesSearchIndex[t]) guidesSearchIndex[t] = new Set();
node.guide = { section_idx, guide_idx };
guidesSearchIndex[t].add(node);
}
}
typesList.sort(byDeclIndexName2);
namespacesList.sort(byDeclIndexName2);
errSetsList.sort(byDeclIndexName2);
fnsList.sort(byDeclIndexName);
varsList.sort(byDeclIndexName);
valsList.sort(byDeclIndexName2);
 
if (typesList.length !== 0) {
resizeDomList(domListTypes, typesList.length, '<li><a href="#"></a></li>');
for (let i = 0; i < typesList.length; i += 1) {
const liDom = domListTypes.children[i];
const aDom = liDom.children[0];
const original_decl = typesList[i].original;
const decl = typesList[i].member;
aDom.textContent = declIndexName(original_decl);
aDom.setAttribute('href', navLinkDeclIndex(decl));
}
domSectTypes.classList.remove("hidden");
}
if (namespacesList.length !== 0) {
resizeDomList(domListNamespaces, namespacesList.length, '<li><a href="#"></a></li>');
for (let i = 0; i < namespacesList.length; i += 1) {
const liDom = domListNamespaces.children[i];
const aDom = liDom.children[0];
const original_decl = namespacesList[i].original;
const decl = namespacesList[i].member;
aDom.textContent = declIndexName(original_decl);
aDom.setAttribute('href', navLinkDeclIndex(decl));
}
domSectNamespaces.classList.remove("hidden");
}
 
function markdown(input, contextType) {
const parsed = new commonmark.Parser({ smart: true }).parse(input);
if (errSetsList.length !== 0) {
resizeDomList(domListErrSets, errSetsList.length, '<li><a href="#"></a></li>');
for (let i = 0; i < errSetsList.length; i += 1) {
const liDom = domListErrSets.children[i];
const aDom = liDom.children[0];
const original_decl = errSetsList[i].original;
const decl = errSetsList[i].member;
aDom.textContent = declIndexName(original_decl);
aDom.setAttribute('href', navLinkDeclIndex(decl));
}
domSectErrSets.classList.remove("hidden");
}
 
// Look for decl references in inline code (`ref`)
const walker = parsed.walker();
let event;
while ((event = walker.next())) {
const node = event.node;
if (node.type === "code") {
const declHash = detectDeclPath(node.literal, contextType);
if (declHash) {
const link = new commonmark.Node("link");
link.destination = declHash;
node.insertBefore(link);
link.appendChild(node);
}
if (fnsList.length !== 0) {
resizeDomList(domListFns, fnsList.length,
'<div><dt><code></code></dt><dd></dd></div>');
for (let i = 0; i < fnsList.length; i += 1) {
const decl = fnsList[i];
const divDom = domListFns.children[i];
 
const dtDom = divDom.children[0];
const ddDocs = divDom.children[1];
const protoCodeDom = dtDom.children[0];
 
protoCodeDom.innerHTML = fnProtoHtml(decl, true);
ddDocs.innerHTML = declDocsHtmlShort(decl);
}
domSectFns.classList.remove("hidden");
}
 
if (fields.length !== 0) {
resizeDomList(domListFields, fields.length, '<div></div>');
for (let i = 0; i < fields.length; i += 1) {
const divDom = domListFields.children[i];
divDom.innerHTML = unwrapString(wasm_exports.decl_field_html(base_decl, fields[i]));
}
domSectFields.classList.remove("hidden");
}
 
if (varsList.length !== 0) {
resizeDomList(domListGlobalVars, varsList.length,
'<tr><td><a href="#"></a></td><td></td><td></td></tr>');
for (let i = 0; i < varsList.length; i += 1) {
const decl = varsList[i];
const trDom = domListGlobalVars.children[i];
 
const tdName = trDom.children[0];
const tdNameA = tdName.children[0];
const tdType = trDom.children[1];
const tdDesc = trDom.children[2];
 
tdNameA.setAttribute('href', navLinkDeclIndex(decl));
tdNameA.textContent = declIndexName(decl);
 
tdType.innerHTML = declTypeHtml(decl);
tdDesc.innerHTML = declDocsHtmlShort(decl);
}
domSectGlobalVars.classList.remove("hidden");
}
 
if (valsList.length !== 0) {
resizeDomList(domListValues, valsList.length,
'<tr><td><a href="#"></a></td><td></td><td></td></tr>');
for (let i = 0; i < valsList.length; i += 1) {
const trDom = domListValues.children[i];
const tdName = trDom.children[0];
const tdNameA = tdName.children[0];
const tdType = trDom.children[1];
const tdDesc = trDom.children[2];
 
const original_decl = valsList[i].original;
const decl = valsList[i].member;
tdNameA.setAttribute('href', navLinkDeclIndex(decl));
tdNameA.textContent = declIndexName(original_decl);
 
tdType.innerHTML = declTypeHtml(decl);
tdDesc.innerHTML = declDocsHtmlShort(decl);
}
domSectValues.classList.remove("hidden");
}
}
 
return new commonmark.HtmlRenderer({ safe: true }).render(parsed);
 
}
 
 
 
// function detectDeclPath(text, context) {
// let result = "";
// let separator = ":";
// const components = text.split(".");
// let curDeclOrType = undefined;
 
// let curContext = context;
// let limit = 10000;
// while (curContext) {
// limit -= 1;
 
// if (limit == 0) {
// throw "too many iterations";
// }
 
// curDeclOrType = findSubDecl(curContext, components[0]);
 
// if (!curDeclOrType) {
// if (curContext.parent_container == null) break;
// curContext = getType(curContext.parent_container);
// continue;
// }
 
// if (curContext == context) {
// separator = '.';
// result = location.hash + separator + components[0];
// } else {
// // We had to go up, which means we need a new path!
// const canonPath = getCanonDeclPath(curDeclOrType.find_subdecl_idx);
// if (!canonPath) return;
 
// let lastModName = canonPath.modNames[canonPath.modNames.length - 1];
// let fullPath = lastModName + ":" + canonPath.declNames.join(".");
 
// separator = '.';
// result = "#A;" + fullPath;
// }
 
// break;
// }
 
// if (!curDeclOrType) {
// for (let i = 0; i < zigAnalysis.modules.length; i += 1) {
// const p = zigAnalysis.modules[i];
// if (p.name == components[0]) {
// curDeclOrType = getType(p.main);
// result += "#A;" + components[0];
// break;
// }
// }
// }
 
// if (!curDeclOrType) return null;
 
// for (let i = 1; i < components.length; i += 1) {
// curDeclOrType = findSubDecl(curDeclOrType, components[i]);
// if (!curDeclOrType) return null;
// result += separator + components[i];
// separator = '.';
// }
 
// return result;
 
// }
 
function activateSelectedResult() {
if (domSectSearchResults.classList.contains("hidden")) {
return;
function renderNamespacePage(decl_index) {
renderNav(decl_index);
renderDeclHeading(decl_index);
const members = namespaceMembers(decl_index, false).slice();
const fields = declFields(decl_index).slice();
renderNamespace(decl_index, members, fields);
}
 
const searchResults = domListSearchResults.getElementsByTagName("li");
let liDom = searchResults[curSearchIndex];
if (liDom == null && searchResults.length !== 0) {
liDom = searchResults[0];
function operatorCompare(a, b) {
if (a === b) {
return 0;
} else if (a < b) {
return -1;
} else {
return 1;
}
}
if (liDom != null) {
let aDom = liDom.children[0];
location.href = aDom.getAttribute("href");
curSearchIndex = -1;
 
function updateCurNav(location_hash) {
curNav.tag = 0;
curNav.decl = null;
curNav.path = null;
curNav.viewSourceHash = null;
curNavSearch = "";
 
if (location_hash.length > 1 && location_hash[0] === '#') {
const query = location_hash.substring(1);
const qpos = query.indexOf("?");
let nonSearchPart;
if (qpos === -1) {
nonSearchPart = query;
} else {
nonSearchPart = query.substring(0, qpos);
curNavSearch = decodeURIComponent(query.substring(qpos + 1));
}
 
if (nonSearchPart.length > 0) {
const source_mode = nonSearchPart.startsWith("src/");
if (source_mode) {
curNav.tag = 2;
curNav.path = nonSearchPart.substring(4);
} else {
curNav.tag = 1;
curNav.decl = findDecl(nonSearchPart);
}
}
}
}
domSearch.blur();
}
 
// hide the modal if it's visible or return to the previous result page and unfocus the search
function onEscape(ev) {
if (isModalVisible(domHelpModal)) {
hideModal(domHelpModal);
ev.preventDefault();
ev.stopPropagation();
} else if (isModalVisible(domPrefsModal)) {
hideModal(domPrefsModal);
ev.preventDefault();
ev.stopPropagation();
} else {
domSearch.value = "";
domSearch.blur();
domSearchPlaceholder.classList.remove("hidden");
curSearchIndex = -1;
ev.preventDefault();
ev.stopPropagation();
startSearch();
function onHashChange(state) {
history.replaceState({}, "");
navigate(location.hash);
if (state == null) window.scrollTo({top: 0});
}
}
 
 
function onSearchKeyDown(ev) {
switch (getKeyString(ev)) {
case "Enter":
// detect if this search changes anything
let terms1 = getSearchTerms();
startSearch();
updateCurNav();
let terms2 = getSearchTerms();
// we might have to wait for onHashChange to trigger
imFeelingLucky = terms1.join(" ") !== terms2.join(" ");
if (!imFeelingLucky) activateSelectedResult();
 
ev.preventDefault();
ev.stopPropagation();
return;
case "Esc":
onEscape(ev);
return
case "Up":
moveSearchCursor(-1);
ev.preventDefault();
ev.stopPropagation();
return;
case "Down":
// TODO: make the page scroll down if the search cursor is out of the screen
moveSearchCursor(1);
ev.preventDefault();
ev.stopPropagation();
return;
default:
// Search is triggered via an `input` event handler, not on arbitrary `keydown` events.
ev.stopPropagation();
return;
function onPopState(ev) {
onHashChange(ev.state);
}
}
 
let domDotsToggleTimeout = null;
function onSearchInput(ev) {
curSearchIndex = -1;
let replaced = domSearch.value.replaceAll(".", " ")
 
// Ping red the help text if the user typed a dot.
if (replaced != domSearch.value) {
domSearchHelpSummary.classList.remove("normal");
if (domDotsToggleTimeout != null) {
clearTimeout(domDotsToggleTimeout);
domDotsToggleTimeout = null;
}
domDotsToggleTimeout = setTimeout(function () {
domSearchHelpSummary.classList.add("normal");
}, 1000);
}
replaced = replaced.replace(/ +/g, ' ');
if (replaced != domSearch.value) {
domSearch.value = replaced;
}
startAsyncSearch();
}
 
function moveSearchCursor(dir) {
const searchResults = domListSearchResults.getElementsByTagName("li");
if (
curSearchIndex < 0 ||
curSearchIndex >= searchResults.length
) {
if (dir > 0) {
curSearchIndex = -1 + dir;
} else if (dir < 0) {
curSearchIndex = searchResults.length + dir;
function navigate(location_hash) {
updateCurNav(location_hash);
if (domSearch.value !== curNavSearch) {
domSearch.value = curNavSearch;
}
render();
if (imFeelingLucky) {
imFeelingLucky = false;
activateSelectedResult();
}
} else {
curSearchIndex += dir;
}
if (curSearchIndex < 0) {
curSearchIndex = 0;
}
if (curSearchIndex >= searchResults.length) {
curSearchIndex = searchResults.length - 1;
}
renderSearchCursor();
}
 
function getKeyString(ev) {
let name;
let ignoreShift = false;
switch (ev.which) {
case 13:
name = "Enter";
break;
case 27:
name = "Esc";
break;
case 38:
name = "Up";
break;
case 40:
name = "Down";
break;
default:
ignoreShift = true;
name =
ev.key != null
? ev.key
: String.fromCharCode(ev.charCode || ev.keyCode);
function activateSelectedResult() {
if (domSectSearchResults.classList.contains("hidden")) {
return;
}
 
var liDom = domListSearchResults.children[curSearchIndex];
if (liDom == null && domListSearchResults.children.length !== 0) {
liDom = domListSearchResults.children[0];
}
if (liDom != null) {
var aDom = liDom.children[0];
location.href = aDom.getAttribute("href");
curSearchIndex = -1;
}
domSearch.blur();
}
if (!ignoreShift && ev.shiftKey) name = "Shift+" + name;
if (ev.altKey) name = "Alt+" + name;
if (ev.ctrlKey) name = "Ctrl+" + name;
return name;
}
 
function onWindowKeyDown(ev) {
switch (getKeyString(ev)) {
case "Esc":
onEscape(ev);
break;
case "/":
if (!getPrefSlashSearch()) break;
// fallthrough
case "s":
if (!isModalVisible(domHelpModal) && !isModalVisible(domPrefsModal)) {
if (ev.target == domSearch) break;
function onSearchKeyDown(ev) {
switch (ev.which) {
case 13:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
 
clearAsyncSearch();
imFeelingLucky = true;
location.hash = computeSearchHash();
 
domSearch.focus();
domSearch.select();
domDocs.scrollTo(0, 0);
ev.preventDefault();
ev.stopPropagation();
startAsyncSearch();
}
break;
case "?":
if (!canToggleModal) break;
 
if (isModalVisible(domPrefsModal)) {
hideModal(domPrefsModal);
}
 
// toggle the help modal
if (isModalVisible(domHelpModal)) {
hideModal(domHelpModal);
} else {
showModal(domHelpModal);
}
ev.preventDefault();
ev.stopPropagation();
break;
case "p":
if (!canToggleModal) break;
 
if (isModalVisible(domHelpModal)) {
hideModal(domHelpModal);
}
 
// toggle the preferences modal
if (isModalVisible(domPrefsModal)) {
hideModal(domPrefsModal);
} else {
showModal(domPrefsModal);
}
ev.preventDefault();
ev.stopPropagation();
}
}
 
function isModalVisible(modal) {
return !modal.classList.contains("hidden");
}
 
function showModal(modal) {
modal.classList.remove("hidden");
modal.style.left =
window.innerWidth / 2 - modal.clientWidth / 2 + "px";
modal.style.top =
window.innerHeight / 2 - modal.clientHeight / 2 + "px";
const firstInput = modal.querySelector("input");
if (firstInput) {
firstInput.focus();
} else {
modal.focus();
}
domSearch.blur();
domBanner.inert = true;
domMain.inert = true;
}
 
function hideModal(modal) {
modal.classList.add("hidden");
domBanner.inert = false;
domMain.inert = false;
modal.blur();
}
 
function clearAsyncSearch() {
if (searchTimer != null) {
clearTimeout(searchTimer);
searchTimer = null;
}
}
 
function startAsyncSearch() {
clearAsyncSearch();
searchTimer = setTimeout(startSearch, 100);
}
function startSearch() {
clearAsyncSearch();
let oldHash = location.hash;
let parts = oldHash.split("?");
let newPart2 = domSearch.value === "" ? "" : "?" + domSearch.value;
location.replace(parts.length === 1 ? oldHash + newPart2 : parts[0] + newPart2);
}
function getSearchTerms() {
let list = curNavSearch.trim().split(/[ \r\n\t]+/);
return list;
}
 
function renderSearchGuides() {
const searchTrimmed = false;
let ignoreCase = curNavSearch.toLowerCase() === curNavSearch;
 
let terms = getSearchTerms();
let matchedItems = new Set();
 
for (let i = 0; i < terms.length; i += 1) {
const nodes = guidesSearchIndex[terms[i]];
if (nodes) {
for (const n of nodes) {
matchedItems.add(n);
}
}
}
 
 
 
if (matchedItems.size !== 0) {
// Build up the list of search results
let matchedItemsHTML = "";
 
for (const node of matchedItems) {
const text = node.literal;
const href = "";
 
matchedItemsHTML += "<li><a href=\"" + href + "\">" + text + "</a></li>";
}
 
// Replace the search results using our newly constructed HTML string
domListSearchResults.innerHTML = matchedItemsHTML;
if (searchTrimmed) {
domSectSearchAllResultsLink.classList.remove("hidden");
}
renderSearchCursor();
 
domSectSearchResults.classList.remove("hidden");
} else {
domSectSearchNoResults.classList.remove("hidden");
}
}
 
function renderSearchAPI() {
domSectSearchResults.prepend(
domSearchHelp.parentElement.removeChild(domSearchHelp)
);
if (canonDeclPaths == null) {
canonDeclPaths = computeCanonDeclPaths();
}
let declSet = new Set();
let otherDeclSet = new Set(); // for low quality results
let declScores = {};
 
let ignoreCase = curNavSearch.toLowerCase() === curNavSearch;
let term_list = getSearchTerms();
for (let i = 0; i < term_list.length; i += 1) {
let term = term_list[i];
let result = declSearchIndex.search(term.toLowerCase());
if (result == null) {
domSectSearchNoResults.prepend(
domSearchHelp.parentElement.removeChild(domSearchHelp)
);
domSectSearchNoResults.classList.remove("hidden");
domSectSearchResults.classList.add("hidden");
return;
}
 
let termSet = new Set();
let termOtherSet = new Set();
 
for (let list of [result.full, result.partial]) {
for (let r of list) {
const d = r.declIndex;
const decl = getDecl(d);
const canonPath = getCanonDeclPath(d);
 
// collect unconditionally for the first term
if (i == 0) {
declSet.add(d);
} else {
// path intersection for subsequent terms
let found = false;
for (let p of canonPath.declIndexes) {
if (declSet.has(p)) {
found = true;
break;
}
}
if (!found) {
otherDeclSet.add(d);
} else {
termSet.add(d);
}
}
 
if (declScores[d] == undefined) declScores[d] = 0;
 
// scores (lower is better)
let decl_name = decl.name;
if (ignoreCase) decl_name = decl_name.toLowerCase();
 
// shallow path are preferable
const path_depth = canonPath.declNames.length * 50;
// matching the start of a decl name is good
const match_from_start = decl_name.startsWith(term) ? -term.length * (2 - ignoreCase) : (decl_name.length - term.length) + 1;
// being a perfect match is good
const is_full_match = (decl_name === term) ? -decl_name.length * (1 - ignoreCase) : Math.abs(decl_name.length - term.length);
// matching the end of a decl name is good
const matches_the_end = decl_name.endsWith(term) ? -term.length * (1 - ignoreCase) : (decl_name.length - term.length) + 1;
// explicitly penalizing scream case decls
const decl_is_scream_case = decl.name.toUpperCase() != decl.name ? 0 : decl.name.length;
 
const score = path_depth
+ match_from_start
+ is_full_match
+ matches_the_end
+ decl_is_scream_case;
 
declScores[d] += score;
}
}
if (i != 0) {
for (let d of declSet) {
if (termSet.has(d)) continue;
let found = false;
for (let p of getCanonDeclPath(d).declIndexes) {
if (termSet.has(p) || otherDeclSet.has(p)) {
found = true;
break;
}
}
if (found) {
declScores[d] = declScores[d] / term_list.length;
}
 
termOtherSet.add(d);
}
declSet = termSet;
for (let d of termOtherSet) {
otherDeclSet.add(d);
}
 
}
}
 
let matchedItems = {
high_quality: [],
low_quality: [],
};
for (let idx of declSet) {
matchedItems.high_quality.push({ points: declScores[idx], declIndex: idx })
}
for (let idx of otherDeclSet) {
matchedItems.low_quality.push({ points: declScores[idx], declIndex: idx })
}
 
matchedItems.high_quality.sort(function(a, b) {
let cmp = operatorCompare(a.points, b.points);
return cmp;
});
matchedItems.low_quality.sort(function(a, b) {
let cmp = operatorCompare(a.points, b.points);
return cmp;
});
 
// Build up the list of search results
let matchedItemsHTML = "";
 
for (let list of [matchedItems.high_quality, matchedItems.low_quality]) {
if (list == matchedItems.low_quality && list.length > 0) {
matchedItemsHTML += "<hr class='other-results'>"
}
for (let result of list) {
const points = result.points;
const match = result.declIndex;
 
let canonPath = getCanonDeclPath(match);
if (canonPath == null) continue;
 
let lastModName = canonPath.modNames[canonPath.modNames.length - 1];
let text = lastModName + "." + canonPath.declNames.join(".");
 
 
const href = navLink(canonPath.modNames, canonPath.declNames);
 
matchedItemsHTML += "<li><a href=\"" + href + "\">" + text + "</a></li>";
}
}
 
// Replace the search results using our newly constructed HTML string
domListSearchResults.innerHTML = matchedItemsHTML;
renderSearchCursor();
 
domSectSearchResults.classList.remove("hidden");
}
 
 
function renderSearchCursor() {
const searchResults = domListSearchResults.getElementsByTagName("li");
for (let i = 0; i < searchResults.length; i += 1) {
let liDom = searchResults[i];
if (curSearchIndex === i) {
liDom.classList.add("selected");
} else {
liDom.classList.remove("selected");
}
}
}
 
function scrollGuidesTop(ev) {
document.getElementById("activeGuide").children[0].scrollIntoView({
behavior: "smooth",
});
ev.preventDefault();
ev.stopPropagation();
}
document.scrollGuidesTop = scrollGuidesTop;
 
function scrollToHeading(id, alreadyThere) {
// Don't scroll if the current location has a scrolling history.
if (scrollHistory[location.hash]) return;
 
const c = document.getElementById(id);
if (c && alreadyThere) {
requestAnimationFrame(() => c.scrollIntoView({behavior: "smooth"}));
} else {
requestAnimationFrame(() => c.scrollIntoView());
}
return;
}
// function indexNodesToCalls() {
// let map = {};
// for (let i = 0; i < zigAnalysis.calls.length; i += 1) {
// let call = zigAnalysis.calls[i];
// let fn = zigAnalysis.fns[call.fn];
// if (map[fn.src] == null) {
// map[fn.src] = [i];
// } else {
// map[fn.src].push(i);
// }
// }
// return map;
// }
case 27:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
 
function byNameProperty(a, b) {
return operatorCompare(a.name, b.name);
}
domSearch.value = "";
domSearch.blur();
curSearchIndex = -1;
ev.preventDefault();
ev.stopPropagation();
startSearch();
return;
case 38:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
 
moveSearchCursor(-1);
ev.preventDefault();
ev.stopPropagation();
return;
case 40:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
 
function getDecl(idx) {
const decl = zigAnalysis.decls[idx];
return {
name: decl[0],
kind: decl[1],
src: decl[2],
value: decl[3],
decltest: decl[4],
is_uns: decl[5],
parent_container: decl[6],
};
}
 
function getAstNode(idx) {
const ast = zigAnalysis.astNodes[idx];
return {
file: ast[0],
line: ast[1],
col: ast[2],
name: ast[3],
code: ast[4],
docs: ast[5],
fields: ast[6],
comptime: ast[7],
};
}
 
function getFile(idx) {
const file = zigAnalysis.files[idx];
return {
name: file[0],
modIndex: file[1],
};
}
 
function getType(idx) {
const ty = zigAnalysis.types[idx];
switch (ty[0]) {
default:
throw "unhandled type kind!";
case typeKinds.Unanalyzed:
throw "unanalyzed type!";
case typeKinds.Type:
case typeKinds.Void:
case typeKinds.Bool:
case typeKinds.NoReturn:
case typeKinds.Int:
case typeKinds.Float:
return { kind: ty[0], name: ty[1] };
case typeKinds.Pointer:
return {
kind: ty[0],
size: ty[1],
child: ty[2],
sentinel: ty[3],
align: ty[4],
address_space: ty[5],
bit_start: ty[6],
host_size: ty[7],
is_ref: ty[8],
is_allowzero: ty[9],
is_mutable: ty[10],
is_volatile: ty[11],
has_sentinel: ty[12],
has_align: ty[13],
has_addrspace: ty[14],
has_bit_range: ty[15],
};
case typeKinds.Array:
return {
kind: ty[0],
len: ty[1],
child: ty[2],
sentinel: ty[3],
};
case typeKinds.Struct:
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
field_types: ty[5],
field_defaults: ty[6],
backing_int: ty[7],
is_tuple: ty[8],
line_number: ty[9],
parent_container: ty[10],
layout: ty[11],
};
case typeKinds.ComptimeExpr:
case typeKinds.ComptimeFloat:
case typeKinds.ComptimeInt:
case typeKinds.Undefined:
case typeKinds.Null:
return { kind: ty[0], name: ty[1] };
case typeKinds.Optional:
return {
kind: ty[0],
name: ty[1],
child: ty[2],
};
case typeKinds.ErrorUnion:
return {
kind: ty[0],
lhs: ty[1],
rhs: ty[2],
};
case typeKinds.InferredErrorUnion:
return {
kind: ty[0],
payload: ty[1],
};
case typeKinds.ErrorSet:
return {
kind: ty[0],
name: ty[1],
fields: ty[2],
};
case typeKinds.Enum:
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
tag: ty[5],
values: ty[6],
nonexhaustive: ty[7],
parent_container: ty[8],
};
case typeKinds.Union:
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
field_types: ty[5],
tag: ty[6],
auto_tag: ty[7],
parent_container: ty[8],
layout: ty[9],
};
case typeKinds.Fn:
return {
kind: ty[0],
name: ty[1],
src: ty[2],
ret: ty[3],
generic_ret: ty[4],
params: ty[5],
lib_name: ty[6],
is_var_args: ty[7],
is_inferred_error: ty[8],
has_lib_name: ty[9],
has_cc: ty[10],
cc: ty[11],
align: ty[12],
has_align: ty[13],
is_test: ty[14],
is_extern: ty[15],
};
case typeKinds.Opaque:
return {
kind: ty[0],
name: ty[1],
src: ty[2],
privDecls: ty[3],
pubDecls: ty[4],
parent_container: ty[5],
};
case typeKinds.Frame:
case typeKinds.AnyFrame:
case typeKinds.Vector:
case typeKinds.EnumLiteral:
return { kind: ty[0], name: ty[1] };
}
}
 
function getLocalStorage() {
if ("localStorage" in window) {
try {
return window.localStorage;
} catch (ignored) {
// localStorage may be disabled (SecurityError)
moveSearchCursor(1);
ev.preventDefault();
ev.stopPropagation();
return;
default:
ev.stopPropagation(); // prevent keyboard shortcuts
return;
}
}
// If localStorage isn't available, persist preferences only for the current session
const sessionPrefs = {};
return {
getItem(key) {
return key in sessionPrefs ? sessionPrefs[key] : null;
},
setItem(key, value) {
sessionPrefs[key] = String(value);
},
};
}
 
function loadPrefs() {
const storedPrefSlashSearch = prefs.getItem("slashSearch");
if (storedPrefSlashSearch === null) {
// Slash search defaults to enabled for all browsers except Firefox
setPrefSlashSearch(navigator.userAgent.indexOf("Firefox") === -1);
} else {
setPrefSlashSearch(storedPrefSlashSearch === "true");
function onSearchChange(ev) {
curSearchIndex = -1;
startAsyncSearch();
}
}
 
function getPrefSlashSearch() {
return prefs.getItem("slashSearch") === "true";
}
function moveSearchCursor(dir) {
if (curSearchIndex < 0 || curSearchIndex >= domListSearchResults.children.length) {
if (dir > 0) {
curSearchIndex = -1 + dir;
} else if (dir < 0) {
curSearchIndex = domListSearchResults.children.length + dir;
}
} else {
curSearchIndex += dir;
}
if (curSearchIndex < 0) {
curSearchIndex = 0;
}
if (curSearchIndex >= domListSearchResults.children.length) {
curSearchIndex = domListSearchResults.children.length - 1;
}
renderSearchCursor();
}
 
function setPrefSlashSearch(enabled) {
prefs.setItem("slashSearch", String(enabled));
domPrefSlashSearch.checked = enabled;
const searchKeys = enabled ? "<kbd>/</kbd> or <kbd>s</kbd>" : "<kbd>s</kbd>";
domSearchKeys.innerHTML = searchKeys;
domSearchPlaceholderText.innerHTML = searchKeys + " to search, <kbd>?</kbd> for more options";
}
function onWindowKeyDown(ev) {
switch (ev.which) {
case 27:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
if (!domHelpModal.classList.contains("hidden")) {
domHelpModal.classList.add("hidden");
ev.preventDefault();
ev.stopPropagation();
}
break;
case 83:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
domSearch.focus();
domSearch.select();
ev.preventDefault();
ev.stopPropagation();
startAsyncSearch();
break;
case 85:
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
ev.preventDefault();
ev.stopPropagation();
navigateToSource();
break;
case 191:
if (!ev.shiftKey || ev.ctrlKey || ev.altKey) return;
ev.preventDefault();
ev.stopPropagation();
showHelpModal();
break;
}
}
 
function showHelpModal() {
domHelpModal.classList.remove("hidden");
domHelpModal.style.left = (window.innerWidth / 2 - domHelpModal.clientWidth / 2) + "px";
domHelpModal.style.top = (window.innerHeight / 2 - domHelpModal.clientHeight / 2) + "px";
domHelpModal.focus();
}
 
function navigateToSource() {
if (curNav.viewSourceHash != null) {
location.hash = curNav.viewSourceHash;
}
}
 
function clearAsyncSearch() {
if (searchTimer != null) {
clearTimeout(searchTimer);
searchTimer = null;
}
}
 
function startAsyncSearch() {
clearAsyncSearch();
searchTimer = setTimeout(startSearch, 10);
}
function computeSearchHash() {
// How location.hash works:
// 1. http://example.com/ => ""
// 2. http://example.com/# => ""
// 3. http://example.com/#foo => "#foo"
// wat
const oldWatHash = location.hash;
const oldHash = oldWatHash.startsWith("#") ? oldWatHash : "#" + oldWatHash;
const parts = oldHash.split("?");
const newPart2 = (domSearch.value === "") ? "" : ("?" + domSearch.value);
return parts[0] + newPart2;
}
function startSearch() {
clearAsyncSearch();
navigate(computeSearchHash());
}
function renderSearch() {
renderNav(curNav.decl);
 
const ignoreCase = (curNavSearch.toLowerCase() === curNavSearch);
const results = executeQuery(curNavSearch, ignoreCase);
 
if (results.length !== 0) {
resizeDomList(domListSearchResults, results.length, '<li><a href="#"></a></li>');
 
for (let i = 0; i < results.length; i += 1) {
const liDom = domListSearchResults.children[i];
const aDom = liDom.children[0];
const match = results[i];
const full_name = fullyQualifiedName(match);
aDom.textContent = full_name;
aDom.setAttribute('href', navLinkFqn(full_name));
}
renderSearchCursor();
 
domSectSearchResults.classList.remove("hidden");
} else {
domSectSearchNoResults.classList.remove("hidden");
}
}
 
function renderSearchCursor() {
for (let i = 0; i < domListSearchResults.children.length; i += 1) {
var liDom = domListSearchResults.children[i];
if (curSearchIndex === i) {
liDom.classList.add("selected");
} else {
liDom.classList.remove("selected");
}
}
}
 
function updateModuleList() {
moduleList.length = 0;
for (let i = 0;; i += 1) {
const name = unwrapString(wasm_exports.module_name(i));
if (name.length == 0) break;
moduleList.push(name);
}
}
 
function byDeclIndexName(a, b) {
const a_name = declIndexName(a);
const b_name = declIndexName(b);
return operatorCompare(a_name, b_name);
}
 
function byDeclIndexName2(a, b) {
const a_name = declIndexName(a.original);
const b_name = declIndexName(b.original);
return operatorCompare(a_name, b_name);
}
 
function decodeString(ptr, len) {
if (len === 0) return "";
return text_decoder.decode(new Uint8Array(wasm_exports.memory.buffer, ptr, len));
}
 
function unwrapString(bigint) {
const ptr = Number(bigint & 0xffffffffn);
const len = Number(bigint >> 32n);
return decodeString(ptr, len);
}
 
function declTypeHtml(decl_index) {
return unwrapString(wasm_exports.decl_type_html(decl_index));
}
 
function declDocsHtmlShort(decl_index) {
return unwrapString(wasm_exports.decl_docs_html(decl_index, true));
}
 
function fullyQualifiedName(decl_index) {
return unwrapString(wasm_exports.decl_fqn(decl_index));
}
 
function declIndexName(decl_index) {
return unwrapString(wasm_exports.decl_name(decl_index));
}
 
function declSourceHtml(decl_index) {
return unwrapString(wasm_exports.decl_source_html(decl_index));
}
 
function declDoctestHtml(decl_index) {
return unwrapString(wasm_exports.decl_doctest_html(decl_index));
}
 
function fnProtoHtml(decl_index, linkify_fn_name) {
return unwrapString(wasm_exports.decl_fn_proto_html(decl_index, linkify_fn_name));
}
 
function setQueryString(s) {
const jsArray = text_encoder.encode(s);
const len = jsArray.length;
const ptr = wasm_exports.query_begin(len);
const wasmArray = new Uint8Array(wasm_exports.memory.buffer, ptr, len);
wasmArray.set(jsArray);
}
 
function executeQuery(query_string, ignore_case) {
setQueryString(query_string);
const ptr = wasm_exports.query_exec(ignore_case);
const head = new Uint32Array(wasm_exports.memory.buffer, ptr, 1);
const len = head[0];
return new Uint32Array(wasm_exports.memory.buffer, ptr + 4, len);
}
 
function namespaceMembers(decl_index, include_private) {
return unwrapSlice32(wasm_exports.namespace_members(decl_index, include_private));
}
 
function declFields(decl_index) {
return unwrapSlice32(wasm_exports.decl_fields(decl_index));
}
 
function declParams(decl_index) {
return unwrapSlice32(wasm_exports.decl_params(decl_index));
}
 
function declErrorSet(decl_index) {
return unwrapSlice64(wasm_exports.decl_error_set(decl_index));
}
 
function errorSetNodeList(base_decl, err_set_node) {
return unwrapSlice64(wasm_exports.error_set_node_list(base_decl, err_set_node));
}
 
function unwrapSlice32(bigint) {
const ptr = Number(bigint & 0xffffffffn);
const len = Number(bigint >> 32n);
if (len === 0) return [];
return new Uint32Array(wasm_exports.memory.buffer, ptr, len);
}
 
function unwrapSlice64(bigint) {
const ptr = Number(bigint & 0xffffffffn);
const len = Number(bigint >> 32n);
if (len === 0) return [];
return new BigUint64Array(wasm_exports.memory.buffer, ptr, len);
}
 
function findDecl(fqn) {
setInputString(fqn);
const result = wasm_exports.find_decl();
if (result === -1) return null;
return result;
}
 
function findFileRoot(path) {
setInputString(path);
const result = wasm_exports.find_file_root();
if (result === -1) return null;
return result;
}
 
function declParent(decl_index) {
const result = wasm_exports.decl_parent(decl_index);
if (result === -1) return null;
return result;
}
 
function fnErrorSet(decl_index) {
const result = wasm_exports.fn_error_set(decl_index);
if (result === 0) return null;
return result;
}
 
function setInputString(s) {
const jsArray = text_encoder.encode(s);
const len = jsArray.length;
const ptr = wasm_exports.set_input_string(len);
const wasmArray = new Uint8Array(wasm_exports.memory.buffer, ptr, len);
wasmArray.set(jsArray);
}
})();
 
function toggleExpand(event) {
const parent = event.target.parentElement;
parent.toggleAttribute("open");
 
if (!parent.open && parent.getBoundingClientRect().top < 0) {
parent.parentElement.parentElement.scrollIntoView(true);
}
}
 
function RadixTree() {
this.root = null;
 
RadixTree.prototype.search = function(query) {
return this.root.search(query);
 
}
 
RadixTree.prototype.add = function(declName, value) {
if (this.root == null) {
this.root = new Node(declName.toLowerCase(), null, [value]);
} else {
this.root.add(declName.toLowerCase(), value);
}
 
const not_scream_case = declName.toUpperCase() != declName;
let found_separator = false;
for (let i = 1; i < declName.length; i += 1) {
if (declName[i] == '_' || declName[i] == '.') {
found_separator = true;
continue;
}
 
 
if (found_separator || (declName[i].toLowerCase() !== declName[i])) {
if (declName.length > i + 1
&& declName[i + 1].toLowerCase() != declName[i + 1]) continue;
let suffix = declName.slice(i);
this.root.add(suffix.toLowerCase(), value);
found_separator = false;
}
}
}
 
function Node(labels, next, values) {
this.labels = labels;
this.next = next;
this.values = values;
}
 
Node.prototype.isCompressed = function() {
return !Array.isArray(this.next);
}
 
Node.prototype.search = function(word) {
let full_matches = [];
let partial_matches = [];
let subtree_root = null;
 
let cn = this;
char_loop: for (let i = 0; i < word.length;) {
if (cn.isCompressed()) {
for (let j = 0; j < cn.labels.length; j += 1) {
let current_idx = i + j;
 
if (current_idx == word.length) {
partial_matches = cn.values;
subtree_root = cn.next;
break char_loop;
}
 
if (word[current_idx] != cn.labels[j]) return null;
}
 
// the full label matched
let new_idx = i + cn.labels.length;
if (new_idx == word.length) {
full_matches = cn.values;
subtree_root = cn.next;
break char_loop;
}
 
 
i = new_idx;
cn = cn.next;
continue;
} else {
for (let j = 0; j < cn.labels.length; j += 1) {
if (word[i] == cn.labels[j]) {
if (i == word.length - 1) {
full_matches = cn.values[j];
subtree_root = cn.next[j];
break char_loop;
}
 
let next = cn.next[j];
if (next == null) return null;
cn = next;
i += 1;
continue char_loop;
}
}
 
// didn't find a match
return null;
}
}
 
// Match was found, let's collect all other
// partial matches from the subtree
let stack = [subtree_root];
let node;
while (node = stack.pop()) {
if (node.isCompressed()) {
partial_matches = partial_matches.concat(node.values);
if (node.next != null) {
stack.push(node.next);
}
} else {
for (let v of node.values) {
partial_matches = partial_matches.concat(v);
}
 
for (let n of node.next) {
if (n != null) stack.push(n);
}
}
}
 
return { full: full_matches, partial: partial_matches };
}
 
Node.prototype.add = function(word, value) {
let cn = this;
char_loop: for (let i = 0; i < word.length;) {
if (cn.isCompressed()) {
for (let j = 0; j < cn.labels.length; j += 1) {
let current_idx = i + j;
 
if (current_idx == word.length) {
if (j < cn.labels.length - 1) {
let node = new Node(cn.labels.slice(j), cn.next, cn.values);
cn.labels = cn.labels.slice(0, j);
cn.next = node;
cn.values = [];
}
cn.values.push(value);
return;
}
 
if (word[current_idx] == cn.labels[j]) continue;
 
// if we're here, a mismatch was found
if (j != cn.labels.length - 1) {
// create a suffix node
const label_suffix = cn.labels.slice(j + 1);
let node = new Node(label_suffix, cn.next, [...cn.values]);
cn.next = node;
cn.values = [];
}
 
// turn current node into a split node
let node = null;
let word_values = [];
if (current_idx == word.length - 1) {
// mismatch happened in the last character of word
// meaning that the current node should hold its value
word_values.push(value);
} else {
node = new Node(word.slice(current_idx + 1), null, [value]);
}
 
cn.labels = cn.labels[j] + word[current_idx];
cn.next = [cn.next, node];
cn.values = [cn.values, word_values];
 
if (j != 0) {
// current node must be turned into a prefix node
let splitNode = new Node(cn.labels, cn.next, cn.values);
cn.labels = word.slice(i, current_idx);
cn.next = splitNode;
cn.values = [];
}
 
return;
}
// label matched fully with word, are there any more chars?
const new_idx = i + cn.labels.length;
if (new_idx == word.length) {
cn.values.push(value);
return;
} else {
if (cn.next == null) {
let node = new Node(word.slice(new_idx), null, [value]);
cn.next = node;
return;
} else {
cn = cn.next;
i = new_idx;
continue;
}
}
} else { // node is not compressed
let letter = word[i];
for (let j = 0; j < cn.labels.length; j += 1) {
if (letter == cn.labels[j]) {
if (i == word.length - 1) {
cn.values[j].push(value);
return;
}
if (cn.next[j] == null) {
let node = new Node(word.slice(i + 1), null, [value]);
cn.next[j] = node;
return;
} else {
cn = cn.next[j];
i += 1;
continue char_loop;
}
}
}
 
// if we're here we didn't find a match
cn.labels += letter;
if (i == word.length - 1) {
cn.next.push(null);
cn.values.push([value]);
} else {
let node = new Node(word.slice(i + 1), null, [value]);
cn.next.push(node);
cn.values.push([]);
}
return;
}
}
}
}
 
 
function slugify(str) {
return str.toLowerCase().trim().replace(/[^\w\s-]/g, '').replace(/[\s_-]+/g, '-').replace(/^-+|-+$/g, '');
}
 
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,226 @@
ast_node: Ast.Node.Index,
file: Walk.File.Index,
/// The decl whose namespace this is in.
parent: Index,
 
pub const ExtraInfo = struct {
is_pub: bool,
name: []const u8,
/// This might not be a doc_comment token in which case there are no doc comments.
first_doc_comment: Ast.TokenIndex,
};
 
pub const Index = enum(u32) {
none = std.math.maxInt(u32),
_,
 
pub fn get(i: Index) *Decl {
return &Walk.decls.items[@intFromEnum(i)];
}
};
 
pub fn is_pub(d: *const Decl) bool {
return d.extra_info().is_pub;
}
 
pub fn extra_info(d: *const Decl) ExtraInfo {
const ast = d.file.get_ast();
const token_tags = ast.tokens.items(.tag);
const node_tags = ast.nodes.items(.tag);
switch (node_tags[d.ast_node]) {
.root => return .{
.name = "",
.is_pub = true,
.first_doc_comment = if (token_tags[0] == .container_doc_comment)
0
else
token_tags.len - 1,
},
 
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(d.ast_node).?;
const name_token = var_decl.ast.mut_token + 1;
assert(token_tags[name_token] == .identifier);
const ident_name = ast.tokenSlice(name_token);
return .{
.name = ident_name,
.is_pub = var_decl.visib_token != null,
.first_doc_comment = findFirstDocComment(ast, var_decl.firstToken()),
};
},
 
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
const fn_proto = ast.fullFnProto(&buf, d.ast_node).?;
const name_token = fn_proto.name_token.?;
assert(token_tags[name_token] == .identifier);
const ident_name = ast.tokenSlice(name_token);
return .{
.name = ident_name,
.is_pub = fn_proto.visib_token != null,
.first_doc_comment = findFirstDocComment(ast, fn_proto.firstToken()),
};
},
 
else => |t| {
log.debug("hit '{s}'", .{@tagName(t)});
unreachable;
},
}
}
 
pub fn value_node(d: *const Decl) ?Ast.Node.Index {
const ast = d.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const token_tags = ast.tokens.items(.tag);
return switch (node_tags[d.ast_node]) {
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
.root,
=> d.ast_node,
 
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(d.ast_node).?;
if (token_tags[var_decl.ast.mut_token] == .keyword_const)
return var_decl.ast.init_node;
 
return null;
},
 
else => null,
};
}
 
pub fn categorize(decl: *const Decl) Walk.Category {
return decl.file.categorize_decl(decl.ast_node);
}
 
/// Looks up a direct child of `decl` by name.
pub fn get_child(decl: *const Decl, name: []const u8) ?Decl.Index {
switch (decl.categorize()) {
.alias => |aliasee| return aliasee.get().get_child(name),
.namespace => |node| {
const file = decl.file.get();
const scope = file.scopes.get(node) orelse return null;
const child_node = scope.get_child(name) orelse return null;
return file.node_decls.get(child_node);
},
else => return null,
}
}
 
/// Looks up a decl by name accessible in `decl`'s namespace.
pub fn lookup(decl: *const Decl, name: []const u8) ?Decl.Index {
const namespace_node = switch (decl.categorize()) {
.namespace => |node| node,
else => decl.parent.get().ast_node,
};
const file = decl.file.get();
const scope = file.scopes.get(namespace_node) orelse return null;
const resolved_node = scope.lookup(&file.ast, name) orelse return null;
return file.node_decls.get(resolved_node);
}
 
/// Appends the fully qualified name to `out`.
pub fn fqn(decl: *const Decl, out: *std.ArrayListUnmanaged(u8)) Oom!void {
try decl.append_path(out);
if (decl.parent != .none) {
try append_parent_ns(out, decl.parent);
try out.appendSlice(gpa, decl.extra_info().name);
} else {
out.items.len -= 1; // remove the trailing '.'
}
}
 
pub fn reset_with_path(decl: *const Decl, list: *std.ArrayListUnmanaged(u8)) Oom!void {
list.clearRetainingCapacity();
try append_path(decl, list);
}
 
pub fn append_path(decl: *const Decl, list: *std.ArrayListUnmanaged(u8)) Oom!void {
const start = list.items.len;
// Prefer the module name alias.
for (Walk.modules.keys(), Walk.modules.values()) |pkg_name, pkg_file| {
if (pkg_file == decl.file) {
try list.ensureUnusedCapacity(gpa, pkg_name.len + 1);
list.appendSliceAssumeCapacity(pkg_name);
list.appendAssumeCapacity('.');
return;
}
}
 
const file_path = decl.file.path();
try list.ensureUnusedCapacity(gpa, file_path.len + 1);
list.appendSliceAssumeCapacity(file_path);
for (list.items[start..]) |*byte| switch (byte.*) {
'/' => byte.* = '.',
else => continue,
};
if (std.mem.endsWith(u8, list.items, ".zig")) {
list.items.len -= 3;
} else {
list.appendAssumeCapacity('.');
}
}
 
pub fn append_parent_ns(list: *std.ArrayListUnmanaged(u8), parent: Decl.Index) Oom!void {
assert(parent != .none);
const decl = parent.get();
if (decl.parent != .none) {
try append_parent_ns(list, decl.parent);
try list.appendSlice(gpa, decl.extra_info().name);
try list.append(gpa, '.');
}
}
 
pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.TokenIndex {
const token_tags = ast.tokens.items(.tag);
var it = token;
while (it > 0) {
it -= 1;
if (token_tags[it] != .doc_comment) {
return it + 1;
}
}
return it;
}
 
/// Successively looks up each component.
pub fn find(search_string: []const u8) Decl.Index {
var path_components = std.mem.splitScalar(u8, search_string, '.');
const file = Walk.modules.get(path_components.first()) orelse return .none;
var current_decl_index = file.findRootDecl();
while (path_components.next()) |component| {
while (true) switch (current_decl_index.get().categorize()) {
.alias => |aliasee| current_decl_index = aliasee,
else => break,
};
current_decl_index = current_decl_index.get().get_child(component) orelse return .none;
}
return current_decl_index;
}
 
const Decl = @This();
const std = @import("std");
const Ast = std.zig.Ast;
const Walk = @import("Walk.zig");
const gpa = std.heap.wasm_allocator;
const assert = std.debug.assert;
const log = std.log;
const Oom = error{OutOfMemory};
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,1122 @@
//! Find and annotate identifiers with links to their declarations.
pub var files: std.StringArrayHashMapUnmanaged(File) = .{};
pub var decls: std.ArrayListUnmanaged(Decl) = .{};
pub var modules: std.StringArrayHashMapUnmanaged(File.Index) = .{};
 
file: File.Index,
 
/// keep in sync with "CAT_" constants in main.js
pub const Category = union(enum(u8)) {
namespace: Ast.Node.Index,
global_variable: Ast.Node.Index,
/// A function that has not been detected as returning a type.
function: Ast.Node.Index,
primitive: Ast.Node.Index,
error_set: Ast.Node.Index,
global_const: Ast.Node.Index,
alias: Decl.Index,
/// A primitive identifier that is also a type.
type,
/// Specifically it is the literal `type`.
type_type,
/// A function that returns a type.
type_function: Ast.Node.Index,
 
pub const Tag = @typeInfo(Category).Union.tag_type.?;
};
 
pub const File = struct {
ast: Ast,
/// Maps identifiers to the declarations they point to.
ident_decls: std.AutoArrayHashMapUnmanaged(Ast.TokenIndex, Ast.Node.Index) = .{},
/// Maps field access identifiers to the containing field access node.
token_parents: std.AutoArrayHashMapUnmanaged(Ast.TokenIndex, Ast.Node.Index) = .{},
/// Maps declarations to their global index.
node_decls: std.AutoArrayHashMapUnmanaged(Ast.Node.Index, Decl.Index) = .{},
/// Maps function declarations to doctests.
doctests: std.AutoArrayHashMapUnmanaged(Ast.Node.Index, Ast.Node.Index) = .{},
/// root node => its namespace scope
/// struct/union/enum/opaque decl node => its namespace scope
/// local var decl node => its local variable scope
scopes: std.AutoArrayHashMapUnmanaged(Ast.Node.Index, *Scope) = .{},
 
pub fn lookup_token(file: *File, token: Ast.TokenIndex) Decl.Index {
const decl_node = file.ident_decls.get(token) orelse return .none;
return file.node_decls.get(decl_node) orelse return .none;
}
 
pub fn field_count(file: *const File, node: Ast.Node.Index) u32 {
const scope = file.scopes.get(node) orelse return 0;
if (scope.tag != .namespace) return 0;
const namespace = @fieldParentPtr(Scope.Namespace, "base", scope);
return namespace.field_count;
}
 
pub const Index = enum(u32) {
_,
 
fn add_decl(i: Index, node: Ast.Node.Index, parent_decl: Decl.Index) Oom!Decl.Index {
try decls.append(gpa, .{
.ast_node = node,
.file = i,
.parent = parent_decl,
});
const decl_index: Decl.Index = @enumFromInt(decls.items.len - 1);
try i.get().node_decls.put(gpa, node, decl_index);
return decl_index;
}
 
pub fn get(i: File.Index) *File {
return &files.values()[@intFromEnum(i)];
}
 
pub fn get_ast(i: File.Index) *Ast {
return &i.get().ast;
}
 
pub fn path(i: File.Index) []const u8 {
return files.keys()[@intFromEnum(i)];
}
 
pub fn findRootDecl(file_index: File.Index) Decl.Index {
return file_index.get().node_decls.values()[0];
}
 
pub fn categorize_decl(file_index: File.Index, node: Ast.Node.Index) Category {
const ast = file_index.get_ast();
const node_tags = ast.nodes.items(.tag);
const token_tags = ast.tokens.items(.tag);
switch (node_tags[node]) {
.root => return .{ .namespace = node },
 
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(node).?;
if (token_tags[var_decl.ast.mut_token] == .keyword_var)
return .{ .global_variable = node };
 
return categorize_expr(file_index, var_decl.ast.init_node);
},
 
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
const full = ast.fullFnProto(&buf, node).?;
return categorize_func(file_index, node, full);
},
 
else => unreachable,
}
}
 
pub fn categorize_func(
file_index: File.Index,
node: Ast.Node.Index,
full: Ast.full.FnProto,
) Category {
return switch (categorize_expr(file_index, full.ast.return_type)) {
.namespace, .error_set, .type_type => .{ .type_function = node },
else => .{ .function = node },
};
}
 
pub fn categorize_expr_deep(file_index: File.Index, node: Ast.Node.Index) Category {
return switch (categorize_expr(file_index, node)) {
.alias => |aliasee| aliasee.get().categorize(),
else => |result| result,
};
}
 
pub fn categorize_expr(file_index: File.Index, node: Ast.Node.Index) Category {
const file = file_index.get();
const ast = file_index.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
//log.debug("categorize_expr tag {s}", .{@tagName(node_tags[node])});
return switch (node_tags[node]) {
.container_decl,
.container_decl_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
=> .{ .namespace = node },
 
.error_set_decl,
.merge_error_sets,
=> .{ .error_set = node },
 
.identifier => {
const name_token = ast.nodes.items(.main_token)[node];
const ident_name = ast.tokenSlice(name_token);
if (std.mem.eql(u8, ident_name, "type"))
return .type_type;
 
if (isPrimitiveNonType(ident_name))
return .{ .primitive = node };
 
if (std.zig.primitives.isPrimitive(ident_name))
return .type;
 
if (file.ident_decls.get(name_token)) |decl_node| {
const decl_index = file.node_decls.get(decl_node) orelse .none;
if (decl_index != .none) return .{ .alias = decl_index };
return categorize_decl(file_index, decl_node);
}
 
return .{ .global_const = node };
},
 
.field_access => {
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
const field_name = ast.tokenSlice(field_ident);
 
switch (categorize_expr(file_index, object_node)) {
.alias => |aliasee| if (aliasee.get().get_child(field_name)) |decl_index| {
return .{ .alias = decl_index };
},
else => {},
}
 
return .{ .global_const = node };
},
 
.builtin_call_two, .builtin_call_two_comma => {
if (node_datas[node].lhs == 0) {
const params = [_]Ast.Node.Index{};
return categorize_builtin_call(file_index, node, &params);
} else if (node_datas[node].rhs == 0) {
const params = [_]Ast.Node.Index{node_datas[node].lhs};
return categorize_builtin_call(file_index, node, &params);
} else {
const params = [_]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
return categorize_builtin_call(file_index, node, &params);
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[node_datas[node].lhs..node_datas[node].rhs];
return categorize_builtin_call(file_index, node, params);
},
 
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
.call,
.call_comma,
.async_call,
.async_call_comma,
=> {
var buf: [1]Ast.Node.Index = undefined;
return categorize_call(file_index, node, ast.fullCall(&buf, node).?);
},
 
.if_simple,
.@"if",
=> {
const if_full = ast.fullIf(node).?;
if (if_full.ast.else_expr != 0) {
const then_cat = categorize_expr_deep(file_index, if_full.ast.then_expr);
const else_cat = categorize_expr_deep(file_index, if_full.ast.else_expr);
if (then_cat == .type_type and else_cat == .type_type) {
return .type_type;
} else if (then_cat == .error_set and else_cat == .error_set) {
return .{ .error_set = node };
} else if (then_cat == .type or else_cat == .type or
then_cat == .namespace or else_cat == .namespace or
then_cat == .error_set or else_cat == .error_set or
then_cat == .type_function or else_cat == .type_function)
{
return .type;
}
}
return .{ .global_const = node };
},
 
.@"switch", .switch_comma => return categorize_switch(file_index, node),
 
.optional_type,
.array_type,
.array_type_sentinel,
.ptr_type_aligned,
.ptr_type_sentinel,
.ptr_type,
.ptr_type_bit_range,
.anyframe_type,
=> .type,
 
else => .{ .global_const = node },
};
}
 
fn categorize_call(
file_index: File.Index,
node: Ast.Node.Index,
call: Ast.full.Call,
) Category {
return switch (categorize_expr(file_index, call.ast.fn_expr)) {
.type_function => .type,
.alias => |aliasee| categorize_decl_as_callee(aliasee, node),
else => .{ .global_const = node },
};
}
 
fn categorize_decl_as_callee(decl_index: Decl.Index, call_node: Ast.Node.Index) Category {
return switch (decl_index.get().categorize()) {
.type_function => .type,
.alias => |aliasee| categorize_decl_as_callee(aliasee, call_node),
else => .{ .global_const = call_node },
};
}
 
fn categorize_builtin_call(
file_index: File.Index,
node: Ast.Node.Index,
params: []const Ast.Node.Index,
) Category {
const ast = file_index.get_ast();
const main_tokens = ast.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_name = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, builtin_name, "@import")) {
const str_lit_token = main_tokens[params[0]];
const str_bytes = ast.tokenSlice(str_lit_token);
const file_path = std.zig.string_literal.parseAlloc(gpa, str_bytes) catch @panic("OOM");
defer gpa.free(file_path);
if (modules.get(file_path)) |imported_file_index| {
return .{ .alias = File.Index.findRootDecl(imported_file_index) };
}
const base_path = file_index.path();
const resolved_path = std.fs.path.resolvePosix(gpa, &.{
base_path, "..", file_path,
}) catch @panic("OOM");
defer gpa.free(resolved_path);
log.debug("from '{s}' @import '{s}' resolved='{s}'", .{
base_path, file_path, resolved_path,
});
if (files.getIndex(resolved_path)) |imported_file_index| {
return .{ .alias = File.Index.findRootDecl(@enumFromInt(imported_file_index)) };
} else {
log.warn("import target '{s}' did not resolve to any file", .{resolved_path});
}
} else if (std.mem.eql(u8, builtin_name, "@This")) {
if (file_index.get().node_decls.get(node)) |decl_index| {
return .{ .alias = decl_index };
} else {
log.warn("@This() is missing link to Decl.Index", .{});
}
}
 
return .{ .global_const = node };
}
 
fn categorize_switch(file_index: File.Index, node: Ast.Node.Index) Category {
const ast = file_index.get_ast();
const node_datas = ast.nodes.items(.data);
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
const case_nodes = ast.extra_data[extra.start..extra.end];
var all_type_type = true;
var all_error_set = true;
var any_type = false;
if (case_nodes.len == 0) return .{ .global_const = node };
for (case_nodes) |case_node| {
const case = ast.fullSwitchCase(case_node).?;
switch (categorize_expr_deep(file_index, case.ast.target_expr)) {
.type_type => {
any_type = true;
all_error_set = false;
},
.error_set => {
any_type = true;
all_type_type = false;
},
.type, .namespace, .type_function => {
any_type = true;
all_error_set = false;
all_type_type = false;
},
else => {
all_error_set = false;
all_type_type = false;
},
}
}
if (all_type_type) return .type_type;
if (all_error_set) return .{ .error_set = node };
if (any_type) return .type;
return .{ .global_const = node };
}
};
};
 
pub const ModuleIndex = enum(u32) {
_,
};
 
pub fn add_file(file_name: []const u8, bytes: []u8) !File.Index {
const ast = try parse(bytes);
const file_index: File.Index = @enumFromInt(files.entries.len);
try files.put(gpa, file_name, .{ .ast = ast });
 
if (ast.errors.len > 0) {
log.err("can't index '{s}' because it has syntax errors", .{file_index.path()});
return file_index;
}
 
var w: Walk = .{
.file = file_index,
};
const scope = try gpa.create(Scope);
scope.* = .{ .tag = .top };
 
const decl_index = try file_index.add_decl(0, .none);
try struct_decl(&w, scope, decl_index, 0, ast.containerDeclRoot());
 
const file = file_index.get();
shrinkToFit(&file.ident_decls);
shrinkToFit(&file.token_parents);
shrinkToFit(&file.node_decls);
shrinkToFit(&file.doctests);
shrinkToFit(&file.scopes);
 
return file_index;
}
 
fn parse(source: []u8) Oom!Ast {
// Require every source file to end with a newline so that Zig's tokenizer
// can continue to require null termination and Autodoc implementation can
// avoid copying source bytes from the decompressed tar file buffer.
const adjusted_source: [:0]const u8 = s: {
if (source.len == 0)
break :s "";
 
assert(source[source.len - 1] == '\n');
source[source.len - 1] = 0;
break :s source[0 .. source.len - 1 :0];
};
 
return Ast.parse(gpa, adjusted_source, .zig);
}
 
pub const Scope = struct {
tag: Tag,
 
const Tag = enum { top, local, namespace };
 
const Local = struct {
base: Scope = .{ .tag = .local },
parent: *Scope,
var_node: Ast.Node.Index,
};
 
const Namespace = struct {
base: Scope = .{ .tag = .namespace },
parent: *Scope,
names: std.StringArrayHashMapUnmanaged(Ast.Node.Index) = .{},
doctests: std.StringArrayHashMapUnmanaged(Ast.Node.Index) = .{},
decl_index: Decl.Index,
field_count: u32,
};
 
fn getNamespaceDecl(start_scope: *Scope) Decl.Index {
var it: *Scope = start_scope;
while (true) switch (it.tag) {
.top => unreachable,
.local => {
const local = @fieldParentPtr(Local, "base", it);
it = local.parent;
},
.namespace => {
const namespace = @fieldParentPtr(Namespace, "base", it);
return namespace.decl_index;
},
};
}
 
pub fn get_child(scope: *Scope, name: []const u8) ?Ast.Node.Index {
switch (scope.tag) {
.top, .local => return null,
.namespace => {
const namespace = @fieldParentPtr(Namespace, "base", scope);
return namespace.names.get(name);
},
}
}
 
pub fn lookup(start_scope: *Scope, ast: *const Ast, name: []const u8) ?Ast.Node.Index {
const main_tokens = ast.nodes.items(.main_token);
var it: *Scope = start_scope;
while (true) switch (it.tag) {
.top => break,
.local => {
const local = @fieldParentPtr(Local, "base", it);
const name_token = main_tokens[local.var_node] + 1;
const ident_name = ast.tokenSlice(name_token);
if (std.mem.eql(u8, ident_name, name)) {
return local.var_node;
}
it = local.parent;
},
.namespace => {
const namespace = @fieldParentPtr(Namespace, "base", it);
if (namespace.names.get(name)) |node| {
return node;
}
it = namespace.parent;
},
};
return null;
}
};
 
fn struct_decl(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
node: Ast.Node.Index,
container_decl: Ast.full.ContainerDecl,
) Oom!void {
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
 
const namespace = try gpa.create(Scope.Namespace);
namespace.* = .{
.parent = scope,
.decl_index = parent_decl,
.field_count = 0,
};
try w.file.get().scopes.putNoClobber(gpa, node, &namespace.base);
try w.scanDecls(namespace, container_decl.ast.members);
 
for (container_decl.ast.members) |member| switch (node_tags[member]) {
.container_field_init,
.container_field_align,
.container_field,
=> try w.container_field(&namespace.base, parent_decl, ast.fullContainerField(member).?),
 
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
=> {
var buf: [1]Ast.Node.Index = undefined;
const full = ast.fullFnProto(&buf, member).?;
const fn_name_token = full.ast.fn_token + 1;
const fn_name = ast.tokenSlice(fn_name_token);
if (namespace.doctests.get(fn_name)) |doctest_node| {
try w.file.get().doctests.put(gpa, member, doctest_node);
}
const decl_index = try w.file.add_decl(member, parent_decl);
const body = if (node_tags[member] == .fn_decl) node_datas[member].rhs else 0;
try w.fn_decl(&namespace.base, decl_index, body, full);
},
 
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const decl_index = try w.file.add_decl(member, parent_decl);
try w.global_var_decl(&namespace.base, decl_index, ast.fullVarDecl(member).?);
},
 
.@"comptime",
.@"usingnamespace",
=> try w.expr(&namespace.base, parent_decl, node_datas[member].lhs),
 
.test_decl => try w.expr(&namespace.base, parent_decl, node_datas[member].rhs),
 
else => unreachable,
};
}
 
fn comptime_decl(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
full: Ast.full.VarDecl,
) Oom!void {
try w.expr(scope, parent_decl, full.ast.type_node);
try w.maybe_expr(scope, parent_decl, full.ast.align_node);
try w.maybe_expr(scope, parent_decl, full.ast.addrspace_node);
try w.maybe_expr(scope, parent_decl, full.ast.section_node);
try w.expr(scope, parent_decl, full.ast.init_node);
}
 
fn global_var_decl(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
full: Ast.full.VarDecl,
) Oom!void {
try w.maybe_expr(scope, parent_decl, full.ast.type_node);
try w.maybe_expr(scope, parent_decl, full.ast.align_node);
try w.maybe_expr(scope, parent_decl, full.ast.addrspace_node);
try w.maybe_expr(scope, parent_decl, full.ast.section_node);
try w.maybe_expr(scope, parent_decl, full.ast.init_node);
}
 
fn container_field(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
full: Ast.full.ContainerField,
) Oom!void {
try w.maybe_expr(scope, parent_decl, full.ast.type_expr);
try w.maybe_expr(scope, parent_decl, full.ast.align_expr);
try w.maybe_expr(scope, parent_decl, full.ast.value_expr);
}
 
fn fn_decl(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
body: Ast.Node.Index,
full: Ast.full.FnProto,
) Oom!void {
for (full.ast.params) |param| {
try expr(w, scope, parent_decl, param);
}
try expr(w, scope, parent_decl, full.ast.return_type);
try maybe_expr(w, scope, parent_decl, full.ast.align_expr);
try maybe_expr(w, scope, parent_decl, full.ast.addrspace_expr);
try maybe_expr(w, scope, parent_decl, full.ast.section_expr);
try maybe_expr(w, scope, parent_decl, full.ast.callconv_expr);
try maybe_expr(w, scope, parent_decl, body);
}
 
fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
if (node != 0) return expr(w, scope, parent_decl, node);
}
 
fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
assert(node != 0);
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
switch (node_tags[node]) {
.root => unreachable, // Top-level declaration.
.@"usingnamespace" => unreachable, // Top-level declaration.
.test_decl => unreachable, // Top-level declaration.
.container_field_init => unreachable, // Top-level declaration.
.container_field_align => unreachable, // Top-level declaration.
.container_field => unreachable, // Top-level declaration.
.fn_decl => unreachable, // Top-level declaration.
 
.global_var_decl => unreachable, // Handled in `block`.
.local_var_decl => unreachable, // Handled in `block`.
.simple_var_decl => unreachable, // Handled in `block`.
.aligned_var_decl => unreachable, // Handled in `block`.
.@"defer" => unreachable, // Handled in `block`.
.@"errdefer" => unreachable, // Handled in `block`.
 
.switch_case => unreachable, // Handled in `switchExpr`.
.switch_case_inline => unreachable, // Handled in `switchExpr`.
.switch_case_one => unreachable, // Handled in `switchExpr`.
.switch_case_inline_one => unreachable, // Handled in `switchExpr`.
 
.asm_output => unreachable, // Handled in `asmExpr`.
.asm_input => unreachable, // Handled in `asmExpr`.
 
.for_range => unreachable, // Handled in `forExpr`.
 
.assign,
.assign_shl,
.assign_shl_sat,
.assign_shr,
.assign_bit_and,
.assign_bit_or,
.assign_bit_xor,
.assign_div,
.assign_sub,
.assign_sub_wrap,
.assign_sub_sat,
.assign_mod,
.assign_add,
.assign_add_wrap,
.assign_add_sat,
.assign_mul,
.assign_mul_wrap,
.assign_mul_sat,
.shl,
.shr,
.add,
.add_wrap,
.add_sat,
.sub,
.sub_wrap,
.sub_sat,
.mul,
.mul_wrap,
.mul_sat,
.div,
.mod,
.shl_sat,
 
.bit_and,
.bit_or,
.bit_xor,
.bang_equal,
.equal_equal,
.greater_than,
.greater_or_equal,
.less_than,
.less_or_equal,
.array_cat,
 
.array_mult,
.error_union,
.merge_error_sets,
.bool_and,
.bool_or,
.@"catch",
.@"orelse",
.array_type,
.array_access,
.switch_range,
=> {
try expr(w, scope, parent_decl, node_datas[node].lhs);
try expr(w, scope, parent_decl, node_datas[node].rhs);
},
 
.assign_destructure => {
const extra_index = node_datas[node].lhs;
const lhs_count = ast.extra_data[extra_index];
const lhs_nodes: []const Ast.Node.Index = @ptrCast(ast.extra_data[extra_index + 1 ..][0..lhs_count]);
const rhs = node_datas[node].rhs;
for (lhs_nodes) |lhs_node| try expr(w, scope, parent_decl, lhs_node);
_ = try expr(w, scope, parent_decl, rhs);
},
 
.bool_not,
.bit_not,
.negation,
.negation_wrap,
.@"return",
.deref,
.address_of,
.optional_type,
.unwrap_optional,
.grouped_expression,
.@"comptime",
.@"nosuspend",
.@"suspend",
.@"await",
.@"resume",
.@"try",
=> try maybe_expr(w, scope, parent_decl, node_datas[node].lhs),
 
.anyframe_type,
.@"break",
=> try maybe_expr(w, scope, parent_decl, node_datas[node].rhs),
 
.identifier => {
const ident_token = main_tokens[node];
const ident_name = ast.tokenSlice(ident_token);
if (scope.lookup(ast, ident_name)) |var_node| {
try w.file.get().ident_decls.put(gpa, ident_token, var_node);
}
},
.field_access => {
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
try w.file.get().token_parents.put(gpa, field_ident, node);
// This will populate the left-most field object if it is an
// identifier, allowing rendering code to piece together the link.
try expr(w, scope, parent_decl, object_node);
},
 
.string_literal,
.multiline_string_literal,
.number_literal,
.unreachable_literal,
.enum_literal,
.error_value,
.anyframe_literal,
.@"continue",
.char_literal,
.error_set_decl,
=> {},
 
.asm_simple,
.@"asm",
=> {
const full = ast.fullAsm(node).?;
for (full.ast.items) |n| {
// There is a missing call here to expr() for .asm_input and
// .asm_output nodes.
_ = n;
}
try expr(w, scope, parent_decl, full.ast.template);
},
 
.builtin_call_two, .builtin_call_two_comma => {
if (node_datas[node].lhs == 0) {
const params = [_]Ast.Node.Index{};
return builtin_call(w, scope, parent_decl, node, &params);
} else if (node_datas[node].rhs == 0) {
const params = [_]Ast.Node.Index{node_datas[node].lhs};
return builtin_call(w, scope, parent_decl, node, &params);
} else {
const params = [_]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
return builtin_call(w, scope, parent_decl, node, &params);
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[node_datas[node].lhs..node_datas[node].rhs];
return builtin_call(w, scope, parent_decl, node, params);
},
 
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
.call,
.call_comma,
.async_call,
.async_call_comma,
=> {
var buf: [1]Ast.Node.Index = undefined;
const full = ast.fullCall(&buf, node).?;
try expr(w, scope, parent_decl, full.ast.fn_expr);
for (full.ast.params) |param| {
try expr(w, scope, parent_decl, param);
}
},
 
.if_simple,
.@"if",
=> {
const full = ast.fullIf(node).?;
try expr(w, scope, parent_decl, full.ast.cond_expr);
try expr(w, scope, parent_decl, full.ast.then_expr);
try maybe_expr(w, scope, parent_decl, full.ast.else_expr);
},
 
.while_simple,
.while_cont,
.@"while",
=> {
try while_expr(w, scope, parent_decl, ast.fullWhile(node).?);
},
 
.for_simple, .@"for" => {
const full = ast.fullFor(node).?;
for (full.ast.inputs) |input| {
if (node_tags[input] == .for_range) {
try expr(w, scope, parent_decl, node_datas[input].lhs);
try maybe_expr(w, scope, parent_decl, node_datas[input].rhs);
} else {
try expr(w, scope, parent_decl, input);
}
}
try expr(w, scope, parent_decl, full.ast.then_expr);
try maybe_expr(w, scope, parent_decl, full.ast.else_expr);
},
 
.slice => return slice(w, scope, parent_decl, ast.slice(node)),
.slice_open => return slice(w, scope, parent_decl, ast.sliceOpen(node)),
.slice_sentinel => return slice(w, scope, parent_decl, ast.sliceSentinel(node)),
 
.block_two, .block_two_semicolon => {
const statements = [2]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
if (node_datas[node].lhs == 0) {
return block(w, scope, parent_decl, statements[0..0]);
} else if (node_datas[node].rhs == 0) {
return block(w, scope, parent_decl, statements[0..1]);
} else {
return block(w, scope, parent_decl, statements[0..2]);
}
},
.block, .block_semicolon => {
const statements = ast.extra_data[node_datas[node].lhs..node_datas[node].rhs];
return block(w, scope, parent_decl, statements);
},
 
.ptr_type_aligned,
.ptr_type_sentinel,
.ptr_type,
.ptr_type_bit_range,
=> {
const full = ast.fullPtrType(node).?;
try maybe_expr(w, scope, parent_decl, full.ast.align_node);
try maybe_expr(w, scope, parent_decl, full.ast.addrspace_node);
try maybe_expr(w, scope, parent_decl, full.ast.sentinel);
try maybe_expr(w, scope, parent_decl, full.ast.bit_range_start);
try maybe_expr(w, scope, parent_decl, full.ast.bit_range_end);
try expr(w, scope, parent_decl, full.ast.child_type);
},
 
.container_decl,
.container_decl_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
=> {
var buf: [2]Ast.Node.Index = undefined;
return struct_decl(w, scope, parent_decl, node, ast.fullContainerDecl(&buf, node).?);
},
 
.array_type_sentinel => {
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
try expr(w, scope, parent_decl, node_datas[node].lhs);
try expr(w, scope, parent_decl, extra.elem_type);
try expr(w, scope, parent_decl, extra.sentinel);
},
.@"switch", .switch_comma => {
const operand_node = node_datas[node].lhs;
try expr(w, scope, parent_decl, operand_node);
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
const case_nodes = ast.extra_data[extra.start..extra.end];
for (case_nodes) |case_node| {
const case = ast.fullSwitchCase(case_node).?;
for (case.ast.values) |value_node| {
try expr(w, scope, parent_decl, value_node);
}
try expr(w, scope, parent_decl, case.ast.target_expr);
}
},
 
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init,
.array_init_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const full = ast.fullArrayInit(&buf, node).?;
try maybe_expr(w, scope, parent_decl, full.ast.type_expr);
for (full.ast.elements) |elem| {
try expr(w, scope, parent_decl, elem);
}
},
 
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init,
.struct_init_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const full = ast.fullStructInit(&buf, node).?;
try maybe_expr(w, scope, parent_decl, full.ast.type_expr);
for (full.ast.fields) |field| {
try expr(w, scope, parent_decl, field);
}
},
 
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
=> {
var buf: [1]Ast.Node.Index = undefined;
return fn_decl(w, scope, parent_decl, 0, ast.fullFnProto(&buf, node).?);
},
}
}
 
fn slice(w: *Walk, scope: *Scope, parent_decl: Decl.Index, full: Ast.full.Slice) Oom!void {
try expr(w, scope, parent_decl, full.ast.sliced);
try expr(w, scope, parent_decl, full.ast.start);
try maybe_expr(w, scope, parent_decl, full.ast.end);
try maybe_expr(w, scope, parent_decl, full.ast.sentinel);
}
 
fn builtin_call(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
node: Ast.Node.Index,
params: []const Ast.Node.Index,
) Oom!void {
const ast = w.file.get_ast();
const main_tokens = ast.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_name = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, builtin_name, "@This")) {
try w.file.get().node_decls.put(gpa, node, scope.getNamespaceDecl());
}
 
for (params) |param| {
try expr(w, scope, parent_decl, param);
}
}
 
fn block(
w: *Walk,
parent_scope: *Scope,
parent_decl: Decl.Index,
statements: []const Ast.Node.Index,
) Oom!void {
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
 
var scope = parent_scope;
 
for (statements) |node| {
switch (node_tags[node]) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const full = ast.fullVarDecl(node).?;
try global_var_decl(w, scope, parent_decl, full);
const local = try gpa.create(Scope.Local);
local.* = .{
.parent = scope,
.var_node = node,
};
try w.file.get().scopes.putNoClobber(gpa, node, &local.base);
scope = &local.base;
},
 
.assign_destructure => {
log.debug("walk assign_destructure not implemented yet", .{});
},
 
.grouped_expression => try expr(w, scope, parent_decl, node_datas[node].lhs),
 
.@"defer",
.@"errdefer",
=> try expr(w, scope, parent_decl, node_datas[node].rhs),
 
else => try expr(w, scope, parent_decl, node),
}
}
}
 
fn while_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, full: Ast.full.While) Oom!void {
try expr(w, scope, parent_decl, full.ast.cond_expr);
try maybe_expr(w, scope, parent_decl, full.ast.cont_expr);
try expr(w, scope, parent_decl, full.ast.then_expr);
try maybe_expr(w, scope, parent_decl, full.ast.else_expr);
}
 
fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.Index) Oom!void {
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const token_tags = ast.tokens.items(.tag);
const node_datas = ast.nodes.items(.data);
 
for (members) |member_node| {
const name_token = switch (node_tags[member_node]) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> main_tokens[member_node] + 1,
 
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
.fn_decl,
=> blk: {
const ident = main_tokens[member_node] + 1;
if (token_tags[ident] != .identifier) continue;
break :blk ident;
},
 
.test_decl => {
const ident_token = node_datas[member_node].lhs;
const is_doctest = token_tags[ident_token] == .identifier;
if (is_doctest) {
const token_bytes = ast.tokenSlice(ident_token);
try namespace.doctests.put(gpa, token_bytes, member_node);
}
continue;
},
 
.container_field_init,
.container_field_align,
.container_field,
=> {
namespace.field_count += 1;
continue;
},
 
else => continue,
};
 
const token_bytes = ast.tokenSlice(name_token);
try namespace.names.put(gpa, token_bytes, member_node);
}
}
 
pub fn isPrimitiveNonType(name: []const u8) bool {
return std.mem.eql(u8, name, "undefined") or
std.mem.eql(u8, name, "null") or
std.mem.eql(u8, name, "true") or
std.mem.eql(u8, name, "false");
}
 
//test {
// const gpa = std.testing.allocator;
//
// var arena_instance = std.heap.ArenaAllocator.init(gpa);
// defer arena_instance.deinit();
// const arena = arena_instance.allocator();
//
// // example test command:
// // zig test --dep input.zig -Mroot=src/Walk.zig -Minput.zig=/home/andy/dev/zig/lib/std/fs/File/zig
// var ast = try Ast.parse(gpa, @embedFile("input.zig"), .zig);
// defer ast.deinit(gpa);
//
// var w: Walk = .{
// .arena = arena,
// .token_links = .{},
// .ast = &ast,
// };
//
// try w.root();
//}
 
const Walk = @This();
const std = @import("std");
const Ast = std.zig.Ast;
const assert = std.debug.assert;
const Decl = @import("Decl.zig");
const log = std.log;
const gpa = std.heap.wasm_allocator;
const Oom = error{OutOfMemory};
 
fn shrinkToFit(m: anytype) void {
m.shrinkAndFree(gpa, m.entries.len);
}
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,1259 @@
/// Delete this to find out where URL escaping needs to be added.
const missing_feature_url_escape = true;
 
const gpa = std.heap.wasm_allocator;
 
const std = @import("std");
const log = std.log;
const assert = std.debug.assert;
const Ast = std.zig.Ast;
const Walk = @import("Walk.zig");
const markdown = @import("markdown.zig");
const Decl = @import("Decl.zig");
 
const js = struct {
extern "js" fn log(ptr: [*]const u8, len: usize) void;
extern "js" fn panic(ptr: [*]const u8, len: usize) noreturn;
};
 
pub const std_options: std.Options = .{
.logFn = logFn,
//.log_level = .debug,
};
 
pub fn panic(msg: []const u8, st: ?*std.builtin.StackTrace, addr: ?usize) noreturn {
_ = st;
_ = addr;
log.err("panic: {s}", .{msg});
@trap();
}
 
fn logFn(
comptime message_level: log.Level,
comptime scope: @TypeOf(.enum_literal),
comptime format: []const u8,
args: anytype,
) void {
const level_txt = comptime message_level.asText();
const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "): ";
var buf: [500]u8 = undefined;
const line = std.fmt.bufPrint(&buf, level_txt ++ prefix2 ++ format, args) catch l: {
buf[buf.len - 3 ..][0..3].* = "...".*;
break :l &buf;
};
js.log(line.ptr, line.len);
}
 
export fn alloc(n: usize) [*]u8 {
const slice = gpa.alloc(u8, n) catch @panic("OOM");
return slice.ptr;
}
 
export fn unpack(tar_ptr: [*]u8, tar_len: usize) void {
const tar_bytes = tar_ptr[0..tar_len];
//log.debug("received {d} bytes of tar file", .{tar_bytes.len});
 
unpack_inner(tar_bytes) catch |err| {
fatal("unable to unpack tar: {s}", .{@errorName(err)});
};
}
 
var query_string: std.ArrayListUnmanaged(u8) = .{};
var query_results: std.ArrayListUnmanaged(Decl.Index) = .{};
 
/// Resizes the query string to be the correct length; returns the pointer to
/// the query string.
export fn query_begin(query_string_len: usize) [*]u8 {
query_string.resize(gpa, query_string_len) catch @panic("OOM");
return query_string.items.ptr;
}
 
/// Executes the query. Returns the pointer to the query results which is an
/// array of u32.
/// The first element is the length of the array.
/// Subsequent elements are Decl.Index values which are all public
/// declarations.
export fn query_exec(ignore_case: bool) [*]Decl.Index {
const query = query_string.items;
log.debug("querying '{s}'", .{query});
query_exec_fallible(query, ignore_case) catch |err| switch (err) {
error.OutOfMemory => @panic("OOM"),
};
query_results.items[0] = @enumFromInt(query_results.items.len - 1);
return query_results.items.ptr;
}
 
const max_matched_items = 1000;
 
fn query_exec_fallible(query: []const u8, ignore_case: bool) !void {
const Score = packed struct(u32) {
points: u16,
segments: u16,
};
const g = struct {
var full_path_search_text: std.ArrayListUnmanaged(u8) = .{};
var full_path_search_text_lower: std.ArrayListUnmanaged(u8) = .{};
var doc_search_text: std.ArrayListUnmanaged(u8) = .{};
/// Each element matches a corresponding query_results element.
var scores: std.ArrayListUnmanaged(Score) = .{};
};
 
// First element stores the size of the list.
try query_results.resize(gpa, 1);
// Corresponding point value is meaningless and therefore undefined.
try g.scores.resize(gpa, 1);
 
decl_loop: for (Walk.decls.items, 0..) |*decl, decl_index| {
const info = decl.extra_info();
if (!info.is_pub) continue;
 
try decl.reset_with_path(&g.full_path_search_text);
if (decl.parent != .none)
try Decl.append_parent_ns(&g.full_path_search_text, decl.parent);
try g.full_path_search_text.appendSlice(gpa, info.name);
 
try g.full_path_search_text_lower.resize(gpa, g.full_path_search_text.items.len);
@memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items);
 
const ast = decl.file.get_ast();
try collect_docs(&g.doc_search_text, ast, info.first_doc_comment);
 
if (ignore_case) {
ascii_lower(g.full_path_search_text_lower.items);
ascii_lower(g.doc_search_text.items);
}
 
var it = std.mem.tokenizeScalar(u8, query, ' ');
var points: u16 = 0;
var bypass_limit = false;
while (it.next()) |term| {
// exact, case sensitive match of full decl path
if (std.mem.eql(u8, g.full_path_search_text.items, term)) {
points += 4;
bypass_limit = true;
continue;
}
// exact, case sensitive match of just decl name
if (std.mem.eql(u8, info.name, term)) {
points += 3;
bypass_limit = true;
continue;
}
// substring, case insensitive match of full decl path
if (std.mem.indexOf(u8, g.full_path_search_text_lower.items, term) != null) {
points += 2;
continue;
}
if (std.mem.indexOf(u8, g.doc_search_text.items, term) != null) {
points += 1;
continue;
}
continue :decl_loop;
}
 
if (query_results.items.len < max_matched_items or bypass_limit) {
try query_results.append(gpa, @enumFromInt(decl_index));
try g.scores.append(gpa, .{
.points = points,
.segments = @intCast(count_scalar(g.full_path_search_text.items, '.')),
});
}
}
 
const sort_context: struct {
pub fn swap(sc: @This(), a_index: usize, b_index: usize) void {
_ = sc;
std.mem.swap(Score, &g.scores.items[a_index], &g.scores.items[b_index]);
std.mem.swap(Decl.Index, &query_results.items[a_index], &query_results.items[b_index]);
}
 
pub fn lessThan(sc: @This(), a_index: usize, b_index: usize) bool {
_ = sc;
const a_score = g.scores.items[a_index];
const b_score = g.scores.items[b_index];
if (b_score.points < a_score.points) {
return true;
} else if (b_score.points > a_score.points) {
return false;
} else if (a_score.segments < b_score.segments) {
return true;
} else if (a_score.segments > b_score.segments) {
return false;
} else {
const a_decl = query_results.items[a_index];
const b_decl = query_results.items[b_index];
const a_file_path = a_decl.get().file.path();
const b_file_path = b_decl.get().file.path();
// This neglects to check the local namespace inside the file.
return std.mem.lessThan(u8, b_file_path, a_file_path);
}
}
} = .{};
 
std.mem.sortUnstableContext(1, query_results.items.len, sort_context);
 
if (query_results.items.len > max_matched_items)
query_results.shrinkRetainingCapacity(max_matched_items);
}
 
const String = Slice(u8);
 
fn Slice(T: type) type {
return packed struct(u64) {
ptr: u32,
len: u32,
 
fn init(s: []const T) @This() {
return .{
.ptr = @intFromPtr(s.ptr),
.len = s.len,
};
}
};
}
 
const ErrorIdentifier = packed struct(u64) {
token_index: Ast.TokenIndex,
decl_index: Decl.Index,
 
fn hasDocs(ei: ErrorIdentifier) bool {
const decl_index = ei.decl_index;
const ast = decl_index.get().file.get_ast();
const token_tags = ast.tokens.items(.tag);
const token_index = ei.token_index;
if (token_index == 0) return false;
return token_tags[token_index - 1] == .doc_comment;
}
 
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
const decl_index = ei.decl_index;
const ast = decl_index.get().file.get_ast();
const name = ast.tokenSlice(ei.token_index);
const first_doc_comment = Decl.findFirstDocComment(ast, ei.token_index);
const has_docs = ast.tokens.items(.tag)[first_doc_comment] == .doc_comment;
const has_link = base_decl != decl_index;
 
try out.appendSlice(gpa, "<dt>");
try out.appendSlice(gpa, name);
if (has_link) {
try out.appendSlice(gpa, " <a href=\"#");
_ = missing_feature_url_escape;
try decl_index.get().fqn(out);
try out.appendSlice(gpa, "\">");
try out.appendSlice(gpa, decl_index.get().extra_info().name);
try out.appendSlice(gpa, "</a>");
}
try out.appendSlice(gpa, "</dt>");
 
if (has_docs) {
try out.appendSlice(gpa, "<dd>");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</dd>");
}
}
};
 
var string_result: std.ArrayListUnmanaged(u8) = .{};
var error_set_result: std.StringArrayHashMapUnmanaged(ErrorIdentifier) = .{};
 
export fn decl_error_set(decl_index: Decl.Index) Slice(ErrorIdentifier) {
return Slice(ErrorIdentifier).init(decl_error_set_fallible(decl_index) catch @panic("OOM"));
}
 
export fn error_set_node_list(base_decl: Decl.Index, node: Ast.Node.Index) Slice(ErrorIdentifier) {
error_set_result.clearRetainingCapacity();
addErrorsFromExpr(base_decl, &error_set_result, node) catch @panic("OOM");
sort_error_set_result();
return Slice(ErrorIdentifier).init(error_set_result.values());
}
 
export fn fn_error_set_decl(decl_index: Decl.Index, node: Ast.Node.Index) Decl.Index {
return switch (decl_index.get().file.categorize_expr(node)) {
.alias => |aliasee| fn_error_set_decl(aliasee, aliasee.get().ast_node),
else => decl_index,
};
}
 
export fn decl_field_count(decl_index: Decl.Index) u32 {
switch (decl_index.get().categorize()) {
.namespace => |node| return decl_index.get().file.get().field_count(node),
else => return 0,
}
}
 
fn decl_error_set_fallible(decl_index: Decl.Index) Oom![]ErrorIdentifier {
error_set_result.clearRetainingCapacity();
try addErrorsFromDecl(decl_index, &error_set_result);
sort_error_set_result();
return error_set_result.values();
}
 
fn sort_error_set_result() void {
const sort_context: struct {
pub fn lessThan(sc: @This(), a_index: usize, b_index: usize) bool {
_ = sc;
const a_name = error_set_result.keys()[a_index];
const b_name = error_set_result.keys()[b_index];
return std.mem.lessThan(u8, a_name, b_name);
}
} = .{};
error_set_result.sortUnstable(sort_context);
}
 
fn addErrorsFromDecl(
decl_index: Decl.Index,
out: *std.StringArrayHashMapUnmanaged(ErrorIdentifier),
) Oom!void {
switch (decl_index.get().categorize()) {
.error_set => |node| try addErrorsFromExpr(decl_index, out, node),
.alias => |aliasee| try addErrorsFromDecl(aliasee, out),
else => |cat| log.debug("unable to addErrorsFromDecl: {any}", .{cat}),
}
}
 
fn addErrorsFromExpr(
decl_index: Decl.Index,
out: *std.StringArrayHashMapUnmanaged(ErrorIdentifier),
node: Ast.Node.Index,
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
 
switch (decl.file.categorize_expr(node)) {
.error_set => |n| switch (node_tags[n]) {
.error_set_decl => {
try addErrorsFromNode(decl_index, out, node);
},
.merge_error_sets => {
try addErrorsFromExpr(decl_index, out, node_datas[node].lhs);
try addErrorsFromExpr(decl_index, out, node_datas[node].rhs);
},
else => unreachable,
},
.alias => |aliasee| {
try addErrorsFromDecl(aliasee, out);
},
else => return,
}
}
 
fn addErrorsFromNode(
decl_index: Decl.Index,
out: *std.StringArrayHashMapUnmanaged(ErrorIdentifier),
node: Ast.Node.Index,
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const main_tokens = ast.nodes.items(.main_token);
const token_tags = ast.tokens.items(.tag);
const error_token = main_tokens[node];
var tok_i = error_token + 2;
while (true) : (tok_i += 1) switch (token_tags[tok_i]) {
.doc_comment, .comma => {},
.identifier => {
const name = ast.tokenSlice(tok_i);
const gop = try out.getOrPut(gpa, name);
// If there are more than one, take the one with doc comments.
// If they both have doc comments, prefer the existing one.
const new: ErrorIdentifier = .{
.token_index = tok_i,
.decl_index = decl_index,
};
if (!gop.found_existing or
(!gop.value_ptr.hasDocs() and new.hasDocs()))
{
gop.value_ptr.* = new;
}
},
.r_brace => break,
else => unreachable,
};
}
 
export fn type_fn_fields(decl_index: Decl.Index) Slice(Ast.Node.Index) {
return decl_fields(decl_index);
}
 
export fn decl_fields(decl_index: Decl.Index) Slice(Ast.Node.Index) {
return Slice(Ast.Node.Index).init(decl_fields_fallible(decl_index) catch @panic("OOM"));
}
 
export fn decl_params(decl_index: Decl.Index) Slice(Ast.Node.Index) {
return Slice(Ast.Node.Index).init(decl_params_fallible(decl_index) catch @panic("OOM"));
}
 
fn decl_fields_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
const g = struct {
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .{};
};
g.result.clearRetainingCapacity();
const decl = decl_index.get();
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const value_node = decl.value_node() orelse return &.{};
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, value_node) orelse return &.{};
for (container_decl.ast.members) |member_node| switch (node_tags[member_node]) {
.container_field_init,
.container_field_align,
.container_field,
=> try g.result.append(gpa, member_node),
 
else => continue,
};
return g.result.items;
}
 
fn decl_params_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
const g = struct {
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .{};
};
g.result.clearRetainingCapacity();
const decl = decl_index.get();
const ast = decl.file.get_ast();
const value_node = decl.value_node() orelse return &.{};
var buf: [1]Ast.Node.Index = undefined;
const fn_proto = ast.fullFnProto(&buf, value_node) orelse return &.{};
try g.result.appendSlice(gpa, fn_proto.ast.params);
return g.result.items;
}
 
export fn error_html(base_decl: Decl.Index, error_identifier: ErrorIdentifier) String {
string_result.clearRetainingCapacity();
error_identifier.html(base_decl, &string_result) catch @panic("OOM");
return String.init(string_result.items);
}
 
export fn decl_field_html(decl_index: Decl.Index, field_node: Ast.Node.Index) String {
string_result.clearRetainingCapacity();
decl_field_html_fallible(&string_result, decl_index, field_node) catch @panic("OOM");
return String.init(string_result.items);
}
 
export fn decl_param_html(decl_index: Decl.Index, param_node: Ast.Node.Index) String {
string_result.clearRetainingCapacity();
decl_param_html_fallible(&string_result, decl_index, param_node) catch @panic("OOM");
return String.init(string_result.items);
}
 
fn decl_field_html_fallible(
out: *std.ArrayListUnmanaged(u8),
decl_index: Decl.Index,
field_node: Ast.Node.Index,
) !void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
try out.appendSlice(gpa, "<pre><code>");
try file_source_html(decl.file, out, field_node, .{});
try out.appendSlice(gpa, "</code></pre>");
 
const field = ast.fullContainerField(field_node).?;
const first_doc_comment = Decl.findFirstDocComment(ast, field.firstToken());
 
if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</div>");
}
}
 
fn decl_param_html_fallible(
out: *std.ArrayListUnmanaged(u8),
decl_index: Decl.Index,
param_node: Ast.Node.Index,
) !void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const token_tags = ast.tokens.items(.tag);
const colon = ast.firstToken(param_node) - 1;
const name_token = colon - 1;
const first_doc_comment = f: {
var it = ast.firstToken(param_node);
while (it > 0) {
it -= 1;
switch (token_tags[it]) {
.doc_comment, .colon, .identifier, .keyword_comptime, .keyword_noalias => {},
else => break,
}
}
break :f it + 1;
};
const name = ast.tokenSlice(name_token);
 
try out.appendSlice(gpa, "<pre><code>");
try appendEscaped(out, name);
try out.appendSlice(gpa, ": ");
try file_source_html(decl.file, out, param_node, .{});
try out.appendSlice(gpa, "</code></pre>");
 
if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</div>");
}
}
 
export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const proto_node = switch (node_tags[decl.ast_node]) {
.fn_decl => node_datas[decl.ast_node].lhs,
 
.fn_proto,
.fn_proto_one,
.fn_proto_simple,
.fn_proto_multi,
=> decl.ast_node,
 
else => unreachable,
};
 
string_result.clearRetainingCapacity();
file_source_html(decl.file, &string_result, proto_node, .{
.skip_doc_comments = true,
.skip_comments = true,
.collapse_whitespace = true,
.fn_link = if (linkify_fn_name) decl_index else .none,
}) catch |err| {
fatal("unable to render source: {s}", .{@errorName(err)});
};
return String.init(string_result.items);
}
 
export fn decl_source_html(decl_index: Decl.Index) String {
const decl = decl_index.get();
 
string_result.clearRetainingCapacity();
file_source_html(decl.file, &string_result, decl.ast_node, .{}) catch |err| {
fatal("unable to render source: {s}", .{@errorName(err)});
};
return String.init(string_result.items);
}
 
export fn decl_doctest_html(decl_index: Decl.Index) String {
const decl = decl_index.get();
const doctest_ast_node = decl.file.get().doctests.get(decl.ast_node) orelse
return String.init("");
 
string_result.clearRetainingCapacity();
file_source_html(decl.file, &string_result, doctest_ast_node, .{}) catch |err| {
fatal("unable to render source: {s}", .{@errorName(err)});
};
return String.init(string_result.items);
}
 
export fn decl_fqn(decl_index: Decl.Index) String {
const decl = decl_index.get();
string_result.clearRetainingCapacity();
decl.fqn(&string_result) catch @panic("OOM");
return String.init(string_result.items);
}
 
export fn decl_parent(decl_index: Decl.Index) Decl.Index {
const decl = decl_index.get();
return decl.parent;
}
 
export fn fn_error_set(decl_index: Decl.Index) Ast.Node.Index {
const decl = decl_index.get();
const ast = decl.file.get_ast();
var buf: [1]Ast.Node.Index = undefined;
const full = ast.fullFnProto(&buf, decl.ast_node).?;
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
return switch (node_tags[full.ast.return_type]) {
.error_set_decl => full.ast.return_type,
.error_union => node_datas[full.ast.return_type].lhs,
else => 0,
};
}
 
export fn decl_file_path(decl_index: Decl.Index) String {
string_result.clearRetainingCapacity();
string_result.appendSlice(gpa, decl_index.get().file.path()) catch @panic("OOM");
return String.init(string_result.items);
}
 
export fn decl_category_name(decl_index: Decl.Index) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const token_tags = ast.tokens.items(.tag);
const name = switch (decl.categorize()) {
.namespace => |node| {
const node_tags = ast.nodes.items(.tag);
if (node_tags[decl.ast_node] == .root)
return String.init("struct");
string_result.clearRetainingCapacity();
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, node).?;
if (container_decl.layout_token) |t| {
if (token_tags[t] == .keyword_extern) {
string_result.appendSlice(gpa, "extern ") catch @panic("OOM");
}
}
const main_token_tag = token_tags[container_decl.ast.main_token];
string_result.appendSlice(gpa, main_token_tag.lexeme().?) catch @panic("OOM");
return String.init(string_result.items);
},
.global_variable => "Global Variable",
.function => "Function",
.type_function => "Type Function",
.type, .type_type => "Type",
.error_set => "Error Set",
.global_const => "Constant",
.primitive => "Primitive Value",
.alias => "Alias",
};
return String.init(name);
}
 
export fn decl_name(decl_index: Decl.Index) String {
const decl = decl_index.get();
string_result.clearRetainingCapacity();
const name = n: {
if (decl.parent == .none) {
// Then it is the root struct of a file.
break :n std.fs.path.stem(decl.file.path());
}
break :n decl.extra_info().name;
};
string_result.appendSlice(gpa, name) catch @panic("OOM");
return String.init(string_result.items);
}
 
export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
const decl = decl_index.get();
string_result.clearRetainingCapacity();
render_docs(&string_result, decl_index, decl.extra_info().first_doc_comment, short) catch @panic("OOM");
return String.init(string_result.items);
}
 
fn collect_docs(
list: *std.ArrayListUnmanaged(u8),
ast: *const Ast,
first_doc_comment: Ast.TokenIndex,
) Oom!void {
const token_tags = ast.tokens.items(.tag);
list.clearRetainingCapacity();
var it = first_doc_comment;
while (true) : (it += 1) switch (token_tags[it]) {
.doc_comment, .container_doc_comment => {
// It is tempting to trim this string but think carefully about how
// that will affect the markdown parser.
const line = ast.tokenSlice(it)[3..];
try list.appendSlice(gpa, line);
},
else => break,
};
}
 
fn render_docs(
out: *std.ArrayListUnmanaged(u8),
decl_index: Decl.Index,
first_doc_comment: Ast.TokenIndex,
short: bool,
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const token_tags = ast.tokens.items(.tag);
 
var parser = try markdown.Parser.init(gpa);
defer parser.deinit();
var it = first_doc_comment;
while (true) : (it += 1) switch (token_tags[it]) {
.doc_comment, .container_doc_comment => {
const line = ast.tokenSlice(it)[3..];
if (short and line.len == 0) break;
try parser.feedLine(line);
},
else => break,
};
 
var parsed_doc = try parser.endInput();
defer parsed_doc.deinit(gpa);
 
const g = struct {
var link_buffer: std.ArrayListUnmanaged(u8) = .{};
};
 
const Writer = std.ArrayListUnmanaged(u8).Writer;
const Renderer = markdown.Renderer(Writer, Decl.Index);
const renderer: Renderer = .{
.context = decl_index,
.renderFn = struct {
fn render(
r: Renderer,
doc: markdown.Document,
node: markdown.Document.Node.Index,
writer: Writer,
) !void {
const data = doc.nodes.items(.data)[@intFromEnum(node)];
switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
.code_span => {
try writer.writeAll("<code>");
const content = doc.string(data.text.content);
if (resolve_decl_path(r.context, content)) |resolved_decl_index| {
g.link_buffer.clearRetainingCapacity();
try resolve_decl_link(resolved_decl_index, &g.link_buffer);
 
try writer.writeAll("<a href=\"#");
_ = missing_feature_url_escape;
try writer.writeAll(g.link_buffer.items);
try writer.print("\">{}</a>", .{markdown.fmtHtml(content)});
} else {
try writer.print("{}", .{markdown.fmtHtml(content)});
}
 
try writer.writeAll("</code>");
},
 
else => try Renderer.renderDefault(r, doc, node, writer),
}
}
}.render,
};
try renderer.render(parsed_doc, out.writer(gpa));
}
 
fn resolve_decl_path(decl_index: Decl.Index, path: []const u8) ?Decl.Index {
var path_components = std.mem.splitScalar(u8, path, '.');
var current_decl_index = decl_index.get().lookup(path_components.first()) orelse return null;
while (path_components.next()) |component| {
switch (current_decl_index.get().categorize()) {
.alias => |aliasee| current_decl_index = aliasee,
else => {},
}
current_decl_index = current_decl_index.get().get_child(component) orelse return null;
}
return current_decl_index;
}
 
export fn decl_type_html(decl_index: Decl.Index) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
string_result.clearRetainingCapacity();
t: {
// If there is an explicit type, use it.
if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
if (var_decl.ast.type_node != 0) {
string_result.appendSlice(gpa, "<code>") catch @panic("OOM");
file_source_html(decl.file, &string_result, var_decl.ast.type_node, .{
.skip_comments = true,
.collapse_whitespace = true,
}) catch |e| {
fatal("unable to render html: {s}", .{@errorName(e)});
};
string_result.appendSlice(gpa, "</code>") catch @panic("OOM");
break :t;
}
}
}
return String.init(string_result.items);
}
 
const Oom = error{OutOfMemory};
 
fn unpack_inner(tar_bytes: []u8) !void {
var fbs = std.io.fixedBufferStream(tar_bytes);
var file_name_buffer: [1024]u8 = undefined;
var link_name_buffer: [1024]u8 = undefined;
var it = std.tar.iterator(fbs.reader(), .{
.file_name_buffer = &file_name_buffer,
.link_name_buffer = &link_name_buffer,
});
while (try it.next()) |tar_file| {
switch (tar_file.kind) {
.normal => {
if (tar_file.size == 0 and tar_file.name.len == 0) break;
if (std.mem.endsWith(u8, tar_file.name, ".zig")) {
log.debug("found file: '{s}'", .{tar_file.name});
const file_name = try gpa.dupe(u8, tar_file.name);
if (std.mem.indexOfScalar(u8, file_name, '/')) |pkg_name_end| {
const pkg_name = file_name[0..pkg_name_end];
const gop = try Walk.modules.getOrPut(gpa, pkg_name);
const file: Walk.File.Index = @enumFromInt(Walk.files.entries.len);
if (!gop.found_existing or
std.mem.eql(u8, file_name[pkg_name_end..], "/root.zig") or
std.mem.eql(u8, file_name[pkg_name_end + 1 .. file_name.len - ".zig".len], pkg_name))
{
gop.value_ptr.* = file;
}
const file_bytes = tar_bytes[fbs.pos..][0..@intCast(tar_file.size)];
assert(file == try Walk.add_file(file_name, file_bytes));
}
} else {
log.warn("skipping: '{s}' - the tar creation should have done that", .{
tar_file.name,
});
}
try tar_file.skip();
},
else => continue,
}
}
}
 
fn fatal(comptime format: []const u8, args: anytype) noreturn {
var buf: [500]u8 = undefined;
const line = std.fmt.bufPrint(&buf, format, args) catch l: {
buf[buf.len - 3 ..][0..3].* = "...".*;
break :l &buf;
};
js.panic(line.ptr, line.len);
}
 
fn ascii_lower(bytes: []u8) void {
for (bytes) |*b| b.* = std.ascii.toLower(b.*);
}
 
export fn module_name(index: u32) String {
const names = Walk.modules.keys();
return String.init(if (index >= names.len) "" else names[index]);
}
 
export fn find_module_root(pkg: Walk.ModuleIndex) Decl.Index {
const root_file = Walk.modules.values()[@intFromEnum(pkg)];
const result = root_file.findRootDecl();
assert(result != .none);
return result;
}
 
/// Set by `set_input_string`.
var input_string: std.ArrayListUnmanaged(u8) = .{};
 
export fn set_input_string(len: usize) [*]u8 {
input_string.resize(gpa, len) catch @panic("OOM");
return input_string.items.ptr;
}
 
/// Looks up the root struct decl corresponding to a file by path.
/// Uses `input_string`.
export fn find_file_root() Decl.Index {
const file: Walk.File.Index = @enumFromInt(Walk.files.getIndex(input_string.items) orelse return .none);
return file.findRootDecl();
}
 
/// Uses `input_string`.
/// Tries to look up the Decl component-wise but then falls back to a file path
/// based scan.
export fn find_decl() Decl.Index {
const result = Decl.find(input_string.items);
if (result != .none) return result;
 
const g = struct {
var match_fqn: std.ArrayListUnmanaged(u8) = .{};
};
for (Walk.decls.items, 0..) |*decl, decl_index| {
g.match_fqn.clearRetainingCapacity();
decl.fqn(&g.match_fqn) catch @panic("OOM");
if (std.mem.eql(u8, g.match_fqn.items, input_string.items)) {
//const path = @as(Decl.Index, @enumFromInt(decl_index)).get().file.path();
//log.debug("find_decl '{s}' found in {s}", .{ input_string.items, path });
return @enumFromInt(decl_index);
}
}
return .none;
}
 
/// Set only by `categorize_decl`; read only by `get_aliasee`, valid only
/// when `categorize_decl` returns `.alias`.
var global_aliasee: Decl.Index = .none;
 
export fn get_aliasee() Decl.Index {
return global_aliasee;
}
export fn categorize_decl(decl_index: Decl.Index, resolve_alias_count: usize) Walk.Category.Tag {
global_aliasee = .none;
var chase_alias_n = resolve_alias_count;
var decl = decl_index.get();
while (true) {
const result = decl.categorize();
switch (result) {
.alias => |new_index| {
assert(new_index != .none);
global_aliasee = new_index;
if (chase_alias_n > 0) {
chase_alias_n -= 1;
decl = new_index.get();
continue;
}
},
else => {},
}
return result;
}
}
 
export fn type_fn_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
return namespace_members(parent, include_private);
}
 
export fn namespace_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
const g = struct {
var members: std.ArrayListUnmanaged(Decl.Index) = .{};
};
 
g.members.clearRetainingCapacity();
 
for (Walk.decls.items, 0..) |*decl, i| {
if (decl.parent == parent) {
if (include_private or decl.is_pub()) {
g.members.append(gpa, @enumFromInt(i)) catch @panic("OOM");
}
}
}
 
return Slice(Decl.Index).init(g.members.items);
}
 
const RenderSourceOptions = struct {
skip_doc_comments: bool = false,
skip_comments: bool = false,
collapse_whitespace: bool = false,
fn_link: Decl.Index = .none,
};
 
fn file_source_html(
file_index: Walk.File.Index,
out: *std.ArrayListUnmanaged(u8),
root_node: Ast.Node.Index,
options: RenderSourceOptions,
) !void {
const ast = file_index.get_ast();
const file = file_index.get();
 
const g = struct {
var field_access_buffer: std.ArrayListUnmanaged(u8) = .{};
};
 
const token_tags = ast.tokens.items(.tag);
const token_starts = ast.tokens.items(.start);
const main_tokens = ast.nodes.items(.main_token);
 
const start_token = ast.firstToken(root_node);
const end_token = ast.lastToken(root_node) + 1;
 
var cursor: usize = token_starts[start_token];
 
for (
token_tags[start_token..end_token],
token_starts[start_token..end_token],
start_token..,
) |tag, start, token_index| {
const between = ast.source[cursor..start];
if (std.mem.trim(u8, between, " \t\r\n").len > 0) {
if (!options.skip_comments) {
try out.appendSlice(gpa, "<span class=\"tok-comment\">");
try appendEscaped(out, between);
try out.appendSlice(gpa, "</span>");
}
} else if (between.len > 0) {
if (options.collapse_whitespace) {
if (out.items.len > 0 and out.items[out.items.len - 1] != ' ')
try out.append(gpa, ' ');
} else {
try out.appendSlice(gpa, between);
}
}
if (tag == .eof) break;
const slice = ast.tokenSlice(token_index);
cursor = start + slice.len;
switch (tag) {
.eof => unreachable,
 
.keyword_addrspace,
.keyword_align,
.keyword_and,
.keyword_asm,
.keyword_async,
.keyword_await,
.keyword_break,
.keyword_catch,
.keyword_comptime,
.keyword_const,
.keyword_continue,
.keyword_defer,
.keyword_else,
.keyword_enum,
.keyword_errdefer,
.keyword_error,
.keyword_export,
.keyword_extern,
.keyword_for,
.keyword_if,
.keyword_inline,
.keyword_noalias,
.keyword_noinline,
.keyword_nosuspend,
.keyword_opaque,
.keyword_or,
.keyword_orelse,
.keyword_packed,
.keyword_anyframe,
.keyword_pub,
.keyword_resume,
.keyword_return,
.keyword_linksection,
.keyword_callconv,
.keyword_struct,
.keyword_suspend,
.keyword_switch,
.keyword_test,
.keyword_threadlocal,
.keyword_try,
.keyword_union,
.keyword_unreachable,
.keyword_usingnamespace,
.keyword_var,
.keyword_volatile,
.keyword_allowzero,
.keyword_while,
.keyword_anytype,
.keyword_fn,
=> {
try out.appendSlice(gpa, "<span class=\"tok-kw\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
},
 
.string_literal,
.char_literal,
.multiline_string_literal_line,
=> {
try out.appendSlice(gpa, "<span class=\"tok-str\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
},
 
.builtin => {
try out.appendSlice(gpa, "<span class=\"tok-builtin\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
},
 
.doc_comment,
.container_doc_comment,
=> {
if (!options.skip_doc_comments) {
try out.appendSlice(gpa, "<span class=\"tok-comment\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
}
},
 
.identifier => i: {
if (options.fn_link != .none) {
const fn_link = options.fn_link.get();
const fn_token = main_tokens[fn_link.ast_node];
if (token_index == fn_token + 1) {
try out.appendSlice(gpa, "<a class=\"tok-fn\" href=\"#");
_ = missing_feature_url_escape;
try fn_link.fqn(out);
try out.appendSlice(gpa, "\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</a>");
break :i;
}
}
 
if (token_index > 0 and token_tags[token_index - 1] == .keyword_fn) {
try out.appendSlice(gpa, "<span class=\"tok-fn\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
break :i;
}
 
if (Walk.isPrimitiveNonType(slice)) {
try out.appendSlice(gpa, "<span class=\"tok-null\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
break :i;
}
 
if (std.zig.primitives.isPrimitive(slice)) {
try out.appendSlice(gpa, "<span class=\"tok-type\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
break :i;
}
 
if (file.token_parents.get(token_index)) |field_access_node| {
g.field_access_buffer.clearRetainingCapacity();
try walk_field_accesses(file_index, &g.field_access_buffer, field_access_node);
if (g.field_access_buffer.items.len > 0) {
try out.appendSlice(gpa, "<a href=\"#");
_ = missing_feature_url_escape;
try out.appendSlice(gpa, g.field_access_buffer.items);
try out.appendSlice(gpa, "\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</a>");
} else {
try appendEscaped(out, slice);
}
break :i;
}
 
{
g.field_access_buffer.clearRetainingCapacity();
try resolve_ident_link(file_index, &g.field_access_buffer, token_index);
if (g.field_access_buffer.items.len > 0) {
try out.appendSlice(gpa, "<a href=\"#");
_ = missing_feature_url_escape;
try out.appendSlice(gpa, g.field_access_buffer.items);
try out.appendSlice(gpa, "\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</a>");
break :i;
}
}
 
try appendEscaped(out, slice);
},
 
.number_literal => {
try out.appendSlice(gpa, "<span class=\"tok-number\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
},
 
.bang,
.pipe,
.pipe_pipe,
.pipe_equal,
.equal,
.equal_equal,
.equal_angle_bracket_right,
.bang_equal,
.l_paren,
.r_paren,
.semicolon,
.percent,
.percent_equal,
.l_brace,
.r_brace,
.l_bracket,
.r_bracket,
.period,
.period_asterisk,
.ellipsis2,
.ellipsis3,
.caret,
.caret_equal,
.plus,
.plus_plus,
.plus_equal,
.plus_percent,
.plus_percent_equal,
.plus_pipe,
.plus_pipe_equal,
.minus,
.minus_equal,
.minus_percent,
.minus_percent_equal,
.minus_pipe,
.minus_pipe_equal,
.asterisk,
.asterisk_equal,
.asterisk_asterisk,
.asterisk_percent,
.asterisk_percent_equal,
.asterisk_pipe,
.asterisk_pipe_equal,
.arrow,
.colon,
.slash,
.slash_equal,
.comma,
.ampersand,
.ampersand_equal,
.question_mark,
.angle_bracket_left,
.angle_bracket_left_equal,
.angle_bracket_angle_bracket_left,
.angle_bracket_angle_bracket_left_equal,
.angle_bracket_angle_bracket_left_pipe,
.angle_bracket_angle_bracket_left_pipe_equal,
.angle_bracket_right,
.angle_bracket_right_equal,
.angle_bracket_angle_bracket_right,
.angle_bracket_angle_bracket_right_equal,
.tilde,
=> try appendEscaped(out, slice),
 
.invalid, .invalid_periodasterisks => return error.InvalidToken,
}
}
}
 
fn resolve_ident_link(
file_index: Walk.File.Index,
out: *std.ArrayListUnmanaged(u8),
ident_token: Ast.TokenIndex,
) Oom!void {
const decl_index = file_index.get().lookup_token(ident_token);
if (decl_index == .none) return;
try resolve_decl_link(decl_index, out);
}
 
fn resolve_decl_link(decl_index: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
const decl = decl_index.get();
switch (decl.categorize()) {
.alias => |alias_decl| try alias_decl.get().fqn(out),
else => try decl.fqn(out),
}
}
 
fn walk_field_accesses(
file_index: Walk.File.Index,
out: *std.ArrayListUnmanaged(u8),
node: Ast.Node.Index,
) Oom!void {
const ast = file_index.get_ast();
const node_tags = ast.nodes.items(.tag);
assert(node_tags[node] == .field_access);
const node_datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
switch (node_tags[object_node]) {
.identifier => {
const lhs_ident = main_tokens[object_node];
try resolve_ident_link(file_index, out, lhs_ident);
},
.field_access => {
try walk_field_accesses(file_index, out, object_node);
},
else => {},
}
if (out.items.len > 0) {
try out.append(gpa, '.');
try out.appendSlice(gpa, ast.tokenSlice(field_ident));
}
}
 
fn appendEscaped(out: *std.ArrayListUnmanaged(u8), s: []const u8) !void {
for (s) |c| {
try out.ensureUnusedCapacity(gpa, 6);
switch (c) {
'&' => out.appendSliceAssumeCapacity("&amp;"),
'<' => out.appendSliceAssumeCapacity("&lt;"),
'>' => out.appendSliceAssumeCapacity("&gt;"),
'"' => out.appendSliceAssumeCapacity("&quot;"),
else => out.appendAssumeCapacity(c),
}
}
}
 
fn count_scalar(haystack: []const u8, needle: u8) usize {
var total: usize = 0;
for (haystack) |elem| {
if (elem == needle)
total += 1;
}
return total;
}
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,940 @@
//! Markdown parsing and rendering support.
//!
//! A Markdown document consists of a series of blocks. Depending on its type,
//! each block may contain other blocks, inline content, or nothing. The
//! supported blocks are as follows:
//!
//! - **List** - a sequence of list items of the same type.
//!
//! - **List item** - unordered list items start with `-`, `*`, or `+` followed
//! by a space. Ordered list items start with a number between 0 and
//! 999,999,999, followed by a `.` or `)` and a space. The number of an
//! ordered list item only matters for the first item in the list (to
//! determine the starting number of the list). All subsequent ordered list
//! items will have sequentially increasing numbers.
//!
//! All list items may contain block content. Any content indented at least as
//! far as the end of the list item marker (including the space after it) is
//! considered part of the list item.
//!
//! Lists which have no blank lines between items or between direct children
//! of items are considered _tight_, and direct child paragraphs of tight list
//! items are rendered without `<p>` tags.
//!
//! - **Table** - a sequence of adjacent table row lines, where each line starts
//! and ends with a `|`, and cells within the row are delimited by `|`s.
//!
//! The first or second row of a table may be a _header delimiter row_, which
//! is a row consisting of cells of the pattern `---` (for unset column
//! alignment), `:--` (for left alignment), `:-:` (for center alignment), or
//! `--:` (for right alignment). The number of `-`s must be at least one, but
//! is otherwise arbitrary. If there is a row just before the header delimiter
//! row, it becomes the header row for the table (a table need not have a
//! header row at all).
//!
//! - **Heading** - a sequence of between 1 and 6 `#` characters, followed by a
//! space and further inline content on the same line.
//!
//! - **Code block** - a sequence of at least 3 `` ` `` characters (a _fence_),
//! optionally followed by a "tag" on the same line, and continuing until a
//! line consisting only of a closing fence whose length matches the opening
//! fence, or until the end of the containing block.
//!
//! The content of a code block is not parsed as inline content. It is
//! included verbatim in the output document (minus leading indentation up to
//! the position of the opening fence).
//!
//! - **Blockquote** - a sequence of lines preceded by `>` characters.
//!
//! - **Paragraph** - ordinary text, parsed as inline content, ending with a
//! blank line or the end of the containing block.
//!
//! Paragraphs which are part of another block may be "lazily" continued by
//! subsequent paragraph lines even if those lines would not ordinarily be
//! considered part of the containing block. For example, this is a single
//! list item, not a list item followed by a paragraph:
//!
//! ```markdown
//! - First line of content.
//! This content is still part of the paragraph,
//! even though it isn't indented far enough.
//! ```
//!
//! - **Thematic break** - a line consisting of at least three matching `-`,
//! `_`, or `*` characters and, optionally, spaces.
//!
//! Indentation may consist of spaces and tabs. The use of tabs is not
//! recommended: a tab is treated the same as a single space for the purpose of
//! determining the indentation level, and is not recognized as a space for
//! block starters which require one (for example, `-` followed by a tab is not
//! a valid list item).
//!
//! The supported inlines are as follows:
//!
//! - **Link** - of the format `[text](target)`. `text` may contain inline
//! content. `target` may contain `\`-escaped characters and balanced
//! parentheses.
//!
//! - **Image** - a link directly preceded by a `!`. The link text is
//! interpreted as the alt text of the image.
//!
//! - **Emphasis** - a run of `*` or `_` characters may be an emphasis opener,
//! closer, or both. For `*` characters, the run may be an opener as long as
//! it is not directly followed by a whitespace character (or the end of the
//! inline content) and a closer as long as it is not directly preceded by
//! one. For `_` characters, this rule is strengthened by requiring that the
//! run also be preceded by a whitespace or punctuation character (for
//! openers) or followed by one (for closers), to avoid mangling `snake_case`
//! words.
//!
//! The rule for emphasis handling is greedy: any run that can close existing
//! emphasis will do so, otherwise it will open emphasis. A single run may
//! serve both functions: the middle `**` in the following example both closes
//! the initial emphasis and opens a new one:
//!
//! ```markdown
//! *one**two*
//! ```
//!
//! A single `*` or `_` is used for normal emphasis (HTML `<em>`), and a
//! double `**` or `__` is used for strong emphasis (HTML `<strong>`). Even
//! longer runs may be used to produce further nested emphasis (though only
//! `***` and `___` to produce `<em><strong>` is really useful).
//!
//! - **Code span** - a run of `` ` `` characters, terminated by a matching run
//! or the end of inline content. The content of a code span is not parsed
//! further.
//!
//! - **Text** - normal text is interpreted as-is, except that `\` may be used
//! to escape any punctuation character, preventing it from being interpreted
//! according to other syntax rules. A `\` followed by a line break within a
//! paragraph is interpreted as a hard line break.
//!
//! Any null bytes or invalid UTF-8 bytes within text are replaced with Unicode
//! replacement characters, `U+FFFD`.
 
const std = @import("std");
const testing = std.testing;
 
pub const Document = @import("markdown/Document.zig");
pub const Parser = @import("markdown/Parser.zig");
pub const Renderer = @import("markdown/renderer.zig").Renderer;
pub const renderNodeInlineText = @import("markdown/renderer.zig").renderNodeInlineText;
pub const fmtHtml = @import("markdown/renderer.zig").fmtHtml;
 
// Avoid exposing main to other files merely importing this one.
pub const main = if (@import("root") == @This())
mainImpl
else
@compileError("only available as root source file");
 
fn mainImpl() !void {
const gpa = std.heap.c_allocator;
 
var parser = try Parser.init(gpa);
defer parser.deinit();
 
var stdin_buf = std.io.bufferedReader(std.io.getStdIn().reader());
var line_buf = std.ArrayList(u8).init(gpa);
defer line_buf.deinit();
while (stdin_buf.reader().streamUntilDelimiter(line_buf.writer(), '\n', null)) {
if (line_buf.getLastOrNull() == '\r') _ = line_buf.pop();
try parser.feedLine(line_buf.items);
line_buf.clearRetainingCapacity();
} else |err| switch (err) {
error.EndOfStream => {},
else => |e| return e,
}
 
var doc = try parser.endInput();
defer doc.deinit(gpa);
 
var stdout_buf = std.io.bufferedWriter(std.io.getStdOut().writer());
try doc.render(stdout_buf.writer());
try stdout_buf.flush();
}
 
test "empty document" {
try testRender("", "");
try testRender(" ", "");
try testRender("\n \n\t\n \n", "");
}
 
test "unordered lists" {
try testRender(
\\- Spam
\\- Spam
\\- Spam
\\- Eggs
\\- Bacon
\\- Spam
\\
\\* Spam
\\* Spam
\\* Spam
\\* Eggs
\\* Bacon
\\* Spam
\\
\\+ Spam
\\+ Spam
\\+ Spam
\\+ Eggs
\\+ Bacon
\\+ Spam
\\
,
\\<ul>
\\<li>Spam</li>
\\<li>Spam</li>
\\<li>Spam</li>
\\<li>Eggs</li>
\\<li>Bacon</li>
\\<li>Spam</li>
\\</ul>
\\<ul>
\\<li>Spam</li>
\\<li>Spam</li>
\\<li>Spam</li>
\\<li>Eggs</li>
\\<li>Bacon</li>
\\<li>Spam</li>
\\</ul>
\\<ul>
\\<li>Spam</li>
\\<li>Spam</li>
\\<li>Spam</li>
\\<li>Eggs</li>
\\<li>Bacon</li>
\\<li>Spam</li>
\\</ul>
\\
);
}
 
test "ordered lists" {
try testRender(
\\1. Breakfast
\\2. Second breakfast
\\3. Lunch
\\2. Afternoon snack
\\1. Dinner
\\6. Dessert
\\7. Midnight snack
\\
\\1) Breakfast
\\2) Second breakfast
\\3) Lunch
\\2) Afternoon snack
\\1) Dinner
\\6) Dessert
\\7) Midnight snack
\\
\\1001. Breakfast
\\2. Second breakfast
\\3. Lunch
\\2. Afternoon snack
\\1. Dinner
\\6. Dessert
\\7. Midnight snack
\\
\\1001) Breakfast
\\2) Second breakfast
\\3) Lunch
\\2) Afternoon snack
\\1) Dinner
\\6) Dessert
\\7) Midnight snack
\\
,
\\<ol>
\\<li>Breakfast</li>
\\<li>Second breakfast</li>
\\<li>Lunch</li>
\\<li>Afternoon snack</li>
\\<li>Dinner</li>
\\<li>Dessert</li>
\\<li>Midnight snack</li>
\\</ol>
\\<ol>
\\<li>Breakfast</li>
\\<li>Second breakfast</li>
\\<li>Lunch</li>
\\<li>Afternoon snack</li>
\\<li>Dinner</li>
\\<li>Dessert</li>
\\<li>Midnight snack</li>
\\</ol>
\\<ol start="1001">
\\<li>Breakfast</li>
\\<li>Second breakfast</li>
\\<li>Lunch</li>
\\<li>Afternoon snack</li>
\\<li>Dinner</li>
\\<li>Dessert</li>
\\<li>Midnight snack</li>
\\</ol>
\\<ol start="1001">
\\<li>Breakfast</li>
\\<li>Second breakfast</li>
\\<li>Lunch</li>
\\<li>Afternoon snack</li>
\\<li>Dinner</li>
\\<li>Dessert</li>
\\<li>Midnight snack</li>
\\</ol>
\\
);
}
 
test "nested lists" {
try testRender(
\\- - Item 1.
\\ - Item 2.
\\Item 2 continued.
\\ * New list.
\\
,
\\<ul>
\\<li><ul>
\\<li>Item 1.</li>
\\<li>Item 2.
\\Item 2 continued.</li>
\\</ul>
\\<ul>
\\<li>New list.</li>
\\</ul>
\\</li>
\\</ul>
\\
);
}
 
test "lists with block content" {
try testRender(
\\1. Item 1.
\\2. Item 2.
\\
\\ This one has another paragraph.
\\3. Item 3.
\\
\\- > Blockquote.
\\- - Sub-list.
\\ - Sub-list continued.
\\ * Different sub-list.
\\- ## Heading.
\\
\\ Some contents below the heading.
\\ 1. Item 1.
\\ 2. Item 2.
\\ 3. Item 3.
\\
,
\\<ol>
\\<li><p>Item 1.</p>
\\</li>
\\<li><p>Item 2.</p>
\\<p>This one has another paragraph.</p>
\\</li>
\\<li><p>Item 3.</p>
\\</li>
\\</ol>
\\<ul>
\\<li><blockquote>
\\<p>Blockquote.</p>
\\</blockquote>
\\</li>
\\<li><ul>
\\<li>Sub-list.</li>
\\<li>Sub-list continued.</li>
\\</ul>
\\<ul>
\\<li>Different sub-list.</li>
\\</ul>
\\</li>
\\<li><h2>Heading.</h2>
\\<p>Some contents below the heading.</p>
\\<ol>
\\<li>Item 1.</li>
\\<li>Item 2.</li>
\\<li>Item 3.</li>
\\</ol>
\\</li>
\\</ul>
\\
);
}
 
test "tables" {
try testRender(
\\| Operator | Meaning |
\\| :------: | ---------------- |
\\| `+` | Add |
\\| `-` | Subtract |
\\| `*` | Multiply |
\\| `/` | Divide |
\\| `??` | **Not sure yet** |
\\
\\| Item 1 | Value 1 |
\\| Item 2 | Value 2 |
\\| Item 3 | Value 3 |
\\| Item 4 | Value 4 |
\\
\\| :--- | :----: | ----: |
\\| Left | Center | Right |
\\
,
\\<table>
\\<tr>
\\<th style="text-align: center">Operator</th>
\\<th>Meaning</th>
\\</tr>
\\<tr>
\\<td style="text-align: center"><code>+</code></td>
\\<td>Add</td>
\\</tr>
\\<tr>
\\<td style="text-align: center"><code>-</code></td>
\\<td>Subtract</td>
\\</tr>
\\<tr>
\\<td style="text-align: center"><code>*</code></td>
\\<td>Multiply</td>
\\</tr>
\\<tr>
\\<td style="text-align: center"><code>/</code></td>
\\<td>Divide</td>
\\</tr>
\\<tr>
\\<td style="text-align: center"><code>??</code></td>
\\<td><strong>Not sure yet</strong></td>
\\</tr>
\\</table>
\\<table>
\\<tr>
\\<td>Item 1</td>
\\<td>Value 1</td>
\\</tr>
\\<tr>
\\<td>Item 2</td>
\\<td>Value 2</td>
\\</tr>
\\<tr>
\\<td>Item 3</td>
\\<td>Value 3</td>
\\</tr>
\\<tr>
\\<td>Item 4</td>
\\<td>Value 4</td>
\\</tr>
\\</table>
\\<table>
\\<tr>
\\<td style="text-align: left">Left</td>
\\<td style="text-align: center">Center</td>
\\<td style="text-align: right">Right</td>
\\</tr>
\\</table>
\\
);
}
 
test "table with uneven number of columns" {
try testRender(
\\| One |
\\| :-- | :--: |
\\| One | Two | Three |
\\
,
\\<table>
\\<tr>
\\<th style="text-align: left">One</th>
\\</tr>
\\<tr>
\\<td style="text-align: left">One</td>
\\<td style="text-align: center">Two</td>
\\<td>Three</td>
\\</tr>
\\</table>
\\
);
}
 
test "table with escaped pipes" {
try testRender(
\\| One \| Two |
\\| --- | --- |
\\| One \| Two |
\\
,
\\<table>
\\<tr>
\\<th>One | Two</th>
\\</tr>
\\<tr>
\\<td>One | Two</td>
\\</tr>
\\</table>
\\
);
}
 
test "table with pipes in code spans" {
try testRender(
\\| `|` | Bitwise _OR_ |
\\| `||` | Combines error sets |
\\| `` `||` `` | Escaped version |
\\| ` ``||`` ` | Another escaped version |
\\| `Oops unterminated code span |
\\
,
\\<table>
\\<tr>
\\<td><code>|</code></td>
\\<td>Bitwise <em>OR</em></td>
\\</tr>
\\<tr>
\\<td><code>||</code></td>
\\<td>Combines error sets</td>
\\</tr>
\\<tr>
\\<td><code>`||`</code></td>
\\<td>Escaped version</td>
\\</tr>
\\<tr>
\\<td><code>``||``</code></td>
\\<td>Another escaped version</td>
\\</tr>
\\</table>
\\<p>| <code>Oops unterminated code span |</code></p>
\\
);
}
 
test "tables require leading and trailing pipes" {
try testRender(
\\Not | a | table
\\
\\| But | this | is |
\\
\\Also not a table:
\\|
\\ |
\\
,
\\<p>Not | a | table</p>
\\<table>
\\<tr>
\\<td>But</td>
\\<td>this</td>
\\<td>is</td>
\\</tr>
\\</table>
\\<p>Also not a table:
\\|
\\|</p>
\\
);
}
 
test "headings" {
try testRender(
\\# Level one
\\## Level two
\\### Level three
\\#### Level four
\\##### Level five
\\###### Level six
\\####### Not a heading
\\
,
\\<h1>Level one</h1>
\\<h2>Level two</h2>
\\<h3>Level three</h3>
\\<h4>Level four</h4>
\\<h5>Level five</h5>
\\<h6>Level six</h6>
\\<p>####### Not a heading</p>
\\
);
}
 
test "headings with inline content" {
try testRender(
\\# Outline of `std.zig`
\\## **Important** notes
\\### ***Nested* inline content**
\\
,
\\<h1>Outline of <code>std.zig</code></h1>
\\<h2><strong>Important</strong> notes</h2>
\\<h3><strong><em>Nested</em> inline content</strong></h3>
\\
);
}
 
test "code blocks" {
try testRender(
\\```
\\Hello, world!
\\This is some code.
\\```
\\``` zig test
\\const std = @import("std");
\\
\\test {
\\ try std.testing.expect(2 + 2 == 4);
\\}
\\```
\\
,
\\<pre><code>Hello, world!
\\This is some code.
\\</code></pre>
\\<pre><code>const std = @import(&quot;std&quot;);
\\
\\test {
\\ try std.testing.expect(2 + 2 == 4);
\\}
\\</code></pre>
\\
);
}
 
test "blockquotes" {
try testRender(
\\> > You miss 100% of the shots you don't take.
\\> >
\\> > ~ Wayne Gretzky
\\>
\\> ~ Michael Scott
\\
,
\\<blockquote>
\\<blockquote>
\\<p>You miss 100% of the shots you don't take.</p>
\\<p>~ Wayne Gretzky</p>
\\</blockquote>
\\<p>~ Michael Scott</p>
\\</blockquote>
\\
);
}
 
test "blockquote lazy continuation lines" {
try testRender(
\\>>>>Deeply nested blockquote
\\>>which continues on another line
\\and then yet another one.
\\>>
\\>> But now two of them have been closed.
\\
\\And then there were none.
\\
,
\\<blockquote>
\\<blockquote>
\\<blockquote>
\\<blockquote>
\\<p>Deeply nested blockquote
\\which continues on another line
\\and then yet another one.</p>
\\</blockquote>
\\</blockquote>
\\<p>But now two of them have been closed.</p>
\\</blockquote>
\\</blockquote>
\\<p>And then there were none.</p>
\\
);
}
 
test "paragraphs" {
try testRender(
\\Paragraph one.
\\
\\Paragraph two.
\\Still in the paragraph.
\\ So is this.
\\
\\
\\
\\
\\ Last paragraph.
\\
,
\\<p>Paragraph one.</p>
\\<p>Paragraph two.
\\Still in the paragraph.
\\So is this.</p>
\\<p>Last paragraph.</p>
\\
);
}
 
test "thematic breaks" {
try testRender(
\\---
\\***
\\___
\\ ---
\\ - - - - - - - - - - -
\\
,
\\<hr />
\\<hr />
\\<hr />
\\<hr />
\\<hr />
\\
);
}
 
test "links" {
try testRender(
\\[Link](https://example.com)
\\[Link *with inlines*](https://example.com)
\\[Nested parens](https://example.com/nested(parens(inside)))
\\[Escaped parens](https://example.com/\)escaped\()
\\[Line break in target](test\
\\target)
\\
,
\\<p><a href="https://example.com">Link</a>
\\<a href="https://example.com">Link <em>with inlines</em></a>
\\<a href="https://example.com/nested(parens(inside))">Nested parens</a>
\\<a href="https://example.com/)escaped(">Escaped parens</a>
\\<a href="test\
\\target">Line break in target</a></p>
\\
);
}
 
test "images" {
try testRender(
\\![Alt text](https://example.com/image.png)
\\![Alt text *with inlines*](https://example.com/image.png)
\\![Nested parens](https://example.com/nested(parens(inside)).png)
\\![Escaped parens](https://example.com/\)escaped\(.png)
\\![Line break in target](test\
\\target)
\\
,
\\<p><img src="https://example.com/image.png" alt="Alt text" />
\\<img src="https://example.com/image.png" alt="Alt text with inlines" />
\\<img src="https://example.com/nested(parens(inside)).png" alt="Nested parens" />
\\<img src="https://example.com/)escaped(.png" alt="Escaped parens" />
\\<img src="test\
\\target" alt="Line break in target" /></p>
\\
);
}
 
test "emphasis" {
try testRender(
\\*Emphasis.*
\\**Strong.**
\\***Strong emphasis.***
\\****More...****
\\*****MORE...*****
\\******Even more...******
\\*******OK, this is enough.*******
\\
,
\\<p><em>Emphasis.</em>
\\<strong>Strong.</strong>
\\<em><strong>Strong emphasis.</strong></em>
\\<em><strong><em>More...</em></strong></em>
\\<em><strong><strong>MORE...</strong></strong></em>
\\<em><strong><em><strong>Even more...</strong></em></strong></em>
\\<em><strong><em><strong><em>OK, this is enough.</em></strong></em></strong></em></p>
\\
);
try testRender(
\\_Emphasis._
\\__Strong.__
\\___Strong emphasis.___
\\____More...____
\\_____MORE..._____
\\______Even more...______
\\_______OK, this is enough._______
\\
,
\\<p><em>Emphasis.</em>
\\<strong>Strong.</strong>
\\<em><strong>Strong emphasis.</strong></em>
\\<em><strong><em>More...</em></strong></em>
\\<em><strong><strong>MORE...</strong></strong></em>
\\<em><strong><em><strong>Even more...</strong></em></strong></em>
\\<em><strong><em><strong><em>OK, this is enough.</em></strong></em></strong></em></p>
\\
);
}
 
test "nested emphasis" {
try testRender(
\\**Hello, *world!***
\\*Hello, **world!***
\\**Hello, _world!_**
\\_Hello, **world!**_
\\*Hello, **nested** *world!**
\\***Hello,* world!**
\\__**Hello, world!**__
\\****Hello,** world!**
\\__Hello,_ world!_
\\*Test**123*
\\__Test____123__
\\
,
\\<p><strong>Hello, <em>world!</em></strong>
\\<em>Hello, <strong>world!</strong></em>
\\<strong>Hello, <em>world!</em></strong>
\\<em>Hello, <strong>world!</strong></em>
\\<em>Hello, <strong>nested</strong> <em>world!</em></em>
\\<strong><em>Hello,</em> world!</strong>
\\<strong><strong>Hello, world!</strong></strong>
\\<strong><strong>Hello,</strong> world!</strong>
\\<em><em>Hello,</em> world!</em>
\\<em>Test</em><em>123</em>
\\<strong>Test____123</strong></p>
\\
);
}
 
test "emphasis precedence" {
try testRender(
\\*First one _wins*_.
\\_*No other __rule matters.*_
\\
,
\\<p><em>First one _wins</em>_.
\\<em><em>No other __rule matters.</em></em></p>
\\
);
}
 
test "emphasis open and close" {
try testRender(
\\Cannot open: *
\\Cannot open: _
\\*Cannot close: *
\\_Cannot close: _
\\
\\foo*bar*baz
\\foo_bar_baz
\\foo**bar**baz
\\foo__bar__baz
\\
,
\\<p>Cannot open: *
\\Cannot open: _
\\*Cannot close: *
\\_Cannot close: _</p>
\\<p>foo<em>bar</em>baz
\\foo_bar_baz
\\foo<strong>bar</strong>baz
\\foo__bar__baz</p>
\\
);
}
 
test "code spans" {
try testRender(
\\`Hello, world!`
\\```Multiple `backticks` can be used.```
\\`**This** does not produce emphasis.`
\\`` `Backtick enclosed string.` ``
\\`Delimiter lengths ```must``` match.`
\\
\\Unterminated ``code...
\\
\\Weird empty code span: `
\\
\\**Very important code: `hi`**
\\
,
\\<p><code>Hello, world!</code>
\\<code>Multiple `backticks` can be used.</code>
\\<code>**This** does not produce emphasis.</code>
\\<code>`Backtick enclosed string.`</code>
\\<code>Delimiter lengths ```must``` match.</code></p>
\\<p>Unterminated <code>code...</code></p>
\\<p>Weird empty code span: <code></code></p>
\\<p><strong>Very important code: <code>hi</code></strong></p>
\\
);
}
 
test "backslash escapes" {
try testRender(
\\Not \*emphasized\*.
\\Literal \\backslashes\\.
\\Not code: \`hi\`.
\\\# Not a title.
\\#\# Also not a title.
\\\> Not a blockquote.
\\\- Not a list item.
\\\| Not a table. |
\\| Also not a table. \|
\\Any \punctuation\ characte\r can be escaped:
\\\!\"\#\$\%\&\'\(\)\*\+\,\-\.\/\:\;\<\=\>\?\@\[\\\]\^\_\`\{\|\}\~
\\
,
\\<p>Not *emphasized*.
\\Literal \backslashes\.
\\Not code: `hi`.
\\# Not a title.
\\## Also not a title.
\\&gt; Not a blockquote.
\\- Not a list item.
\\| Not a table. |
\\| Also not a table. |
\\Any \punctuation\ characte\r can be escaped:
\\!&quot;#$%&amp;'()*+,-./:;&lt;=&gt;?@[\]^_`{|}~</p>
\\
);
}
 
test "hard line breaks" {
try testRender(
\\The iguana sits\
\\Perched atop a short desk chair\
\\Writing code in Zig
\\
,
\\<p>The iguana sits<br />
\\Perched atop a short desk chair<br />
\\Writing code in Zig</p>
\\
);
}
 
test "Unicode handling" {
// Null bytes must be replaced.
try testRender("\x00\x00\x00", "<p>\u{FFFD}\u{FFFD}\u{FFFD}</p>\n");
 
// Invalid UTF-8 must be replaced.
try testRender("\xC0\x80\xE0\x80\x80\xF0\x80\x80\x80", "<p>\u{FFFD}\u{FFFD}\u{FFFD}</p>\n");
try testRender("\xED\xA0\x80\xED\xBF\xBF", "<p>\u{FFFD}\u{FFFD}</p>\n");
 
// Incomplete UTF-8 must be replaced.
try testRender("\xE2\x82", "<p>\u{FFFD}</p>\n");
}
 
fn testRender(input: []const u8, expected: []const u8) !void {
var parser = try Parser.init(testing.allocator);
defer parser.deinit();
 
var lines = std.mem.split(u8, input, "\n");
while (lines.next()) |line| {
try parser.feedLine(line);
}
var doc = try parser.endInput();
defer doc.deinit(testing.allocator);
 
var actual = std.ArrayList(u8).init(testing.allocator);
defer actual.deinit();
try doc.render(actual.writer());
 
try testing.expectEqualStrings(expected, actual.items);
}
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,192 @@
//! An abstract tree representation of a Markdown document.
 
const std = @import("std");
const builtin = @import("builtin");
const assert = std.debug.assert;
const Allocator = std.mem.Allocator;
const Renderer = @import("renderer.zig").Renderer;
 
nodes: Node.List.Slice,
extra: []u32,
string_bytes: []u8,
 
const Document = @This();
 
pub const Node = struct {
tag: Tag,
data: Data,
 
pub const Index = enum(u32) {
root = 0,
_,
};
pub const List = std.MultiArrayList(Node);
 
pub const Tag = enum {
/// Data is `container`.
root,
 
// Blocks
/// Data is `list`.
list,
/// Data is `list_item`.
list_item,
/// Data is `container`.
table,
/// Data is `container`.
table_row,
/// Data is `table_cell`.
table_cell,
/// Data is `heading`.
heading,
/// Data is `code_block`.
code_block,
/// Data is `container`.
blockquote,
/// Data is `container`.
paragraph,
/// Data is `none`.
thematic_break,
 
// Inlines
/// Data is `link`.
link,
/// Data is `link`.
image,
/// Data is `container`.
strong,
/// Data is `container`.
emphasis,
/// Data is `text`.
code_span,
/// Data is `text`.
text,
/// Data is `none`.
line_break,
};
 
pub const Data = union {
none: void,
container: struct {
children: ExtraIndex,
},
text: struct {
content: StringIndex,
},
list: struct {
start: ListStart,
children: ExtraIndex,
},
list_item: struct {
tight: bool,
children: ExtraIndex,
},
table_cell: struct {
info: packed struct {
alignment: TableCellAlignment,
header: bool,
},
children: ExtraIndex,
},
heading: struct {
/// Between 1 and 6, inclusive.
level: u3,
children: ExtraIndex,
},
code_block: struct {
tag: StringIndex,
content: StringIndex,
},
link: struct {
target: StringIndex,
children: ExtraIndex,
},
 
comptime {
// In Debug and ReleaseSafe builds, there may be hidden extra fields
// included for safety checks. Without such safety checks enabled,
// we always want this union to be 8 bytes.
if (builtin.mode != .Debug and builtin.mode != .ReleaseSafe) {
assert(@sizeOf(Data) == 8);
}
}
};
 
/// The starting number of a list. This is either a number between 0 and
/// 999,999,999, inclusive, or `unordered` to indicate an unordered list.
pub const ListStart = enum(u30) {
// When https://github.com/ziglang/zig/issues/104 is implemented, this
// type can be more naturally expressed as ?u30. As it is, we want
// values to fit within 4 bytes, so ?u30 does not yet suffice for
// storage.
unordered = std.math.maxInt(u30),
_,
 
pub fn asNumber(start: ListStart) ?u30 {
if (start == .unordered) return null;
assert(@intFromEnum(start) <= 999_999_999);
return @intFromEnum(start);
}
};
 
pub const TableCellAlignment = enum {
unset,
left,
center,
right,
};
 
/// Trailing: `len` times `Node.Index`
pub const Children = struct {
len: u32,
};
};
 
pub const ExtraIndex = enum(u32) { _ };
 
/// The index of a null-terminated string in `string_bytes`.
pub const StringIndex = enum(u32) {
empty = 0,
_,
};
 
pub fn deinit(doc: *Document, allocator: Allocator) void {
doc.nodes.deinit(allocator);
allocator.free(doc.extra);
allocator.free(doc.string_bytes);
doc.* = undefined;
}
 
/// Renders a document directly to a writer using the default renderer.
pub fn render(doc: Document, writer: anytype) @TypeOf(writer).Error!void {
const renderer: Renderer(@TypeOf(writer), void) = .{ .context = {} };
try renderer.render(doc, writer);
}
 
pub fn ExtraData(comptime T: type) type {
return struct { data: T, end: usize };
}
 
pub fn extraData(doc: Document, comptime T: type, index: ExtraIndex) ExtraData(T) {
const fields = @typeInfo(T).Struct.fields;
var i: usize = @intFromEnum(index);
var result: T = undefined;
inline for (fields) |field| {
@field(result, field.name) = switch (field.type) {
u32 => doc.extra[i],
else => @compileError("bad field type"),
};
i += 1;
}
return .{ .data = result, .end = i };
}
 
pub fn extraChildren(doc: Document, index: ExtraIndex) []const Node.Index {
const children = doc.extraData(Node.Children, index);
return @ptrCast(doc.extra[children.end..][0..children.data.len]);
}
 
pub fn string(doc: Document, index: StringIndex) [:0]const u8 {
const start = @intFromEnum(index);
return std.mem.span(@as([*:0]u8, @ptrCast(doc.string_bytes[start..].ptr)));
}
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,1501 @@
//! A Markdown parser producing `Document`s.
//!
//! The parser operates at two levels: at the outer level, the parser accepts
//! the content of an input document line by line and begins building the _block
//! structure_ of the document. This creates a stack of currently open blocks.
//!
//! When the parser detects the end of a block, it closes the block, popping it
//! from the open block stack and completing any additional parsing of the
//! block's content. For blocks which contain parseable inline content, this
//! invokes the inner level of the parser, handling the _inline structure_ of
//! the block.
//!
//! Inline parsing scans through the collected inline content of a block. When
//! it encounters a character that could indicate the beginning of an inline, it
//! either handles the inline right away (if possible) or adds it to a pending
//! inlines stack. When an inline is completed, it is added to a list of
//! completed inlines, which (along with any surrounding text nodes) will become
//! the children of the parent inline or the block whose inline content is being
//! parsed.
 
const std = @import("std");
const mem = std.mem;
const assert = std.debug.assert;
const isWhitespace = std.ascii.isWhitespace;
const Allocator = mem.Allocator;
const expectEqual = std.testing.expectEqual;
const Document = @import("Document.zig");
const Node = Document.Node;
const ExtraIndex = Document.ExtraIndex;
const ExtraData = Document.ExtraData;
const StringIndex = Document.StringIndex;
 
nodes: Node.List = .{},
extra: std.ArrayListUnmanaged(u32) = .{},
scratch_extra: std.ArrayListUnmanaged(u32) = .{},
string_bytes: std.ArrayListUnmanaged(u8) = .{},
scratch_string: std.ArrayListUnmanaged(u8) = .{},
pending_blocks: std.ArrayListUnmanaged(Block) = .{},
allocator: Allocator,
 
const Parser = @This();
 
/// An arbitrary limit on the maximum number of columns in a table so that
/// table-related metadata maintained by the parser does not require dynamic
/// memory allocation.
const max_table_columns = 128;
 
/// A block element which is still receiving children.
const Block = struct {
tag: Tag,
data: Data,
extra_start: usize,
string_start: usize,
 
const Tag = enum {
/// Data is `list`.
list,
/// Data is `list_item`.
list_item,
/// Data is `table`.
table,
/// Data is `none`.
table_row,
/// Data is `heading`.
heading,
/// Data is `code_block`.
code_block,
/// Data is `none`.
blockquote,
/// Data is `none`.
paragraph,
/// Data is `none`.
thematic_break,
};
 
const Data = union {
none: void,
list: struct {
marker: ListMarker,
/// Between 0 and 999,999,999, inclusive.
start: u30,
tight: bool,
last_line_blank: bool = false,
},
list_item: struct {
continuation_indent: usize,
},
table: struct {
column_alignments: std.BoundedArray(Node.TableCellAlignment, max_table_columns) = .{},
},
heading: struct {
/// Between 1 and 6, inclusive.
level: u3,
},
code_block: struct {
tag: StringIndex,
fence_len: usize,
indent: usize,
},
 
const ListMarker = enum {
@"-",
@"*",
@"+",
number_dot,
number_paren,
};
};
 
const ContentType = enum {
blocks,
inlines,
raw_inlines,
nothing,
};
 
fn canAccept(b: Block) ContentType {
return switch (b.tag) {
.list,
.list_item,
.table,
.blockquote,
=> .blocks,
 
.heading,
.paragraph,
=> .inlines,
 
.code_block,
=> .raw_inlines,
 
.table_row,
.thematic_break,
=> .nothing,
};
}
 
/// Attempts to continue `b` using the contents of `line`. If successful,
/// returns the remaining portion of `line` to be considered part of `b`
/// (e.g. for a blockquote, this would be everything except the leading
/// `>`). If unsuccessful, returns null.
fn match(b: Block, line: []const u8) ?[]const u8 {
const unindented = mem.trimLeft(u8, line, " \t");
const indent = line.len - unindented.len;
return switch (b.tag) {
.list => line,
.list_item => if (indent >= b.data.list_item.continuation_indent)
line[b.data.list_item.continuation_indent..]
else if (unindented.len == 0)
// Blank lines should not close list items, since there may be
// more indented contents to follow after the blank line.
""
else
null,
.table => if (unindented.len > 0) unindented else null,
.table_row => null,
.heading => null,
.code_block => code_block: {
const trimmed = mem.trimRight(u8, unindented, " \t");
if (mem.indexOfNone(u8, trimmed, "`") != null or trimmed.len != b.data.code_block.fence_len) {
const effective_indent = @min(indent, b.data.code_block.indent);
break :code_block line[effective_indent..];
} else {
break :code_block null;
}
},
.blockquote => if (mem.startsWith(u8, unindented, ">"))
unindented[1..]
else
null,
.paragraph => if (unindented.len > 0) unindented else null,
.thematic_break => null,
};
}
};
 
pub fn init(allocator: Allocator) Allocator.Error!Parser {
var p: Parser = .{ .allocator = allocator };
try p.nodes.append(allocator, .{
.tag = .root,
.data = undefined,
});
try p.string_bytes.append(allocator, 0);
return p;
}
 
pub fn deinit(p: *Parser) void {
p.nodes.deinit(p.allocator);
p.extra.deinit(p.allocator);
p.scratch_extra.deinit(p.allocator);
p.string_bytes.deinit(p.allocator);
p.scratch_string.deinit(p.allocator);
p.pending_blocks.deinit(p.allocator);
p.* = undefined;
}
 
/// Accepts a single line of content. `line` should not have a trailing line
/// ending character.
pub fn feedLine(p: *Parser, line: []const u8) Allocator.Error!void {
var rest_line = line;
const first_unmatched = for (p.pending_blocks.items, 0..) |b, i| {
if (b.match(rest_line)) |rest| {
rest_line = rest;
} else {
break i;
}
} else p.pending_blocks.items.len;
 
const in_code_block = p.pending_blocks.items.len > 0 and
p.pending_blocks.getLast().tag == .code_block;
const code_block_end = in_code_block and
first_unmatched + 1 == p.pending_blocks.items.len;
// New blocks cannot be started if we are actively inside a code block or
// are just closing one (to avoid interpreting the closing ``` as a new code
// block start).
var maybe_block_start = if (!in_code_block or first_unmatched + 2 <= p.pending_blocks.items.len)
try p.startBlock(rest_line)
else
null;
 
// This is a lazy continuation line if there are no new blocks to open and
// the last open block is a paragraph.
if (maybe_block_start == null and
!isBlank(rest_line) and
p.pending_blocks.items.len > 0 and
p.pending_blocks.getLast().tag == .paragraph)
{
try p.addScratchStringLine(rest_line);
return;
}
 
// If a new block needs to be started, any paragraph needs to be closed,
// even though this isn't detected as part of the closing condition for
// paragraphs.
if (maybe_block_start != null and
p.pending_blocks.items.len > 0 and
p.pending_blocks.getLast().tag == .paragraph)
{
try p.closeLastBlock();
}
 
while (p.pending_blocks.items.len > first_unmatched) {
try p.closeLastBlock();
}
 
while (maybe_block_start) |block_start| : (maybe_block_start = try p.startBlock(rest_line)) {
try p.appendBlockStart(block_start);
// There may be more blocks to start within the same line.
rest_line = block_start.rest;
// Headings may only contain inline content.
if (block_start.tag == .heading) break;
// An opening code fence does not contain any additional block or inline
// content to process.
if (block_start.tag == .code_block) return;
}
 
// Do not append the end of a code block (```) as textual content.
if (code_block_end) return;
 
const can_accept = if (p.pending_blocks.getLastOrNull()) |last_pending_block|
last_pending_block.canAccept()
else
.blocks;
const rest_line_trimmed = mem.trimLeft(u8, rest_line, " \t");
switch (can_accept) {
.blocks => {
// If we're inside a list item and the rest of the line is blank, it
// means that any subsequent child of the list item (or subsequent
// item in the list) will cause the containing list to be considered
// loose. However, we can't immediately declare that the list is
// loose, since we might just be looking at a blank line after the
// end of the last item in the list. The final determination will be
// made when appending the next child of the list or list item.
const maybe_containing_list = if (p.pending_blocks.items.len > 0 and p.pending_blocks.getLast().tag == .list_item)
&p.pending_blocks.items[p.pending_blocks.items.len - 2]
else
null;
 
if (rest_line_trimmed.len > 0) {
try p.appendBlockStart(.{
.tag = .paragraph,
.data = .{ .none = {} },
.rest = undefined,
});
try p.addScratchStringLine(rest_line_trimmed);
}
 
if (maybe_containing_list) |containing_list| {
containing_list.data.list.last_line_blank = rest_line_trimmed.len == 0;
}
},
.inlines => try p.addScratchStringLine(rest_line_trimmed),
.raw_inlines => try p.addScratchStringLine(rest_line),
.nothing => {},
}
}
 
/// Completes processing of the input and returns the parsed document.
pub fn endInput(p: *Parser) Allocator.Error!Document {
while (p.pending_blocks.items.len > 0) {
try p.closeLastBlock();
}
// There should be no inline content pending after closing the last open
// block.
assert(p.scratch_string.items.len == 0);
 
const children = try p.addExtraChildren(@ptrCast(p.scratch_extra.items));
p.nodes.items(.data)[0] = .{ .container = .{ .children = children } };
p.scratch_string.items.len = 0;
p.scratch_extra.items.len = 0;
 
var nodes = p.nodes.toOwnedSlice();
errdefer nodes.deinit(p.allocator);
const extra = try p.extra.toOwnedSlice(p.allocator);
errdefer p.allocator.free(extra);
const string_bytes = try p.string_bytes.toOwnedSlice(p.allocator);
errdefer p.allocator.free(string_bytes);
 
return .{
.nodes = nodes,
.extra = extra,
.string_bytes = string_bytes,
};
}
 
/// Data describing the start of a new block element.
const BlockStart = struct {
tag: Tag,
data: Data,
rest: []const u8,
 
const Tag = enum {
/// Data is `list_item`.
list_item,
/// Data is `table_row`.
table_row,
/// Data is `heading`.
heading,
/// Data is `code_block`.
code_block,
/// Data is `none`.
blockquote,
/// Data is `none`.
paragraph,
/// Data is `none`.
thematic_break,
};
 
const Data = union {
none: void,
list_item: struct {
marker: Block.Data.ListMarker,
number: u30,
continuation_indent: usize,
},
table_row: struct {
cells: std.BoundedArray([]const u8, max_table_columns),
},
heading: struct {
/// Between 1 and 6, inclusive.
level: u3,
},
code_block: struct {
tag: StringIndex,
fence_len: usize,
indent: usize,
},
};
};
 
fn appendBlockStart(p: *Parser, block_start: BlockStart) !void {
if (p.pending_blocks.getLastOrNull()) |last_pending_block| {
// Close the last block if it is a list and the new block is not a list item
// or not of the same marker type.
const should_close_list = last_pending_block.tag == .list and
(block_start.tag != .list_item or
block_start.data.list_item.marker != last_pending_block.data.list.marker);
// The last block should also be closed if the new block is not a table
// row, which is the only allowed child of a table.
const should_close_table = last_pending_block.tag == .table and
block_start.tag != .table_row;
if (should_close_list or should_close_table) {
try p.closeLastBlock();
}
}
 
if (p.pending_blocks.getLastOrNull()) |last_pending_block| {
// If the last block is a list or list item, check for tightness based
// on the last line.
const maybe_containing_list = switch (last_pending_block.tag) {
.list => &p.pending_blocks.items[p.pending_blocks.items.len - 1],
.list_item => &p.pending_blocks.items[p.pending_blocks.items.len - 2],
else => null,
};
if (maybe_containing_list) |containing_list| {
if (containing_list.data.list.last_line_blank) {
containing_list.data.list.tight = false;
}
}
}
 
// Start a new list if the new block is a list item and there is no
// containing list yet.
if (block_start.tag == .list_item and
(p.pending_blocks.items.len == 0 or p.pending_blocks.getLast().tag != .list))
{
try p.pending_blocks.append(p.allocator, .{
.tag = .list,
.data = .{ .list = .{
.marker = block_start.data.list_item.marker,
.start = block_start.data.list_item.number,
.tight = true,
} },
.string_start = p.scratch_string.items.len,
.extra_start = p.scratch_extra.items.len,
});
}
 
if (block_start.tag == .table_row) {
// Likewise, table rows start a table implicitly.
if (p.pending_blocks.items.len == 0 or p.pending_blocks.getLast().tag != .table) {
try p.pending_blocks.append(p.allocator, .{
.tag = .table,
.data = .{ .table = .{
.column_alignments = .{},
} },
.string_start = p.scratch_string.items.len,
.extra_start = p.scratch_extra.items.len,
});
}
 
const current_row = p.scratch_extra.items.len - p.pending_blocks.getLast().extra_start;
if (current_row <= 1) {
if (parseTableHeaderDelimiter(block_start.data.table_row.cells)) |alignments| {
p.pending_blocks.items[p.pending_blocks.items.len - 1].data.table.column_alignments = alignments;
if (current_row == 1) {
// We need to go back and mark the header row and its column
// alignments.
const datas = p.nodes.items(.data);
const header_data = datas[p.scratch_extra.getLast()];
for (p.extraChildren(header_data.container.children), 0..) |header_cell, i| {
const alignment = if (i < alignments.len) alignments.buffer[i] else .unset;
const cell_data = &datas[@intFromEnum(header_cell)].table_cell;
cell_data.info.alignment = alignment;
cell_data.info.header = true;
}
}
return;
}
}
}
 
const tag: Block.Tag, const data: Block.Data = switch (block_start.tag) {
.list_item => .{ .list_item, .{ .list_item = .{
.continuation_indent = block_start.data.list_item.continuation_indent,
} } },
.table_row => .{ .table_row, .{ .none = {} } },
.heading => .{ .heading, .{ .heading = .{
.level = block_start.data.heading.level,
} } },
.code_block => .{ .code_block, .{ .code_block = .{
.tag = block_start.data.code_block.tag,
.fence_len = block_start.data.code_block.fence_len,
.indent = block_start.data.code_block.indent,
} } },
.blockquote => .{ .blockquote, .{ .none = {} } },
.paragraph => .{ .paragraph, .{ .none = {} } },
.thematic_break => .{ .thematic_break, .{ .none = {} } },
};
 
try p.pending_blocks.append(p.allocator, .{
.tag = tag,
.data = data,
.string_start = p.scratch_string.items.len,
.extra_start = p.scratch_extra.items.len,
});
 
if (tag == .table_row) {
// Table rows are unique, since we already have all the children
// available in the BlockStart. We can immediately parse and append
// these children now.
const containing_table = p.pending_blocks.items[p.pending_blocks.items.len - 2];
const column_alignments = containing_table.data.table.column_alignments.slice();
for (block_start.data.table_row.cells.slice(), 0..) |cell_content, i| {
const cell_children = try p.parseInlines(cell_content);
const alignment = if (i < column_alignments.len) column_alignments[i] else .unset;
const cell = try p.addNode(.{
.tag = .table_cell,
.data = .{ .table_cell = .{
.info = .{
.alignment = alignment,
.header = false,
},
.children = cell_children,
} },
});
try p.addScratchExtraNode(cell);
}
}
}
 
fn startBlock(p: *Parser, line: []const u8) !?BlockStart {
const unindented = mem.trimLeft(u8, line, " \t");
const indent = line.len - unindented.len;
if (isThematicBreak(line)) {
// Thematic breaks take precedence over list items.
return .{
.tag = .thematic_break,
.data = .{ .none = {} },
.rest = "",
};
} else if (startListItem(unindented)) |list_item| {
return .{
.tag = .list_item,
.data = .{ .list_item = .{
.marker = list_item.marker,
.number = list_item.number,
.continuation_indent = list_item.continuation_indent,
} },
.rest = list_item.rest,
};
} else if (startTableRow(unindented)) |table_row| {
return .{
.tag = .table_row,
.data = .{ .table_row = .{
.cells = table_row.cells,
} },
.rest = "",
};
} else if (startHeading(unindented)) |heading| {
return .{
.tag = .heading,
.data = .{ .heading = .{
.level = heading.level,
} },
.rest = heading.rest,
};
} else if (try p.startCodeBlock(unindented)) |code_block| {
return .{
.tag = .code_block,
.data = .{ .code_block = .{
.tag = code_block.tag,
.fence_len = code_block.fence_len,
.indent = indent,
} },
.rest = "",
};
} else if (startBlockquote(unindented)) |rest| {
return .{
.tag = .blockquote,
.data = .{ .none = {} },
.rest = rest,
};
} else {
return null;
}
}
 
const ListItemStart = struct {
marker: Block.Data.ListMarker,
number: u30,
continuation_indent: usize,
rest: []const u8,
};
 
fn startListItem(unindented_line: []const u8) ?ListItemStart {
if (mem.startsWith(u8, unindented_line, "- ")) {
return .{
.marker = .@"-",
.number = undefined,
.continuation_indent = 2,
.rest = unindented_line[2..],
};
} else if (mem.startsWith(u8, unindented_line, "* ")) {
return .{
.marker = .@"*",
.number = undefined,
.continuation_indent = 2,
.rest = unindented_line[2..],
};
} else if (mem.startsWith(u8, unindented_line, "+ ")) {
return .{
.marker = .@"+",
.number = undefined,
.continuation_indent = 2,
.rest = unindented_line[2..],
};
}
 
const number_end = mem.indexOfNone(u8, unindented_line, "0123456789") orelse return null;
const after_number = unindented_line[number_end..];
const marker: Block.Data.ListMarker = if (mem.startsWith(u8, after_number, ". "))
.number_dot
else if (mem.startsWith(u8, after_number, ") "))
.number_paren
else
return null;
const number = std.fmt.parseInt(u30, unindented_line[0..number_end], 10) catch return null;
if (number > 999_999_999) return null;
return .{
.marker = marker,
.number = number,
.continuation_indent = number_end + 2,
.rest = after_number[2..],
};
}
 
const TableRowStart = struct {
cells: std.BoundedArray([]const u8, max_table_columns),
};
 
fn startTableRow(unindented_line: []const u8) ?TableRowStart {
if (unindented_line.len < 2 or
!mem.startsWith(u8, unindented_line, "|") or
mem.endsWith(u8, unindented_line, "\\|") or
!mem.endsWith(u8, unindented_line, "|")) return null;
 
var cells: std.BoundedArray([]const u8, max_table_columns) = .{};
const table_row_content = unindented_line[1 .. unindented_line.len - 1];
var cell_start: usize = 0;
var i: usize = 0;
while (i < table_row_content.len) : (i += 1) {
switch (table_row_content[i]) {
'\\' => i += 1,
'|' => {
cells.append(table_row_content[cell_start..i]) catch return null;
cell_start = i + 1;
},
'`' => {
// Ignoring pipes in code spans allows table cells to contain
// code using ||, for example.
const open_start = i;
i = mem.indexOfNonePos(u8, table_row_content, i, "`") orelse return null;
const open_len = i - open_start;
while (mem.indexOfScalarPos(u8, table_row_content, i, '`')) |close_start| {
i = mem.indexOfNonePos(u8, table_row_content, close_start, "`") orelse return null;
const close_len = i - close_start;
if (close_len == open_len) break;
} else return null;
},
else => {},
}
}
cells.append(table_row_content[cell_start..]) catch return null;
 
return .{ .cells = cells };
}
 
fn parseTableHeaderDelimiter(
row_cells: std.BoundedArray([]const u8, max_table_columns),
) ?std.BoundedArray(Node.TableCellAlignment, max_table_columns) {
var alignments: std.BoundedArray(Node.TableCellAlignment, max_table_columns) = .{};
for (row_cells.slice()) |content| {
const alignment = parseTableHeaderDelimiterCell(content) orelse return null;
alignments.appendAssumeCapacity(alignment);
}
return alignments;
}
 
fn parseTableHeaderDelimiterCell(content: []const u8) ?Node.TableCellAlignment {
var state: enum {
before_rule,
after_left_anchor,
in_rule,
after_right_anchor,
after_rule,
} = .before_rule;
var left_anchor = false;
var right_anchor = false;
for (content) |c| {
switch (state) {
.before_rule => switch (c) {
' ' => {},
':' => {
left_anchor = true;
state = .after_left_anchor;
},
'-' => state = .in_rule,
else => return null,
},
.after_left_anchor => switch (c) {
'-' => state = .in_rule,
else => return null,
},
.in_rule => switch (c) {
'-' => {},
':' => {
right_anchor = true;
state = .after_right_anchor;
},
' ' => state = .after_rule,
else => return null,
},
.after_right_anchor => switch (c) {
' ' => state = .after_rule,
else => return null,
},
.after_rule => switch (c) {
' ' => {},
else => return null,
},
}
}
 
switch (state) {
.before_rule,
.after_left_anchor,
=> return null,
 
.in_rule,
.after_right_anchor,
.after_rule,
=> {},
}
 
return if (left_anchor and right_anchor)
.center
else if (left_anchor)
.left
else if (right_anchor)
.right
else
.unset;
}
 
test parseTableHeaderDelimiterCell {
try expectEqual(null, parseTableHeaderDelimiterCell(""));
try expectEqual(null, parseTableHeaderDelimiterCell(" "));
try expectEqual(.unset, parseTableHeaderDelimiterCell("-"));
try expectEqual(.unset, parseTableHeaderDelimiterCell(" - "));
try expectEqual(.unset, parseTableHeaderDelimiterCell("----"));
try expectEqual(.unset, parseTableHeaderDelimiterCell(" ---- "));
try expectEqual(null, parseTableHeaderDelimiterCell(":"));
try expectEqual(null, parseTableHeaderDelimiterCell("::"));
try expectEqual(.left, parseTableHeaderDelimiterCell(":-"));
try expectEqual(.left, parseTableHeaderDelimiterCell(" :----"));
try expectEqual(.center, parseTableHeaderDelimiterCell(":-:"));
try expectEqual(.center, parseTableHeaderDelimiterCell(":----:"));
try expectEqual(.center, parseTableHeaderDelimiterCell(" :----: "));
try expectEqual(.right, parseTableHeaderDelimiterCell("-:"));
try expectEqual(.right, parseTableHeaderDelimiterCell("----:"));
try expectEqual(.right, parseTableHeaderDelimiterCell(" ----: "));
}
 
const HeadingStart = struct {
level: u3,
rest: []const u8,
};
 
fn startHeading(unindented_line: []const u8) ?HeadingStart {
var level: u3 = 0;
return for (unindented_line, 0..) |c, i| {
switch (c) {
'#' => {
if (level == 6) break null;
level += 1;
},
' ' => {
// We must have seen at least one # by this point, since
// unindented_line has no leading spaces.
assert(level > 0);
break .{
.level = level,
.rest = unindented_line[i + 1 ..],
};
},
else => break null,
}
} else null;
}
 
const CodeBlockStart = struct {
tag: StringIndex,
fence_len: usize,
};
 
fn startCodeBlock(p: *Parser, unindented_line: []const u8) !?CodeBlockStart {
var fence_len: usize = 0;
const tag_bytes = for (unindented_line, 0..) |c, i| {
switch (c) {
'`' => fence_len += 1,
else => break unindented_line[i..],
}
} else "";
// Code block tags may not contain backticks, since that would create
// potential confusion with inline code spans.
if (fence_len < 3 or mem.indexOfScalar(u8, tag_bytes, '`') != null) return null;
return .{
.tag = try p.addString(mem.trim(u8, tag_bytes, " ")),
.fence_len = fence_len,
};
}
 
fn startBlockquote(unindented_line: []const u8) ?[]const u8 {
return if (mem.startsWith(u8, unindented_line, ">"))
unindented_line[1..]
else
null;
}
 
fn isThematicBreak(line: []const u8) bool {
var char: ?u8 = null;
var count: usize = 0;
for (line) |c| {
switch (c) {
' ' => {},
'-', '_', '*' => {
if (char != null and c != char.?) return false;
char = c;
count += 1;
},
else => return false,
}
}
return count >= 3;
}
 
fn closeLastBlock(p: *Parser) !void {
const b = p.pending_blocks.pop();
const node = switch (b.tag) {
.list => list: {
assert(b.string_start == p.scratch_string.items.len);
 
// Although tightness is parsed as a property of the list, it is
// stored at the list item level to make it possible to render each
// node without any context from its parents.
const list_items = p.scratch_extra.items[b.extra_start..];
const node_datas = p.nodes.items(.data);
if (!b.data.list.tight) {
for (list_items) |list_item| {
node_datas[list_item].list_item.tight = false;
}
}
 
const children = try p.addExtraChildren(@ptrCast(list_items));
break :list try p.addNode(.{
.tag = .list,
.data = .{ .list = .{
.start = switch (b.data.list.marker) {
.number_dot, .number_paren => @enumFromInt(b.data.list.start),
.@"-", .@"*", .@"+" => .unordered,
},
.children = children,
} },
});
},
.list_item => list_item: {
assert(b.string_start == p.scratch_string.items.len);
const children = try p.addExtraChildren(@ptrCast(p.scratch_extra.items[b.extra_start..]));
break :list_item try p.addNode(.{
.tag = .list_item,
.data = .{ .list_item = .{
.tight = true,
.children = children,
} },
});
},
.table => table: {
assert(b.string_start == p.scratch_string.items.len);
const children = try p.addExtraChildren(@ptrCast(p.scratch_extra.items[b.extra_start..]));
break :table try p.addNode(.{
.tag = .table,
.data = .{ .container = .{
.children = children,
} },
});
},
.table_row => table_row: {
assert(b.string_start == p.scratch_string.items.len);
const children = try p.addExtraChildren(@ptrCast(p.scratch_extra.items[b.extra_start..]));
break :table_row try p.addNode(.{
.tag = .table_row,
.data = .{ .container = .{
.children = children,
} },
});
},
.heading => heading: {
const children = try p.parseInlines(p.scratch_string.items[b.string_start..]);
break :heading try p.addNode(.{
.tag = .heading,
.data = .{ .heading = .{
.level = b.data.heading.level,
.children = children,
} },
});
},
.code_block => code_block: {
const content = try p.addString(p.scratch_string.items[b.string_start..]);
break :code_block try p.addNode(.{
.tag = .code_block,
.data = .{ .code_block = .{
.tag = b.data.code_block.tag,
.content = content,
} },
});
},
.blockquote => blockquote: {
assert(b.string_start == p.scratch_string.items.len);
const children = try p.addExtraChildren(@ptrCast(p.scratch_extra.items[b.extra_start..]));
break :blockquote try p.addNode(.{
.tag = .blockquote,
.data = .{ .container = .{
.children = children,
} },
});
},
.paragraph => paragraph: {
const children = try p.parseInlines(p.scratch_string.items[b.string_start..]);
break :paragraph try p.addNode(.{
.tag = .paragraph,
.data = .{ .container = .{
.children = children,
} },
});
},
.thematic_break => try p.addNode(.{
.tag = .thematic_break,
.data = .{ .none = {} },
}),
};
p.scratch_string.items.len = b.string_start;
p.scratch_extra.items.len = b.extra_start;
try p.addScratchExtraNode(node);
}
 
const InlineParser = struct {
parent: *Parser,
content: []const u8,
pos: usize = 0,
pending_inlines: std.ArrayListUnmanaged(PendingInline) = .{},
completed_inlines: std.ArrayListUnmanaged(CompletedInline) = .{},
 
const PendingInline = struct {
tag: Tag,
data: Data,
start: usize,
 
const Tag = enum {
/// Data is `emphasis`.
emphasis,
/// Data is `none`.
link,
/// Data is `none`.
image,
};
 
const Data = union {
none: void,
emphasis: struct {
underscore: bool,
run_len: usize,
},
};
};
 
const CompletedInline = struct {
node: Node.Index,
start: usize,
len: usize,
};
 
fn deinit(ip: *InlineParser) void {
ip.pending_inlines.deinit(ip.parent.allocator);
ip.completed_inlines.deinit(ip.parent.allocator);
}
 
/// Parses all of `ip.content`, returning the children of the node
/// containing the inline content.
fn parse(ip: *InlineParser) Allocator.Error!ExtraIndex {
while (ip.pos < ip.content.len) : (ip.pos += 1) {
switch (ip.content[ip.pos]) {
'\\' => ip.pos += 1,
'[' => try ip.pending_inlines.append(ip.parent.allocator, .{
.tag = .link,
.data = .{ .none = {} },
.start = ip.pos,
}),
'!' => if (ip.pos + 1 < ip.content.len and ip.content[ip.pos + 1] == '[') {
try ip.pending_inlines.append(ip.parent.allocator, .{
.tag = .image,
.data = .{ .none = {} },
.start = ip.pos,
});
ip.pos += 1;
},
']' => try ip.parseLink(),
'*', '_' => try ip.parseEmphasis(),
'`' => try ip.parseCodeSpan(),
else => {},
}
}
 
const children = try ip.encodeChildren(0, ip.content.len);
// There may be pending inlines after parsing (e.g. unclosed emphasis
// runs), but there must not be any completed inlines, since those
// should all be part of `children`.
assert(ip.completed_inlines.items.len == 0);
return children;
}
 
/// Parses a link, starting at the `]` at the end of the link text. `ip.pos`
/// is left at the closing `)` of the link target or at the closing `]` if
/// there is none.
fn parseLink(ip: *InlineParser) !void {
var i = ip.pending_inlines.items.len;
while (i > 0) {
i -= 1;
if (ip.pending_inlines.items[i].tag == .link or
ip.pending_inlines.items[i].tag == .image) break;
} else return;
const opener = ip.pending_inlines.items[i];
ip.pending_inlines.shrinkRetainingCapacity(i);
const text_start = switch (opener.tag) {
.link => opener.start + 1,
.image => opener.start + 2,
else => unreachable,
};
 
if (ip.pos + 1 >= ip.content.len or ip.content[ip.pos + 1] != '(') return;
const text_end = ip.pos;
 
const target_start = text_end + 2;
var target_end = target_start;
var nesting_level: usize = 1;
while (target_end < ip.content.len) : (target_end += 1) {
switch (ip.content[target_end]) {
'\\' => target_end += 1,
'(' => nesting_level += 1,
')' => {
if (nesting_level == 1) break;
nesting_level -= 1;
},
else => {},
}
} else return;
ip.pos = target_end;
 
const children = try ip.encodeChildren(text_start, text_end);
const target = try ip.encodeLinkTarget(target_start, target_end);
 
const link = try ip.parent.addNode(.{
.tag = switch (opener.tag) {
.link => .link,
.image => .image,
else => unreachable,
},
.data = .{ .link = .{
.target = target,
.children = children,
} },
});
try ip.completed_inlines.append(ip.parent.allocator, .{
.node = link,
.start = opener.start,
.len = ip.pos - opener.start + 1,
});
}
 
fn encodeLinkTarget(ip: *InlineParser, start: usize, end: usize) !StringIndex {
// For efficiency, we can encode directly into string_bytes rather than
// creating a temporary string and then encoding it, since this process
// is entirely linear.
const string_top = ip.parent.string_bytes.items.len;
errdefer ip.parent.string_bytes.shrinkRetainingCapacity(string_top);
 
var text_iter: TextIterator = .{ .content = ip.content[start..end] };
while (text_iter.next()) |content| {
switch (content) {
.char => |c| try ip.parent.string_bytes.append(ip.parent.allocator, c),
.text => |s| try ip.parent.string_bytes.appendSlice(ip.parent.allocator, s),
.line_break => try ip.parent.string_bytes.appendSlice(ip.parent.allocator, "\\\n"),
}
}
try ip.parent.string_bytes.append(ip.parent.allocator, 0);
return @enumFromInt(string_top);
}
 
/// Parses emphasis, starting at the beginning of a run of `*` or `_`
/// characters. `ip.pos` is left at the last character in the run after
/// parsing.
fn parseEmphasis(ip: *InlineParser) !void {
const char = ip.content[ip.pos];
var start = ip.pos;
while (ip.pos + 1 < ip.content.len and ip.content[ip.pos + 1] == char) {
ip.pos += 1;
}
var len = ip.pos - start + 1;
const underscore = char == '_';
const space_before = start == 0 or isWhitespace(ip.content[start - 1]);
const space_after = start + len == ip.content.len or isWhitespace(ip.content[start + len]);
const punct_before = start == 0 or isPunctuation(ip.content[start - 1]);
const punct_after = start + len == ip.content.len or isPunctuation(ip.content[start + len]);
// The rules for when emphasis may be closed or opened are stricter for
// underscores to avoid inappropriately interpreting snake_case words as
// containing emphasis markers.
const can_open = if (underscore)
!space_after and (space_before or punct_before)
else
!space_after;
const can_close = if (underscore)
!space_before and (space_after or punct_after)
else
!space_before;
 
if (can_close and ip.pending_inlines.items.len > 0) {
var i = ip.pending_inlines.items.len;
while (i > 0 and len > 0) {
i -= 1;
const opener = &ip.pending_inlines.items[i];
if (opener.tag != .emphasis or
opener.data.emphasis.underscore != underscore) continue;
 
const close_len = @min(opener.data.emphasis.run_len, len);
const opener_end = opener.start + opener.data.emphasis.run_len;
 
const emphasis = try ip.encodeEmphasis(opener_end, start, close_len);
const emphasis_start = opener_end - close_len;
const emphasis_len = start - emphasis_start + close_len;
try ip.completed_inlines.append(ip.parent.allocator, .{
.node = emphasis,
.start = emphasis_start,
.len = emphasis_len,
});
 
// There may still be other openers further down in the
// stack to close, or part of this run might serve as an
// opener itself.
start += close_len;
len -= close_len;
 
// Remove any pending inlines above this on the stack, since
// closing this emphasis will prevent them from being closed.
// Additionally, if this opener is completely consumed by
// being closed, it can be removed.
opener.data.emphasis.run_len -= close_len;
if (opener.data.emphasis.run_len == 0) {
ip.pending_inlines.shrinkRetainingCapacity(i);
} else {
ip.pending_inlines.shrinkRetainingCapacity(i + 1);
}
}
}
 
if (can_open and len > 0) {
try ip.pending_inlines.append(ip.parent.allocator, .{
.tag = .emphasis,
.data = .{ .emphasis = .{
.underscore = underscore,
.run_len = len,
} },
.start = start,
});
}
}
 
/// Encodes emphasis specified by a run of `run_len` emphasis characters,
/// with `start..end` being the range of content contained within the
/// emphasis.
fn encodeEmphasis(ip: *InlineParser, start: usize, end: usize, run_len: usize) !Node.Index {
const children = try ip.encodeChildren(start, end);
var inner = switch (run_len % 3) {
1 => try ip.parent.addNode(.{
.tag = .emphasis,
.data = .{ .container = .{
.children = children,
} },
}),
2 => try ip.parent.addNode(.{
.tag = .strong,
.data = .{ .container = .{
.children = children,
} },
}),
0 => strong_emphasis: {
const strong = try ip.parent.addNode(.{
.tag = .strong,
.data = .{ .container = .{
.children = children,
} },
});
break :strong_emphasis try ip.parent.addNode(.{
.tag = .emphasis,
.data = .{ .container = .{
.children = try ip.parent.addExtraChildren(&.{strong}),
} },
});
},
else => unreachable,
};
 
var run_left = run_len;
while (run_left > 3) : (run_left -= 3) {
const strong = try ip.parent.addNode(.{
.tag = .strong,
.data = .{ .container = .{
.children = try ip.parent.addExtraChildren(&.{inner}),
} },
});
inner = try ip.parent.addNode(.{
.tag = .emphasis,
.data = .{ .container = .{
.children = try ip.parent.addExtraChildren(&.{strong}),
} },
});
}
 
return inner;
}
 
/// Parses a code span, starting at the beginning of the opening backtick
/// run. `ip.pos` is left at the last character in the closing run after
/// parsing.
fn parseCodeSpan(ip: *InlineParser) !void {
const opener_start = ip.pos;
ip.pos = mem.indexOfNonePos(u8, ip.content, ip.pos, "`") orelse ip.content.len;
const opener_len = ip.pos - opener_start;
 
const start = ip.pos;
const end = while (mem.indexOfScalarPos(u8, ip.content, ip.pos, '`')) |closer_start| {
ip.pos = mem.indexOfNonePos(u8, ip.content, closer_start, "`") orelse ip.content.len;
const closer_len = ip.pos - closer_start;
 
if (closer_len == opener_len) break closer_start;
} else unterminated: {
ip.pos = ip.content.len;
break :unterminated ip.content.len;
};
 
var content = if (start < ip.content.len)
ip.content[start..end]
else
"";
// This single space removal rule allows code spans to be written which
// start or end with backticks.
if (mem.startsWith(u8, content, " `")) content = content[1..];
if (mem.endsWith(u8, content, "` ")) content = content[0 .. content.len - 1];
 
const text = try ip.parent.addNode(.{
.tag = .code_span,
.data = .{ .text = .{
.content = try ip.parent.addString(content),
} },
});
try ip.completed_inlines.append(ip.parent.allocator, .{
.node = text,
.start = opener_start,
.len = ip.pos - opener_start,
});
// Ensure ip.pos is pointing at the last character of the
// closer, not after it.
ip.pos -= 1;
}
 
/// Encodes children parsed in the content range `start..end`. The children
/// will be text nodes and any completed inlines within the range.
fn encodeChildren(ip: *InlineParser, start: usize, end: usize) !ExtraIndex {
const scratch_extra_top = ip.parent.scratch_extra.items.len;
defer ip.parent.scratch_extra.shrinkRetainingCapacity(scratch_extra_top);
 
var child_index = ip.completed_inlines.items.len;
while (child_index > 0 and ip.completed_inlines.items[child_index - 1].start >= start) {
child_index -= 1;
}
const start_child_index = child_index;
 
var pos = start;
while (child_index < ip.completed_inlines.items.len) : (child_index += 1) {
const child_inline = ip.completed_inlines.items[child_index];
// Completed inlines must be strictly nested within the encodable
// content.
assert(child_inline.start >= pos and child_inline.start + child_inline.len <= end);
 
if (child_inline.start > pos) {
try ip.encodeTextNode(pos, child_inline.start);
}
try ip.parent.addScratchExtraNode(child_inline.node);
 
pos = child_inline.start + child_inline.len;
}
ip.completed_inlines.shrinkRetainingCapacity(start_child_index);
 
if (pos < end) {
try ip.encodeTextNode(pos, end);
}
 
const children = ip.parent.scratch_extra.items[scratch_extra_top..];
return try ip.parent.addExtraChildren(@ptrCast(children));
}
 
/// Encodes textual content `ip.content[start..end]` to `scratch_extra`. The
/// encoded content may include both `text` and `line_break` nodes.
fn encodeTextNode(ip: *InlineParser, start: usize, end: usize) !void {
// For efficiency, we can encode directly into string_bytes rather than
// creating a temporary string and then encoding it, since this process
// is entirely linear.
const string_top = ip.parent.string_bytes.items.len;
errdefer ip.parent.string_bytes.shrinkRetainingCapacity(string_top);
 
var string_start = string_top;
var text_iter: TextIterator = .{ .content = ip.content[start..end] };
while (text_iter.next()) |content| {
switch (content) {
.char => |c| try ip.parent.string_bytes.append(ip.parent.allocator, c),
.text => |s| try ip.parent.string_bytes.appendSlice(ip.parent.allocator, s),
.line_break => {
if (ip.parent.string_bytes.items.len > string_start) {
try ip.parent.string_bytes.append(ip.parent.allocator, 0);
try ip.parent.addScratchExtraNode(try ip.parent.addNode(.{
.tag = .text,
.data = .{ .text = .{
.content = @enumFromInt(string_start),
} },
}));
string_start = ip.parent.string_bytes.items.len;
}
try ip.parent.addScratchExtraNode(try ip.parent.addNode(.{
.tag = .line_break,
.data = .{ .none = {} },
}));
},
}
}
if (ip.parent.string_bytes.items.len > string_start) {
try ip.parent.string_bytes.append(ip.parent.allocator, 0);
try ip.parent.addScratchExtraNode(try ip.parent.addNode(.{
.tag = .text,
.data = .{ .text = .{
.content = @enumFromInt(string_start),
} },
}));
}
}
 
/// An iterator over parts of textual content, handling unescaping of
/// escaped characters and line breaks.
const TextIterator = struct {
content: []const u8,
pos: usize = 0,
 
const Content = union(enum) {
char: u8,
text: []const u8,
line_break,
};
 
const replacement = "\u{FFFD}";
 
fn next(iter: *TextIterator) ?Content {
if (iter.pos >= iter.content.len) return null;
if (iter.content[iter.pos] == '\\') {
iter.pos += 1;
if (iter.pos == iter.content.len) {
return .{ .char = '\\' };
} else if (iter.content[iter.pos] == '\n') {
iter.pos += 1;
return .line_break;
} else if (isPunctuation(iter.content[iter.pos])) {
const c = iter.content[iter.pos];
iter.pos += 1;
return .{ .char = c };
} else {
return .{ .char = '\\' };
}
}
return iter.nextCodepoint();
}
 
fn nextCodepoint(iter: *TextIterator) ?Content {
switch (iter.content[iter.pos]) {
0 => {
iter.pos += 1;
return .{ .text = replacement };
},
1...127 => |c| {
iter.pos += 1;
return .{ .char = c };
},
else => |b| {
const cp_len = std.unicode.utf8ByteSequenceLength(b) catch {
iter.pos += 1;
return .{ .text = replacement };
};
const is_valid = iter.pos + cp_len < iter.content.len and
std.unicode.utf8ValidateSlice(iter.content[iter.pos..][0..cp_len]);
const cp_encoded = if (is_valid)
iter.content[iter.pos..][0..cp_len]
else
replacement;
iter.pos += cp_len;
return .{ .text = cp_encoded };
},
}
}
};
};
 
fn parseInlines(p: *Parser, content: []const u8) !ExtraIndex {
var ip: InlineParser = .{
.parent = p,
.content = mem.trim(u8, content, " \t\n"),
};
defer ip.deinit();
return try ip.parse();
}
 
pub fn extraData(p: Parser, comptime T: type, index: ExtraIndex) ExtraData(T) {
const fields = @typeInfo(T).Struct.fields;
var i: usize = @intFromEnum(index);
var result: T = undefined;
inline for (fields) |field| {
@field(result, field.name) = switch (field.type) {
u32 => p.extra.items[i],
else => @compileError("bad field type"),
};
i += 1;
}
return .{ .data = result, .end = i };
}
 
pub fn extraChildren(p: Parser, index: ExtraIndex) []const Node.Index {
const children = p.extraData(Node.Children, index);
return @ptrCast(p.extra.items[children.end..][0..children.data.len]);
}
 
fn addNode(p: *Parser, node: Node) !Node.Index {
const index: Node.Index = @enumFromInt(@as(u32, @intCast(p.nodes.len)));
try p.nodes.append(p.allocator, node);
return index;
}
 
fn addString(p: *Parser, s: []const u8) !StringIndex {
if (s.len == 0) return .empty;
 
const index: StringIndex = @enumFromInt(@as(u32, @intCast(p.string_bytes.items.len)));
try p.string_bytes.ensureUnusedCapacity(p.allocator, s.len + 1);
p.string_bytes.appendSliceAssumeCapacity(s);
p.string_bytes.appendAssumeCapacity(0);
return index;
}
 
fn addExtraChildren(p: *Parser, nodes: []const Node.Index) !ExtraIndex {
const index: ExtraIndex = @enumFromInt(@as(u32, @intCast(p.extra.items.len)));
try p.extra.ensureUnusedCapacity(p.allocator, nodes.len + 1);
p.extra.appendAssumeCapacity(@intCast(nodes.len));
p.extra.appendSliceAssumeCapacity(@ptrCast(nodes));
return index;
}
 
fn addScratchExtraNode(p: *Parser, node: Node.Index) !void {
try p.scratch_extra.append(p.allocator, @intFromEnum(node));
}
 
fn addScratchStringLine(p: *Parser, line: []const u8) !void {
try p.scratch_string.ensureUnusedCapacity(p.allocator, line.len + 1);
p.scratch_string.appendSliceAssumeCapacity(line);
p.scratch_string.appendAssumeCapacity('\n');
}
 
fn isBlank(line: []const u8) bool {
return mem.indexOfNone(u8, line, " \t") == null;
}
 
fn isPunctuation(c: u8) bool {
return switch (c) {
'!',
'"',
'#',
'$',
'%',
'&',
'\'',
'(',
')',
'*',
'+',
',',
'-',
'.',
'/',
':',
';',
'<',
'=',
'>',
'?',
'@',
'[',
'\\',
']',
'^',
'_',
'`',
'{',
'|',
'}',
'~',
=> true,
else => false,
};
}
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,249 @@
const std = @import("std");
const Document = @import("Document.zig");
const Node = Document.Node;
 
/// A Markdown document renderer.
///
/// Each concrete `Renderer` type has a `renderDefault` function, with the
/// intention that custom `renderFn` implementations can call `renderDefault`
/// for node types for which they require no special rendering.
pub fn Renderer(comptime Writer: type, comptime Context: type) type {
return struct {
renderFn: *const fn (
r: Self,
doc: Document,
node: Node.Index,
writer: Writer,
) Writer.Error!void = renderDefault,
context: Context,
 
const Self = @This();
 
pub fn render(r: Self, doc: Document, writer: Writer) Writer.Error!void {
try r.renderFn(r, doc, .root, writer);
}
 
pub fn renderDefault(
r: Self,
doc: Document,
node: Node.Index,
writer: Writer,
) Writer.Error!void {
const data = doc.nodes.items(.data)[@intFromEnum(node)];
switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
.root => {
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
},
.list => {
if (data.list.start.asNumber()) |start| {
if (start == 1) {
try writer.writeAll("<ol>\n");
} else {
try writer.print("<ol start=\"{}\">\n", .{start});
}
} else {
try writer.writeAll("<ul>\n");
}
for (doc.extraChildren(data.list.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
if (data.list.start.asNumber() != null) {
try writer.writeAll("</ol>\n");
} else {
try writer.writeAll("</ul>\n");
}
},
.list_item => {
try writer.writeAll("<li>");
for (doc.extraChildren(data.list_item.children)) |child| {
if (data.list_item.tight and doc.nodes.items(.tag)[@intFromEnum(child)] == .paragraph) {
const para_data = doc.nodes.items(.data)[@intFromEnum(child)];
for (doc.extraChildren(para_data.container.children)) |para_child| {
try r.renderFn(r, doc, para_child, writer);
}
} else {
try r.renderFn(r, doc, child, writer);
}
}
try writer.writeAll("</li>\n");
},
.table => {
try writer.writeAll("<table>\n");
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</table>\n");
},
.table_row => {
try writer.writeAll("<tr>\n");
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</tr>\n");
},
.table_cell => {
if (data.table_cell.info.header) {
try writer.writeAll("<th");
} else {
try writer.writeAll("<td");
}
switch (data.table_cell.info.alignment) {
.unset => try writer.writeAll(">"),
else => |a| try writer.print(" style=\"text-align: {s}\">", .{@tagName(a)}),
}
 
for (doc.extraChildren(data.table_cell.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
 
if (data.table_cell.info.header) {
try writer.writeAll("</th>\n");
} else {
try writer.writeAll("</td>\n");
}
},
.heading => {
try writer.print("<h{}>", .{data.heading.level});
for (doc.extraChildren(data.heading.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.print("</h{}>\n", .{data.heading.level});
},
.code_block => {
const content = doc.string(data.code_block.content);
try writer.print("<pre><code>{}</code></pre>\n", .{fmtHtml(content)});
},
.blockquote => {
try writer.writeAll("<blockquote>\n");
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</blockquote>\n");
},
.paragraph => {
try writer.writeAll("<p>");
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</p>\n");
},
.thematic_break => {
try writer.writeAll("<hr />\n");
},
.link => {
const target = doc.string(data.link.target);
try writer.print("<a href=\"{}\">", .{fmtHtml(target)});
for (doc.extraChildren(data.link.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</a>");
},
.image => {
const target = doc.string(data.link.target);
try writer.print("<img src=\"{}\" alt=\"", .{fmtHtml(target)});
for (doc.extraChildren(data.link.children)) |child| {
try renderInlineNodeText(doc, child, writer);
}
try writer.writeAll("\" />");
},
.strong => {
try writer.writeAll("<strong>");
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</strong>");
},
.emphasis => {
try writer.writeAll("<em>");
for (doc.extraChildren(data.container.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.writeAll("</em>");
},
.code_span => {
const content = doc.string(data.text.content);
try writer.print("<code>{}</code>", .{fmtHtml(content)});
},
.text => {
const content = doc.string(data.text.content);
try writer.print("{}", .{fmtHtml(content)});
},
.line_break => {
try writer.writeAll("<br />\n");
},
}
}
};
}
 
/// Renders an inline node as plain text. Asserts that the node is an inline and
/// has no non-inline children.
pub fn renderInlineNodeText(
doc: Document,
node: Node.Index,
writer: anytype,
) @TypeOf(writer).Error!void {
const data = doc.nodes.items(.data)[@intFromEnum(node)];
switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
.root,
.list,
.list_item,
.table,
.table_row,
.table_cell,
.heading,
.code_block,
.blockquote,
.paragraph,
.thematic_break,
=> unreachable, // Blocks
 
.link, .image => {
for (doc.extraChildren(data.link.children)) |child| {
try renderInlineNodeText(doc, child, writer);
}
},
.strong => {
for (doc.extraChildren(data.container.children)) |child| {
try renderInlineNodeText(doc, child, writer);
}
},
.emphasis => {
for (doc.extraChildren(data.container.children)) |child| {
try renderInlineNodeText(doc, child, writer);
}
},
.code_span, .text => {
const content = doc.string(data.text.content);
try writer.print("{}", .{fmtHtml(content)});
},
.line_break => {
try writer.writeAll("\n");
},
}
}
 
pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter(formatHtml) {
return .{ .data = bytes };
}
 
fn formatHtml(
bytes: []const u8,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = fmt;
_ = options;
for (bytes) |b| {
switch (b) {
'<' => try writer.writeAll("&lt;"),
'>' => try writer.writeAll("&gt;"),
'&' => try writer.writeAll("&amp;"),
'"' => try writer.writeAll("&quot;"),
else => try writer.writeByte(b),
}
}
}
 
ev/null added: 7501, removed: 25316, total 0
@@ -1,2147 +0,0 @@
'use strict';
 
const Tag = {
whitespace: "whitespace",
invalid: "invalid",
identifier: "identifier",
string_literal: "string_literal",
multiline_string_literal_line: "multiline_string_literal_line",
char_literal: "char_literal",
eof: "eof",
builtin: "builtin",
number_literal: "number_literal",
doc_comment: "doc_comment",
container_doc_comment: "container_doc_comment",
line_comment: "line_comment",
invalid_periodasterisks: "invalid_periodasterisks",
bang: "bang",
pipe: "pipe",
pipe_pipe: "pipe_pipe",
pipe_equal: "pipe_equal",
equal: "equal",
equal_equal: "equal_equal",
equal_angle_bracket_right: "equal_angle_bracket_right",
bang_equal: "bang_equal",
l_paren: "l_paren",
r_paren: "r_paren",
semicolon: "semicolon",
percent: "percent",
percent_equal: "percent_equal",
l_brace: "l_brace",
r_brace: "r_brace",
l_bracket: "l_bracket",
r_bracket: "r_bracket",
period: "period",
period_asterisk: "period_asterisk",
ellipsis2: "ellipsis2",
ellipsis3: "ellipsis3",
caret: "caret",
caret_equal: "caret_equal",
plus: "plus",
plus_plus: "plus_plus",
plus_equal: "plus_equal",
plus_percent: "plus_percent",
plus_percent_equal: "plus_percent_equal",
plus_pipe: "plus_pipe",
plus_pipe_equal: "plus_pipe_equal",
minus: "minus",
minus_equal: "minus_equal",
minus_percent: "minus_percent",
minus_percent_equal: "minus_percent_equal",
minus_pipe: "minus_pipe",
minus_pipe_equal: "minus_pipe_equal",
asterisk: "asterisk",
asterisk_equal: "asterisk_equal",
asterisk_asterisk: "asterisk_asterisk",
asterisk_percent: "asterisk_percent",
asterisk_percent_equal: "asterisk_percent_equal",
asterisk_pipe: "asterisk_pipe",
asterisk_pipe_equal: "asterisk_pipe_equal",
arrow: "arrow",
colon: "colon",
slash: "slash",
slash_equal: "slash_equal",
comma: "comma",
ampersand: "ampersand",
ampersand_equal: "ampersand_equal",
question_mark: "question_mark",
angle_bracket_left: "angle_bracket_left",
angle_bracket_left_equal: "angle_bracket_left_equal",
angle_bracket_angle_bracket_left: "angle_bracket_angle_bracket_left",
angle_bracket_angle_bracket_left_equal: "angle_bracket_angle_bracket_left_equal",
angle_bracket_angle_bracket_left_pipe: "angle_bracket_angle_bracket_left_pipe",
angle_bracket_angle_bracket_left_pipe_equal: "angle_bracket_angle_bracket_left_pipe_equal",
angle_bracket_right: "angle_bracket_right",
angle_bracket_right_equal: "angle_bracket_right_equal",
angle_bracket_angle_bracket_right: "angle_bracket_angle_bracket_right",
angle_bracket_angle_bracket_right_equal: "angle_bracket_angle_bracket_right_equal",
tilde: "tilde",
keyword_addrspace: "keyword_addrspace",
keyword_align: "keyword_align",
keyword_allowzero: "keyword_allowzero",
keyword_and: "keyword_and",
keyword_anyframe: "keyword_anyframe",
keyword_anytype: "keyword_anytype",
keyword_asm: "keyword_asm",
keyword_async: "keyword_async",
keyword_await: "keyword_await",
keyword_break: "keyword_break",
keyword_callconv: "keyword_callconv",
keyword_catch: "keyword_catch",
keyword_comptime: "keyword_comptime",
keyword_const: "keyword_const",
keyword_continue: "keyword_continue",
keyword_defer: "keyword_defer",
keyword_else: "keyword_else",
keyword_enum: "keyword_enum",
keyword_errdefer: "keyword_errdefer",
keyword_error: "keyword_error",
keyword_export: "keyword_export",
keyword_extern: "keyword_extern",
keyword_fn: "keyword_fn",
keyword_for: "keyword_for",
keyword_if: "keyword_if",
keyword_inline: "keyword_inline",
keyword_noalias: "keyword_noalias",
keyword_noinline: "keyword_noinline",
keyword_nosuspend: "keyword_nosuspend",
keyword_opaque: "keyword_opaque",
keyword_or: "keyword_or",
keyword_orelse: "keyword_orelse",
keyword_packed: "keyword_packed",
keyword_pub: "keyword_pub",
keyword_resume: "keyword_resume",
keyword_return: "keyword_return",
keyword_linksection: "keyword_linksection",
keyword_struct: "keyword_struct",
keyword_suspend: "keyword_suspend",
keyword_switch: "keyword_switch",
keyword_test: "keyword_test",
keyword_threadlocal: "keyword_threadlocal",
keyword_try: "keyword_try",
keyword_union: "keyword_union",
keyword_unreachable: "keyword_unreachable",
keyword_usingnamespace: "keyword_usingnamespace",
keyword_var: "keyword_var",
keyword_volatile: "keyword_volatile",
keyword_while: "keyword_while"
}
 
const Tok = {
const: { src: "const", tag: Tag.keyword_const },
var: { src: "var", tag: Tag.keyword_var },
colon: { src: ":", tag: Tag.colon },
eql: { src: "=", tag: Tag.equals },
space: { src: " ", tag: Tag.whitespace },
tab: { src: " ", tag: Tag.whitespace },
enter: { src: "\n", tag: Tag.whitespace },
semi: { src: ";", tag: Tag.semicolon },
l_bracket: { src: "[", tag: Tag.l_bracket },
r_bracket: { src: "]", tag: Tag.r_bracket },
l_brace: { src: "{", tag: Tag.l_brace },
r_brace: { src: "}", tag: Tag.r_brace },
l_paren: { src: "(", tag: Tag.l_paren },
r_paren: { src: ")", tag: Tag.r_paren },
period: { src: ".", tag: Tag.period },
comma: { src: ",", tag: Tag.comma },
question_mark: { src: "?", tag: Tag.question_mark },
asterisk: { src: "*", tag: Tag.asterisk },
identifier: (name) => { return { src: name, tag: Tag.identifier } },
};
 
 
const State = {
start: 0,
identifier: 1,
builtin: 2,
string_literal: 3,
string_literal_backslash: 4,
multiline_string_literal_line: 5,
char_literal: 6,
char_literal_backslash: 7,
char_literal_hex_escape: 8,
char_literal_unicode_escape_saw_u: 9,
char_literal_unicode_escape: 10,
char_literal_unicode_invalid: 11,
char_literal_unicode: 12,
char_literal_end: 13,
backslash: 14,
equal: 15,
bang: 16,
pipe: 17,
minus: 18,
minus_percent: 19,
minus_pipe: 20,
asterisk: 21,
asterisk_percent: 22,
asterisk_pipe: 23,
slash: 24,
line_comment_start: 25,
line_comment: 26,
doc_comment_start: 27,
doc_comment: 28,
int: 29,
int_exponent: 30,
int_period: 31,
float: 32,
float_exponent: 33,
ampersand: 34,
caret: 35,
percent: 36,
plus: 37,
plus_percent: 38,
plus_pipe: 39,
angle_bracket_left: 40,
angle_bracket_angle_bracket_left: 41,
angle_bracket_angle_bracket_left_pipe: 42,
angle_bracket_right: 43,
angle_bracket_angle_bracket_right: 44,
period: 45,
period_2: 46,
period_asterisk: 47,
saw_at_sign: 48,
whitespace: 49,
}
 
const keywords = {
"addrspace": Tag.keyword_addrspace,
"align": Tag.keyword_align,
"allowzero": Tag.keyword_allowzero,
"and": Tag.keyword_and,
"anyframe": Tag.keyword_anyframe,
"anytype": Tag.keyword_anytype,
"asm": Tag.keyword_asm,
"async": Tag.keyword_async,
"await": Tag.keyword_await,
"break": Tag.keyword_break,
"callconv": Tag.keyword_callconv,
"catch": Tag.keyword_catch,
"comptime": Tag.keyword_comptime,
"const": Tag.keyword_const,
"continue": Tag.keyword_continue,
"defer": Tag.keyword_defer,
"else": Tag.keyword_else,
"enum": Tag.keyword_enum,
"errdefer": Tag.keyword_errdefer,
"error": Tag.keyword_error,
"export": Tag.keyword_export,
"extern": Tag.keyword_extern,
"fn": Tag.keyword_fn,
"for": Tag.keyword_for,
"if": Tag.keyword_if,
"inline": Tag.keyword_inline,
"noalias": Tag.keyword_noalias,
"noinline": Tag.keyword_noinline,
"nosuspend": Tag.keyword_nosuspend,
"opaque": Tag.keyword_opaque,
"or": Tag.keyword_or,
"orelse": Tag.keyword_orelse,
"packed": Tag.keyword_packed,
"pub": Tag.keyword_pub,
"resume": Tag.keyword_resume,
"return": Tag.keyword_return,
"linksection": Tag.keyword_linksection,
"struct": Tag.keyword_struct,
"suspend": Tag.keyword_suspend,
"switch": Tag.keyword_switch,
"test": Tag.keyword_test,
"threadlocal": Tag.keyword_threadlocal,
"try": Tag.keyword_try,
"union": Tag.keyword_union,
"unreachable": Tag.keyword_unreachable,
"usingnamespace": Tag.keyword_usingnamespace,
"var": Tag.keyword_var,
"volatile": Tag.keyword_volatile,
"while": Tag.keyword_while,
};
 
function make_token(tag, start, end) {
return {
tag: tag,
loc: {
start: start,
end: end
}
}
 
}
 
function dump_tokens(tokens, raw_source) {
 
//TODO: this is not very fast
function find_tag_key(tag) {
for (const [key, value] of Object.entries(Tag)) {
if (value == tag) return key;
}
}
 
for (let i = 0; i < tokens.length; i++) {
const tok = tokens[i];
const z = raw_source.substring(tok.loc.start, tok.loc.end).toLowerCase();
console.log(`${find_tag_key(tok.tag)} "${tok.tag}" '${z}'`)
}
}
 
function* Tokenizer(raw_source) {
let tokenizer = new InnerTokenizer(raw_source);
while (true) {
let t = tokenizer.next();
if (t.tag == Tag.eof)
return;
t.src = raw_source.slice(t.loc.start, t.loc.end);
yield t;
}
 
}
function InnerTokenizer(raw_source) {
this.index = 0;
this.flag = false;
 
this.seen_escape_digits = undefined;
this.remaining_code_units = undefined;
 
this.next = () => {
let state = State.start;
 
var result = {
tag: -1,
loc: {
start: this.index,
end: undefined,
},
src: undefined,
};
 
//having a while (true) loop seems like a bad idea the loop should never
//take more iterations than twice the length of the source code
const MAX_ITERATIONS = raw_source.length * 2;
let iterations = 0;
 
while (iterations <= MAX_ITERATIONS) {
 
if (this.flag) {
return make_token(Tag.eof, this.index - 2, this.index - 2);
}
iterations += 1; // avoid death loops
 
var c = raw_source[this.index];
 
if (c === undefined) {
c = ' '; // push the last token
this.flag = true;
}
 
switch (state) {
case State.start:
switch (c) {
case 0: {
if (this.index != raw_source.length) {
result.tag = Tag.invalid;
result.loc.start = this.index;
this.index += 1;
result.loc.end = this.index;
return result;
}
result.loc.end = this.index;
return result;
}
case ' ':
case '\n':
case '\t':
case '\r': {
state = State.whitespace;
result.tag = Tag.whitespace;
result.loc.start = this.index;
break;
}
case '"': {
state = State.string_literal;
result.tag = Tag.string_literal;
break;
}
case '\'': {
state = State.char_literal;
break;
}
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '_': {
state = State.identifier;
result.tag = Tag.identifier;
break;
}
case '@': {
state = State.saw_at_sign;
break;
}
case '=': {
state = State.equal;
break;
}
case '!': {
state = State.bang;
break;
}
case '|': {
state = State.pipe;
break;
}
case '(': {
result.tag = Tag.l_paren;
this.index += 1;
result.loc.end = this.index;
 
return result;
}
case ')': {
result.tag = Tag.r_paren;
this.index += 1; result.loc.end = this.index;
return result;
}
case '[': {
result.tag = Tag.l_bracket;
this.index += 1; result.loc.end = this.index;
return result;
}
case ']': {
result.tag = Tag.r_bracket;
this.index += 1; result.loc.end = this.index;
return result;
}
case ';': {
result.tag = Tag.semicolon;
this.index += 1; result.loc.end = this.index;
return result;
}
case ',': {
result.tag = Tag.comma;
this.index += 1; result.loc.end = this.index;
return result;
}
case '?': {
result.tag = Tag.question_mark;
this.index += 1; result.loc.end = this.index;
return result;
}
case ':': {
result.tag = Tag.colon;
this.index += 1; result.loc.end = this.index;
return result;
}
case '%': {
state = State.percent; break;
}
case '*': {
state = State.asterisk; break;
}
case '+': {
state = State.plus; break;
}
case '<': {
state = State.angle_bracket_left; break;
}
case '>': {
state = State.angle_bracket_right; break;
}
case '^': {
state = State.caret; break;
}
case '\\': {
state = State.backslash;
result.tag = Tag.multiline_string_literal_line; break;
}
case '{': {
result.tag = Tag.l_brace;
this.index += 1; result.loc.end = this.index;
return result;
}
case '}': {
result.tag = Tag.r_brace;
this.index += 1; result.loc.end = this.index;
return result;
}
case '~': {
result.tag = Tag.tilde;
this.index += 1; result.loc.end = this.index;
return result;
}
case '.': {
state = State.period; break;
}
case '-': {
state = State.minus; break;
}
case '/': {
state = State.slash; break;
}
case '&': {
state = State.ampersand; break;
}
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
{
state = State.int;
result.tag = Tag.number_literal; break;
}
default: {
result.tag = Tag.invalid;
result.loc.end = this.index;
this.index += 1;
return result;
}
}
break;
case State.saw_at_sign:
switch (c) {
case '"': {
result.tag = Tag.identifier;
state = State.string_literal; break;
}
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '_': {
state = State.builtin;
result.tag = Tag.builtin;
break;
}
default: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
}
break;
case State.ampersand:
switch (c) {
case '=': {
result.tag = Tag.ampersand_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.ampersand; result.loc.end = this.index;
return result;
}
}
break;
case State.asterisk: switch (c) {
case '=': {
result.tag = Tag.asterisk_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
case '*': {
result.tag = Tag.asterisk_asterisk;
this.index += 1; result.loc.end = this.index;
return result;
}
case '%': {
state = State.asterisk_percent; break;
}
case '|': {
state = State.asterisk_pipe; break;
}
default: {
result.tag = Tag.asterisk;
result.loc.end = this.index;
return result;
}
}
break;
case State.asterisk_percent:
switch (c) {
case '=': {
result.tag = Tag.asterisk_percent_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.asterisk_percent;
result.loc.end = this.index;
return result;
}
}
break;
case State.asterisk_pipe:
switch (c) {
case '=': {
result.tag = Tag.asterisk_pipe_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.asterisk_pipe; result.loc.end = this.index;
return result;
}
}
break;
case State.percent:
switch (c) {
case '=': {
result.tag = Tag.percent_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.percent; result.loc.end = this.index;
return result;
}
}
break;
case State.plus:
switch (c) {
case '=': {
result.tag = Tag.plus_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
case '+': {
result.tag = Tag.plus_plus;
this.index += 1; result.loc.end = this.index;
return result;
}
case '%': {
state = State.plus_percent; break;
}
case '|': {
state = State.plus_pipe; break;
}
default: {
result.tag = Tag.plus; result.loc.end = this.index;
return result;
}
}
break;
case State.plus_percent:
switch (c) {
case '=': {
result.tag = Tag.plus_percent_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.plus_percent; result.loc.end = this.index;
return result;
}
}
break;
case State.plus_pipe:
switch (c) {
case '=': {
result.tag = Tag.plus_pipe_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.plus_pipe; result.loc.end = this.index;
return result;
}
}
break;
case State.caret:
switch (c) {
case '=': {
result.tag = Tag.caret_equal;
this.index += 1; result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.caret; result.loc.end = this.index;
return result;
}
}
break;
case State.identifier:
switch (c) {
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '_':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9': break;
default: {
// if (Token.getKeyword(buffer[result.loc.start..this.index])) | tag | {
const z = raw_source.substring(result.loc.start, this.index);
if (z in keywords) {
result.tag = keywords[z];
}
result.loc.end = this.index;
return result;
}
 
 
}
break;
case State.builtin: switch (c) {
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '_':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9': break;
default: result.loc.end = this.index;
return result;
}
break;
case State.backslash:
switch (c) {
case '\\': {
state = State.multiline_string_literal_line;
break;
}
default: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
}
break;
case State.string_literal:
switch (c) {
case '\\': {
state = State.string_literal_backslash; break;
}
case '"': {
this.index += 1;
result.loc.end = this.index;
 
return result;
}
case 0: {
//TODO: PORT
// if (this.index == buffer.len) {
// result.tag = .invalid;
// break;
// } else {
// checkLiteralCharacter();
// }
result.loc.end = this.index;
return result;
}
case '\n': {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
//TODO: PORT
//default: checkLiteralCharacter(),
}
break;
case State.string_literal_backslash:
switch (c) {
case 0:
case '\n': {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
default: {
state = State.string_literal; break;
}
}
break;
case State.char_literal: switch (c) {
case 0: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
case '\\': {
state = State.char_literal_backslash;
break;
}
//TODO: PORT
// '\'', 0x80...0xbf, 0xf8...0xff => {
// result.tag = .invalid;
// break;
// },
// 0xc0...0xdf => { // 110xxxxx
// this.remaining_code_units = 1;
// state = .char_literal_unicode;
// },
// 0xe0...0xef => { // 1110xxxx
// this.remaining_code_units = 2;
// state = .char_literal_unicode;
// },
// 0xf0...0xf7 => { // 11110xxx
// this.remaining_code_units = 3;
// state = .char_literal_unicode;
// },
 
// case 0x80:
// case 0x81:
// case 0x82:
// case 0x83:
// case 0x84:
// case 0x85:
// case 0x86:
// case 0x87:
// case 0x88:
// case 0x89:
// case 0x8a:
// case 0x8b:
// case 0x8c:
// case 0x8d:
// case 0x8e:
// case 0x8f:
// case 0x90:
// case 0x91:
// case 0x92:
// case 0x93:
// case 0x94:
// case 0x95:
// case 0x96:
// case 0x97:
// case 0x98:
// case 0x99:
// case 0x9a:
// case 0x9b:
// case 0x9c:
// case 0x9d:
// case 0x9e:
// case 0x9f:
// case 0xa0:
// case 0xa1:
// case 0xa2:
// case 0xa3:
// case 0xa4:
// case 0xa5:
// case 0xa6:
// case 0xa7:
// case 0xa8:
// case 0xa9:
// case 0xaa:
// case 0xab:
// case 0xac:
// case 0xad:
// case 0xae:
// case 0xaf:
// case 0xb0:
// case 0xb1:
// case 0xb2:
// case 0xb3:
// case 0xb4:
// case 0xb5:
// case 0xb6:
// case 0xb7:
// case 0xb8:
// case 0xb9:
// case 0xba:
// case 0xbb:
// case 0xbc:
// case 0xbd:
// case 0xbe:
// case 0xbf:
// case 0xf8:
// case 0xf9:
// case 0xfa:
// case 0xfb:
// case 0xfc:
// case 0xfd:
// case 0xfe:
// case 0xff:
// result.tag = .invalid;
// break;
// case 0xc0:
// case 0xc1:
// case 0xc2:
// case 0xc3:
// case 0xc4:
// case 0xc5:
// case 0xc6:
// case 0xc7:
// case 0xc8:
// case 0xc9:
// case 0xca:
// case 0xcb:
// case 0xcc:
// case 0xcd:
// case 0xce:
// case 0xcf:
// case 0xd0:
// case 0xd1:
// case 0xd2:
// case 0xd3:
// case 0xd4:
// case 0xd5:
// case 0xd6:
// case 0xd7:
// case 0xd8:
// case 0xd9:
// case 0xda:
// case 0xdb:
// case 0xdc:
// case 0xdd:
// case 0xde:
// case 0xdf:
// this.remaining_code_units = 1;
// state = .char_literal_unicode;
// case 0xe0:
// case 0xe1:
// case 0xe2:
// case 0xe3:
// case 0xe4:
// case 0xe5:
// case 0xe6:
// case 0xe7:
// case 0xe8:
// case 0xe9:
// case 0xea:
// case 0xeb:
// case 0xec:
// case 0xed:
// case 0xee:
// case 0xef:
// this.remaining_code_units = 2;
// state = .char_literal_unicode;
// case 0xf0:
// case 0xf1:
// case 0xf2:
// case 0xf3:
// case 0xf4:
// case 0xf5:
// case 0xf6:
// case 0xf7:
// this.remaining_code_units = 3;
// state = .char_literal_unicode;
 
case '\n': {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
default: {
state = State.char_literal_end; break;
}
}
break;
case State.char_literal_backslash:
switch (c) {
case 0:
case '\n': {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
case 'x': {
state = State.char_literal_hex_escape;
this.seen_escape_digits = 0; break;
}
case 'u': {
state = State.char_literal_unicode_escape_saw_u; break;
}
default: {
state = State.char_literal_end; break;
}
}
break;
case State.char_literal_hex_escape:
switch (c) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F': {
this.seen_escape_digits += 1;
if (this.seen_escape_digits == 2) {
state = State.char_literal_end;
} break;
}
default: {
result.tag = Tag.invalid;
esult.loc.end = this.index;
return result;
}
}
break;
case State.char_literal_unicode_escape_saw_u:
switch (c) {
case 0: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
case '{': {
state = State.char_literal_unicode_escape; break;
}
default: {
result.tag = Tag.invalid;
state = State.char_literal_unicode_invalid; break;
}
}
break;
case State.char_literal_unicode_escape:
switch (c) {
case 0: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F': break;
case '}': {
state = State.char_literal_end; // too many/few digits handled later
break;
}
default: {
result.tag = Tag.invalid;
state = State.char_literal_unicode_invalid; break;
}
}
break;
case State.char_literal_unicode_invalid:
switch (c) {
// Keep consuming characters until an obvious stopping point.
// This consolidates e.g. `u{0ab1Q}` into a single invalid token
// instead of creating the tokens `u{0ab1`, `Q`, `}`
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '}':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9': break;
default: break;
}
break;
case State.char_literal_end:
switch (c) {
case '\'': {
result.tag = Tag.char_literal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
}
break;
case State.char_literal_unicode:
switch (c) {
// 0x80...0xbf => {
// this.remaining_code_units -= 1;
// if (this.remaining_code_units == 0) {
// state = .char_literal_end;
// }
// },
default: {
result.tag = Tag.invalid;
result.loc.end = this.index;
return result;
}
}
break;
case State.multiline_string_literal_line:
switch (c) {
case 0:
result.loc.end = this.index;
return result;
case '\n': {
 
this.index += 1;
result.loc.end = this.index;
return result;
}
case '\t': break;
//TODO: PORT
//default: checkLiteralCharacter(),
 
}
break;
case State.bang:
switch (c) {
case '=': {
result.tag = Tag.bang_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.bang;
result.loc.end = this.index;
return result;
}
}
break;
case State.pipe:
switch (c) {
case '=': {
result.tag = Tag.pipe_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
case '|': {
result.tag = Tag.pipe_pipe;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.pipe;
result.loc.end = this.index;
return result;
}
}
break;
case State.equal: switch (c) {
case '=': {
result.tag = Tag.equal_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
case '>': {
result.tag = Tag.equal_angle_bracket_right;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.equal;
result.loc.end = this.index;
return result;
}
}
break;
case State.minus: switch (c) {
case '>': {
result.tag = Tag.arrow;
this.index += 1;
result.loc.end = this.index;
return result;
}
case '=': {
result.tag = Tag.minus_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
case '%': {
state = State.minus_percent; break;
}
case '|': {
state = State.minus_pipe; break;
}
default: {
result.tag = Tag.minus;
result.loc.end = this.index;
return result;
}
}
break;
case State.minus_percent:
switch (c) {
case '=': {
result.tag = Tag.minus_percent_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.minus_percent;
result.loc.end = this.index;
return result;
}
}
break;
case State.minus_pipe:
switch (c) {
case '=': {
result.tag = Tag.minus_pipe_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.minus_pipe;
result.loc.end = this.index;
return result;
}
}
break;
case State.angle_bracket_left:
switch (c) {
case '<': {
state = State.angle_bracket_angle_bracket_left; break;
}
case '=': {
result.tag = Tag.angle_bracket_left_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.angle_bracket_left;
result.loc.end = this.index;
return result;
}
}
break;
case State.angle_bracket_angle_bracket_left:
switch (c) {
case '=': {
result.tag = Tag.angle_bracket_angle_bracket_left_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
case '|': {
state = State.angle_bracket_angle_bracket_left_pipe;
}
default: {
result.tag = Tag.angle_bracket_angle_bracket_left;
result.loc.end = this.index;
return result;
}
}
break;
case State.angle_bracket_angle_bracket_left_pipe:
switch (c) {
case '=': {
result.tag = Tag.angle_bracket_angle_bracket_left_pipe_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.angle_bracket_angle_bracket_left_pipe;
result.loc.end = this.index;
return result;
}
}
break;
case State.angle_bracket_right:
switch (c) {
case '>': {
state = State.angle_bracket_angle_bracket_right; break;
}
case '=': {
result.tag = Tag.angle_bracket_right_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.angle_bracket_right;
result.loc.end = this.index;
return result;
}
}
break;
case State.angle_bracket_angle_bracket_right:
switch (c) {
case '=': {
result.tag = Tag.angle_bracket_angle_bracket_right_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.angle_bracket_angle_bracket_right;
result.loc.end = this.index;
return result;
}
}
break;
case State.period:
switch (c) {
case '.': {
state = State.period_2; break;
}
case '*': {
state = State.period_asterisk; break;
}
default: {
result.tag = Tag.period;
result.loc.end = this.index;
return result;
}
}
break;
case State.period_2:
switch (c) {
case '.': {
result.tag = Tag.ellipsis3;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.ellipsis2;
result.loc.end = this.index;
return result;
}
}
break;
case State.period_asterisk:
switch (c) {
case '*': {
result.tag = Tag.invalid_periodasterisks;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.period_asterisk;
result.loc.end = this.index;
return result;
}
}
break;
case State.slash:
switch (c) {
case '/': {
state = State.line_comment_start;
break;
}
case '=': {
result.tag = Tag.slash_equal;
this.index += 1;
result.loc.end = this.index;
return result;
}
default: {
result.tag = Tag.slash;
result.loc.end = this.index;
return result;
}
} break;
case State.line_comment_start:
switch (c) {
case 0: {
if (this.index != raw_source.length) {
result.tag = Tag.invalid;
this.index += 1;
}
result.loc.end = this.index;
return result;
}
case '/': {
state = State.doc_comment_start; break;
}
case '!': {
result.tag = Tag.container_doc_comment;
state = State.doc_comment; break;
}
case '\n': {
state = State.start;
result.loc.start = this.index + 1; break;
}
case '\t':
state = State.line_comment; break;
default: {
state = State.line_comment;
//TODO: PORT
//checkLiteralCharacter();
break;
}
} break;
case State.doc_comment_start:
switch (c) {
case '/': {
state = State.line_comment; break;
}
case 0:
case '\n':
{
result.tag = Tag.doc_comment;
result.loc.end = this.index;
return result;
}
case '\t': {
state = State.doc_comment;
result.tag = Tag.doc_comment; break;
}
default: {
state = State.doc_comment;
result.tag = Tag.doc_comment;
//TODO: PORT
//checkLiteralCharacter();
break;
}
} break;
case State.line_comment:
switch (c) {
case 0: {
if (this.index != raw_source.length) {
result.tag = Tag.invalid;
this.index += 1;
}
result.loc.end = this.index;
return result;
}
case '\n': {
result.tag = Tag.line_comment;
result.loc.end = this.index;
return result;
}
case '\t': break;
//TODO: PORT
//default: checkLiteralCharacter(),
} break;
case State.doc_comment:
switch (c) {
case 0://
case '\n':
result.loc.end = this.index;
return result;
case '\t': break;
//TODOL PORT
// default: checkLiteralCharacter(),
default:
break;
} break;
case State.int:
switch (c) {
case '.':
state = State.int_period;
break;
case '_':
case 'a':
case 'b':
case 'c':
case 'd':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
case 'e':
case 'E':
case 'p':
case 'P':
state = State.int_exponent;
break;
default: result.loc.end = this.index;
return result;
} break;
case State.int_exponent:
switch (c) {
case '-':
case '+':
{
``
state = State.float; break;
}
default: {
this.index -= 1;
state = State.int; break;
}
} break;
case State.int_period: switch (c) {
case '_':
case 'a':
case 'b':
case 'c':
case 'd':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9': {
state = State.float; break;
}
case 'e':
case 'E':
case 'p':
case 'P':
state = State.float_exponent; break;
default: {
this.index -= 1;
result.loc.end = this.index;
return result;
}
} break;
case State.float:
switch (c) {
case '_':
case 'a':
case 'b':
case 'c':
case 'd':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
 
case 'e':
case 'E':
case 'p':
case 'P':
state = State.float_exponent; break;
default: result.loc.end = this.index;
return result;
} break;
case State.float_exponent:
switch (c) {
case '-':
case '+':
state = State.float; break;
default: {
this.index -= 1;
state = State.float; break;
}
}
break;
 
case State.whitespace:
switch(c) {
case ' ':
case '\n':
case '\t':
case '\r': {
break;
}
default: {
result.loc.end = this.index;
return result;
}
}
}
this.index += 1;
}
 
//TODO: PORT
// if (result.tag == Tag.eof) {
// if (pending_invalid_token) | token | {
// pending_invalid_token = null;
// return token;
// }
// result.loc.start = sindex;
// }
 
result.loc.end = this.index;
return result;
 
}
}
 
 
const builtin_types = [
"f16", "f32", "f64", "f80", "f128",
"c_longdouble", "c_short", "c_ushort", "c_int", "c_uint",
"c_long", "c_ulong", "c_longlong", "c_ulonglong", "c_char",
"anyopaque", "void", "bool", "isize", "usize",
"noreturn", "type", "anyerror", "comptime_int", "comptime_float",
];
 
function isSimpleType(typeName) {
return builtin_types.includes(typeName) || isIntType(typeName);
}
 
function isIntType(typeName) {
if (typeName[0] != 'u' && typeName[0] != 'i') return false;
let i = 1;
if (i == typeName.length) return false;
for (; i < typeName.length; i += 1) {
if (typeName[i] < '0' || typeName[i] > '9') return false;
}
return true;
}
 
function isSpecialIndentifier(identifier) {
return ["null", "true", "false", ,"undefined"].includes(identifier);
}
 
//const fs = require('fs');
//const src = fs.readFileSync("../std/c.zig", 'utf8');
//console.log(generate_html_for_src(src));
 
 
// gist for zig_lexer_test code: https://gist.github.com/Myvar/2684ba4fb86b975274629d6f21eddc7b
// // Just for testing not to commit in pr
// var isNode = new Function("try {return this===global;}catch(e){return false;}");
// if (isNode()) {
 
 
// //const s = "const std = @import(\"std\");";
// //const toksa = tokenize_zig_source(s);
// //dump_tokens(toksa, s);
// //console.log(JSON.stringify(toksa));
 
// const fs = require('fs');
 
// function testFile(fileName) {
// //console.log(fileName);
// var exec = require('child_process').execFileSync;
// var passed = true;
// const zig_data = exec('./zig_lexer_test', [fileName]);
// const data = fs.readFileSync(fileName, 'utf8');
 
// const toks = tokenize_zig_source(data);
// const a_json = toks;
 
// // dump_tokens(a_json, data);
// // return;
 
// const b_json = JSON.parse(zig_data.toString());
 
// if (a_json.length !== b_json.length) {
// console.log("FAILED a and be is not the same length");
// passed = false;
// //return;
// }
 
// let len = a_json.length;
// if (len >= b_json.length) len = b_json.length;
 
// for (let i = 0; i < len; i++) {
// const a = a_json[i];
// const b = b_json[i];
 
// // console.log(a.tag + " == " + b.tag);
 
// if (a.tag !== b.tag) {
 
// // console.log("Around here:");
// // console.log(
// // data.substring(b_json[i - 2].loc.start, b_json[i - 2].loc.end),
// // data.substring(b_json[i - 1].loc.start, b_json[i - 1].loc.end),
// // data.substring(b_json[i].loc.start, b_json[i].loc.end),
// // data.substring(b_json[i + 1].loc.start, b_json[i + 1].loc.end),
// // data.substring(b_json[i + 2].loc.start, b_json[i + 2].loc.end),
// // );
 
// console.log("TAG: a != b");
// console.log("js", a.tag);
// console.log("zig", b.tag);
// passed = false;
// return;
// }
 
// if (a.tag !== Tag.eof && a.loc.start !== b.loc.start) {
// console.log("START: a != b");
 
// console.log("js", "\"" + data.substring(a_json[i ].loc.start, a_json[i].loc.end) + "\"");
// console.log("zig", "\"" + data.substring(b_json[i ].loc.start, b_json[i].loc.end) + "\"");
 
 
// passed = false;
// return;
// }
 
// // if (a.tag !== Tag.eof && a.loc.end !== b.loc.end) {
// // console.log("END: a != b");
// // // console.log("Around here:");
// // // console.log(
// // // // data.substring(b_json[i - 2].loc.start, b_json[i - 2].loc.end),
// // // // data.substring(b_json[i - 1].loc.start, b_json[i - 1].loc.end),
// // // data.substring(b_json[i ].loc.start, b_json[i].loc.end),
// // // // data.substring(b_json[i + 1].loc.start, b_json[i + 1].loc.end),
// // // // data.substring(b_json[i + 2].loc.start, b_json[i + 2].loc.end),
// // // );
// // console.log("js", "\"" + data.substring(a_json[i ].loc.start, a_json[i].loc.end) + "\"");
// // console.log("zig", "\"" + data.substring(b_json[i ].loc.start, b_json[i].loc.end) + "\"");
// // passed = false;
// // return;
// // }
// }
// return passed;
// }
// var path = require('path');
// function fromDir(startPath, filter) {
// if (!fs.existsSync(startPath)) {
// console.log("no dir ", startPath);
// return;
// }
// var files = fs.readdirSync(startPath);
// for (var i = 0; i < files.length; i++) {
// var filename = path.join(startPath, files[i]);
// var stat = fs.lstatSync(filename);
// if (stat.isDirectory()) {
// fromDir(filename, filter); //recurse
// } else if (filename.endsWith(filter)) {
// try {
// console.log('-- TESTING: ', filename);
// console.log("\t\t", testFile(filename));
// }
// catch {
// }
// };
// };
// };
// fromDir('../std', '.zig');
// //console.log(testFile("/home/myvar/code/zig/lib/std/fmt/errol.zig"));
// //console.log(testFile("test.zig"));
// }
No newline at end of file
 
lib/std/Thread/WaitGroup.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,4 @@
const builtin = @import("builtin");
const std = @import("std");
const assert = std.debug.assert;
const WaitGroup = @This();
@@ -43,3 +44,24 @@ pub fn isDone(wg: *WaitGroup) bool {
 
return (state / one_pending) == 0;
}
 
// Spawns a new thread for the task. This is appropriate when the callee
// delegates all work.
pub fn spawnManager(
wg: *WaitGroup,
comptime func: anytype,
args: anytype,
) void {
if (builtin.single_threaded) {
@call(.auto, func, args);
return;
}
const Manager = struct {
fn run(wg_inner: *WaitGroup, args_inner: @TypeOf(args)) void {
defer wg_inner.finish();
@call(.auto, func, args_inner);
}
};
wg.start();
_ = std.Thread.spawn(.{}, Manager.run, .{ wg, args }) catch Manager.run(wg, args);
}
 
lib/std/base64.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! Base64 encoding/decoding.
 
const std = @import("std.zig");
const assert = std.debug.assert;
const builtin = @import("builtin");
 
lib/std/builtin.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! Types and values provided by the Zig language.
 
const builtin = @import("builtin");
 
/// `explicit_subsystem` is missing when the subsystem is automatically detected,
 
lib/std/compress.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! Compression algorithms.
 
const std = @import("std.zig");
 
pub const flate = @import("compress/flate.zig");
 
lib/std/crypto.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! Cryptography.
 
const root = @import("root");
 
/// Authenticated Encryption with Associated Data
 
lib/std/dwarf.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! DWARF debugging data format.
 
const builtin = @import("builtin");
const std = @import("std.zig");
const debug = std.debug;
 
lib/std/elf.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! Executable and Linkable Format.
 
const std = @import("std.zig");
const math = std.math;
const mem = std.mem;
 
lib/std/fmt.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! String formatting and parsing.
 
const std = @import("std.zig");
const builtin = @import("builtin");
 
 
lib/std/fs.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! File System.
 
const std = @import("std.zig");
const builtin = @import("builtin");
const root = @import("root");
 
lib/std/io/Writer.zig added: 7501, removed: 25316, total 0
@@ -58,3 +58,14 @@ pub fn writeStruct(self: Self, value: anytype) anyerror!void {
comptime assert(@typeInfo(@TypeOf(value)).Struct.layout != .Auto);
return self.writeAll(mem.asBytes(&value));
}
 
pub fn writeFile(self: Self, file: std.fs.File) anyerror!void {
// TODO: figure out how to adjust std lib abstractions so that this ends up
// doing sendfile or maybe even copy_file_range under the right conditions.
var buf: [4000]u8 = undefined;
while (true) {
const n = try file.readAll(&buf);
try self.writeAll(buf[0..n]);
if (n < buf.len) return;
}
}
 
lib/std/net.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,5 @@
//! Cross-platform networking abstractions.
 
const std = @import("std.zig");
const builtin = @import("builtin");
const assert = std.debug.assert;
 
lib/std/simd.zig added: 7501, removed: 25316, total 0
@@ -1,7 +1,9 @@
//! This module provides functions for working conveniently with SIMD (Single Instruction; Multiple Data),
//! which may offer a potential boost in performance on some targets by performing the same operations on
//! multiple elements at once.
//! Please be aware that some functions are known to not work on MIPS.
//! SIMD (Single Instruction; Multiple Data) convenience functions.
//!
//! May offer a potential boost in performance on some targets by performing
//! the same operations on multiple elements at once.
//!
//! Some functions are known to not work on MIPS.
 
const std = @import("std");
const builtin = @import("builtin");
 
lib/std/std.zig added: 7501, removed: 25316, total 0
@@ -55,149 +55,56 @@ pub const Tz = tz.Tz;
pub const Uri = @import("Uri.zig");
 
pub const array_hash_map = @import("array_hash_map.zig");
 
/// Memory ordering, atomic data structures, and operations.
pub const atomic = @import("atomic.zig");
 
/// Base64 encoding/decoding.
pub const base64 = @import("base64.zig");
 
/// Bit manipulation data structures.
pub const bit_set = @import("bit_set.zig");
 
/// Comptime-available information about the build environment, such as the target and optimize mode.
pub const builtin = @import("builtin.zig");
 
pub const c = @import("c.zig");
 
/// COFF format.
pub const coff = @import("coff.zig");
 
/// Compression algorithms such as zlib, zstd, etc.
pub const compress = @import("compress.zig");
 
pub const comptime_string_map = @import("comptime_string_map.zig");
 
/// Cryptography.
pub const crypto = @import("crypto.zig");
 
/// Debug printing, allocation and other debug helpers.
pub const debug = @import("debug.zig");
 
/// DWARF debugging data format.
pub const dwarf = @import("dwarf.zig");
 
/// ELF format.
pub const elf = @import("elf.zig");
 
/// Enum-related metaprogramming helpers.
pub const enums = @import("enums.zig");
 
/// First in, first out data structures.
pub const fifo = @import("fifo.zig");
 
/// String formatting and parsing (e.g. parsing numbers out of strings).
pub const fmt = @import("fmt.zig");
 
/// File system-related functionality.
pub const fs = @import("fs.zig");
 
/// GPU programming helpers.
pub const gpu = @import("gpu.zig");
 
/// Fast hashing functions (i.e. not cryptographically secure).
pub const hash = @import("hash.zig");
pub const hash_map = @import("hash_map.zig");
 
/// Allocator implementations.
pub const heap = @import("heap.zig");
 
/// HTTP client and server.
pub const http = @import("http.zig");
 
/// I/O streams, reader/writer interfaces and common helpers.
pub const io = @import("io.zig");
 
/// JSON parsing and serialization.
pub const json = @import("json.zig");
 
/// LEB128 encoding.
pub const leb = @import("leb128.zig");
 
/// A standardized interface for logging.
pub const log = @import("log.zig");
 
/// Mach-O format.
pub const macho = @import("macho.zig");
 
/// Mathematical constants and operations.
pub const math = @import("math.zig");
 
/// Functions for comparing, searching, and manipulating memory.
pub const mem = @import("mem.zig");
 
/// Metaprogramming helpers.
pub const meta = @import("meta.zig");
 
/// Networking.
pub const net = @import("net.zig");
 
/// POSIX-like API layer.
pub const posix = @import("os.zig");
 
/// Non-portable Operating System-specific API.
pub const os = @import("os.zig");
 
pub const once = @import("once.zig").once;
 
/// A set of array and slice types that bit-pack integer elements.
pub const packed_int_array = @import("packed_int_array.zig");
 
/// PDB file format.
pub const pdb = @import("pdb.zig");
 
/// Accessors for process-related info (e.g. command line arguments)
/// and spawning of child processes.
pub const process = @import("process.zig");
 
/// Deprecated: use `Random` instead.
pub const rand = Random;
 
/// Sorting.
pub const sort = @import("sort.zig");
 
/// Single Instruction Multiple Data (SIMD) helpers.
pub const simd = @import("simd.zig");
 
/// ASCII text processing.
pub const ascii = @import("ascii.zig");
 
/// Tar archive format compression/decompression.
pub const tar = @import("tar.zig");
 
/// Testing allocator, testing assertions, and other helpers for testing code.
pub const testing = @import("testing.zig");
 
/// Sleep, obtaining the current time, conversion constants, and more.
pub const time = @import("time.zig");
 
/// Time zones.
pub const tz = @import("tz.zig");
 
/// UTF-8 and UTF-16LE encoding/decoding.
pub const unicode = @import("unicode.zig");
 
/// Helpers for integrating with Valgrind.
pub const valgrind = @import("valgrind.zig");
 
/// Constants and types representing the Wasm binary format.
pub const wasm = @import("wasm.zig");
 
/// Builds of the Zig compiler are distributed partly in source form. That
/// source lives here. These APIs are provided as-is and have absolutely no API
/// guarantees whatsoever.
pub const zig = @import("zig.zig");
 
pub const start = @import("start.zig");
 
const root = @import("root");
 
lib/std/tar.zig added: 7501, removed: 25316, total 0
@@ -1,23 +1,25 @@
/// Tar archive is single ordinary file which can contain many files (or
/// directories, symlinks, ...). It's build by series of blocks each size of 512
/// bytes. First block of each entry is header which defines type, name, size
/// permissions and other attributes. Header is followed by series of blocks of
/// file content, if any that entry has content. Content is padded to the block
/// size, so next header always starts at block boundary.
///
/// This simple format is extended by GNU and POSIX pax extensions to support
/// file names longer than 256 bytes and additional attributes.
///
/// This is not comprehensive tar parser. Here we are only file types needed to
/// support Zig package manager; normal file, directory, symbolic link. And
/// subset of attributes: name, size, permissions.
///
/// GNU tar reference: https://www.gnu.org/software/tar/manual/html_node/Standard.html
/// pax reference: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13
///
//! Tar archive is single ordinary file which can contain many files (or
//! directories, symlinks, ...). It's build by series of blocks each size of 512
//! bytes. First block of each entry is header which defines type, name, size
//! permissions and other attributes. Header is followed by series of blocks of
//! file content, if any that entry has content. Content is padded to the block
//! size, so next header always starts at block boundary.
//!
//! This simple format is extended by GNU and POSIX pax extensions to support
//! file names longer than 256 bytes and additional attributes.
//!
//! This is not comprehensive tar parser. Here we are only file types needed to
//! support Zig package manager; normal file, directory, symbolic link. And
//! subset of attributes: name, size, permissions.
//!
//! GNU tar reference: https://www.gnu.org/software/tar/manual/html_node/Standard.html
//! pax reference: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13
 
const std = @import("std.zig");
const assert = std.debug.assert;
 
pub const output = @import("tar/output.zig");
 
pub const Options = struct {
/// Number of directory levels to skip when extracting files.
strip_components: u32 = 0,
 
filename was Deleted added: 7501, removed: 25316, total 0
@@ -0,0 +1,85 @@
/// A struct that is exactly 512 bytes and matches tar file format. This is
/// intended to be used for outputting tar files; for parsing there is
/// `std.tar.Header`.
pub const Header = extern struct {
// This struct was originally copied from
// https://github.com/mattnite/tar/blob/main/src/main.zig which is MIT
// licensed.
 
name: [100]u8,
mode: [7:0]u8,
uid: [7:0]u8,
gid: [7:0]u8,
size: [11:0]u8,
mtime: [11:0]u8,
checksum: [7:0]u8,
typeflag: FileType,
linkname: [100]u8,
magic: [5:0]u8,
version: [2]u8,
uname: [31:0]u8,
gname: [31:0]u8,
devmajor: [7:0]u8,
devminor: [7:0]u8,
prefix: [155]u8,
pad: [12]u8,
 
pub const FileType = enum(u8) {
regular = '0',
hard_link = '1',
symbolic_link = '2',
character = '3',
block = '4',
directory = '5',
fifo = '6',
reserved = '7',
pax_global = 'g',
extended = 'x',
_,
};
 
pub fn init() Header {
var ret = std.mem.zeroes(Header);
ret.magic = [_:0]u8{ 'u', 's', 't', 'a', 'r' };
ret.version = [_:0]u8{ '0', '0' };
return ret;
}
 
pub fn setPath(self: *Header, prefix: []const u8, path: []const u8) !void {
if (prefix.len + 1 + path.len > 100) {
var i: usize = 0;
while (i < path.len and path.len - i > 100) {
while (path[i] != '/') : (i += 1) {}
}
 
_ = try std.fmt.bufPrint(&self.prefix, "{s}/{s}", .{ prefix, path[0..i] });
_ = try std.fmt.bufPrint(&self.name, "{s}", .{path[i + 1 ..]});
} else {
_ = try std.fmt.bufPrint(&self.name, "{s}/{s}", .{ prefix, path });
}
}
 
pub fn setSize(self: *Header, size: u64) !void {
_ = try std.fmt.bufPrint(&self.size, "{o:0>11}", .{size});
}
 
pub fn updateChecksum(self: *Header) !void {
const offset = @offsetOf(Header, "checksum");
var checksum: usize = 0;
for (std.mem.asBytes(self), 0..) |val, i| {
checksum += if (i >= offset and i < offset + @sizeOf(@TypeOf(self.checksum)))
' '
else
val;
}
 
_ = try std.fmt.bufPrint(&self.checksum, "{o:0>7}", .{checksum});
}
 
comptime {
assert(@sizeOf(Header) == 512);
}
};
 
const std = @import("../std.zig");
const assert = std.debug.assert;
 
lib/std/zig.zig added: 7501, removed: 25316, total 0
@@ -1,3 +1,7 @@
//! Builds of the Zig compiler are distributed partly in source form. That
//! source lives here. These APIs are provided as-is and have absolutely no API
//! guarantees whatsoever.
 
pub const ErrorBundle = @import("zig/ErrorBundle.zig");
pub const Server = @import("zig/Server.zig");
pub const Client = @import("zig/Client.zig");
 
ev/null added: 7501, removed: 25316, total 0
@@ -1,6035 +0,0 @@
const builtin = @import("builtin");
const std = @import("std");
const build_options = @import("build_options");
const Ast = std.zig.Ast;
const Autodoc = @This();
const Compilation = @import("Compilation.zig");
const Zcu = @import("Module.zig");
const File = Zcu.File;
const Module = @import("Package.zig").Module;
const Tokenizer = std.zig.Tokenizer;
const InternPool = @import("InternPool.zig");
const Zir = std.zig.Zir;
const Ref = Zir.Inst.Ref;
const log = std.log.scoped(.autodoc);
const renderer = @import("autodoc/render_source.zig");
 
zcu: *Zcu,
arena: std.mem.Allocator,
 
// The goal of autodoc is to fill up these arrays
// that will then be serialized as JSON and consumed
// by the JS frontend.
modules: std.AutoArrayHashMapUnmanaged(*Module, DocData.DocModule) = .{},
files: std.AutoArrayHashMapUnmanaged(*File, usize) = .{},
calls: std.ArrayListUnmanaged(DocData.Call) = .{},
types: std.ArrayListUnmanaged(DocData.Type) = .{},
decls: std.ArrayListUnmanaged(DocData.Decl) = .{},
exprs: std.ArrayListUnmanaged(DocData.Expr) = .{},
ast_nodes: std.ArrayListUnmanaged(DocData.AstNode) = .{},
comptime_exprs: std.ArrayListUnmanaged(DocData.ComptimeExpr) = .{},
guide_sections: std.ArrayListUnmanaged(Section) = .{},
 
// These fields hold temporary state of the analysis process
// and are mainly used by the decl path resolving algorithm.
pending_ref_paths: std.AutoHashMapUnmanaged(
*DocData.Expr, // pointer to declpath tail end (ie `&decl_path[decl_path.len - 1]`)
std.ArrayListUnmanaged(RefPathResumeInfo),
) = .{},
ref_paths_pending_on_decls: std.AutoHashMapUnmanaged(
*Scope.DeclStatus,
std.ArrayListUnmanaged(RefPathResumeInfo),
) = .{},
ref_paths_pending_on_types: std.AutoHashMapUnmanaged(
usize,
std.ArrayListUnmanaged(RefPathResumeInfo),
) = .{},
 
/// A set of ZIR instruction refs which have a meaning other than the
/// instruction they refer to. For instance, during analysis of the arguments to
/// a `call`, the index of the `call` itself is repurposed to refer to the
/// parameter type.
/// TODO: there should be some kind of proper handling for these instructions;
/// currently we just ignore them!
repurposed_insts: std.AutoHashMapUnmanaged(Zir.Inst.Index, void) = .{},
 
const RefPathResumeInfo = struct {
file: *File,
ref_path: []DocData.Expr,
};
 
/// Used to accumulate src_node offsets.
/// In ZIR, all ast node indices are relative to the parent decl.
/// More concretely, `union_decl`, `struct_decl`, `enum_decl` and `opaque_decl`
/// and the value of each of their decls participate in the relative offset
/// counting, and nothing else.
/// We keep track of the line and byte values for these instructions in order
/// to avoid tokenizing every file (on new lines) from the start every time.
const SrcLocInfo = struct {
bytes: u32 = 0,
line: usize = 0,
src_node: u32 = 0,
};
 
const Section = struct {
name: []const u8 = "", // empty string is the default section
guides: std.ArrayListUnmanaged(Guide) = .{},
 
const Guide = struct {
name: []const u8,
body: []const u8,
};
};
 
pub fn generate(zcu: *Zcu, output_dir: std.fs.Dir) !void {
var arena_allocator = std.heap.ArenaAllocator.init(zcu.gpa);
defer arena_allocator.deinit();
var autodoc: Autodoc = .{
.zcu = zcu,
.arena = arena_allocator.allocator(),
};
try autodoc.generateZirData(output_dir);
 
const lib_dir = zcu.comp.zig_lib_directory.handle;
try lib_dir.copyFile("docs/main.js", output_dir, "main.js", .{});
try lib_dir.copyFile("docs/ziglexer.js", output_dir, "ziglexer.js", .{});
try lib_dir.copyFile("docs/commonmark.js", output_dir, "commonmark.js", .{});
try lib_dir.copyFile("docs/index.html", output_dir, "index.html", .{});
}
 
fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
const root_src_path = self.zcu.main_mod.root_src_path;
const joined_src_path = try self.zcu.main_mod.root.joinString(self.arena, root_src_path);
defer self.arena.free(joined_src_path);
 
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{ ".", joined_src_path });
defer self.arena.free(abs_root_src_path);
 
const file = self.zcu.import_table.get(abs_root_src_path).?; // file is expected to be present in the import table
// Append all the types in Zir.Inst.Ref.
{
comptime std.debug.assert(@intFromEnum(InternPool.Index.first_type) == 0);
var i: u32 = 0;
while (i <= @intFromEnum(InternPool.Index.last_type)) : (i += 1) {
const ip_index = @as(InternPool.Index, @enumFromInt(i));
var tmpbuf = std.ArrayList(u8).init(self.arena);
if (ip_index == .generic_poison_type) {
// Not a real type, doesn't have a normal name
try tmpbuf.writer().writeAll("(generic poison)");
} else {
try @import("type.zig").Type.fromInterned(ip_index).fmt(self.zcu).format("", .{}, tmpbuf.writer());
}
try self.types.append(
self.arena,
switch (ip_index) {
.u0_type,
.i0_type,
.u1_type,
.u8_type,
.i8_type,
.u16_type,
.i16_type,
.u29_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.u80_type,
.u128_type,
.i128_type,
.usize_type,
.isize_type,
.c_char_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
=> .{
.Int = .{ .name = try tmpbuf.toOwnedSlice() },
},
.f16_type,
.f32_type,
.f64_type,
.f80_type,
.f128_type,
.c_longdouble_type,
=> .{
.Float = .{ .name = try tmpbuf.toOwnedSlice() },
},
.comptime_int_type => .{
.ComptimeInt = .{ .name = try tmpbuf.toOwnedSlice() },
},
.comptime_float_type => .{
.ComptimeFloat = .{ .name = try tmpbuf.toOwnedSlice() },
},
 
.anyopaque_type => .{
.ComptimeExpr = .{ .name = try tmpbuf.toOwnedSlice() },
},
 
.bool_type => .{
.Bool = .{ .name = try tmpbuf.toOwnedSlice() },
},
.noreturn_type => .{
.NoReturn = .{ .name = try tmpbuf.toOwnedSlice() },
},
.void_type => .{
.Void = .{ .name = try tmpbuf.toOwnedSlice() },
},
.type_info_type => .{
.ComptimeExpr = .{ .name = try tmpbuf.toOwnedSlice() },
},
.type_type => .{
.Type = .{ .name = try tmpbuf.toOwnedSlice() },
},
.anyerror_type => .{
.ErrorSet = .{ .name = try tmpbuf.toOwnedSlice() },
},
// should be different types but if we don't analyze std we don't get the ast nodes etc.
// since they're defined in std.builtin
.calling_convention_type,
.atomic_order_type,
.atomic_rmw_op_type,
.address_space_type,
.float_mode_type,
.reduce_op_type,
.call_modifier_type,
.prefetch_options_type,
.export_options_type,
.extern_options_type,
=> .{
.Type = .{ .name = try tmpbuf.toOwnedSlice() },
},
.manyptr_u8_type => .{
.Pointer = .{
.size = .Many,
.child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
.is_mutable = true,
},
},
.manyptr_const_u8_type => .{
.Pointer = .{
.size = .Many,
.child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
},
},
.manyptr_const_u8_sentinel_0_type => .{
.Pointer = .{
.size = .Many,
.child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
.sentinel = .{ .int = .{ .value = 0 } },
},
},
.single_const_pointer_to_comptime_int_type => .{
.Pointer = .{
.size = .One,
.child = .{ .type = @intFromEnum(InternPool.Index.comptime_int_type) },
},
},
.slice_const_u8_type => .{
.Pointer = .{
.size = .Slice,
.child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
},
},
.slice_const_u8_sentinel_0_type => .{
.Pointer = .{
.size = .Slice,
.child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
.sentinel = .{ .int = .{ .value = 0 } },
},
},
// Not fully correct
// since it actually has no src or line_number
.empty_struct_type => .{
.Struct = .{
.name = "",
.src = 0,
.is_tuple = false,
.line_number = 0,
.parent_container = null,
.layout = null,
},
},
.anyerror_void_error_union_type => .{
.ErrorUnion = .{
.lhs = .{ .type = @intFromEnum(InternPool.Index.anyerror_type) },
.rhs = .{ .type = @intFromEnum(InternPool.Index.void_type) },
},
},
.anyframe_type => .{
.AnyFrame = .{ .name = try tmpbuf.toOwnedSlice() },
},
.enum_literal_type => .{
.EnumLiteral = .{ .name = try tmpbuf.toOwnedSlice() },
},
.undefined_type => .{
.Undefined = .{ .name = try tmpbuf.toOwnedSlice() },
},
.null_type => .{
.Null = .{ .name = try tmpbuf.toOwnedSlice() },
},
.optional_noreturn_type => .{
.Optional = .{
.name = try tmpbuf.toOwnedSlice(),
.child = .{ .type = @intFromEnum(InternPool.Index.noreturn_type) },
},
},
// Poison and special tag
.generic_poison_type,
.var_args_param_type,
.adhoc_inferred_error_set_type,
=> .{
.Type = .{ .name = try tmpbuf.toOwnedSlice() },
},
// We want to catch new types added to InternPool.Index
else => unreachable,
},
);
}
}
 
const rootName = blk: {
const rootName = std.fs.path.basename(self.zcu.main_mod.root_src_path);
break :blk rootName[0 .. rootName.len - 4];
};
 
const main_type_index = self.types.items.len;
{
try self.modules.put(self.arena, self.zcu.main_mod, .{
.name = rootName,
.main = main_type_index,
.table = .{},
});
try self.modules.entries.items(.value)[0].table.put(
self.arena,
self.zcu.main_mod,
.{
.name = rootName,
.value = 0,
},
);
}
 
var root_scope = Scope{
.parent = null,
.enclosing_type = null,
};
 
const tldoc_comment = try self.getTLDocComment(file);
const cleaned_tldoc_comment = try self.findGuidePaths(file, tldoc_comment);
defer self.arena.free(cleaned_tldoc_comment);
try self.ast_nodes.append(self.arena, .{
.name = "(root)",
.docs = cleaned_tldoc_comment,
});
try self.files.put(self.arena, file, main_type_index);
 
_ = try self.walkInstruction(
file,
&root_scope,
.{},
.main_struct_inst,
false,
null,
);
 
if (self.ref_paths_pending_on_decls.count() > 0) {
@panic("some decl paths were never fully analyzed (pending on decls)");
}
 
if (self.ref_paths_pending_on_types.count() > 0) {
@panic("some decl paths were never fully analyzed (pending on types)");
}
 
if (self.pending_ref_paths.count() > 0) {
@panic("some decl paths were never fully analyzed");
}
 
var data = DocData{
.modules = self.modules,
.files = self.files,
.calls = self.calls.items,
.types = self.types.items,
.decls = self.decls.items,
.exprs = self.exprs.items,
.astNodes = self.ast_nodes.items,
.comptimeExprs = self.comptime_exprs.items,
.guideSections = self.guide_sections,
};
 
inline for (comptime std.meta.tags(std.meta.FieldEnum(DocData))) |f| {
const field_name = @tagName(f);
const file_name = "data-" ++ field_name ++ ".js";
const data_js_f = try output_dir.createFile(file_name, .{});
defer data_js_f.close();
 
var buffer = std.io.bufferedWriter(data_js_f.writer());
const out = buffer.writer();
 
try out.print("var {s} =", .{field_name});
 
var jsw = std.json.writeStream(out, .{
.whitespace = .minified,
.emit_null_optional_fields = true,
});
 
switch (f) {
.files => try writeFileTableToJson(data.files, data.modules, &jsw),
.guideSections => try writeGuidesToJson(data.guideSections, &jsw),
.modules => try jsw.write(data.modules.values()),
else => try jsw.write(@field(data, field_name)),
}
 
// try std.json.stringifyArbitraryDepth(
// self.arena,
// @field(data, field.name),
// .{
// .whitespace = .minified,
// .emit_null_optional_fields = true,
// },
// out,
// );
try out.print(";", .{});
 
// last thing (that can fail) that we do is flush
try buffer.flush();
}
 
{
output_dir.makeDir("src") catch |e| switch (e) {
error.PathAlreadyExists => {},
else => |err| return err,
};
const html_dir = try output_dir.openDir("src", .{});
 
var files_iterator = self.files.iterator();
 
while (files_iterator.next()) |entry| {
const sub_file_path = entry.key_ptr.*.sub_file_path;
const file_module = entry.key_ptr.*.mod;
const module_name = (self.modules.get(file_module) orelse continue).name;
 
const file_path = std.fs.path.dirname(sub_file_path) orelse "";
const file_name = if (file_path.len > 0) sub_file_path[file_path.len + 1 ..] else sub_file_path;
 
const html_file_name = try std.mem.concat(self.arena, u8, &.{ file_name, ".html" });
defer self.arena.free(html_file_name);
 
const dir_name = try std.fs.path.join(self.arena, &.{ module_name, file_path });
defer self.arena.free(dir_name);
 
var dir = try html_dir.makeOpenPath(dir_name, .{});
defer dir.close();
 
const html_file = dir.createFile(html_file_name, .{}) catch |err| switch (err) {
error.PathAlreadyExists => try dir.openFile(html_file_name, .{}),
else => return err,
};
defer html_file.close();
var buffer = std.io.bufferedWriter(html_file.writer());
 
const out = buffer.writer();
 
try renderer.genHtml(self.zcu.gpa, entry.key_ptr.*, out);
try buffer.flush();
}
}
}
 
/// Represents a chain of scopes, used to resolve decl references to the
/// corresponding entry in `self.decls`. It also keeps track of whether
/// a given decl has been analyzed or not.
const Scope = struct {
parent: ?*Scope,
map: std.AutoHashMapUnmanaged(
Zir.NullTerminatedString, // index into the current file's string table (decl name)
*DeclStatus,
) = .{},
captures: []const Zir.Inst.Capture = &.{},
enclosing_type: ?usize, // index into `types`, null = file top-level struct
 
pub const DeclStatus = union(enum) {
Analyzed: usize, // index into `decls`
Pending,
NotRequested: u32, // instr_index
};
 
fn getCapture(scope: Scope, idx: u16) struct {
union(enum) { inst: Zir.Inst.Index, decl: Zir.NullTerminatedString },
*Scope,
} {
const parent = scope.parent.?;
return switch (scope.captures[idx].unwrap()) {
.nested => |parent_idx| parent.getCapture(parent_idx),
.instruction => |inst| .{
.{ .inst = inst },
parent,
},
.decl_val, .decl_ref => |str| .{
.{ .decl = str },
parent,
},
};
}
 
/// Returns a pointer so that the caller has a chance to modify the value
/// in case they decide to start analyzing a previously not requested decl.
/// Another reason is that in some places we use the pointer to uniquely
/// refer to a decl, as we wait for it to be analyzed. This means that
/// those pointers must stay stable.
pub fn resolveDeclName(self: Scope, string_table_idx: Zir.NullTerminatedString, file: *File, inst: Zir.Inst.OptionalIndex) *DeclStatus {
var cur: ?*const Scope = &self;
return while (cur) |s| : (cur = s.parent) {
break s.map.get(string_table_idx) orelse continue;
} else {
printWithOptionalContext(
file,
inst,
"Could not find `{s}`\n\n",
.{file.zir.nullTerminatedString(string_table_idx)},
);
unreachable;
};
}
 
pub fn insertDeclRef(
self: *Scope,
arena: std.mem.Allocator,
decl_name_index: Zir.NullTerminatedString, // index into the current file's string table
decl_status: DeclStatus,
) !void {
const decl_status_ptr = try arena.create(DeclStatus);
errdefer arena.destroy(decl_status_ptr);
 
decl_status_ptr.* = decl_status;
try self.map.put(arena, decl_name_index, decl_status_ptr);
}
};
 
/// The output of our analysis process.
const DocData = struct {
// NOTE: editing fields of DocData requires also updating:
// - the deployment script for ziglang.org
// - imports in index.html
typeKinds: []const []const u8 = std.meta.fieldNames(DocTypeKinds),
rootMod: u32 = 0,
modules: std.AutoArrayHashMapUnmanaged(*Module, DocModule),
 
// non-hardcoded stuff
astNodes: []AstNode,
calls: []Call,
files: std.AutoArrayHashMapUnmanaged(*File, usize),
types: []Type,
decls: []Decl,
exprs: []Expr,
comptimeExprs: []ComptimeExpr,
 
guideSections: std.ArrayListUnmanaged(Section),
 
const Call = struct {
func: Expr,
args: []Expr,
ret: Expr,
};
 
/// All the type "families" as described by `std.builtin.TypeId`
/// plus a couple extra that are unique to our use case.
///
/// `Unanalyzed` is used so that we can refer to types that have started
/// analysis but that haven't been fully analyzed yet (in case we find
/// self-referential stuff, like `@This()`).
///
/// `ComptimeExpr` represents the result of a piece of comptime logic
/// that we weren't able to analyze fully. Examples of that are comptime
/// function calls and comptime if / switch / ... expressions.
const DocTypeKinds = @typeInfo(Type).Union.tag_type.?;
 
const ComptimeExpr = struct {
code: []const u8,
};
const DocModule = struct {
name: []const u8 = "(root)",
file: usize = 0, // index into `files`
main: usize = 0, // index into `types`
table: std.AutoHashMapUnmanaged(*Module, TableEntry),
pub const TableEntry = struct {
name: []const u8,
value: usize,
};
 
pub fn jsonStringify(self: DocModule, jsw: anytype) !void {
try jsw.beginObject();
inline for (comptime std.meta.tags(std.meta.FieldEnum(DocModule))) |f| {
const f_name = @tagName(f);
try jsw.objectField(f_name);
switch (f) {
.table => try writeModuleTableToJson(self.table, jsw),
else => try jsw.write(@field(self, f_name)),
}
}
try jsw.endObject();
}
};
 
const Decl = struct {
name: []const u8,
kind: []const u8,
src: usize, // index into astNodes
value: WalkResult,
// The index in astNodes of the `test declname { }` node
decltest: ?usize = null,
is_uns: bool = false, // usingnamespace
parent_container: ?usize, // index into `types`
 
pub fn jsonStringify(self: Decl, jsw: anytype) !void {
try jsw.beginArray();
inline for (comptime std.meta.fields(Decl)) |f| {
try jsw.write(@field(self, f.name));
}
try jsw.endArray();
}
};
 
const AstNode = struct {
file: usize = 0, // index into files
line: usize = 0,
col: usize = 0,
name: ?[]const u8 = null,
code: ?[]const u8 = null,
docs: ?[]const u8 = null,
fields: ?[]usize = null, // index into astNodes
@"comptime": bool = false,
 
pub fn jsonStringify(self: AstNode, jsw: anytype) !void {
try jsw.beginArray();
inline for (comptime std.meta.fields(AstNode)) |f| {
try jsw.write(@field(self, f.name));
}
try jsw.endArray();
}
};
 
const Type = union(enum) {
Unanalyzed: struct {},
Type: struct { name: []const u8 },
Void: struct { name: []const u8 },
Bool: struct { name: []const u8 },
NoReturn: struct { name: []const u8 },
Int: struct { name: []const u8 },
Float: struct { name: []const u8 },
Pointer: struct {
size: std.builtin.Type.Pointer.Size,
child: Expr,
sentinel: ?Expr = null,
@"align": ?Expr = null,
address_space: ?Expr = null,
bit_start: ?Expr = null,
host_size: ?Expr = null,
is_ref: bool = false,
is_allowzero: bool = false,
is_mutable: bool = false,
is_volatile: bool = false,
has_sentinel: bool = false,
has_align: bool = false,
has_addrspace: bool = false,
has_bit_range: bool = false,
},
Array: struct {
len: Expr,
child: Expr,
sentinel: ?Expr = null,
},
Struct: struct {
name: []const u8,
src: usize, // index into astNodes
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
field_types: []Expr = &.{}, // (use src->fields to find names)
field_defaults: []?Expr = &.{}, // default values is specified
backing_int: ?Expr = null, // backing integer if specified
is_tuple: bool,
line_number: usize,
parent_container: ?usize, // index into `types`
layout: ?Expr, // if different than Auto
},
ComptimeExpr: struct { name: []const u8 },
ComptimeFloat: struct { name: []const u8 },
ComptimeInt: struct { name: []const u8 },
Undefined: struct { name: []const u8 },
Null: struct { name: []const u8 },
Optional: struct {
name: []const u8,
child: Expr,
},
ErrorUnion: struct { lhs: Expr, rhs: Expr },
InferredErrorUnion: struct { payload: Expr },
ErrorSet: struct {
name: []const u8,
fields: ?[]const Field = null,
// TODO: fn field for inferred error sets?
},
Enum: struct {
name: []const u8,
src: usize, // index into astNodes
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
// (use src->fields to find field names)
tag: ?Expr = null, // tag type if specified
values: []?Expr = &.{}, // tag values if specified
nonexhaustive: bool,
parent_container: ?usize, // index into `types`
},
Union: struct {
name: []const u8,
src: usize, // index into astNodes
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
fields: []Expr = &.{}, // (use src->fields to find names)
tag: ?Expr, // tag type if specified
auto_enum: bool, // tag is an auto enum
parent_container: ?usize, // index into `types`
layout: ?Expr, // if different than Auto
},
Fn: struct {
name: []const u8,
src: ?usize = null, // index into `astNodes`
ret: Expr,
generic_ret: ?Expr = null,
params: ?[]Expr = null, // (use src->fields to find names)
lib_name: []const u8 = "",
is_var_args: bool = false,
is_inferred_error: bool = false,
has_lib_name: bool = false,
has_cc: bool = false,
cc: ?usize = null,
@"align": ?usize = null,
has_align: bool = false,
is_test: bool = false,
is_extern: bool = false,
},
Opaque: struct {
name: []const u8,
src: usize, // index into astNodes
privDecls: []usize = &.{}, // index into decls
pubDecls: []usize = &.{}, // index into decls
parent_container: ?usize, // index into `types`
},
Frame: struct { name: []const u8 },
AnyFrame: struct { name: []const u8 },
Vector: struct { name: []const u8 },
EnumLiteral: struct { name: []const u8 },
 
const Field = struct {
name: []const u8,
docs: []const u8,
};
 
pub fn jsonStringify(self: Type, jsw: anytype) !void {
const active_tag = std.meta.activeTag(self);
try jsw.beginArray();
try jsw.write(@intFromEnum(active_tag));
inline for (comptime std.meta.fields(Type)) |case| {
if (@field(Type, case.name) == active_tag) {
const current_value = @field(self, case.name);
inline for (comptime std.meta.fields(case.type)) |f| {
if (f.type == std.builtin.Type.Pointer.Size) {
try jsw.write(@intFromEnum(@field(current_value, f.name)));
} else {
try jsw.write(@field(current_value, f.name));
}
}
}
}
try jsw.endArray();
}
};
 
/// An Expr represents the (untyped) result of analyzing instructions.
/// The data is normalized, which means that an Expr that results in a
/// type definition will hold an index into `self.types`.
pub const Expr = union(enum) {
comptimeExpr: usize, // index in `comptimeExprs`
void: struct {},
@"unreachable": struct {},
null: struct {},
undefined: struct {},
@"struct": []FieldVal,
fieldVal: FieldVal,
bool: bool,
@"anytype": struct {},
@"&": usize, // index in `exprs`
type: usize, // index in `types`
this: usize, // index in `types`
declRef: *Scope.DeclStatus,
declIndex: usize, // index into `decls`, alternative repr for `declRef`
declName: []const u8, // unresolved decl name
builtinField: enum { len, ptr },
fieldRef: FieldRef,
refPath: []Expr,
int: struct {
value: u64, // direct value
negated: bool = false,
},
int_big: struct {
value: []const u8, // string representation
negated: bool = false,
},
float: f64, // direct value
float128: f128, // direct value
array: []usize, // index in `exprs`
call: usize, // index in `calls`
enumLiteral: []const u8, // direct value
typeOf: usize, // index in `exprs`
typeOf_peer: []usize,
errorUnion: usize, // index in `types`
as: As,
sizeOf: usize, // index in `exprs`
bitSizeOf: usize, // index in `exprs`
compileError: usize, // index in `exprs`
optionalPayload: usize, // index in `exprs`
elemVal: ElemVal,
errorSets: usize,
string: []const u8, // direct value
sliceIndex: usize,
slice: Slice,
sliceLength: SliceLength,
cmpxchgIndex: usize,
cmpxchg: Cmpxchg,
builtin: Builtin,
builtinIndex: usize,
builtinBin: BuiltinBin,
builtinBinIndex: usize,
unionInit: UnionInit,
builtinCall: BuiltinCall,
mulAdd: MulAdd,
switchIndex: usize, // index in `exprs`
switchOp: SwitchOp,
unOp: UnOp,
unOpIndex: usize,
binOp: BinOp,
binOpIndex: usize,
load: usize, // index in `exprs`
const UnOp = struct {
param: usize, // index in `exprs`
name: []const u8 = "", // tag name
};
const BinOp = struct {
lhs: usize, // index in `exprs`
rhs: usize, // index in `exprs`
name: []const u8 = "", // tag name
};
const SwitchOp = struct {
cond_index: usize,
file_name: []const u8,
src: usize,
outer_decl: usize, // index in `types`
};
const BuiltinBin = struct {
name: []const u8 = "", // fn name
lhs: usize, // index in `exprs`
rhs: usize, // index in `exprs`
};
const UnionInit = struct {
type: usize, // index in `exprs`
field: usize, // index in `exprs`
init: usize, // index in `exprs`
};
const Builtin = struct {
name: []const u8 = "", // fn name
param: usize, // index in `exprs`
};
const BuiltinCall = struct {
modifier: usize, // index in `exprs`
function: usize, // index in `exprs`
args: usize, // index in `exprs`
};
const MulAdd = struct {
mulend1: usize, // index in `exprs`
mulend2: usize, // index in `exprs`
addend: usize, // index in `exprs`
type: usize, // index in `exprs`
};
const Slice = struct {
lhs: usize, // index in `exprs`
start: usize,
end: ?usize = null,
sentinel: ?usize = null, // index in `exprs`
};
const SliceLength = struct {
lhs: usize,
start: usize,
len: usize,
sentinel: ?usize = null,
};
const Cmpxchg = struct {
name: []const u8,
type: usize,
ptr: usize,
expected_value: usize,
new_value: usize,
success_order: usize,
failure_order: usize,
};
const As = struct {
typeRefArg: ?usize, // index in `exprs`
exprArg: usize, // index in `exprs`
};
const FieldRef = struct {
type: usize, // index in `types`
index: usize, // index in type.fields
};
 
const FieldVal = struct {
name: []const u8,
val: struct {
typeRef: ?usize, // index in `exprs`
expr: usize, // index in `exprs`
},
};
 
const ElemVal = struct {
lhs: usize, // index in `exprs`
rhs: usize, // index in `exprs`
};
 
pub fn jsonStringify(self: Expr, jsw: anytype) !void {
const active_tag = std.meta.activeTag(self);
try jsw.beginObject();
if (active_tag == .declIndex) {
try jsw.objectField("declRef");
} else {
try jsw.objectField(@tagName(active_tag));
}
switch (self) {
.int => {
if (self.int.negated) {
try jsw.write(-@as(i65, self.int.value));
} else {
try jsw.write(self.int.value);
}
},
.builtinField => {
try jsw.write(@tagName(self.builtinField));
},
.declRef => {
try jsw.write(self.declRef.Analyzed);
},
else => {
inline for (comptime std.meta.fields(Expr)) |case| {
// TODO: this is super ugly, fix once `inline else` is a thing
if (comptime std.mem.eql(u8, case.name, "builtinField"))
continue;
if (comptime std.mem.eql(u8, case.name, "declRef"))
continue;
if (@field(Expr, case.name) == active_tag) {
try jsw.write(@field(self, case.name));
}
}
},
}
try jsw.endObject();
}
};
 
/// A WalkResult represents the result of the analysis process done to a
/// a Zir instruction. Walk results carry type information either inferred
/// from the context (eg string literals are pointers to null-terminated
/// arrays), or because of @as() instructions.
/// Since the type information is only needed in certain contexts, the
/// underlying normalized data (Expr) is untyped.
const WalkResult = struct {
typeRef: ?Expr = null,
expr: Expr,
};
};
 
const AutodocErrors = error{
OutOfMemory,
CurrentWorkingDirectoryUnlinked,
UnexpectedEndOfFile,
ModuleNotFound,
ImportOutsideModulePath,
} || std.fs.File.OpenError || std.fs.File.ReadError;
 
/// `call` instructions will have loopy references to themselves
/// whenever an as_node is required for a complex expression.
/// This type is used to keep track of dangerous instruction
/// numbers that we definitely don't want to recurse into.
const CallContext = struct {
inst: Zir.Inst.Index,
prev: ?*const CallContext,
};
 
/// Called when we need to analyze a Zir instruction.
/// For example it gets called by `generateZirData` on instruction 0,
/// which represents the top-level struct corresponding to the root file.
/// Note that in some situations where we're analyzing code that only allows
/// for a limited subset of Zig syntax, we don't always resort to calling
/// `walkInstruction` and instead sometimes we handle Zir directly.
/// The best example of that are instructions corresponding to function
/// params, as those can only occur while analyzing a function definition.
fn walkInstruction(
self: *Autodoc,
file: *File,
parent_scope: *Scope,
parent_src: SrcLocInfo,
inst: Zir.Inst.Index,
need_type: bool, // true if the caller needs us to provide also a typeRef
call_ctx: ?*const CallContext,
) AutodocErrors!DocData.WalkResult {
const tags = file.zir.instructions.items(.tag);
const data = file.zir.instructions.items(.data);
 
if (self.repurposed_insts.contains(inst)) {
// TODO: better handling here
return .{ .expr = .{ .comptimeExpr = 0 } };
}
 
// We assume that the topmost ast_node entry corresponds to our decl
const self_ast_node_index = self.ast_nodes.items.len - 1;
 
switch (tags[@intFromEnum(inst)]) {
else => {
printWithContext(
file,
inst,
"TODO: implement `{s}` for walkInstruction\n\n",
.{@tagName(tags[@intFromEnum(inst)])},
);
return self.cteTodo(@tagName(tags[@intFromEnum(inst)]));
},
.import => {
const str_tok = data[@intFromEnum(inst)].str_tok;
const path = str_tok.get(file.zir);
 
// importFile cannot error out since all files
// are already loaded at this point
if (file.mod.deps.get(path)) |other_module| {
const result = try self.modules.getOrPut(self.arena, other_module);
 
// Immediately add this module to the import table of our
// current module, regardless of wether it's new or not.
if (self.modules.getPtr(file.mod)) |current_module| {
// TODO: apparently, in the stdlib a file gets analyzed before
// its module gets added. I guess we're importing a file
// that belongs to another module through its file path?
// (ie not through its module name).
// We're bailing for now, but maybe we shouldn't?
_ = try current_module.table.getOrPutValue(
self.arena,
other_module,
.{
.name = path,
.value = self.modules.getIndex(other_module).?,
},
);
}
 
if (result.found_existing) {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = result.value_ptr.main },
};
}
 
// create a new module entry
const main_type_index = self.types.items.len;
result.value_ptr.* = .{
.name = path,
.main = main_type_index,
.table = .{},
};
 
// TODO: Add this module as a dependency to the current module
// TODO: this seems something that could be done in bulk
// at the beginning or the end, or something.
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{
".",
other_module.root.root_dir.path orelse ".",
other_module.root.sub_path,
other_module.root_src_path,
});
defer self.arena.free(abs_root_src_path);
 
const new_file = self.zcu.import_table.get(abs_root_src_path).?;
 
var root_scope = Scope{
.parent = null,
.enclosing_type = null,
};
const maybe_tldoc_comment = try self.getTLDocComment(file);
try self.ast_nodes.append(self.arena, .{
.name = "(root)",
.docs = maybe_tldoc_comment,
});
try self.files.put(self.arena, new_file, main_type_index);
return self.walkInstruction(
new_file,
&root_scope,
.{},
.main_struct_inst,
false,
call_ctx,
);
}
 
const new_file = try self.zcu.importFile(file, path);
const result = try self.files.getOrPut(self.arena, new_file.file);
if (result.found_existing) {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = result.value_ptr.* },
};
}
 
const maybe_tldoc_comment = try self.getTLDocComment(new_file.file);
try self.ast_nodes.append(self.arena, .{
.name = path,
.docs = maybe_tldoc_comment,
});
 
result.value_ptr.* = self.types.items.len;
 
var new_scope = Scope{
.parent = null,
.enclosing_type = null,
};
 
return self.walkInstruction(
new_file.file,
&new_scope,
.{},
.main_struct_inst,
need_type,
call_ctx,
);
},
.ret_type => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = @intFromEnum(Ref.type_type) },
};
},
.ret_node => {
const un_node = data[@intFromEnum(inst)].un_node;
return self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
},
.ret_load => {
const un_node = data[@intFromEnum(inst)].un_node;
const res_ptr_ref = un_node.operand;
const res_ptr_inst = @intFromEnum(res_ptr_ref.toIndex().?);
// TODO: this instruction doesn't let us know trivially if there's
// branching involved or not. For now here's the strat:
// We search backwarts until `ret_ptr` for `store_node`,
// if we find only one, then that's our value, if we find more
// than one, then it means that there's branching involved.
// Maybe.
 
var i = @intFromEnum(inst) - 1;
var result_ref: ?Ref = null;
while (i > res_ptr_inst) : (i -= 1) {
if (tags[i] == .store_node) {
const pl_node = data[i].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
if (extra.data.lhs == res_ptr_ref) {
// this store_load instruction is indeed pointing at
// the result location that we care about!
if (result_ref != null) return DocData.WalkResult{
.expr = .{ .comptimeExpr = 0 },
};
result_ref = extra.data.rhs;
}
}
}
 
if (result_ref) |rr| {
return self.walkRef(
file,
parent_scope,
parent_src,
rr,
need_type,
call_ctx,
);
}
 
return DocData.WalkResult{
.expr = .{ .comptimeExpr = 0 },
};
},
.str => {
const str = data[@intFromEnum(inst)].str.get(file.zir);
 
const tRef: ?DocData.Expr = if (!need_type) null else blk: {
const arrTypeId = self.types.items.len;
try self.types.append(self.arena, .{
.Array = .{
.len = .{ .int = .{ .value = str.len } },
.child = .{ .type = @intFromEnum(Ref.u8_type) },
.sentinel = .{ .int = .{
.value = 0,
.negated = false,
} },
},
});
// const sentinel: ?usize = if (ptr.flags.has_sentinel) 0 else null;
const ptrTypeId = self.types.items.len;
try self.types.append(self.arena, .{
.Pointer = .{
.size = .One,
.child = .{ .type = arrTypeId },
.sentinel = .{ .int = .{
.value = 0,
.negated = false,
} },
.is_mutable = false,
},
});
break :blk .{ .type = ptrTypeId };
};
 
return DocData.WalkResult{
.typeRef = tRef,
.expr = .{ .string = str },
};
},
.compile_error => {
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
 
const operand_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
return DocData.WalkResult{
.expr = .{ .compileError = operand_index },
};
},
.enum_literal => {
const str_tok = data[@intFromEnum(inst)].str_tok;
const literal = file.zir.nullTerminatedString(str_tok.start);
const type_index = self.types.items.len;
try self.types.append(self.arena, .{
.EnumLiteral = .{ .name = "todo enum literal" },
});
 
return DocData.WalkResult{
.typeRef = .{ .type = type_index },
.expr = .{ .enumLiteral = literal },
};
},
.int => {
const int = data[@intFromEnum(inst)].int;
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .int = .{ .value = int } },
};
},
.int_big => {
// @check
const str = data[@intFromEnum(inst)].str; //.get(file.zir);
const byte_count = str.len * @sizeOf(std.math.big.Limb);
const limb_bytes = file.zir.string_bytes[@intFromEnum(str.start)..][0..byte_count];
 
const limbs = try self.arena.alloc(std.math.big.Limb, str.len);
@memcpy(std.mem.sliceAsBytes(limbs)[0..limb_bytes.len], limb_bytes);
 
const big_int = std.math.big.int.Const{
.limbs = limbs,
.positive = true,
};
 
const as_string = try big_int.toStringAlloc(self.arena, 10, .lower);
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .int_big = .{ .value = as_string } },
};
},
.@"unreachable" => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.noreturn_type) },
.expr = .{ .@"unreachable" = .{} },
};
},
 
.slice_start => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.SliceStart, pl_node.payload_index);
 
const slice_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const start: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.start,
false,
call_ctx,
);
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const start_index = self.exprs.items.len;
try self.exprs.append(self.arena, start.expr);
self.exprs.items[slice_index] = .{ .slice = .{ .lhs = lhs_index, .start = start_index } };
 
const typeRef = switch (lhs.expr) {
.declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
else => null,
};
 
return DocData.WalkResult{
.typeRef = typeRef,
.expr = .{ .sliceIndex = slice_index },
};
},
.slice_end => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.SliceEnd, pl_node.payload_index);
 
const slice_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const start: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.start,
false,
call_ctx,
);
const end: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.end,
false,
call_ctx,
);
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const start_index = self.exprs.items.len;
try self.exprs.append(self.arena, start.expr);
const end_index = self.exprs.items.len;
try self.exprs.append(self.arena, end.expr);
self.exprs.items[slice_index] = .{ .slice = .{ .lhs = lhs_index, .start = start_index, .end = end_index } };
 
const typeRef = switch (lhs.expr) {
.declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
else => null,
};
 
return DocData.WalkResult{
.typeRef = typeRef,
.expr = .{ .sliceIndex = slice_index },
};
},
.slice_sentinel => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.SliceSentinel, pl_node.payload_index);
 
const slice_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const start: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.start,
false,
call_ctx,
);
const end: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.end,
false,
call_ctx,
);
const sentinel: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.sentinel,
false,
call_ctx,
);
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const start_index = self.exprs.items.len;
try self.exprs.append(self.arena, start.expr);
const end_index = self.exprs.items.len;
try self.exprs.append(self.arena, end.expr);
const sentinel_index = self.exprs.items.len;
try self.exprs.append(self.arena, sentinel.expr);
self.exprs.items[slice_index] = .{ .slice = .{
.lhs = lhs_index,
.start = start_index,
.end = end_index,
.sentinel = sentinel_index,
} };
 
const typeRef = switch (lhs.expr) {
.declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
else => null,
};
 
return DocData.WalkResult{
.typeRef = typeRef,
.expr = .{ .sliceIndex = slice_index },
};
},
.slice_length => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.SliceLength, pl_node.payload_index);
 
const slice_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const start: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.start,
false,
call_ctx,
);
const len: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.len,
false,
call_ctx,
);
const sentinel_opt: ?DocData.WalkResult = if (extra.data.sentinel != .none)
try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.sentinel,
false,
call_ctx,
)
else
null;
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const start_index = self.exprs.items.len;
try self.exprs.append(self.arena, start.expr);
const len_index = self.exprs.items.len;
try self.exprs.append(self.arena, len.expr);
const sentinel_index = if (sentinel_opt) |sentinel| sentinel_index: {
const index = self.exprs.items.len;
try self.exprs.append(self.arena, sentinel.expr);
break :sentinel_index index;
} else null;
self.exprs.items[slice_index] = .{ .sliceLength = .{
.lhs = lhs_index,
.start = start_index,
.len = len_index,
.sentinel = sentinel_index,
} };
 
const typeRef = switch (lhs.expr) {
.declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
else => null,
};
 
return DocData.WalkResult{
.typeRef = typeRef,
.expr = .{ .sliceIndex = slice_index },
};
},
 
.load => {
const un_node = data[@intFromEnum(inst)].un_node;
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
need_type,
call_ctx,
);
const load_idx = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
var typeRef: ?DocData.Expr = null;
if (operand.typeRef) |ref| {
switch (ref) {
.type => |t_index| {
switch (self.types.items[t_index]) {
.Pointer => |p| typeRef = p.child,
else => {},
}
},
else => {},
}
}
 
return DocData.WalkResult{
.typeRef = typeRef,
.expr = .{ .load = load_idx },
};
},
.ref => {
const un_tok = data[@intFromEnum(inst)].un_tok;
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_tok.operand,
need_type,
call_ctx,
);
const ref_idx = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
return DocData.WalkResult{
.expr = .{ .@"&" = ref_idx },
};
},
 
.add,
.addwrap,
.add_sat,
.sub,
.subwrap,
.sub_sat,
.mul,
.mulwrap,
.mul_sat,
.div,
.shl,
.shl_sat,
.shr,
.bit_or,
.bit_and,
.xor,
.array_cat,
=> {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
 
const binop_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const rhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
self.exprs.items[binop_index] = .{ .binOp = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.lhs = lhs_index,
.rhs = rhs_index,
} };
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .binOpIndex = binop_index },
};
},
.array_mul => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.ArrayMul, pl_node.payload_index);
 
const binop_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
const res_ty: ?DocData.WalkResult = if (extra.data.res_ty != .none)
try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.res_ty,
false,
call_ctx,
)
else
null;
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const rhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
self.exprs.items[binop_index] = .{ .binOp = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.lhs = lhs_index,
.rhs = rhs_index,
} };
 
return DocData.WalkResult{
.typeRef = if (res_ty) |rt| rt.expr else null,
.expr = .{ .binOpIndex = binop_index },
};
},
// compare operators
.cmp_eq,
.cmp_neq,
.cmp_gt,
.cmp_gte,
.cmp_lt,
.cmp_lte,
=> {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
 
const binop_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const rhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
self.exprs.items[binop_index] = .{ .binOp = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.lhs = lhs_index,
.rhs = rhs_index,
} };
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.bool_type) },
.expr = .{ .binOpIndex = binop_index },
};
},
 
// builtin functions
.align_of,
.int_from_bool,
.embed_file,
.error_name,
.panic,
.set_runtime_safety, // @check
.sqrt,
.sin,
.cos,
.tan,
.exp,
.exp2,
.log,
.log2,
.log10,
.abs,
.floor,
.ceil,
.trunc,
.round,
.tag_name,
.type_name,
.frame_type,
.frame_size,
.int_from_ptr,
.type_info,
// @check
.clz,
.ctz,
.pop_count,
.byte_swap,
.bit_reverse,
=> {
const un_node = data[@intFromEnum(inst)].un_node;
const bin_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
const param = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
 
const param_index = self.exprs.items.len;
try self.exprs.append(self.arena, param.expr);
 
self.exprs.items[bin_index] = .{
.builtin = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.param = param_index,
},
};
 
return DocData.WalkResult{
.typeRef = param.typeRef orelse .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .builtinIndex = bin_index },
};
},
.bit_not,
.bool_not,
.negate_wrap,
=> {
const un_node = data[@intFromEnum(inst)].un_node;
const un_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .unOp = .{ .param = 0 } });
const param = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
 
const param_index = self.exprs.items.len;
try self.exprs.append(self.arena, param.expr);
 
self.exprs.items[un_index] = .{
.unOp = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.param = param_index,
},
};
 
return DocData.WalkResult{
.typeRef = param.typeRef,
.expr = .{ .unOpIndex = un_index },
};
},
.bool_br_and, .bool_br_or => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.BoolBr, pl_node.payload_index);
 
const bin_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
 
const lhs = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
 
const rhs = try self.walkInstruction(
file,
parent_scope,
parent_src,
@enumFromInt(file.zir.extra[extra.end..][extra.data.body_len - 1]),
false,
call_ctx,
);
const rhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
 
self.exprs.items[bin_index] = .{ .binOp = .{ .name = @tagName(tags[@intFromEnum(inst)]), .lhs = lhs_index, .rhs = rhs_index } };
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.bool_type) },
.expr = .{ .binOpIndex = bin_index },
};
},
.truncate => {
// in the ZIR this node is a builtin `bin` but we want send it as a `un` builtin
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
 
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
 
const bin_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
 
const rhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
 
self.exprs.items[bin_index] = .{ .builtin = .{ .name = @tagName(tags[@intFromEnum(inst)]), .param = rhs_index } };
 
return DocData.WalkResult{
.typeRef = lhs.expr,
.expr = .{ .builtinIndex = bin_index },
};
},
.int_from_float,
.float_from_int,
.ptr_from_int,
.enum_from_int,
.float_cast,
.int_cast,
.ptr_cast,
.has_decl,
.has_field,
.div_exact,
.div_floor,
.div_trunc,
.mod,
.rem,
.mod_rem,
.shl_exact,
.shr_exact,
.bitcast,
.vector_type,
// @check
.bit_offset_of,
.offset_of,
.splat,
.reduce,
.min,
.max,
=> {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
 
const binop_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .builtinBin = .{ .lhs = 0, .rhs = 0 } });
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
 
const lhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const rhs_index = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
self.exprs.items[binop_index] = .{ .builtinBin = .{ .name = @tagName(tags[@intFromEnum(inst)]), .lhs = lhs_index, .rhs = rhs_index } };
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .builtinBinIndex = binop_index },
};
},
.mul_add => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.MulAdd, pl_node.payload_index);
 
const mul1: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.mulend1,
false,
call_ctx,
);
const mul2: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.mulend2,
false,
call_ctx,
);
const add: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.addend,
false,
call_ctx,
);
 
const mul1_index = self.exprs.items.len;
try self.exprs.append(self.arena, mul1.expr);
const mul2_index = self.exprs.items.len;
try self.exprs.append(self.arena, mul2.expr);
const add_index = self.exprs.items.len;
try self.exprs.append(self.arena, add.expr);
 
const type_index: usize = self.exprs.items.len;
try self.exprs.append(self.arena, add.typeRef orelse .{ .type = @intFromEnum(Ref.type_type) });
 
return DocData.WalkResult{
.typeRef = add.typeRef,
.expr = .{
.mulAdd = .{
.mulend1 = mul1_index,
.mulend2 = mul2_index,
.addend = add_index,
.type = type_index,
},
},
};
},
.union_init => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.UnionInit, pl_node.payload_index);
 
const union_type: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.union_type,
false,
call_ctx,
);
const field_name: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.field_name,
false,
call_ctx,
);
const init: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.init,
false,
call_ctx,
);
 
const union_type_index = self.exprs.items.len;
try self.exprs.append(self.arena, union_type.expr);
const field_name_index = self.exprs.items.len;
try self.exprs.append(self.arena, field_name.expr);
const init_index = self.exprs.items.len;
try self.exprs.append(self.arena, init.expr);
 
return DocData.WalkResult{
.typeRef = union_type.expr,
.expr = .{
.unionInit = .{
.type = union_type_index,
.field = field_name_index,
.init = init_index,
},
},
};
},
.builtin_call => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.BuiltinCall, pl_node.payload_index);
 
const modifier: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.modifier,
false,
call_ctx,
);
 
const callee: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.callee,
false,
call_ctx,
);
 
const args: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.args,
false,
call_ctx,
);
 
const modifier_index = self.exprs.items.len;
try self.exprs.append(self.arena, modifier.expr);
const function_index = self.exprs.items.len;
try self.exprs.append(self.arena, callee.expr);
const args_index = self.exprs.items.len;
try self.exprs.append(self.arena, args.expr);
 
return DocData.WalkResult{
.expr = .{
.builtinCall = .{
.modifier = modifier_index,
.function = function_index,
.args = args_index,
},
},
};
},
.error_union_type => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .ErrorUnion = .{
.lhs = lhs.expr,
.rhs = rhs.expr,
} });
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .errorUnion = type_slot_index },
};
},
.merge_error_sets => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
 
const lhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
false,
call_ctx,
);
const rhs: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
false,
call_ctx,
);
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .ErrorUnion = .{
.lhs = lhs.expr,
.rhs = rhs.expr,
} });
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .errorSets = type_slot_index },
};
},
// .elem_type => {
// const un_node = data[@intFromEnum(inst)].un_node;
 
// const operand: DocData.WalkResult = try self.walkRef(
// file,
// parent_scope, parent_src,
// un_node.operand,
// false,
// );
 
// return operand;
// },
.ptr_type => {
const ptr = data[@intFromEnum(inst)].ptr_type;
const extra = file.zir.extraData(Zir.Inst.PtrType, ptr.payload_index);
var extra_index = extra.end;
 
const elem_type_ref = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.elem_type,
false,
call_ctx,
);
 
// @check if `addrspace`, `bit_start` and `host_size` really need to be
// present in json
var sentinel: ?DocData.Expr = null;
if (ptr.flags.has_sentinel) {
const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
const ref_result = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
call_ctx,
);
sentinel = ref_result.expr;
extra_index += 1;
}
 
var @"align": ?DocData.Expr = null;
if (ptr.flags.has_align) {
const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
const ref_result = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
call_ctx,
);
@"align" = ref_result.expr;
extra_index += 1;
}
var address_space: ?DocData.Expr = null;
if (ptr.flags.has_addrspace) {
const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
const ref_result = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
call_ctx,
);
address_space = ref_result.expr;
extra_index += 1;
}
const bit_start: ?DocData.Expr = null;
if (ptr.flags.has_bit_range) {
const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
const ref_result = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
call_ctx,
);
address_space = ref_result.expr;
extra_index += 1;
}
 
var host_size: ?DocData.Expr = null;
if (ptr.flags.has_bit_range) {
const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
const ref_result = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
call_ctx,
);
host_size = ref_result.expr;
}
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.Pointer = .{
.size = ptr.size,
.child = elem_type_ref.expr,
.has_align = ptr.flags.has_align,
.@"align" = @"align",
.has_addrspace = ptr.flags.has_addrspace,
.address_space = address_space,
.has_sentinel = ptr.flags.has_sentinel,
.sentinel = sentinel,
.is_mutable = ptr.flags.is_mutable,
.is_volatile = ptr.flags.is_volatile,
.has_bit_range = ptr.flags.has_bit_range,
.bit_start = bit_start,
.host_size = host_size,
},
});
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.array_type => {
const pl_node = data[@intFromEnum(inst)].pl_node;
 
const bin = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index).data;
const len = try self.walkRef(
file,
parent_scope,
parent_src,
bin.lhs,
false,
call_ctx,
);
const child = try self.walkRef(
file,
parent_scope,
parent_src,
bin.rhs,
false,
call_ctx,
);
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.Array = .{
.len = len.expr,
.child = child.expr,
},
});
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.array_type_sentinel => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.ArrayTypeSentinel, pl_node.payload_index);
const len = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.len,
false,
call_ctx,
);
const sentinel = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.sentinel,
false,
call_ctx,
);
const elem_type = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.elem_type,
false,
call_ctx,
);
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.Array = .{
.len = len.expr,
.child = elem_type.expr,
.sentinel = sentinel.expr,
},
});
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.array_init => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.MultiOp, pl_node.payload_index);
const operands = file.zir.refSlice(extra.end, extra.data.operands_len);
const array_data = try self.arena.alloc(usize, operands.len - 1);
 
std.debug.assert(operands.len > 0);
const array_type = try self.walkRef(
file,
parent_scope,
parent_src,
operands[0],
false,
call_ctx,
);
 
for (operands[1..], 0..) |op, idx| {
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
op,
false,
call_ctx,
);
const expr_index = self.exprs.items.len;
try self.exprs.append(self.arena, wr.expr);
array_data[idx] = expr_index;
}
 
return DocData.WalkResult{
.typeRef = array_type.expr,
.expr = .{ .array = array_data },
};
},
.array_init_anon => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.MultiOp, pl_node.payload_index);
const operands = file.zir.refSlice(extra.end, extra.data.operands_len);
const array_data = try self.arena.alloc(usize, operands.len);
 
for (operands, 0..) |op, idx| {
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
op,
false,
call_ctx,
);
const expr_index = self.exprs.items.len;
try self.exprs.append(self.arena, wr.expr);
array_data[idx] = expr_index;
}
 
return DocData.WalkResult{
.typeRef = null,
.expr = .{ .array = array_data },
};
},
.array_init_ref => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.MultiOp, pl_node.payload_index);
const operands = file.zir.refSlice(extra.end, extra.data.operands_len);
const array_data = try self.arena.alloc(usize, operands.len - 1);
 
std.debug.assert(operands.len > 0);
const array_type = try self.walkRef(
file,
parent_scope,
parent_src,
operands[0],
false,
call_ctx,
);
 
for (operands[1..], 0..) |op, idx| {
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
op,
false,
call_ctx,
);
const expr_index = self.exprs.items.len;
try self.exprs.append(self.arena, wr.expr);
array_data[idx] = expr_index;
}
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.Pointer = .{
.size = .One,
.child = array_type.expr,
},
});
 
const expr_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .array = array_data });
 
return DocData.WalkResult{
.typeRef = .{ .type = type_slot_index },
.expr = .{ .@"&" = expr_index },
};
},
.float => {
const float = data[@intFromEnum(inst)].float;
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_float_type) },
.expr = .{ .float = float },
};
},
// @check: In frontend I'm handling float128 with `.toFixed(2)`
.float128 => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Float128, pl_node.payload_index);
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_float_type) },
.expr = .{ .float128 = extra.data.get() },
};
},
.negate => {
const un_node = data[@intFromEnum(inst)].un_node;
 
var operand: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
need_type,
call_ctx,
);
switch (operand.expr) {
.int => |*int| int.negated = true,
.int_big => |*int_big| int_big.negated = true,
else => {
const un_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .unOp = .{ .param = 0 } });
const param_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
self.exprs.items[un_index] = .{
.unOp = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.param = param_index,
},
};
return DocData.WalkResult{
.typeRef = operand.typeRef,
.expr = .{ .unOpIndex = un_index },
};
},
}
return operand;
},
.size_of => {
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
const operand_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .sizeOf = operand_index },
};
},
.bit_size_of => {
// not working correctly with `align()`
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
need_type,
call_ctx,
);
const operand_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
return DocData.WalkResult{
.typeRef = operand.typeRef,
.expr = .{ .bitSizeOf = operand_index },
};
},
.int_from_enum => {
// not working correctly with `align()`
const un_node = data[@intFromEnum(inst)].un_node;
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
const builtin_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
const operand_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
self.exprs.items[builtin_index] = .{
.builtin = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.param = operand_index,
},
};
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .builtinIndex = builtin_index },
};
},
.switch_block => {
// WIP
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.SwitchBlock, pl_node.payload_index);
 
const switch_cond = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.operand,
false,
call_ctx,
);
const cond_index = self.exprs.items.len;
try self.exprs.append(self.arena, switch_cond.expr);
_ = cond_index;
 
// const ast_index = self.ast_nodes.items.len;
// const type_index = self.types.items.len - 1;
 
// const ast_line = self.ast_nodes.items[ast_index - 1];
 
// const sep = "=" ** 200;
// log.debug("{s}", .{sep});
// log.debug("SWITCH BLOCK", .{});
// log.debug("extra = {any}", .{extra});
// log.debug("outer_decl = {any}", .{self.types.items[type_index]});
// log.debug("ast_lines = {}", .{ast_line});
// log.debug("{s}", .{sep});
 
const switch_index = self.exprs.items.len;
 
// const src_loc = try self.srcLocInfo(file, pl_node.src_node, parent_src);
 
const switch_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
try self.exprs.append(self.arena, .{ .comptimeExpr = self.comptime_exprs.items.len });
try self.comptime_exprs.append(self.arena, .{ .code = switch_expr });
// try self.exprs.append(self.arena, .{ .switchOp = .{
// .cond_index = cond_index,
// .file_name = file.sub_file_path,
// .src = ast_index,
// .outer_decl = type_index,
// } });
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .switchIndex = switch_index },
};
},
 
.typeof => {
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
need_type,
call_ctx,
);
const operand_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
return DocData.WalkResult{
.typeRef = operand.typeRef,
.expr = .{ .typeOf = operand_index },
};
},
.typeof_builtin => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Block, pl_node.payload_index);
const body = file.zir.extra[extra.end..][extra.data.body_len - 1];
const operand: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
data[body].@"break".operand,
false,
call_ctx,
);
 
const operand_index = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
return DocData.WalkResult{
.typeRef = operand.typeRef,
.expr = .{ .typeOf = operand_index },
};
},
.as_node, .as_shift_operand => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.As, pl_node.payload_index);
 
// Skip the as_node if the destination type is a call instruction
if (extra.data.dest_type.toIndex()) |dti| {
var maybe_cc = call_ctx;
while (maybe_cc) |cc| : (maybe_cc = cc.prev) {
if (cc.inst == dti) {
return try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.operand,
false,
call_ctx,
);
}
}
}
 
const dest_type_walk = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.dest_type,
false,
call_ctx,
);
 
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.operand,
false,
call_ctx,
);
 
const operand_idx = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
const dest_type_idx = self.exprs.items.len;
try self.exprs.append(self.arena, dest_type_walk.expr);
 
// TODO: there's something wrong with how both `as` and `WalkrResult`
// try to store type information.
return DocData.WalkResult{
.typeRef = dest_type_walk.expr,
.expr = .{
.as = .{
.typeRefArg = dest_type_idx,
.exprArg = operand_idx,
},
},
};
},
.optional_type => {
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
 
const operand_idx = self.types.items.len;
try self.types.append(self.arena, .{
.Optional = .{ .name = "?TODO", .child = operand.expr },
});
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = operand_idx },
};
},
.decl_val, .decl_ref => {
const str_tok = data[@intFromEnum(inst)].str_tok;
const decl_status = parent_scope.resolveDeclName(str_tok.start, file, inst.toOptional());
return DocData.WalkResult{
.expr = .{ .declRef = decl_status },
};
},
.field_val, .field_ptr => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Field, pl_node.payload_index);
 
var path: std.ArrayListUnmanaged(DocData.Expr) = .{};
try path.append(self.arena, .{
.declName = file.zir.nullTerminatedString(extra.data.field_name_start),
});
 
// Put inside path the starting index of each decl name that
// we encounter as we navigate through all the field_*s
const lhs_ref = blk: {
var lhs_extra = extra;
while (true) {
const lhs = @intFromEnum(lhs_extra.data.lhs.toIndex() orelse {
break :blk lhs_extra.data.lhs;
});
 
if (tags[lhs] != .field_val and
tags[lhs] != .field_ptr)
{
break :blk lhs_extra.data.lhs;
}
 
lhs_extra = file.zir.extraData(
Zir.Inst.Field,
data[lhs].pl_node.payload_index,
);
 
try path.append(self.arena, .{
.declName = file.zir.nullTerminatedString(lhs_extra.data.field_name_start),
});
}
};
 
// If the lhs is a `call` instruction, it means that we're inside
// a function call and we're referring to one of its arguments.
// We can't just blindly analyze the instruction or we will
// start recursing forever.
// TODO: add proper resolution of the container type for `calls`
// TODO: we're like testing lhs as an instruction twice
// (above and below) this todo, maybe a cleaer solution woul
// avoid that.
// TODO: double check that we really don't need type info here
 
const wr = blk: {
if (lhs_ref.toIndex()) |lhs_inst| switch (tags[@intFromEnum(lhs_inst)]) {
.call, .field_call => {
break :blk DocData.WalkResult{
.expr = .{
.comptimeExpr = 0,
},
};
},
else => {},
};
 
break :blk try self.walkRef(
file,
parent_scope,
parent_src,
lhs_ref,
false,
call_ctx,
);
};
try path.append(self.arena, wr.expr);
 
// This way the data in `path` has the same ordering that the ref
// path has in the text: most general component first.
std.mem.reverse(DocData.Expr, path.items);
 
// Righ now, every element of `path` is a string except its first
// element (at index 0). We're now going to attempt to resolve each
// string. If one or more components in this path are not yet fully
// analyzed, the path will only be solved partially, but we expect
// to eventually solve it fully(or give up in case of a
// comptimeExpr). This means that:
// - (1) Paths can be not fully analyzed temporarily, so any code
// that requires to know where a ref path leads to, neeeds to
// implement support for lazyness (see self.pending_ref_paths)
// - (2) Paths can sometimes never resolve fully. This means that
// any value that depends on that will have to become a
// comptimeExpr.
try self.tryResolveRefPath(file, inst, path.items);
return DocData.WalkResult{ .expr = .{ .refPath = path.items } };
},
.int_type => {
const int_type = data[@intFromEnum(inst)].int_type;
const sign = if (int_type.signedness == .unsigned) "u" else "i";
const bits = int_type.bit_count;
const name = try std.fmt.allocPrint(self.arena, "{s}{}", .{ sign, bits });
 
try self.types.append(self.arena, .{
.Int = .{ .name = name },
});
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = self.types.items.len - 1 },
};
},
.block => {
const res = DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .comptimeExpr = self.comptime_exprs.items.len },
};
const pl_node = data[@intFromEnum(inst)].pl_node;
const block_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
try self.comptime_exprs.append(self.arena, .{
.code = block_expr,
});
return res;
},
.block_inline => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Block, pl_node.payload_index);
return self.walkInlineBody(
file,
parent_scope,
try self.srcLocInfo(file, pl_node.src_node, parent_src),
parent_src,
file.zir.bodySlice(extra.end, extra.data.body_len),
need_type,
call_ctx,
);
},
.break_inline => {
const @"break" = data[@intFromEnum(inst)].@"break";
return try self.walkRef(
file,
parent_scope,
parent_src,
@"break".operand,
need_type,
call_ctx,
);
},
.struct_init => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.StructInit, pl_node.payload_index);
const field_vals = try self.arena.alloc(
DocData.Expr.FieldVal,
extra.data.fields_len,
);
 
var type_ref: DocData.Expr = undefined;
var idx = extra.end;
for (field_vals) |*fv| {
const init_extra = file.zir.extraData(Zir.Inst.StructInit.Item, idx);
defer idx = init_extra.end;
 
const field_name = blk: {
const field_inst_index = @intFromEnum(init_extra.data.field_type);
if (tags[field_inst_index] != .struct_init_field_type) unreachable;
const field_pl_node = data[field_inst_index].pl_node;
const field_extra = file.zir.extraData(
Zir.Inst.FieldType,
field_pl_node.payload_index,
);
const field_src = try self.srcLocInfo(
file,
field_pl_node.src_node,
parent_src,
);
 
// On first iteration use field info to find out the struct type
if (idx == extra.end) {
const wr = try self.walkRef(
file,
parent_scope,
field_src,
field_extra.data.container_type,
false,
call_ctx,
);
type_ref = wr.expr;
}
break :blk file.zir.nullTerminatedString(field_extra.data.name_start);
};
const value = try self.walkRef(
file,
parent_scope,
parent_src,
init_extra.data.init,
need_type,
call_ctx,
);
const exprIdx = self.exprs.items.len;
try self.exprs.append(self.arena, value.expr);
var typeRefIdx: ?usize = null;
if (value.typeRef) |ref| {
typeRefIdx = self.exprs.items.len;
try self.exprs.append(self.arena, ref);
}
fv.* = .{
.name = field_name,
.val = .{
.typeRef = typeRefIdx,
.expr = exprIdx,
},
};
}
 
return DocData.WalkResult{
.typeRef = type_ref,
.expr = .{ .@"struct" = field_vals },
};
},
.struct_init_empty,
.struct_init_empty_result,
=> {
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
 
return DocData.WalkResult{
.typeRef = operand.expr,
.expr = .{ .@"struct" = &.{} },
};
},
.struct_init_empty_ref_result => {
const un_node = data[@intFromEnum(inst)].un_node;
 
const operand: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
false,
call_ctx,
);
 
const struct_init_idx = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .@"struct" = &.{} });
 
return DocData.WalkResult{
.typeRef = operand.expr,
.expr = .{ .@"&" = struct_init_idx },
};
},
.struct_init_anon => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.StructInitAnon, pl_node.payload_index);
 
const field_vals = try self.arena.alloc(
DocData.Expr.FieldVal,
extra.data.fields_len,
);
 
var idx = extra.end;
for (field_vals) |*fv| {
const init_extra = file.zir.extraData(Zir.Inst.StructInitAnon.Item, idx);
const field_name = file.zir.nullTerminatedString(init_extra.data.field_name);
const value = try self.walkRef(
file,
parent_scope,
parent_src,
init_extra.data.init,
need_type,
call_ctx,
);
 
const exprIdx = self.exprs.items.len;
try self.exprs.append(self.arena, value.expr);
var typeRefIdx: ?usize = null;
if (value.typeRef) |ref| {
typeRefIdx = self.exprs.items.len;
try self.exprs.append(self.arena, ref);
}
 
fv.* = .{
.name = field_name,
.val = .{
.typeRef = typeRefIdx,
.expr = exprIdx,
},
};
 
idx = init_extra.end;
}
 
return DocData.WalkResult{
.expr = .{ .@"struct" = field_vals },
};
},
.error_set_decl => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.ErrorSetDecl, pl_node.payload_index);
const fields = try self.arena.alloc(
DocData.Type.Field,
extra.data.fields_len,
);
var idx = extra.end;
for (fields) |*f| {
const name = file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[idx]));
idx += 1;
 
const docs = file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[idx]));
idx += 1;
 
f.* = .{
.name = name,
.docs = docs,
};
}
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.ErrorSet = .{
.name = "todo errset",
.fields = fields,
},
});
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.param_anytype, .param_anytype_comptime => {
// @check if .param_anytype_comptime can be here
// Analysis of anytype function params happens in `.func`.
// This switch case handles the case where an expression depends
// on an anytype field. E.g.: `fn foo(bar: anytype) @TypeOf(bar)`.
// This means that we're looking at a generic expression.
const str_tok = data[@intFromEnum(inst)].str_tok;
const name = str_tok.get(file.zir);
const cte_slot_index = self.comptime_exprs.items.len;
try self.comptime_exprs.append(self.arena, .{
.code = name,
});
return DocData.WalkResult{ .expr = .{ .comptimeExpr = cte_slot_index } };
},
.param, .param_comptime => {
// See .param_anytype for more information.
const pl_tok = data[@intFromEnum(inst)].pl_tok;
const extra = file.zir.extraData(Zir.Inst.Param, pl_tok.payload_index);
const name = file.zir.nullTerminatedString(extra.data.name);
 
const cte_slot_index = self.comptime_exprs.items.len;
try self.comptime_exprs.append(self.arena, .{
.code = name,
});
return DocData.WalkResult{ .expr = .{ .comptimeExpr = cte_slot_index } };
},
.call => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Call, pl_node.payload_index);
 
const callee = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.callee,
need_type,
call_ctx,
);
 
const args_len = extra.data.flags.args_len;
var args = try self.arena.alloc(DocData.Expr, args_len);
const body = file.zir.extra[extra.end..];
 
try self.repurposed_insts.put(self.arena, inst, {});
defer _ = self.repurposed_insts.remove(inst);
 
var i: usize = 0;
while (i < args_len) : (i += 1) {
const arg_end = file.zir.extra[extra.end + i];
const break_index = body[arg_end - 1];
const ref = data[break_index].@"break".operand;
// TODO: consider toggling need_type to true if we ever want
// to show discrepancies between the types of provided
// arguments and the types declared in the function
// signature for its parameters.
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
&.{
.inst = inst,
.prev = call_ctx,
},
);
args[i] = wr.expr;
}
 
const cte_slot_index = self.comptime_exprs.items.len;
try self.comptime_exprs.append(self.arena, .{
.code = "func call",
});
 
const call_slot_index = self.calls.items.len;
try self.calls.append(self.arena, .{
.func = callee.expr,
.args = args,
.ret = .{ .comptimeExpr = cte_slot_index },
});
 
return DocData.WalkResult{
.typeRef = if (callee.typeRef) |tr| switch (tr) {
.type => |func_type_idx| switch (self.types.items[func_type_idx]) {
.Fn => |func| func.ret,
else => blk: {
printWithContext(
file,
inst,
"unexpected callee type in walkInstruction.call: `{s}`\n",
.{@tagName(self.types.items[func_type_idx])},
);
 
break :blk null;
},
},
else => null,
} else null,
.expr = .{ .call = call_slot_index },
};
},
.field_call => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.FieldCall, pl_node.payload_index);
 
const obj_ptr = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.obj_ptr,
need_type,
call_ctx,
);
 
var field_call = try self.arena.alloc(DocData.Expr, 2);
 
if (obj_ptr.typeRef) |ref| {
field_call[0] = ref;
} else {
field_call[0] = obj_ptr.expr;
}
field_call[1] = .{ .declName = file.zir.nullTerminatedString(extra.data.field_name_start) };
try self.tryResolveRefPath(file, inst, field_call);
 
const args_len = extra.data.flags.args_len;
var args = try self.arena.alloc(DocData.Expr, args_len);
const body = file.zir.extra[extra.end..];
 
try self.repurposed_insts.put(self.arena, inst, {});
defer _ = self.repurposed_insts.remove(inst);
 
var i: usize = 0;
while (i < args_len) : (i += 1) {
const arg_end = file.zir.extra[extra.end + i];
const break_index = body[arg_end - 1];
const ref = data[break_index].@"break".operand;
// TODO: consider toggling need_type to true if we ever want
// to show discrepancies between the types of provided
// arguments and the types declared in the function
// signature for its parameters.
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
ref,
false,
&.{
.inst = inst,
.prev = call_ctx,
},
);
args[i] = wr.expr;
}
 
const cte_slot_index = self.comptime_exprs.items.len;
try self.comptime_exprs.append(self.arena, .{
.code = "field call",
});
 
const call_slot_index = self.calls.items.len;
try self.calls.append(self.arena, .{
.func = .{ .refPath = field_call },
.args = args,
.ret = .{ .comptimeExpr = cte_slot_index },
});
 
return DocData.WalkResult{
.expr = .{ .call = call_slot_index },
};
},
.func, .func_inferred => {
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .Unanalyzed = .{} });
 
const result = self.analyzeFunction(
file,
parent_scope,
parent_src,
inst,
self_ast_node_index,
type_slot_index,
tags[@intFromEnum(inst)] == .func_inferred,
call_ctx,
);
 
return result;
},
.func_fancy => {
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .Unanalyzed = .{} });
 
const result = self.analyzeFancyFunction(
file,
parent_scope,
parent_src,
inst,
self_ast_node_index,
type_slot_index,
call_ctx,
);
 
return result;
},
.optional_payload_safe, .optional_payload_unsafe => {
const un_node = data[@intFromEnum(inst)].un_node;
const operand = try self.walkRef(
file,
parent_scope,
parent_src,
un_node.operand,
need_type,
call_ctx,
);
const optional_idx = self.exprs.items.len;
try self.exprs.append(self.arena, operand.expr);
 
var typeRef: ?DocData.Expr = null;
if (operand.typeRef) |ref| {
switch (ref) {
.type => |t_index| {
const t = self.types.items[t_index];
switch (t) {
.Optional => |opt| typeRef = opt.child,
else => {
printWithContext(file, inst, "Invalid type for optional_payload_*: {}\n", .{t});
},
}
},
else => {},
}
}
 
return DocData.WalkResult{
.typeRef = typeRef,
.expr = .{ .optionalPayload = optional_idx },
};
},
.elem_val_node => {
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
const lhs = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.lhs,
need_type,
call_ctx,
);
const rhs = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.rhs,
need_type,
call_ctx,
);
const lhs_idx = self.exprs.items.len;
try self.exprs.append(self.arena, lhs.expr);
const rhs_idx = self.exprs.items.len;
try self.exprs.append(self.arena, rhs.expr);
return DocData.WalkResult{
.expr = .{
.elemVal = .{
.lhs = lhs_idx,
.rhs = rhs_idx,
},
},
};
},
.extended => {
const extended = data[@intFromEnum(inst)].extended;
switch (extended.opcode) {
else => {
printWithContext(
file,
inst,
"TODO: implement `walkInstruction.extended` for {s}",
.{@tagName(extended.opcode)},
);
return self.cteTodo(@tagName(extended.opcode));
},
.typeof_peer => {
// Zir says it's a NodeMultiOp but in this case it's TypeOfPeer
const extra = file.zir.extraData(Zir.Inst.TypeOfPeer, extended.operand);
const args = file.zir.refSlice(extra.end, extended.small);
const array_data = try self.arena.alloc(usize, args.len);
 
var array_type: ?DocData.Expr = null;
for (args, 0..) |arg, idx| {
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
arg,
idx == 0,
call_ctx,
);
if (idx == 0) {
array_type = wr.typeRef;
}
 
const expr_index = self.exprs.items.len;
try self.exprs.append(self.arena, wr.expr);
array_data[idx] = expr_index;
}
 
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.Array = .{
.len = .{
.int = .{
.value = args.len,
.negated = false,
},
},
.child = .{ .type = 0 },
},
});
const result = DocData.WalkResult{
.typeRef = .{ .type = type_slot_index },
.expr = .{ .typeOf_peer = array_data },
};
 
return result;
},
.opaque_decl => {
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .Unanalyzed = .{} });
 
var scope: Scope = .{
.parent = parent_scope,
.enclosing_type = type_slot_index,
};
 
const small: Zir.Inst.OpaqueDecl.Small = @bitCast(extended.small);
const extra = file.zir.extraData(Zir.Inst.OpaqueDecl, extended.operand);
var extra_index: usize = extra.end;
 
const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
 
const captures_len = if (small.has_captures_len) blk: {
const captures_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk captures_len;
} else 0;
 
if (small.has_decls_len) extra_index += 1;
 
scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
extra_index += captures_len;
 
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
 
extra_index = try self.analyzeAllDecls(
file,
&scope,
inst,
src_info,
&decl_indexes,
&priv_decl_indexes,
call_ctx,
);
 
self.types.items[type_slot_index] = .{
.Opaque = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.parent_container = parent_scope.enclosing_type,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
for (paths.items) |resume_info| {
try self.tryResolveRefPath(
resume_info.file,
inst,
resume_info.ref_path,
);
}
 
_ = self.ref_paths_pending_on_types.remove(type_slot_index);
// TODO: we should deallocate the arraylist that holds all the
// decl paths. not doing it now since it's arena-allocated
// anyway, but maybe we should put it elsewhere.
}
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.variable => {
const extra = file.zir.extraData(Zir.Inst.ExtendedVar, extended.operand);
 
const small = @as(Zir.Inst.ExtendedVar.Small, @bitCast(extended.small));
var extra_index: usize = extra.end;
if (small.has_lib_name) extra_index += 1;
if (small.has_align) extra_index += 1;
 
const var_type = try self.walkRef(
file,
parent_scope,
parent_src,
extra.data.var_type,
need_type,
call_ctx,
);
 
var value: DocData.WalkResult = .{
.typeRef = var_type.expr,
.expr = .{ .undefined = .{} },
};
 
if (small.has_init) {
const var_init_ref = @as(Ref, @enumFromInt(file.zir.extra[extra_index]));
const var_init = try self.walkRef(
file,
parent_scope,
parent_src,
var_init_ref,
need_type,
call_ctx,
);
value.expr = var_init.expr;
value.typeRef = var_init.typeRef;
}
 
return value;
},
.union_decl => {
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .Unanalyzed = .{} });
 
var scope: Scope = .{
.parent = parent_scope,
.enclosing_type = type_slot_index,
};
 
const small = @as(Zir.Inst.UnionDecl.Small, @bitCast(extended.small));
const extra = file.zir.extraData(Zir.Inst.UnionDecl, extended.operand);
var extra_index: usize = extra.end;
 
const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
 
// We delay analysis because union tags can refer to
// decls defined inside the union itself.
const tag_type_ref: ?Ref = if (small.has_tag_type) blk: {
const tag_type = file.zir.extra[extra_index];
extra_index += 1;
const tag_ref = @as(Ref, @enumFromInt(tag_type));
break :blk tag_ref;
} else null;
 
const captures_len = if (small.has_captures_len) blk: {
const captures_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk captures_len;
} else 0;
 
const body_len = if (small.has_body_len) blk: {
const body_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk body_len;
} else 0;
 
const fields_len = if (small.has_fields_len) blk: {
const fields_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk fields_len;
} else 0;
 
const layout_expr: ?DocData.Expr = switch (small.layout) {
.Auto => null,
else => .{ .enumLiteral = @tagName(small.layout) },
};
 
if (small.has_decls_len) extra_index += 1;
 
scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
extra_index += captures_len;
 
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
 
extra_index = try self.analyzeAllDecls(
file,
&scope,
inst,
src_info,
&decl_indexes,
&priv_decl_indexes,
call_ctx,
);
 
// Analyze the tag once all decls have been analyzed
const tag_type = if (tag_type_ref) |tt_ref| (try self.walkRef(
file,
&scope,
parent_src,
tt_ref,
false,
call_ctx,
)).expr else null;
 
// Fields
extra_index += body_len;
 
var field_type_refs = try std.ArrayListUnmanaged(DocData.Expr).initCapacity(
self.arena,
fields_len,
);
var field_name_indexes = try std.ArrayListUnmanaged(usize).initCapacity(
self.arena,
fields_len,
);
try self.collectUnionFieldInfo(
file,
&scope,
src_info,
fields_len,
&field_type_refs,
&field_name_indexes,
extra_index,
call_ctx,
);
 
self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
 
self.types.items[type_slot_index] = .{
.Union = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.fields = field_type_refs.items,
.tag = tag_type,
.auto_enum = small.auto_enum_tag,
.parent_container = parent_scope.enclosing_type,
.layout = layout_expr,
},
};
 
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
for (paths.items) |resume_info| {
try self.tryResolveRefPath(
resume_info.file,
inst,
resume_info.ref_path,
);
}
 
_ = self.ref_paths_pending_on_types.remove(type_slot_index);
// TODO: we should deallocate the arraylist that holds all the
// decl paths. not doing it now since it's arena-allocated
// anyway, but maybe we should put it elsewhere.
}
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.enum_decl => {
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .Unanalyzed = .{} });
 
var scope: Scope = .{
.parent = parent_scope,
.enclosing_type = type_slot_index,
};
 
const small = @as(Zir.Inst.EnumDecl.Small, @bitCast(extended.small));
const extra = file.zir.extraData(Zir.Inst.EnumDecl, extended.operand);
var extra_index: usize = extra.end;
 
const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
 
const tag_type: ?DocData.Expr = if (small.has_tag_type) blk: {
const tag_type = file.zir.extra[extra_index];
extra_index += 1;
const tag_ref = @as(Ref, @enumFromInt(tag_type));
const wr = try self.walkRef(
file,
parent_scope,
parent_src,
tag_ref,
false,
call_ctx,
);
break :blk wr.expr;
} else null;
 
const captures_len = if (small.has_captures_len) blk: {
const captures_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk captures_len;
} else 0;
 
const body_len = if (small.has_body_len) blk: {
const body_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk body_len;
} else 0;
 
const fields_len = if (small.has_fields_len) blk: {
const fields_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk fields_len;
} else 0;
 
if (small.has_decls_len) extra_index += 1;
 
scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
extra_index += captures_len;
 
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
 
extra_index = try self.analyzeAllDecls(
file,
&scope,
inst,
src_info,
&decl_indexes,
&priv_decl_indexes,
call_ctx,
);
 
// const body = file.zir.extra[extra_index..][0..body_len];
extra_index += body_len;
 
var field_name_indexes: std.ArrayListUnmanaged(usize) = .{};
var field_values: std.ArrayListUnmanaged(?DocData.Expr) = .{};
{
var bit_bag_idx = extra_index;
var cur_bit_bag: u32 = undefined;
extra_index += std.math.divCeil(usize, fields_len, 32) catch unreachable;
 
var idx: usize = 0;
while (idx < fields_len) : (idx += 1) {
if (idx % 32 == 0) {
cur_bit_bag = file.zir.extra[bit_bag_idx];
bit_bag_idx += 1;
}
 
const has_value = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
 
const field_name_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
extra_index += 1;
 
const doc_comment_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
extra_index += 1;
 
const value_expr: ?DocData.Expr = if (has_value) blk: {
const value_ref = file.zir.extra[extra_index];
extra_index += 1;
const value = try self.walkRef(
file,
&scope,
src_info,
@as(Ref, @enumFromInt(value_ref)),
false,
call_ctx,
);
break :blk value.expr;
} else null;
try field_values.append(self.arena, value_expr);
 
const field_name = file.zir.nullTerminatedString(field_name_index);
 
try field_name_indexes.append(self.arena, self.ast_nodes.items.len);
const doc_comment: ?[]const u8 = if (doc_comment_index != .empty)
file.zir.nullTerminatedString(doc_comment_index)
else
null;
try self.ast_nodes.append(self.arena, .{
.name = field_name,
.docs = doc_comment,
});
}
}
 
self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
 
self.types.items[type_slot_index] = .{
.Enum = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.tag = tag_type,
.values = field_values.items,
.nonexhaustive = small.nonexhaustive,
.parent_container = parent_scope.enclosing_type,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
for (paths.items) |resume_info| {
try self.tryResolveRefPath(
resume_info.file,
inst,
resume_info.ref_path,
);
}
 
_ = self.ref_paths_pending_on_types.remove(type_slot_index);
// TODO: we should deallocate the arraylist that holds all the
// decl paths. not doing it now since it's arena-allocated
// anyway, but maybe we should put it elsewhere.
}
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.struct_decl => {
const type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{ .Unanalyzed = .{} });
 
var scope: Scope = .{
.parent = parent_scope,
.enclosing_type = type_slot_index,
};
 
const small = @as(Zir.Inst.StructDecl.Small, @bitCast(extended.small));
const extra = file.zir.extraData(Zir.Inst.StructDecl, extended.operand);
var extra_index: usize = extra.end;
 
const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
 
const captures_len = if (small.has_captures_len) blk: {
const captures_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk captures_len;
} else 0;
 
const fields_len = if (small.has_fields_len) blk: {
const fields_len = file.zir.extra[extra_index];
extra_index += 1;
break :blk fields_len;
} else 0;
 
// We don't care about decls yet
if (small.has_decls_len) extra_index += 1;
 
scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
extra_index += captures_len;
 
var backing_int: ?DocData.Expr = null;
if (small.has_backing_int) {
const backing_int_body_len = file.zir.extra[extra_index];
extra_index += 1; // backing_int_body_len
if (backing_int_body_len == 0) {
const backing_int_ref = @as(Ref, @enumFromInt(file.zir.extra[extra_index]));
const backing_int_res = try self.walkRef(
file,
&scope,
src_info,
backing_int_ref,
true,
call_ctx,
);
backing_int = backing_int_res.expr;
extra_index += 1; // backing_int_ref
} else {
const backing_int_body = file.zir.bodySlice(extra_index, backing_int_body_len);
const break_inst = backing_int_body[backing_int_body.len - 1];
const operand = data[@intFromEnum(break_inst)].@"break".operand;
const backing_int_res = try self.walkRef(
file,
&scope,
src_info,
operand,
true,
call_ctx,
);
backing_int = backing_int_res.expr;
extra_index += backing_int_body_len; // backing_int_body_inst
}
}
 
const layout_expr: ?DocData.Expr = switch (small.layout) {
.Auto => null,
else => .{ .enumLiteral = @tagName(small.layout) },
};
 
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
 
extra_index = try self.analyzeAllDecls(
file,
&scope,
inst,
src_info,
&decl_indexes,
&priv_decl_indexes,
call_ctx,
);
 
// Inside field init bodies, the struct decl instruction is used to refer to the
// field type during the second pass of analysis.
try self.repurposed_insts.put(self.arena, inst, {});
defer _ = self.repurposed_insts.remove(inst);
 
var field_type_refs: std.ArrayListUnmanaged(DocData.Expr) = .{};
var field_default_refs: std.ArrayListUnmanaged(?DocData.Expr) = .{};
var field_name_indexes: std.ArrayListUnmanaged(usize) = .{};
try self.collectStructFieldInfo(
file,
&scope,
src_info,
fields_len,
&field_type_refs,
&field_default_refs,
&field_name_indexes,
extra_index,
small.is_tuple,
call_ctx,
);
 
self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
 
self.types.items[type_slot_index] = .{
.Struct = .{
.name = "todo_name",
.src = self_ast_node_index,
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.field_types = field_type_refs.items,
.field_defaults = field_default_refs.items,
.is_tuple = small.is_tuple,
.backing_int = backing_int,
.line_number = self.ast_nodes.items[self_ast_node_index].line,
.parent_container = parent_scope.enclosing_type,
.layout = layout_expr,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
for (paths.items) |resume_info| {
try self.tryResolveRefPath(
resume_info.file,
inst,
resume_info.ref_path,
);
}
 
_ = self.ref_paths_pending_on_types.remove(type_slot_index);
// TODO: we should deallocate the arraylist that holds all the
// decl paths. not doing it now since it's arena-allocated
// anyway, but maybe we should put it elsewhere.
}
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
},
.this => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{
.this = parent_scope.enclosing_type.?,
// We know enclosing_type is always present
// because it's only null for the top-level
// struct instruction of a file.
},
};
},
.int_from_error,
.error_from_int,
.reify,
=> {
const extra = file.zir.extraData(Zir.Inst.UnNode, extended.operand).data;
const bin_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
const param = try self.walkRef(
file,
parent_scope,
parent_src,
extra.operand,
false,
call_ctx,
);
 
const param_index = self.exprs.items.len;
try self.exprs.append(self.arena, param.expr);
 
self.exprs.items[bin_index] = .{ .builtin = .{ .name = @tagName(extended.opcode), .param = param_index } };
 
return DocData.WalkResult{
.typeRef = param.typeRef orelse .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .builtinIndex = bin_index },
};
},
.work_item_id,
.work_group_size,
.work_group_id,
=> {
const extra = file.zir.extraData(Zir.Inst.UnNode, extended.operand).data;
const bin_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
const param = try self.walkRef(
file,
parent_scope,
parent_src,
extra.operand,
false,
call_ctx,
);
 
const param_index = self.exprs.items.len;
try self.exprs.append(self.arena, param.expr);
 
self.exprs.items[bin_index] = .{ .builtin = .{ .name = @tagName(extended.opcode), .param = param_index } };
 
return DocData.WalkResult{
// from docs we know they return u32
.typeRef = .{ .type = @intFromEnum(Ref.u32_type) },
.expr = .{ .builtinIndex = bin_index },
};
},
.cmpxchg => {
const extra = file.zir.extraData(Zir.Inst.Cmpxchg, extended.operand).data;
 
const last_type_index = self.exprs.items.len;
const last_type = self.exprs.items[last_type_index - 1];
const type_index = self.exprs.items.len;
try self.exprs.append(self.arena, last_type);
 
const ptr_index = self.exprs.items.len;
const ptr: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.ptr,
false,
call_ctx,
);
try self.exprs.append(self.arena, ptr.expr);
 
const expected_value_index = self.exprs.items.len;
const expected_value: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.expected_value,
false,
call_ctx,
);
try self.exprs.append(self.arena, expected_value.expr);
 
const new_value_index = self.exprs.items.len;
const new_value: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.new_value,
false,
call_ctx,
);
try self.exprs.append(self.arena, new_value.expr);
 
const success_order_index = self.exprs.items.len;
const success_order: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.success_order,
false,
call_ctx,
);
try self.exprs.append(self.arena, success_order.expr);
 
const failure_order_index = self.exprs.items.len;
const failure_order: DocData.WalkResult = try self.walkRef(
file,
parent_scope,
parent_src,
extra.failure_order,
false,
call_ctx,
);
try self.exprs.append(self.arena, failure_order.expr);
 
const cmpxchg_index = self.exprs.items.len;
try self.exprs.append(self.arena, .{ .cmpxchg = .{
.name = @tagName(tags[@intFromEnum(inst)]),
.type = type_index,
.ptr = ptr_index,
.expected_value = expected_value_index,
.new_value = new_value_index,
.success_order = success_order_index,
.failure_order = failure_order_index,
} });
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .cmpxchgIndex = cmpxchg_index },
};
},
.closure_get => {
const captured, const scope = parent_scope.getCapture(extended.small);
switch (captured) {
.inst => |cap_inst| return self.walkInstruction(file, scope, parent_src, cap_inst, need_type, call_ctx),
.decl => |str| {
const decl_status = parent_scope.resolveDeclName(str, file, inst.toOptional());
return .{ .expr = .{ .declRef = decl_status } };
},
}
},
}
},
}
}
 
/// Called by `walkInstruction` when encountering a container type.
/// Iterates over all decl definitions in its body and it also analyzes each
/// decl's body recursively by calling into `walkInstruction`.
///
/// Does not append to `self.decls` directly because `walkInstruction`
/// is expected to look-ahead scan all decls and reserve `body_len`
/// slots in `self.decls`, which are then filled out by this function.
fn analyzeAllDecls(
self: *Autodoc,
file: *File,
scope: *Scope,
parent_inst: Zir.Inst.Index,
parent_src: SrcLocInfo,
decl_indexes: *std.ArrayListUnmanaged(usize),
priv_decl_indexes: *std.ArrayListUnmanaged(usize),
call_ctx: ?*const CallContext,
) AutodocErrors!usize {
const first_decl_indexes_slot = decl_indexes.items.len;
const original_it = file.zir.declIterator(parent_inst);
 
// First loop to discover decl names
{
var it = original_it;
while (it.next()) |zir_index| {
const declaration, _ = file.zir.getDeclaration(zir_index);
if (declaration.name.isNamedTest(file.zir)) continue;
const decl_name = declaration.name.toString(file.zir) orelse continue;
try scope.insertDeclRef(self.arena, decl_name, .Pending);
}
}
 
// Second loop to analyze `usingnamespace` decls
{
var it = original_it;
var decl_indexes_slot = first_decl_indexes_slot;
while (it.next()) |zir_index| : (decl_indexes_slot += 1) {
const pl_node = file.zir.instructions.items(.data)[@intFromEnum(zir_index)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Declaration, pl_node.payload_index);
if (extra.data.name != .@"usingnamespace") continue;
try self.analyzeUsingnamespaceDecl(
file,
scope,
try self.srcLocInfo(file, pl_node.src_node, parent_src),
decl_indexes,
priv_decl_indexes,
extra.data,
@intCast(extra.end),
call_ctx,
);
}
}
 
// Third loop to analyze all remaining decls
{
var it = original_it;
while (it.next()) |zir_index| {
const pl_node = file.zir.instructions.items(.data)[@intFromEnum(zir_index)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Declaration, pl_node.payload_index);
switch (extra.data.name) {
.@"comptime", .@"usingnamespace", .unnamed_test, .decltest => continue,
_ => if (extra.data.name.isNamedTest(file.zir)) continue,
}
try self.analyzeDecl(
file,
scope,
try self.srcLocInfo(file, pl_node.src_node, parent_src),
decl_indexes,
priv_decl_indexes,
zir_index,
extra.data,
@intCast(extra.end),
call_ctx,
);
}
}
 
// Fourth loop to analyze decltests
var it = original_it;
while (it.next()) |zir_index| {
const pl_node = file.zir.instructions.items(.data)[@intFromEnum(zir_index)].pl_node;
const extra = file.zir.extraData(Zir.Inst.Declaration, pl_node.payload_index);
if (extra.data.name != .decltest) continue;
try self.analyzeDecltest(
file,
scope,
try self.srcLocInfo(file, pl_node.src_node, parent_src),
extra.data,
@intCast(extra.end),
);
}
 
return it.extra_index;
}
 
fn walkInlineBody(
autodoc: *Autodoc,
file: *File,
scope: *Scope,
block_src: SrcLocInfo,
parent_src: SrcLocInfo,
body: []const Zir.Inst.Index,
need_type: bool,
call_ctx: ?*const CallContext,
) AutodocErrors!DocData.WalkResult {
const tags = file.zir.instructions.items(.tag);
const break_inst = switch (tags[@intFromEnum(body[body.len - 1])]) {
.condbr_inline => {
// Unresolvable.
const res: DocData.WalkResult = .{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .comptimeExpr = autodoc.comptime_exprs.items.len },
};
const source = (try file.getTree(autodoc.zcu.gpa)).getNodeSource(block_src.src_node);
try autodoc.comptime_exprs.append(autodoc.arena, .{
.code = source,
});
return res;
},
.break_inline => body[body.len - 1],
else => unreachable,
};
const break_data = file.zir.instructions.items(.data)[@intFromEnum(break_inst)].@"break";
return autodoc.walkRef(file, scope, parent_src, break_data.operand, need_type, call_ctx);
}
 
// Asserts the given decl is public
fn analyzeDecl(
self: *Autodoc,
file: *File,
scope: *Scope,
decl_src: SrcLocInfo,
decl_indexes: *std.ArrayListUnmanaged(usize),
priv_decl_indexes: *std.ArrayListUnmanaged(usize),
decl_inst: Zir.Inst.Index,
declaration: Zir.Inst.Declaration,
extra_index: u32,
call_ctx: ?*const CallContext,
) AutodocErrors!void {
const bodies = declaration.getBodies(extra_index, file.zir);
const name = file.zir.nullTerminatedString(declaration.name.toString(file.zir).?);
 
const doc_comment: ?[]const u8 = if (declaration.flags.has_doc_comment)
file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[extra_index]))
else
null;
 
// astnode
const ast_node_index = idx: {
const idx = self.ast_nodes.items.len;
try self.ast_nodes.append(self.arena, .{
.file = self.files.getIndex(file).?,
.line = decl_src.line,
.col = 0,
.docs = doc_comment,
.fields = null, // walkInstruction will fill `fields` if necessary
});
break :idx idx;
};
 
const walk_result = try self.walkInlineBody(
file,
scope,
decl_src,
decl_src,
bodies.value_body,
true,
call_ctx,
);
 
const tree = try file.getTree(self.zcu.gpa);
const kind_token = tree.nodes.items(.main_token)[decl_src.src_node];
const kind: []const u8 = switch (tree.tokens.items(.tag)[kind_token]) {
.keyword_var => "var",
else => "const",
};
 
const decls_slot_index = self.decls.items.len;
try self.decls.append(self.arena, .{
.name = name,
.src = ast_node_index,
.value = walk_result,
.kind = kind,
.parent_container = scope.enclosing_type,
});
 
if (declaration.flags.is_pub) {
try decl_indexes.append(self.arena, decls_slot_index);
} else {
try priv_decl_indexes.append(self.arena, decls_slot_index);
}
 
const decl_status_ptr = scope.resolveDeclName(declaration.name.toString(file.zir).?, file, .none);
std.debug.assert(decl_status_ptr.* == .Pending);
decl_status_ptr.* = .{ .Analyzed = decls_slot_index };
 
// Unblock any pending decl path that was waiting for this decl.
if (self.ref_paths_pending_on_decls.get(decl_status_ptr)) |paths| {
for (paths.items) |resume_info| {
try self.tryResolveRefPath(
resume_info.file,
decl_inst,
resume_info.ref_path,
);
}
 
_ = self.ref_paths_pending_on_decls.remove(decl_status_ptr);
// TODO: we should deallocate the arraylist that holds all the
// ref paths. not doing it now since it's arena-allocated
// anyway, but maybe we should put it elsewhere.
}
}
 
fn analyzeUsingnamespaceDecl(
self: *Autodoc,
file: *File,
scope: *Scope,
decl_src: SrcLocInfo,
decl_indexes: *std.ArrayListUnmanaged(usize),
priv_decl_indexes: *std.ArrayListUnmanaged(usize),
declaration: Zir.Inst.Declaration,
extra_index: u32,
call_ctx: ?*const CallContext,
) AutodocErrors!void {
const bodies = declaration.getBodies(extra_index, file.zir);
 
const doc_comment: ?[]const u8 = if (declaration.flags.has_doc_comment)
file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[extra_index]))
else
null;
 
// astnode
const ast_node_index = idx: {
const idx = self.ast_nodes.items.len;
try self.ast_nodes.append(self.arena, .{
.file = self.files.getIndex(file).?,
.line = decl_src.line,
.col = 0,
.docs = doc_comment,
.fields = null, // walkInstruction will fill `fields` if necessary
});
break :idx idx;
};
 
const walk_result = try self.walkInlineBody(
file,
scope,
decl_src,
decl_src,
bodies.value_body,
true,
call_ctx,
);
 
const decl_slot_index = self.decls.items.len;
try self.decls.append(self.arena, .{
.name = "",
.kind = "",
.src = ast_node_index,
.value = walk_result,
.is_uns = true,
.parent_container = scope.enclosing_type,
});
 
if (declaration.flags.is_pub) {
try decl_indexes.append(self.arena, decl_slot_index);
} else {
try priv_decl_indexes.append(self.arena, decl_slot_index);
}
}
 
fn analyzeDecltest(
self: *Autodoc,
file: *File,
scope: *Scope,
decl_src: SrcLocInfo,
declaration: Zir.Inst.Declaration,
extra_index: u32,
) AutodocErrors!void {
std.debug.assert(declaration.flags.has_doc_comment);
const decl_name_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
 
const test_source_code = (try file.getTree(self.zcu.gpa)).getNodeSource(decl_src.src_node);
 
const decl_name: ?[]const u8 = if (decl_name_index != .empty)
file.zir.nullTerminatedString(decl_name_index)
else
null;
 
// astnode
const ast_node_index = idx: {
const idx = self.ast_nodes.items.len;
try self.ast_nodes.append(self.arena, .{
.file = self.files.getIndex(file).?,
.line = decl_src.line,
.col = 0,
.name = decl_name,
.code = test_source_code,
});
break :idx idx;
};
 
const decl_status = scope.resolveDeclName(decl_name_index, file, .none);
 
switch (decl_status.*) {
.Analyzed => |idx| {
self.decls.items[idx].decltest = ast_node_index;
},
else => unreachable, // we assume analyzeAllDecls analyzed other decls by this point
}
}
 
/// An unresolved path has a non-string WalkResult at its beginnig, while every
/// other element is a string WalkResult. Resolving means iteratively map each
/// string to a Decl / Type / Call / etc.
///
/// If we encounter an unanalyzed decl during the process, we append the
/// unsolved sub-path to `self.ref_paths_pending_on_decls` and bail out.
/// Same happens when a decl holds a type definition that hasn't been fully
/// analyzed yet (except that we append to `self.ref_paths_pending_on_types`.
///
/// When analyzeAllDecls / walkInstruction finishes analyzing a decl / type, it will
/// then check if there's any pending ref path blocked on it and, if any, it
/// will progress their resolution by calling tryResolveRefPath again.
///
/// Ref paths can also depend on other ref paths. See
/// `self.pending_ref_paths` for more info.
///
/// A ref path that has a component that resolves into a comptimeExpr will
/// give up its resolution process entirely, leaving the remaining components
/// as strings.
fn tryResolveRefPath(
self: *Autodoc,
/// File from which the decl path originates.
file: *File,
inst: Zir.Inst.Index, // used only for panicWithContext
path: []DocData.Expr,
) AutodocErrors!void {
var i: usize = 0;
outer: while (i < path.len - 1) : (i += 1) {
const parent = path[i];
const child_string = path[i + 1].declName; // we expect to find an unsolved decl
 
var resolved_parent = parent;
var j: usize = 0;
while (j < 10_000) : (j += 1) {
switch (resolved_parent) {
else => break,
.this => |t| resolved_parent = .{ .type = t },
.declIndex => |decl_index| {
const decl = self.decls.items[decl_index];
resolved_parent = decl.value.expr;
continue;
},
.declRef => |decl_status_ptr| {
// NOTE: must be kep in sync with `findNameInUnsDecls`
switch (decl_status_ptr.*) {
// The use of unreachable here is conservative.
// It might be that it truly should be up to us to
// request the analys of this decl, but it's not clear
// at the moment of writing.
.NotRequested => unreachable,
.Analyzed => |decl_index| {
const decl = self.decls.items[decl_index];
resolved_parent = decl.value.expr;
continue;
},
.Pending => {
// This decl path is pending completion
{
const res = try self.pending_ref_paths.getOrPut(
self.arena,
&path[path.len - 1],
);
if (!res.found_existing) res.value_ptr.* = .{};
}
 
const res = try self.ref_paths_pending_on_decls.getOrPut(
self.arena,
decl_status_ptr,
);
if (!res.found_existing) res.value_ptr.* = .{};
try res.value_ptr.*.append(self.arena, .{
.file = file,
.ref_path = path[i..path.len],
});
 
// We return instead doing `break :outer` to prevent the
// code after the :outer while loop to run, as it assumes
// that the path will have been fully analyzed (or we
// have given up because of a comptimeExpr).
return;
},
}
},
.refPath => |rp| {
if (self.pending_ref_paths.getPtr(&rp[rp.len - 1])) |waiter_list| {
try waiter_list.append(self.arena, .{
.file = file,
.ref_path = path[i..path.len],
});
 
// This decl path is pending completion
{
const res = try self.pending_ref_paths.getOrPut(
self.arena,
&path[path.len - 1],
);
if (!res.found_existing) res.value_ptr.* = .{};
}
 
return;
}
 
// If the last element is a declName or a CTE, then we give up,
// otherwise we resovle the parent to it and loop again.
// NOTE: we assume that if we find a string, it's because of
// a CTE component somewhere in the path. We know that the path
// is not pending futher evaluation because we just checked!
const last = rp[rp.len - 1];
switch (last) {
.comptimeExpr, .declName => break :outer,
else => {
resolved_parent = last;
continue;
},
}
},
.fieldVal => |fv| {
resolved_parent = self.exprs.items[fv.val.expr];
},
}
} else {
panicWithContext(
file,
inst,
"exhausted eval quota for `{}`in tryResolveRefPath\n",
.{resolved_parent},
);
}
 
switch (resolved_parent) {
else => {
// NOTE: indirect references to types / decls should be handled
// in the switch above this one!
printWithContext(
file,
inst,
"TODO: handle `{s}`in tryResolveRefPath\nInfo: {}",
.{ @tagName(resolved_parent), resolved_parent },
);
// path[i + 1] = (try self.cteTodo("<match failure>")).expr;
continue :outer;
},
.comptimeExpr, .call, .typeOf => {
// Since we hit a cte, we leave the remaining strings unresolved
// and completely give up on resolving this decl path.
//decl_path.hasCte = true;
break :outer;
},
.type => |t_index| switch (self.types.items[t_index]) {
else => {
panicWithContext(
file,
inst,
"TODO: handle `{s}` in tryResolveDeclPath.type\nInfo: {}",
.{ @tagName(self.types.items[t_index]), resolved_parent },
);
},
.ComptimeExpr => {
// Same as the comptimeExpr branch above
break :outer;
},
.Unanalyzed => {
// This decl path is pending completion
{
const res = try self.pending_ref_paths.getOrPut(
self.arena,
&path[path.len - 1],
);
if (!res.found_existing) res.value_ptr.* = .{};
}
 
const res = try self.ref_paths_pending_on_types.getOrPut(
self.arena,
t_index,
);
if (!res.found_existing) res.value_ptr.* = .{};
try res.value_ptr.*.append(self.arena, .{
.file = file,
.ref_path = path[i..path.len],
});
 
return;
},
.Array => {
if (std.mem.eql(u8, child_string, "len")) {
path[i + 1] = .{
.builtinField = .len,
};
} else {
panicWithContext(
file,
inst,
"TODO: handle `{s}` in tryResolveDeclPath.type.Array\nInfo: {}",
.{ child_string, resolved_parent },
);
}
},
// TODO: the following searches could probably
// be performed more efficiently on the corresponding
// scope
.Enum => |t_enum| { // foo.bar.baz
// Look into locally-defined pub decls
for (t_enum.pubDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
// Look into locally-defined priv decls
for (t_enum.privDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
switch (try self.findNameInUnsDecls(file, path[i..path.len], resolved_parent, child_string)) {
.Pending => return,
.NotFound => {},
.Found => |match| {
path[i + 1] = match;
continue :outer;
},
}
 
for (self.ast_nodes.items[t_enum.src].fields.?, 0..) |ast_node, idx| {
const name = self.ast_nodes.items[ast_node].name.?;
if (std.mem.eql(u8, name, child_string)) {
// TODO: should we really create an artificial
// decl for this type? Probably not.
 
path[i + 1] = .{
.fieldRef = .{
.type = t_index,
.index = idx,
},
};
continue :outer;
}
}
 
// if we got here, our search failed
printWithContext(
file,
inst,
"failed to match `{s}` in enum",
.{child_string},
);
 
path[i + 1] = (try self.cteTodo("match failure")).expr;
continue :outer;
},
.Union => |t_union| {
// Look into locally-defined pub decls
for (t_union.pubDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
// Look into locally-defined priv decls
for (t_union.privDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
switch (try self.findNameInUnsDecls(file, path[i..path.len], resolved_parent, child_string)) {
.Pending => return,
.NotFound => {},
.Found => |match| {
path[i + 1] = match;
continue :outer;
},
}
 
for (self.ast_nodes.items[t_union.src].fields.?, 0..) |ast_node, idx| {
const name = self.ast_nodes.items[ast_node].name.?;
if (std.mem.eql(u8, name, child_string)) {
// TODO: should we really create an artificial
// decl for this type? Probably not.
 
path[i + 1] = .{
.fieldRef = .{
.type = t_index,
.index = idx,
},
};
continue :outer;
}
}
 
// if we got here, our search failed
printWithContext(
file,
inst,
"failed to match `{s}` in union",
.{child_string},
);
path[i + 1] = (try self.cteTodo("match failure")).expr;
continue :outer;
},
 
.Struct => |t_struct| {
// Look into locally-defined pub decls
for (t_struct.pubDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
// Look into locally-defined priv decls
for (t_struct.privDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
switch (try self.findNameInUnsDecls(file, path[i..path.len], resolved_parent, child_string)) {
.Pending => return,
.NotFound => {},
.Found => |match| {
path[i + 1] = match;
continue :outer;
},
}
 
for (self.ast_nodes.items[t_struct.src].fields.?, 0..) |ast_node, idx| {
const name = self.ast_nodes.items[ast_node].name.?;
if (std.mem.eql(u8, name, child_string)) {
// TODO: should we really create an artificial
// decl for this type? Probably not.
 
path[i + 1] = .{
.fieldRef = .{
.type = t_index,
.index = idx,
},
};
continue :outer;
}
}
 
// if we got here, our search failed
// printWithContext(
// file,
// inst,
// "failed to match `{s}` in struct",
// .{child_string},
// );
// path[i + 1] = (try self.cteTodo("match failure")).expr;
//
// that's working
path[i + 1] = (try self.cteTodo(child_string)).expr;
continue :outer;
},
.Opaque => |t_opaque| {
// Look into locally-defined pub decls
for (t_opaque.pubDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
// Look into locally-defined priv decls
for (t_opaque.privDecls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) continue;
if (std.mem.eql(u8, d.name, child_string)) {
path[i + 1] = .{ .declIndex = idx };
continue :outer;
}
}
 
// We delay looking into Uns decls since they could be
// not fully analyzed yet.
switch (try self.findNameInUnsDecls(file, path[i..path.len], resolved_parent, child_string)) {
.Pending => return,
.NotFound => {},
.Found => |match| {
path[i + 1] = match;
continue :outer;
},
}
 
// if we got here, our search failed
printWithContext(
file,
inst,
"failed to match `{s}` in opaque",
.{child_string},
);
 
path[i + 1] = (try self.cteTodo("match failure")).expr;
continue :outer;
},
},
.@"struct" => |st| {
for (st) |field| {
if (std.mem.eql(u8, field.name, child_string)) {
path[i + 1] = .{ .fieldVal = field };
continue :outer;
}
}
 
// if we got here, our search failed
printWithContext(
file,
inst,
"failed to match `{s}` in struct",
.{child_string},
);
 
path[i + 1] = (try self.cteTodo("match failure")).expr;
continue :outer;
},
}
}
 
if (self.pending_ref_paths.get(&path[path.len - 1])) |waiter_list| {
// It's important to de-register ourselves as pending before
// attempting to resolve any other decl.
_ = self.pending_ref_paths.remove(&path[path.len - 1]);
 
for (waiter_list.items) |resume_info| {
try self.tryResolveRefPath(resume_info.file, inst, resume_info.ref_path);
}
// TODO: this is where we should free waiter_list, but its in the arena
// that said, we might want to store it elsewhere and reclaim memory asap
}
}
 
const UnsSearchResult = union(enum) {
Found: DocData.Expr,
Pending,
NotFound,
};
 
fn findNameInUnsDecls(
self: *Autodoc,
file: *File,
tail: []DocData.Expr,
uns_expr: DocData.Expr,
name: []const u8,
) !UnsSearchResult {
var to_analyze = std.SegmentedList(DocData.Expr, 1){};
// TODO: make this an appendAssumeCapacity
try to_analyze.append(self.arena, uns_expr);
 
while (to_analyze.pop()) |cte| {
var container_expression = cte;
for (0..10_000) |_| {
// TODO: handle other types of indirection, like @import
const type_index = switch (container_expression) {
.type => |t| t,
.declRef => |decl_status_ptr| {
switch (decl_status_ptr.*) {
// The use of unreachable here is conservative.
// It might be that it truly should be up to us to
// request the analys of this decl, but it's not clear
// at the moment of writing.
.NotRequested => unreachable,
.Analyzed => |decl_index| {
const decl = self.decls.items[decl_index];
container_expression = decl.value.expr;
continue;
},
.Pending => {
// This decl path is pending completion
{
const res = try self.pending_ref_paths.getOrPut(
self.arena,
&tail[tail.len - 1],
);
if (!res.found_existing) res.value_ptr.* = .{};
}
 
const res = try self.ref_paths_pending_on_decls.getOrPut(
self.arena,
decl_status_ptr,
);
if (!res.found_existing) res.value_ptr.* = .{};
try res.value_ptr.*.append(self.arena, .{
.file = file,
.ref_path = tail,
});
 
// TODO: save some state that keeps track of our
// progress because, as things stand, we
// always re-start the search from scratch
return .Pending;
},
}
},
else => {
log.debug(
"Handle `{s}` in findNameInUnsDecls (first switch)",
.{@tagName(cte)},
);
return .{ .Found = .{ .comptimeExpr = 0 } };
},
};
 
const t = self.types.items[type_index];
const decls = switch (t) {
else => {
log.debug(
"Handle `{s}` in findNameInUnsDecls (second switch)",
.{@tagName(cte)},
);
return .{ .Found = .{ .comptimeExpr = 0 } };
},
inline .Struct, .Union, .Opaque, .Enum => |c| c.pubDecls,
};
 
for (decls) |idx| {
const d = self.decls.items[idx];
if (d.is_uns) {
try to_analyze.append(self.arena, d.value.expr);
} else if (std.mem.eql(u8, d.name, name)) {
return .{ .Found = .{ .declIndex = idx } };
}
}
}
}
 
return .NotFound;
}
 
fn analyzeFancyFunction(
self: *Autodoc,
file: *File,
scope: *Scope,
parent_src: SrcLocInfo,
inst: Zir.Inst.Index,
self_ast_node_index: usize,
type_slot_index: usize,
call_ctx: ?*const CallContext,
) AutodocErrors!DocData.WalkResult {
const tags = file.zir.instructions.items(.tag);
const data = file.zir.instructions.items(.data);
const fn_info = file.zir.getFnInfo(inst);
 
try self.ast_nodes.ensureUnusedCapacity(self.arena, fn_info.total_params_len);
var param_type_refs = try std.ArrayListUnmanaged(DocData.Expr).initCapacity(
self.arena,
fn_info.total_params_len,
);
var param_ast_indexes = try std.ArrayListUnmanaged(usize).initCapacity(
self.arena,
fn_info.total_params_len,
);
 
// TODO: handle scope rules for fn parameters
for (fn_info.param_body[0..fn_info.total_params_len]) |param_index| {
switch (tags[@intFromEnum(param_index)]) {
else => {
panicWithContext(
file,
param_index,
"TODO: handle `{s}` in walkInstruction.func\n",
.{@tagName(tags[@intFromEnum(param_index)])},
);
},
.param_anytype, .param_anytype_comptime => {
// TODO: where are the doc comments?
const str_tok = data[@intFromEnum(param_index)].str_tok;
 
const name = str_tok.get(file.zir);
 
param_ast_indexes.appendAssumeCapacity(self.ast_nodes.items.len);
self.ast_nodes.appendAssumeCapacity(.{
.name = name,
.docs = "",
.@"comptime" = tags[@intFromEnum(param_index)] == .param_anytype_comptime,
});
 
param_type_refs.appendAssumeCapacity(
DocData.Expr{ .@"anytype" = .{} },
);
},
.param, .param_comptime => {
const pl_tok = data[@intFromEnum(param_index)].pl_tok;
const extra = file.zir.extraData(Zir.Inst.Param, pl_tok.payload_index);
const doc_comment = if (extra.data.doc_comment != .empty)
file.zir.nullTerminatedString(extra.data.doc_comment)
else
"";
const name = file.zir.nullTerminatedString(extra.data.name);
 
param_ast_indexes.appendAssumeCapacity(self.ast_nodes.items.len);
try self.ast_nodes.append(self.arena, .{
.name = name,
.docs = doc_comment,
.@"comptime" = tags[@intFromEnum(param_index)] == .param_comptime,
});
 
const break_index = file.zir.extra[extra.end..][extra.data.body_len - 1];
const break_operand = data[break_index].@"break".operand;
const param_type_ref = try self.walkRef(
file,
scope,
parent_src,
break_operand,
false,
call_ctx,
);
 
param_type_refs.appendAssumeCapacity(param_type_ref.expr);
},
}
}
 
self.ast_nodes.items[self_ast_node_index].fields = param_ast_indexes.items;
 
const pl_node = data[@intFromEnum(inst)].pl_node;
const extra = file.zir.extraData(Zir.Inst.FuncFancy, pl_node.payload_index);
 
var extra_index: usize = extra.end;
 
var lib_name: []const u8 = "";
if (extra.data.bits.has_lib_name) {
const lib_name_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
lib_name = file.zir.nullTerminatedString(lib_name_index);
extra_index += 1;
}
 
var align_index: ?usize = null;
if (extra.data.bits.has_align_ref) {
const align_ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
align_index = self.exprs.items.len;
_ = try self.walkRef(
file,
scope,
parent_src,
align_ref,
false,
call_ctx,
);
extra_index += 1;
} else if (extra.data.bits.has_align_body) {
const align_body_len = file.zir.extra[extra_index];
extra_index += 1;
const align_body = file.zir.extra[extra_index .. extra_index + align_body_len];
_ = align_body;
// TODO: analyze the block (or bail with a comptimeExpr)
extra_index += align_body_len;
} else {
// default alignment
}
 
var addrspace_index: ?usize = null;
if (extra.data.bits.has_addrspace_ref) {
const addrspace_ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
addrspace_index = self.exprs.items.len;
_ = try self.walkRef(
file,
scope,
parent_src,
addrspace_ref,
false,
call_ctx,
);
extra_index += 1;
} else if (extra.data.bits.has_addrspace_body) {
const addrspace_body_len = file.zir.extra[extra_index];
extra_index += 1;
const addrspace_body = file.zir.extra[extra_index .. extra_index + addrspace_body_len];
_ = addrspace_body;
// TODO: analyze the block (or bail with a comptimeExpr)
extra_index += addrspace_body_len;
} else {
// default alignment
}
 
var section_index: ?usize = null;
if (extra.data.bits.has_section_ref) {
const section_ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
section_index = self.exprs.items.len;
_ = try self.walkRef(
file,
scope,
parent_src,
section_ref,
false,
call_ctx,
);
extra_index += 1;
} else if (extra.data.bits.has_section_body) {
const section_body_len = file.zir.extra[extra_index];
extra_index += 1;
const section_body = file.zir.extra[extra_index .. extra_index + section_body_len];
_ = section_body;
// TODO: analyze the block (or bail with a comptimeExpr)
extra_index += section_body_len;
} else {
// default alignment
}
 
var cc_index: ?usize = null;
if (extra.data.bits.has_cc_ref and !extra.data.bits.has_cc_body) {
const cc_ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
const cc_expr = try self.walkRef(
file,
scope,
parent_src,
cc_ref,
false,
call_ctx,
);
 
cc_index = self.exprs.items.len;
try self.exprs.append(self.arena, cc_expr.expr);
 
extra_index += 1;
} else if (extra.data.bits.has_cc_body) {
const cc_body_len = file.zir.extra[extra_index];
extra_index += 1;
const cc_body = file.zir.bodySlice(extra_index, cc_body_len);
 
// We assume the body ends with a break_inline
const break_index = cc_body[cc_body.len - 1];
const break_operand = data[@intFromEnum(break_index)].@"break".operand;
const cc_expr = try self.walkRef(
file,
scope,
parent_src,
break_operand,
false,
call_ctx,
);
 
cc_index = self.exprs.items.len;
try self.exprs.append(self.arena, cc_expr.expr);
 
extra_index += cc_body_len;
} else {
// auto calling convention
}
 
// ret
const ret_type_ref: DocData.Expr = switch (fn_info.ret_ty_body.len) {
0 => switch (fn_info.ret_ty_ref) {
.none => DocData.Expr{ .void = .{} },
else => blk: {
const ref = fn_info.ret_ty_ref;
const wr = try self.walkRef(
file,
scope,
parent_src,
ref,
false,
call_ctx,
);
break :blk wr.expr;
},
},
else => blk: {
const last_instr_index = fn_info.ret_ty_body[fn_info.ret_ty_body.len - 1];
const break_operand = data[@intFromEnum(last_instr_index)].@"break".operand;
const wr = try self.walkRef(
file,
scope,
parent_src,
break_operand,
false,
call_ctx,
);
break :blk wr.expr;
},
};
 
// TODO: a complete version of this will probably need a scope
// in order to evaluate correctly closures around funcion
// parameters etc.
const generic_ret: ?DocData.Expr = switch (ret_type_ref) {
.type => |t| blk: {
if (fn_info.body.len == 0) break :blk null;
if (t == @intFromEnum(Ref.type_type)) {
break :blk try self.getGenericReturnType(
file,
scope,
parent_src,
fn_info.body,
call_ctx,
);
} else {
break :blk null;
}
},
else => null,
};
 
// if we're analyzing a function signature (ie without body), we
// actually don't have an ast_node reserved for us, but since
// we don't have a name, we don't need it.
const src = if (fn_info.body.len == 0) 0 else self_ast_node_index;
 
self.types.items[type_slot_index] = .{
.Fn = .{
.name = "todo_name func",
.src = src,
.params = param_type_refs.items,
.ret = ret_type_ref,
.generic_ret = generic_ret,
.is_extern = extra.data.bits.is_extern,
.has_cc = cc_index != null,
.has_align = align_index != null,
.has_lib_name = extra.data.bits.has_lib_name,
.lib_name = lib_name,
.is_inferred_error = extra.data.bits.is_inferred_error,
.cc = cc_index,
.@"align" = align_index,
},
};
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
}
fn analyzeFunction(
self: *Autodoc,
file: *File,
scope: *Scope,
parent_src: SrcLocInfo,
inst: Zir.Inst.Index,
self_ast_node_index: usize,
type_slot_index: usize,
ret_is_inferred_error_set: bool,
call_ctx: ?*const CallContext,
) AutodocErrors!DocData.WalkResult {
const tags = file.zir.instructions.items(.tag);
const data = file.zir.instructions.items(.data);
const fn_info = file.zir.getFnInfo(inst);
 
try self.ast_nodes.ensureUnusedCapacity(self.arena, fn_info.total_params_len);
var param_type_refs = try std.ArrayListUnmanaged(DocData.Expr).initCapacity(
self.arena,
fn_info.total_params_len,
);
var param_ast_indexes = try std.ArrayListUnmanaged(usize).initCapacity(
self.arena,
fn_info.total_params_len,
);
 
// TODO: handle scope rules for fn parameters
for (fn_info.param_body[0..fn_info.total_params_len]) |param_index| {
switch (tags[@intFromEnum(param_index)]) {
else => {
panicWithContext(
file,
param_index,
"TODO: handle `{s}` in walkInstruction.func\n",
.{@tagName(tags[@intFromEnum(param_index)])},
);
},
.param_anytype, .param_anytype_comptime => {
// TODO: where are the doc comments?
const str_tok = data[@intFromEnum(param_index)].str_tok;
 
const name = str_tok.get(file.zir);
 
param_ast_indexes.appendAssumeCapacity(self.ast_nodes.items.len);
self.ast_nodes.appendAssumeCapacity(.{
.name = name,
.docs = "",
.@"comptime" = tags[@intFromEnum(param_index)] == .param_anytype_comptime,
});
 
param_type_refs.appendAssumeCapacity(
DocData.Expr{ .@"anytype" = .{} },
);
},
.param, .param_comptime => {
const pl_tok = data[@intFromEnum(param_index)].pl_tok;
const extra = file.zir.extraData(Zir.Inst.Param, pl_tok.payload_index);
const doc_comment = if (extra.data.doc_comment != .empty)
file.zir.nullTerminatedString(extra.data.doc_comment)
else
"";
const name = file.zir.nullTerminatedString(extra.data.name);
 
param_ast_indexes.appendAssumeCapacity(self.ast_nodes.items.len);
try self.ast_nodes.append(self.arena, .{
.name = name,
.docs = doc_comment,
.@"comptime" = tags[@intFromEnum(param_index)] == .param_comptime,
});
 
const break_index = file.zir.extra[extra.end..][extra.data.body_len - 1];
const break_operand = data[break_index].@"break".operand;
const param_type_ref = try self.walkRef(
file,
scope,
parent_src,
break_operand,
false,
call_ctx,
);
 
param_type_refs.appendAssumeCapacity(param_type_ref.expr);
},
}
}
 
// ret
const ret_type_ref: DocData.Expr = switch (fn_info.ret_ty_body.len) {
0 => switch (fn_info.ret_ty_ref) {
.none => DocData.Expr{ .void = .{} },
else => blk: {
const ref = fn_info.ret_ty_ref;
const wr = try self.walkRef(
file,
scope,
parent_src,
ref,
false,
call_ctx,
);
break :blk wr.expr;
},
},
else => blk: {
const last_instr_index = fn_info.ret_ty_body[fn_info.ret_ty_body.len - 1];
const break_operand = data[@intFromEnum(last_instr_index)].@"break".operand;
const wr = try self.walkRef(
file,
scope,
parent_src,
break_operand,
false,
call_ctx,
);
break :blk wr.expr;
},
};
 
// TODO: a complete version of this will probably need a scope
// in order to evaluate correctly closures around funcion
// parameters etc.
const generic_ret: ?DocData.Expr = switch (ret_type_ref) {
.type => |t| blk: {
if (fn_info.body.len == 0) break :blk null;
if (t == @intFromEnum(Ref.type_type)) {
break :blk try self.getGenericReturnType(
file,
scope,
parent_src,
fn_info.body,
call_ctx,
);
} else {
break :blk null;
}
},
else => null,
};
 
const ret_type: DocData.Expr = blk: {
if (ret_is_inferred_error_set) {
const ret_type_slot_index = self.types.items.len;
try self.types.append(self.arena, .{
.InferredErrorUnion = .{ .payload = ret_type_ref },
});
break :blk .{ .type = ret_type_slot_index };
} else break :blk ret_type_ref;
};
 
// if we're analyzing a function signature (ie without body), we
// actually don't have an ast_node reserved for us, but since
// we don't have a name, we don't need it.
const src = if (fn_info.body.len == 0) 0 else self_ast_node_index;
 
self.ast_nodes.items[self_ast_node_index].fields = param_ast_indexes.items;
self.types.items[type_slot_index] = .{
.Fn = .{
.name = "todo_name func",
.src = src,
.params = param_type_refs.items,
.ret = ret_type,
.generic_ret = generic_ret,
},
};
 
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = type_slot_index },
};
}
 
fn getGenericReturnType(
self: *Autodoc,
file: *File,
scope: *Scope,
parent_src: SrcLocInfo, // function decl line
body: []const Zir.Inst.Index,
call_ctx: ?*const CallContext,
) !DocData.Expr {
const tags = file.zir.instructions.items(.tag);
if (body.len >= 4) {
const maybe_ret_inst = body[body.len - 4];
switch (tags[@intFromEnum(maybe_ret_inst)]) {
.ret_node, .ret_load => {
const wr = try self.walkInstruction(
file,
scope,
parent_src,
maybe_ret_inst,
false,
call_ctx,
);
return wr.expr;
},
else => {},
}
}
return DocData.Expr{ .comptimeExpr = 0 };
}
 
fn collectUnionFieldInfo(
self: *Autodoc,
file: *File,
scope: *Scope,
parent_src: SrcLocInfo,
fields_len: usize,
field_type_refs: *std.ArrayListUnmanaged(DocData.Expr),
field_name_indexes: *std.ArrayListUnmanaged(usize),
ei: usize,
call_ctx: ?*const CallContext,
) !void {
if (fields_len == 0) return;
var extra_index = ei;
 
const bits_per_field = 4;
const fields_per_u32 = 32 / bits_per_field;
const bit_bags_count = std.math.divCeil(usize, fields_len, fields_per_u32) catch unreachable;
var bit_bag_index: usize = extra_index;
extra_index += bit_bags_count;
 
var cur_bit_bag: u32 = undefined;
var field_i: u32 = 0;
while (field_i < fields_len) : (field_i += 1) {
if (field_i % fields_per_u32 == 0) {
cur_bit_bag = file.zir.extra[bit_bag_index];
bit_bag_index += 1;
}
const has_type = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
const has_align = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
const has_tag = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
const unused = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
_ = unused;
 
const field_name = file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[extra_index]));
extra_index += 1;
const doc_comment_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
extra_index += 1;
const field_type: Zir.Inst.Ref = if (has_type) @enumFromInt(file.zir.extra[extra_index]) else .void_type;
if (has_type) extra_index += 1;
 
if (has_align) extra_index += 1;
if (has_tag) extra_index += 1;
 
// type
{
const walk_result = try self.walkRef(
file,
scope,
parent_src,
field_type,
false,
call_ctx,
);
try field_type_refs.append(self.arena, walk_result.expr);
}
 
// ast node
{
try field_name_indexes.append(self.arena, self.ast_nodes.items.len);
const doc_comment: ?[]const u8 = if (doc_comment_index != .empty)
file.zir.nullTerminatedString(doc_comment_index)
else
null;
try self.ast_nodes.append(self.arena, .{
.name = field_name,
.docs = doc_comment,
});
}
}
}
 
fn collectStructFieldInfo(
self: *Autodoc,
file: *File,
scope: *Scope,
parent_src: SrcLocInfo,
fields_len: usize,
field_type_refs: *std.ArrayListUnmanaged(DocData.Expr),
field_default_refs: *std.ArrayListUnmanaged(?DocData.Expr),
field_name_indexes: *std.ArrayListUnmanaged(usize),
ei: usize,
is_tuple: bool,
call_ctx: ?*const CallContext,
) !void {
if (fields_len == 0) return;
var extra_index = ei;
 
const bits_per_field = 4;
const fields_per_u32 = 32 / bits_per_field;
const bit_bags_count = std.math.divCeil(usize, fields_len, fields_per_u32) catch unreachable;
 
const Field = struct {
field_name: Zir.NullTerminatedString,
doc_comment_index: Zir.NullTerminatedString,
type_body_len: u32 = 0,
align_body_len: u32 = 0,
init_body_len: u32 = 0,
type_ref: Zir.Inst.Ref = .none,
};
const fields = try self.arena.alloc(Field, fields_len);
 
var bit_bag_index: usize = extra_index;
extra_index += bit_bags_count;
 
var cur_bit_bag: u32 = undefined;
var field_i: u32 = 0;
while (field_i < fields_len) : (field_i += 1) {
if (field_i % fields_per_u32 == 0) {
cur_bit_bag = file.zir.extra[bit_bag_index];
bit_bag_index += 1;
}
const has_align = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
const has_default = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
// const is_comptime = @truncate(u1, cur_bit_bag) != 0;
cur_bit_bag >>= 1;
const has_type_body = @as(u1, @truncate(cur_bit_bag)) != 0;
cur_bit_bag >>= 1;
 
const field_name: Zir.NullTerminatedString = if (!is_tuple) blk: {
const fname = file.zir.extra[extra_index];
extra_index += 1;
break :blk @enumFromInt(fname);
} else .empty;
 
const doc_comment_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
extra_index += 1;
 
fields[field_i] = .{
.field_name = field_name,
.doc_comment_index = doc_comment_index,
};
 
if (has_type_body) {
fields[field_i].type_body_len = file.zir.extra[extra_index];
} else {
fields[field_i].type_ref = @enumFromInt(file.zir.extra[extra_index]);
}
extra_index += 1;
 
if (has_align) {
fields[field_i].align_body_len = file.zir.extra[extra_index];
extra_index += 1;
}
if (has_default) {
fields[field_i].init_body_len = file.zir.extra[extra_index];
extra_index += 1;
}
}
 
const data = file.zir.instructions.items(.data);
 
for (fields) |field| {
const type_expr = expr: {
if (field.type_ref != .none) {
const walk_result = try self.walkRef(
file,
scope,
parent_src,
field.type_ref,
false,
call_ctx,
);
break :expr walk_result.expr;
}
 
std.debug.assert(field.type_body_len != 0);
const body = file.zir.bodySlice(extra_index, field.type_body_len);
extra_index += body.len;
 
const break_inst = body[body.len - 1];
const operand = data[@intFromEnum(break_inst)].@"break".operand;
try self.ast_nodes.append(self.arena, .{
.file = self.files.getIndex(file).?,
.line = parent_src.line,
.col = 0,
.fields = null, // walkInstruction will fill `fields` if necessary
});
const walk_result = try self.walkRef(
file,
scope,
parent_src,
operand,
false,
call_ctx,
);
break :expr walk_result.expr;
};
 
extra_index += field.align_body_len;
 
const default_expr: ?DocData.Expr = def: {
if (field.init_body_len == 0) {
break :def null;
}
 
const body = file.zir.bodySlice(extra_index, field.init_body_len);
extra_index += body.len;
 
const break_inst = body[body.len - 1];
const operand = data[@intFromEnum(break_inst)].@"break".operand;
const walk_result = try self.walkRef(
file,
scope,
parent_src,
operand,
false,
call_ctx,
);
break :def walk_result.expr;
};
 
try field_type_refs.append(self.arena, type_expr);
try field_default_refs.append(self.arena, default_expr);
 
// ast node
{
try field_name_indexes.append(self.arena, self.ast_nodes.items.len);
const doc_comment: ?[]const u8 = if (field.doc_comment_index != .empty)
file.zir.nullTerminatedString(field.doc_comment_index)
else
null;
const field_name: []const u8 = if (field.field_name != .empty)
file.zir.nullTerminatedString(field.field_name)
else
"";
 
try self.ast_nodes.append(self.arena, .{
.name = field_name,
.docs = doc_comment,
});
}
}
}
 
/// A Zir Ref can either refer to common types and values, or to a Zir index.
/// WalkRef resolves common cases and delegates to `walkInstruction` otherwise.
fn walkRef(
self: *Autodoc,
file: *File,
parent_scope: *Scope,
parent_src: SrcLocInfo,
ref: Ref,
need_type: bool, // true when the caller needs also a typeRef for the return value
call_ctx: ?*const CallContext,
) AutodocErrors!DocData.WalkResult {
if (ref == .none) {
return .{ .expr = .{ .comptimeExpr = 0 } };
} else if (@intFromEnum(ref) <= @intFromEnum(InternPool.Index.last_type)) {
// We can just return a type that indexes into `types` with the
// enum value because in the beginning we pre-filled `types` with
// the types that are listed in `Ref`.
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(std.builtin.TypeId.Type) },
.expr = .{ .type = @intFromEnum(ref) },
};
} else if (ref.toIndex()) |zir_index| {
return self.walkInstruction(
file,
parent_scope,
parent_src,
zir_index,
need_type,
call_ctx,
);
} else {
switch (ref) {
else => {
panicWithOptionalContext(
file,
.none,
"TODO: handle {s} in walkRef",
.{@tagName(ref)},
);
},
.undef => {
return DocData.WalkResult{ .expr = .undefined };
},
.zero => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .int = .{ .value = 0 } },
};
},
.one => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .int = .{ .value = 1 } },
};
},
.negative_one => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
.expr = .{ .int = .{ .value = 1, .negated = true } },
};
},
.zero_usize => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.usize_type) },
.expr = .{ .int = .{ .value = 0 } },
};
},
.one_usize => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.usize_type) },
.expr = .{ .int = .{ .value = 1 } },
};
},
.zero_u8 => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.u8_type) },
.expr = .{ .int = .{ .value = 0 } },
};
},
.one_u8 => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.u8_type) },
.expr = .{ .int = .{ .value = 1 } },
};
},
.four_u8 => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.u8_type) },
.expr = .{ .int = .{ .value = 4 } },
};
},
 
.void_value => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.void_type) },
.expr = .{ .void = .{} },
};
},
.unreachable_value => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.noreturn_type) },
.expr = .{ .@"unreachable" = .{} },
};
},
.null_value => {
return DocData.WalkResult{ .expr = .null };
},
.bool_true => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.bool_type) },
.expr = .{ .bool = true },
};
},
.bool_false => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.bool_type) },
.expr = .{ .bool = false },
};
},
.empty_struct => {
return DocData.WalkResult{ .expr = .{ .@"struct" = &.{} } };
},
.calling_convention_type => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.type_type) },
.expr = .{ .type = @intFromEnum(Ref.calling_convention_type) },
};
},
.calling_convention_c => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.calling_convention_type) },
.expr = .{ .enumLiteral = "C" },
};
},
.calling_convention_inline => {
return DocData.WalkResult{
.typeRef = .{ .type = @intFromEnum(Ref.calling_convention_type) },
.expr = .{ .enumLiteral = "Inline" },
};
},
// .generic_poison => {
// return DocData.WalkResult{ .int = .{
// .type = @intFromEnum(Ref.comptime_int_type),
// .value = 1,
// } };
// },
}
}
}
 
fn printWithContext(
file: *File,
inst: Zir.Inst.Index,
comptime fmt: []const u8,
args: anytype,
) void {
return printWithOptionalContext(file, inst.toOptional(), fmt, args);
}
 
fn printWithOptionalContext(file: *File, inst: Zir.Inst.OptionalIndex, comptime fmt: []const u8, args: anytype) void {
log.debug("Context [{s}] % {} \n " ++ fmt, .{ file.sub_file_path, inst } ++ args);
}
 
fn panicWithContext(
file: *File,
inst: Zir.Inst.Index,
comptime fmt: []const u8,
args: anytype,
) noreturn {
printWithOptionalContext(file, inst.toOptional(), fmt, args);
unreachable;
}
 
fn panicWithOptionalContext(
file: *File,
inst: Zir.Inst.OptionalIndex,
comptime fmt: []const u8,
args: anytype,
) noreturn {
printWithOptionalContext(file, inst, fmt, args);
unreachable;
}
 
fn cteTodo(self: *Autodoc, msg: []const u8) error{OutOfMemory}!DocData.WalkResult {
const cte_slot_index = self.comptime_exprs.items.len;
try self.comptime_exprs.append(self.arena, .{
.code = msg,
});
return DocData.WalkResult{ .expr = .{ .comptimeExpr = cte_slot_index } };
}
 
fn writeFileTableToJson(
map: std.AutoArrayHashMapUnmanaged(*File, usize),
mods: std.AutoArrayHashMapUnmanaged(*Module, DocData.DocModule),
jsw: anytype,
) !void {
try jsw.beginArray();
var it = map.iterator();
while (it.next()) |entry| {
try jsw.beginArray();
try jsw.write(entry.key_ptr.*.sub_file_path);
try jsw.write(mods.getIndex(entry.key_ptr.*.mod) orelse 0);
try jsw.endArray();
}
try jsw.endArray();
}
 
/// Writes the data like so:
/// ```
/// {
/// "<section name>": [{name: "<guide name>", text: "<guide contents>"},],
/// }
/// ```
fn writeGuidesToJson(sections: std.ArrayListUnmanaged(Section), jsw: anytype) !void {
try jsw.beginArray();
 
for (sections.items) |s| {
// section name
try jsw.beginObject();
try jsw.objectField("name");
try jsw.write(s.name);
try jsw.objectField("guides");
 
// section value
try jsw.beginArray();
for (s.guides.items) |g| {
try jsw.beginObject();
try jsw.objectField("name");
try jsw.write(g.name);
try jsw.objectField("body");
try jsw.write(g.body);
try jsw.endObject();
}
try jsw.endArray();
try jsw.endObject();
}
 
try jsw.endArray();
}
 
fn writeModuleTableToJson(
map: std.AutoHashMapUnmanaged(*Module, DocData.DocModule.TableEntry),
jsw: anytype,
) !void {
try jsw.beginObject();
var it = map.valueIterator();
while (it.next()) |entry| {
try jsw.objectField(entry.name);
try jsw.write(entry.value);
}
try jsw.endObject();
}
 
fn srcLocInfo(
self: Autodoc,
file: *File,
src_node: i32,
parent_src: SrcLocInfo,
) !SrcLocInfo {
const sn = @as(u32, @intCast(@as(i32, @intCast(parent_src.src_node)) + src_node));
const tree = try file.getTree(self.zcu.gpa);
const node_idx = @as(Ast.Node.Index, @bitCast(sn));
const tokens = tree.nodes.items(.main_token);
 
const tok_idx = tokens[node_idx];
const start = tree.tokens.items(.start)[tok_idx];
const loc = tree.tokenLocation(parent_src.bytes, tok_idx);
return SrcLocInfo{
.line = parent_src.line + loc.line,
.bytes = start,
.src_node = sn,
};
}
 
fn declIsVar(
self: Autodoc,
file: *File,
src_node: i32,
parent_src: SrcLocInfo,
) !bool {
const sn = @as(u32, @intCast(@as(i32, @intCast(parent_src.src_node)) + src_node));
const tree = try file.getTree(self.zcu.gpa);
const node_idx = @as(Ast.Node.Index, @bitCast(sn));
const tokens = tree.nodes.items(.main_token);
const tags = tree.tokens.items(.tag);
 
const tok_idx = tokens[node_idx];
 
// tags[tok_idx] is the token called 'mut token' in AstGen
return (tags[tok_idx] == .keyword_var);
}
 
fn getBlockSource(
self: Autodoc,
file: *File,
parent_src: SrcLocInfo,
block_src_node: i32,
) AutodocErrors![]const u8 {
const tree = try file.getTree(self.zcu.gpa);
const block_src = try self.srcLocInfo(file, block_src_node, parent_src);
return tree.getNodeSource(block_src.src_node);
}
 
fn getTLDocComment(self: *Autodoc, file: *File) ![]const u8 {
const source = (try file.getSource(self.zcu.gpa)).bytes;
var tokenizer = Tokenizer.init(source);
var tok = tokenizer.next();
var comment = std.ArrayList(u8).init(self.arena);
while (tok.tag == .container_doc_comment) : (tok = tokenizer.next()) {
try comment.appendSlice(source[tok.loc.start + "//!".len .. tok.loc.end + 1]);
}
 
return comment.items;
}
 
/// Returns the doc comment cleared of autodoc directives.
fn findGuidePaths(self: *Autodoc, file: *File, str: []const u8) ![]const u8 {
const guide_prefix = "zig-autodoc-guide:";
const section_prefix = "zig-autodoc-section:";
 
try self.guide_sections.append(self.arena, .{}); // add a default section
var current_section = &self.guide_sections.items[self.guide_sections.items.len - 1];
 
var clean_docs: std.ArrayListUnmanaged(u8) = .{};
errdefer clean_docs.deinit(self.arena);
 
// TODO: this algo is kinda inefficient
 
var it = std.mem.splitScalar(u8, str, '\n');
while (it.next()) |line| {
const trimmed_line = std.mem.trim(u8, line, " ");
if (std.mem.startsWith(u8, trimmed_line, guide_prefix)) {
const path = trimmed_line[guide_prefix.len..];
const trimmed_path = std.mem.trim(u8, path, " ");
try self.addGuide(file, trimmed_path, current_section);
} else if (std.mem.startsWith(u8, trimmed_line, section_prefix)) {
const section_name = trimmed_line[section_prefix.len..];
const trimmed_section_name = std.mem.trim(u8, section_name, " ");
try self.guide_sections.append(self.arena, .{
.name = trimmed_section_name,
});
current_section = &self.guide_sections.items[self.guide_sections.items.len - 1];
} else {
try clean_docs.appendSlice(self.arena, line);
try clean_docs.append(self.arena, '\n');
}
}
 
return clean_docs.toOwnedSlice(self.arena);
}
 
fn addGuide(self: *Autodoc, file: *File, guide_path: []const u8, section: *Section) !void {
if (guide_path.len == 0) return error.MissingAutodocGuideName;
 
const resolved_path = try std.fs.path.resolve(self.arena, &[_][]const u8{
file.sub_file_path, "..", guide_path,
});
 
var guide_file = try file.mod.root.openFile(resolved_path, .{});
defer guide_file.close();
 
const guide = guide_file.reader().readAllAlloc(self.arena, 1 * 1024 * 1024) catch |err| switch (err) {
error.StreamTooLong => @panic("stream too long"),
else => |e| return e,
};
 
try section.guides.append(self.arena, .{
.name = resolved_path,
.body = guide,
});
}
 
src/Compilation.zig added: 7501, removed: 25316, total 0
@@ -36,7 +36,6 @@ const Cache = std.Build.Cache;
const c_codegen = @import("codegen/c.zig");
const libtsan = @import("libtsan.zig");
const Zir = std.zig.Zir;
const Autodoc = @import("Autodoc.zig");
const resinator = @import("resinator.zig");
const Builtin = @import("Builtin.zig");
const LlvmObject = @import("codegen/llvm.zig").Object;
@@ -734,6 +733,8 @@ pub const MiscTask = enum {
compiler_rt,
zig_libc,
analyze_mod,
docs_copy,
docs_wasm,
 
@"musl crti.o",
@"musl crtn.o",
@@ -2347,10 +2348,6 @@ fn flush(comp: *Compilation, arena: Allocator, prog_node: *std.Progress.Node) !v
try emitLlvmObject(comp, arena, default_emit, null, llvm_object, prog_node);
}
}
 
if (comp.totalErrorCount() == 0) {
try maybeGenerateAutodocs(comp, prog_node);
}
}
 
/// This function is called by the frontend before flush(). It communicates that
@@ -2401,26 +2398,6 @@ fn renameTmpIntoCache(
}
}
 
fn maybeGenerateAutodocs(comp: *Compilation, prog_node: *std.Progress.Node) !void {
const mod = comp.module orelse return;
// TODO: do this in a separate job during performAllTheWork(). The
// file copies at the end of generate() can also be extracted to
// separate jobs
if (!build_options.only_c and !build_options.only_core_functionality) {
if (comp.docs_emit) |emit| {
var dir = try emit.directory.handle.makeOpenPath(emit.sub_path, .{});
defer dir.close();
 
var sub_prog_node = prog_node.start("Generating documentation", 0);
sub_prog_node.activate();
sub_prog_node.context.refresh();
defer sub_prog_node.end();
 
try Autodoc.generate(mod, dir);
}
}
}
 
/// Communicate the output binary location to parent Compilations.
fn wholeCacheModeSetBinFilePath(
comp: *Compilation,
@@ -3346,6 +3323,9 @@ pub fn performAllTheWork(
var zir_prog_node = main_progress_node.start("AST Lowering", 0);
defer zir_prog_node.end();
 
var wasm_prog_node = main_progress_node.start("Compile Autodocs", 0);
defer wasm_prog_node.end();
 
var c_obj_prog_node = main_progress_node.start("Compile C Objects", comp.c_source_files.len);
defer c_obj_prog_node.end();
 
@@ -3355,6 +3335,13 @@ pub fn performAllTheWork(
comp.work_queue_wait_group.reset();
defer comp.work_queue_wait_group.wait();
 
if (!build_options.only_c and !build_options.only_core_functionality) {
if (comp.docs_emit != null) {
try taskDocsCopy(comp, &comp.work_queue_wait_group);
comp.work_queue_wait_group.spawnManager(workerDocsWasm, .{ comp, &wasm_prog_node });
}
}
 
{
const astgen_frame = tracy.namedFrame("astgen");
defer astgen_frame.end();
@@ -3769,6 +3756,255 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
}
}
 
fn taskDocsCopy(comp: *Compilation, wg: *WaitGroup) !void {
wg.start();
errdefer wg.finish();
try comp.thread_pool.spawn(workerDocsCopy, .{ comp, wg });
}
 
fn workerDocsCopy(comp: *Compilation, wg: *WaitGroup) void {
defer wg.finish();
docsCopyFallible(comp) catch |err| {
return comp.lockAndSetMiscFailure(
.docs_copy,
"unable to copy autodocs artifacts: {s}",
.{@errorName(err)},
);
};
}
 
fn docsCopyFallible(comp: *Compilation) anyerror!void {
const emit = comp.docs_emit.?;
var out_dir = emit.directory.handle.makeOpenPath(emit.sub_path, .{}) catch |err| {
return comp.lockAndSetMiscFailure(
.docs_copy,
"unable to create output directory '{}{s}': {s}",
.{ emit.directory, emit.sub_path, @errorName(err) },
);
};
defer out_dir.close();
 
for (&[_][]const u8{ "docs/main.js", "docs/index.html" }) |sub_path| {
const basename = std.fs.path.basename(sub_path);
comp.zig_lib_directory.handle.copyFile(sub_path, out_dir, basename, .{}) catch |err| {
comp.lockAndSetMiscFailure(.docs_copy, "unable to copy {s}: {s}", .{
sub_path,
@errorName(err),
});
return;
};
}
 
var tar_file = out_dir.createFile("sources.tar", .{}) catch |err| {
return comp.lockAndSetMiscFailure(
.docs_copy,
"unable to create '{}{s}/sources.tar': {s}",
.{ emit.directory, emit.sub_path, @errorName(err) },
);
};
defer tar_file.close();
 
const root = comp.root_mod.root;
const sub_path = if (root.sub_path.len == 0) "." else root.sub_path;
var mod_dir = root.root_dir.handle.openDir(sub_path, .{ .iterate = true }) catch |err| {
return comp.lockAndSetMiscFailure(.docs_copy, "unable to open directory '{}': {s}", .{
root, @errorName(err),
});
};
defer mod_dir.close();
 
var walker = try mod_dir.walk(comp.gpa);
defer walker.deinit();
 
const padding_buffer = [1]u8{0} ** 512;
 
while (try walker.next()) |entry| {
switch (entry.kind) {
.file => {
if (!std.mem.endsWith(u8, entry.basename, ".zig")) continue;
if (std.mem.eql(u8, entry.basename, "test.zig")) continue;
if (std.mem.endsWith(u8, entry.basename, "_test.zig")) continue;
},
else => continue,
}
 
var file = mod_dir.openFile(entry.path, .{}) catch |err| {
return comp.lockAndSetMiscFailure(.docs_copy, "unable to open '{}{s}': {s}", .{
root, entry.path, @errorName(err),
});
};
defer file.close();
 
const stat = file.stat() catch |err| {
return comp.lockAndSetMiscFailure(.docs_copy, "unable to stat '{}{s}': {s}", .{
root, entry.path, @errorName(err),
});
};
 
var file_header = std.tar.output.Header.init();
file_header.typeflag = .regular;
try file_header.setPath(comp.root_name, entry.path);
try file_header.setSize(stat.size);
try file_header.updateChecksum();
 
const header_bytes = std.mem.asBytes(&file_header);
const padding = p: {
const remainder: u16 = @intCast(stat.size % 512);
const n = if (remainder > 0) 512 - remainder else 0;
break :p padding_buffer[0..n];
};
 
var header_and_trailer: [2]std.os.iovec_const = .{
.{ .iov_base = header_bytes.ptr, .iov_len = header_bytes.len },
.{ .iov_base = padding.ptr, .iov_len = padding.len },
};
 
try tar_file.writeFileAll(file, .{
.in_len = stat.size,
.headers_and_trailers = &header_and_trailer,
.header_count = 1,
});
}
}
 
fn workerDocsWasm(comp: *Compilation, prog_node: *std.Progress.Node) void {
workerDocsWasmFallible(comp, prog_node) catch |err| {
comp.lockAndSetMiscFailure(.docs_wasm, "unable to build autodocs: {s}", .{
@errorName(err),
});
};
}
 
fn workerDocsWasmFallible(comp: *Compilation, prog_node: *std.Progress.Node) anyerror!void {
const gpa = comp.gpa;
 
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
 
const optimize_mode = std.builtin.OptimizeMode.ReleaseSmall;
const output_mode = std.builtin.OutputMode.Exe;
const resolved_target: Package.Module.ResolvedTarget = .{
.result = std.zig.system.resolveTargetQuery(.{
.cpu_arch = .wasm32,
.os_tag = .freestanding,
.cpu_features_add = std.Target.wasm.featureSet(&.{
.atomics,
.bulk_memory,
// .extended_const, not supported by Safari
.multivalue,
.mutable_globals,
.nontrapping_fptoint,
.reference_types,
//.relaxed_simd, not supported by Firefox or Safari
.sign_ext,
// observed to cause Error occured during wast conversion :
// Unknown operator: 0xfd058 in Firefox 117
//.simd128,
// .tail_call, not supported by Safari
}),
}) catch unreachable,
 
.is_native_os = false,
.is_native_abi = false,
};
 
const config = try Config.resolve(.{
.output_mode = output_mode,
.resolved_target = resolved_target,
.is_test = false,
.have_zcu = true,
.emit_bin = true,
.root_optimize_mode = optimize_mode,
.link_libc = false,
.rdynamic = true,
});
 
const src_basename = "main.zig";
const root_name = std.fs.path.stem(src_basename);
 
const root_mod = try Package.Module.create(arena, .{
.global_cache_directory = comp.global_cache_directory,
.paths = .{
.root = .{
.root_dir = comp.zig_lib_directory,
.sub_path = "docs/wasm",
},
.root_src_path = src_basename,
},
.fully_qualified_name = root_name,
.inherited = .{
.resolved_target = resolved_target,
.optimize_mode = optimize_mode,
},
.global = config,
.cc_argv = &.{},
.parent = null,
.builtin_mod = null,
.builtin_modules = null, // there is only one module in this compilation
});
const bin_basename = try std.zig.binNameAlloc(arena, .{
.root_name = root_name,
.target = resolved_target.result,
.output_mode = output_mode,
});
 
const sub_compilation = try Compilation.create(gpa, arena, .{
.global_cache_directory = comp.global_cache_directory,
.local_cache_directory = comp.global_cache_directory,
.zig_lib_directory = comp.zig_lib_directory,
.self_exe_path = comp.self_exe_path,
.config = config,
.root_mod = root_mod,
.entry = .disabled,
.cache_mode = .whole,
.root_name = root_name,
.thread_pool = comp.thread_pool,
.libc_installation = comp.libc_installation,
.emit_bin = .{
.directory = null, // Put it in the cache directory.
.basename = bin_basename,
},
.verbose_cc = comp.verbose_cc,
.verbose_link = comp.verbose_link,
.verbose_air = comp.verbose_air,
.verbose_intern_pool = comp.verbose_intern_pool,
.verbose_generic_instances = comp.verbose_intern_pool,
.verbose_llvm_ir = comp.verbose_llvm_ir,
.verbose_llvm_bc = comp.verbose_llvm_bc,
.verbose_cimport = comp.verbose_cimport,
.verbose_llvm_cpu_features = comp.verbose_llvm_cpu_features,
});
defer sub_compilation.destroy();
 
try comp.updateSubCompilation(sub_compilation, .docs_wasm, prog_node);
 
const emit = comp.docs_emit.?;
var out_dir = emit.directory.handle.makeOpenPath(emit.sub_path, .{}) catch |err| {
return comp.lockAndSetMiscFailure(
.docs_copy,
"unable to create output directory '{}{s}': {s}",
.{ emit.directory, emit.sub_path, @errorName(err) },
);
};
defer out_dir.close();
 
sub_compilation.local_cache_directory.handle.copyFile(
sub_compilation.cache_use.whole.bin_sub_path.?,
out_dir,
"main.wasm",
.{},
) catch |err| {
return comp.lockAndSetMiscFailure(.docs_copy, "unable to copy '{}{s}' to '{}{s}': {s}", .{
sub_compilation.local_cache_directory,
sub_compilation.cache_use.whole.bin_sub_path.?,
emit.directory,
emit.sub_path,
@errorName(err),
});
};
}
 
const AstGenSrc = union(enum) {
root,
import: struct {
 
ev/null added: 7501, removed: 25316, total 0
@@ -1,435 +0,0 @@
const std = @import("std");
const builtin = @import("builtin");
const io = std.io;
const fs = std.fs;
const process = std.process;
const ChildProcess = std.ChildProcess;
const Progress = std.Progress;
const print = std.debug.print;
const mem = std.mem;
const testing = std.testing;
const Allocator = std.mem.Allocator;
const Module = @import("../Module.zig");
 
pub fn genHtml(
allocator: Allocator,
src: *Module.File,
out: anytype,
) !void {
try out.writeAll(
\\<!doctype html>
\\<html lang="en">
\\<head>
\\ <meta charset="utf-8">
\\ <meta name="viewport" content="width=device-width, initial-scale=1.0">
);
try out.print(" <title>{s} - source view</title>\n", .{src.sub_file_path});
try out.writeAll(
\\ <link rel="icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAgklEQVR4AWMYWuD7EllJIM4G4g4g5oIJ/odhOJ8wToOxSTXgNxDHoeiBMfA4+wGShjyYOCkG/IGqWQziEzYAoUAeiF9D5U+DxEg14DRU7jWIT5IBIOdCxf+A+CQZAAoopEB7QJwBCBwHiip8UYmRdrAlDpIMgApwQZNnNii5Dq0MBgCxxycBnwEd+wAAAABJRU5ErkJggg==">
\\ <link rel="icon" href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNTMgMTQwIj48ZyBmaWxsPSIjRjdBNDFEIj48Zz48cG9seWdvbiBwb2ludHM9IjQ2LDIyIDI4LDQ0IDE5LDMwIi8+PHBvbHlnb24gcG9pbnRzPSI0NiwyMiAzMywzMyAyOCw0NCAyMiw0NCAyMiw5NSAzMSw5NSAyMCwxMDAgMTIsMTE3IDAsMTE3IDAsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMzEsOTUgMTIsMTE3IDQsMTA2Ii8+PC9nPjxnPjxwb2x5Z29uIHBvaW50cz0iNTYsMjIgNjIsMzYgMzcsNDQiLz48cG9seWdvbiBwb2ludHM9IjU2LDIyIDExMSwyMiAxMTEsNDQgMzcsNDQgNTYsMzIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTE2LDk1IDk3LDExNyA5MCwxMDQiLz48cG9seWdvbiBwb2ludHM9IjExNiw5NSAxMDAsMTA0IDk3LDExNyA0MiwxMTcgNDIsOTUiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTUwLDAgNTIsMTE3IDMsMTQwIDEwMSwyMiIvPjwvZz48Zz48cG9seWdvbiBwb2ludHM9IjE0MSwyMiAxNDAsNDAgMTIyLDQ1Ii8+PHBvbHlnb24gcG9pbnRzPSIxNTMsMjIgMTUzLDExNyAxMDYsMTE3IDEyMCwxMDUgMTI1LDk1IDEzMSw5NSAxMzEsNDUgMTIyLDQ1IDEzMiwzNiAxNDEsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTI1LDk1IDEzMCwxMTAgMTA2LDExNyIvPjwvZz48L2c+PC9zdmc+">
\\ <style>
\\ body{
\\ font-family: system-ui, -apple-system, Roboto, "Segoe UI", sans-serif;
\\ margin: 0;
\\ line-height: 1.5;
\\ }
\\
\\ pre > code {
\\ display: block;
\\ overflow: auto;
\\ line-height: normal;
\\ margin: 0em;
\\ }
\\ .tok-kw {
\\ color: #333;
\\ font-weight: bold;
\\ }
\\ .tok-str {
\\ color: #d14;
\\ }
\\ .tok-builtin {
\\ color: #005C7A;
\\ }
\\ .tok-comment {
\\ color: #545454;
\\ font-style: italic;
\\ }
\\ .tok-fn {
\\ color: #900;
\\ font-weight: bold;
\\ }
\\ .tok-null {
\\ color: #005C5C;
\\ }
\\ .tok-number {
\\ color: #005C5C;
\\ }
\\ .tok-type {
\\ color: #458;
\\ font-weight: bold;
\\ }
\\ pre {
\\ counter-reset: line;
\\ }
\\ pre .line:before {
\\ counter-increment: line;
\\ content: counter(line);
\\ display: inline-block;
\\ padding-right: 1em;
\\ width: 2em;
\\ text-align: right;
\\ color: #999;
\\ }
\\
\\ .line {
\\ width: 100%;
\\ display: inline-block;
\\ }
\\ .line:target {
\\ border-top: 1px solid #ccc;
\\ border-bottom: 1px solid #ccc;
\\ background: #fafafa;
\\ }
\\
\\ @media (prefers-color-scheme: dark) {
\\ body{
\\ background:#222;
\\ color: #ccc;
\\ }
\\ pre > code {
\\ color: #ccc;
\\ background: #222;
\\ border: unset;
\\ }
\\ .line:target {
\\ border-top: 1px solid #444;
\\ border-bottom: 1px solid #444;
\\ background: #333;
\\ }
\\ .tok-kw {
\\ color: #eee;
\\ }
\\ .tok-str {
\\ color: #2e5;
\\ }
\\ .tok-builtin {
\\ color: #ff894c;
\\ }
\\ .tok-comment {
\\ color: #aa7;
\\ }
\\ .tok-fn {
\\ color: #B1A0F8;
\\ }
\\ .tok-null {
\\ color: #ff8080;
\\ }
\\ .tok-number {
\\ color: #ff8080;
\\ }
\\ .tok-type {
\\ color: #68f;
\\ }
\\ }
\\ </style>
\\</head>
\\<body>
\\
);
 
const source = try src.getSource(allocator);
try tokenizeAndPrintRaw(out, source.bytes);
try out.writeAll(
\\</body>
\\</html>
);
}
 
const start_line = "<span class=\"line\" id=\"L{d}\">";
const end_line = "</span>\n";
 
var line_counter: usize = 1;
 
pub fn tokenizeAndPrintRaw(
out: anytype,
src: [:0]const u8,
) !void {
line_counter = 1;
 
try out.print("<pre><code>" ++ start_line, .{line_counter});
var tokenizer = std.zig.Tokenizer.init(src);
var index: usize = 0;
var next_tok_is_fn = false;
while (true) {
const prev_tok_was_fn = next_tok_is_fn;
next_tok_is_fn = false;
 
const token = tokenizer.next();
if (mem.indexOf(u8, src[index..token.loc.start], "//")) |comment_start_off| {
// render one comment
const comment_start = index + comment_start_off;
const comment_end_off = mem.indexOf(u8, src[comment_start..token.loc.start], "\n");
const comment_end = if (comment_end_off) |o| comment_start + o else token.loc.start;
 
try writeEscapedLines(out, src[index..comment_start]);
try out.writeAll("<span class=\"tok-comment\">");
try writeEscaped(out, src[comment_start..comment_end]);
try out.writeAll("</span>\n");
index = comment_end;
tokenizer.index = index;
continue;
}
 
try writeEscapedLines(out, src[index..token.loc.start]);
switch (token.tag) {
.eof => break,
 
.keyword_addrspace,
.keyword_align,
.keyword_and,
.keyword_asm,
.keyword_async,
.keyword_await,
.keyword_break,
.keyword_catch,
.keyword_comptime,
.keyword_const,
.keyword_continue,
.keyword_defer,
.keyword_else,
.keyword_enum,
.keyword_errdefer,
.keyword_error,
.keyword_export,
.keyword_extern,
.keyword_for,
.keyword_if,
.keyword_inline,
.keyword_noalias,
.keyword_noinline,
.keyword_nosuspend,
.keyword_opaque,
.keyword_or,
.keyword_orelse,
.keyword_packed,
.keyword_anyframe,
.keyword_pub,
.keyword_resume,
.keyword_return,
.keyword_linksection,
.keyword_callconv,
.keyword_struct,
.keyword_suspend,
.keyword_switch,
.keyword_test,
.keyword_threadlocal,
.keyword_try,
.keyword_union,
.keyword_unreachable,
.keyword_usingnamespace,
.keyword_var,
.keyword_volatile,
.keyword_allowzero,
.keyword_while,
.keyword_anytype,
=> {
try out.writeAll("<span class=\"tok-kw\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
},
 
.keyword_fn => {
try out.writeAll("<span class=\"tok-kw\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
next_tok_is_fn = true;
},
 
.string_literal,
.char_literal,
=> {
try out.writeAll("<span class=\"tok-str\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
},
 
.multiline_string_literal_line => {
if (src[token.loc.end - 1] == '\n') {
try out.writeAll("<span class=\"tok-str\">");
try writeEscaped(out, src[token.loc.start .. token.loc.end - 1]);
line_counter += 1;
try out.print("</span>" ++ end_line ++ "\n" ++ start_line, .{line_counter});
} else {
try out.writeAll("<span class=\"tok-str\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
}
},
 
.builtin => {
try out.writeAll("<span class=\"tok-builtin\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
},
 
.doc_comment,
.container_doc_comment,
=> {
try out.writeAll("<span class=\"tok-comment\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
},
 
.identifier => {
const tok_bytes = src[token.loc.start..token.loc.end];
if (mem.eql(u8, tok_bytes, "undefined") or
mem.eql(u8, tok_bytes, "null") or
mem.eql(u8, tok_bytes, "true") or
mem.eql(u8, tok_bytes, "false"))
{
try out.writeAll("<span class=\"tok-null\">");
try writeEscaped(out, tok_bytes);
try out.writeAll("</span>");
} else if (prev_tok_was_fn) {
try out.writeAll("<span class=\"tok-fn\">");
try writeEscaped(out, tok_bytes);
try out.writeAll("</span>");
} else {
const is_int = blk: {
if (src[token.loc.start] != 'i' and src[token.loc.start] != 'u')
break :blk false;
var i = token.loc.start + 1;
if (i == token.loc.end)
break :blk false;
while (i != token.loc.end) : (i += 1) {
if (src[i] < '0' or src[i] > '9')
break :blk false;
}
break :blk true;
};
if (is_int or isType(tok_bytes)) {
try out.writeAll("<span class=\"tok-type\">");
try writeEscaped(out, tok_bytes);
try out.writeAll("</span>");
} else {
try writeEscaped(out, tok_bytes);
}
}
},
 
.number_literal => {
try out.writeAll("<span class=\"tok-number\">");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("</span>");
},
 
.bang,
.pipe,
.pipe_pipe,
.pipe_equal,
.equal,
.equal_equal,
.equal_angle_bracket_right,
.bang_equal,
.l_paren,
.r_paren,
.semicolon,
.percent,
.percent_equal,
.l_brace,
.r_brace,
.l_bracket,
.r_bracket,
.period,
.period_asterisk,
.ellipsis2,
.ellipsis3,
.caret,
.caret_equal,
.plus,
.plus_plus,
.plus_equal,
.plus_percent,
.plus_percent_equal,
.plus_pipe,
.plus_pipe_equal,
.minus,
.minus_equal,
.minus_percent,
.minus_percent_equal,
.minus_pipe,
.minus_pipe_equal,
.asterisk,
.asterisk_equal,
.asterisk_asterisk,
.asterisk_percent,
.asterisk_percent_equal,
.asterisk_pipe,
.asterisk_pipe_equal,
.arrow,
.colon,
.slash,
.slash_equal,
.comma,
.ampersand,
.ampersand_equal,
.question_mark,
.angle_bracket_left,
.angle_bracket_left_equal,
.angle_bracket_angle_bracket_left,
.angle_bracket_angle_bracket_left_equal,
.angle_bracket_angle_bracket_left_pipe,
.angle_bracket_angle_bracket_left_pipe_equal,
.angle_bracket_right,
.angle_bracket_right_equal,
.angle_bracket_angle_bracket_right,
.angle_bracket_angle_bracket_right_equal,
.tilde,
=> try writeEscaped(out, src[token.loc.start..token.loc.end]),
 
.invalid, .invalid_periodasterisks => return error.ParseError,
}
index = token.loc.end;
}
try out.writeAll(end_line ++ "</code></pre>");
}
 
fn writeEscapedLines(out: anytype, text: []const u8) !void {
for (text) |char| {
if (char == '\n') {
try out.writeAll(end_line);
line_counter += 1;
try out.print(start_line, .{line_counter});
} else {
try writeEscaped(out, &[_]u8{char});
}
}
}
 
fn writeEscaped(out: anytype, input: []const u8) !void {
for (input) |c| {
try switch (c) {
'&' => out.writeAll("&amp;"),
'<' => out.writeAll("&lt;"),
'>' => out.writeAll("&gt;"),
'"' => out.writeAll("&quot;"),
else => out.writeByte(c),
};
}
}
 
const builtin_types = [_][]const u8{
"f16", "f32", "f64", "f80", "f128",
"c_longdouble", "c_short", "c_ushort", "c_int", "c_uint",
"c_long", "c_ulong", "c_longlong", "c_ulonglong", "c_char",
"anyopaque", "void", "bool", "isize", "usize",
"noreturn", "type", "anyerror", "comptime_int", "comptime_float",
};
 
fn isType(name: []const u8) bool {
for (builtin_types) |t| {
if (mem.eql(u8, t, name))
return true;
}
return false;
}
 
src/main.zig added: 7501, removed: 25316, total 0
@@ -98,6 +98,7 @@ const normal_usage =
\\
\\ env Print lib path, std path, cache directory, and version
\\ help Print this help and exit
\\ std View standard library documentation in a browser
\\ libc Display native libc paths file or validate one
\\ targets List available compilation targets
\\ version Print version number and exit
@@ -309,6 +310,14 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
.root_src_path = "libc.zig",
.prepend_zig_lib_dir_path = true,
});
} else if (mem.eql(u8, cmd, "std")) {
return jitCmd(gpa, arena, cmd_args, .{
.cmd_name = "std",
.root_src_path = "std-docs.zig",
.prepend_zig_lib_dir_path = true,
.prepend_zig_exe_path = true,
.prepend_global_cache_path = true,
});
} else if (mem.eql(u8, cmd, "init")) {
return cmdInit(gpa, arena, cmd_args);
} else if (mem.eql(u8, cmd, "targets")) {
@@ -5556,6 +5565,8 @@ const JitCmdOptions = struct {
cmd_name: []const u8,
root_src_path: []const u8,
prepend_zig_lib_dir_path: bool = false,
prepend_global_cache_path: bool = false,
prepend_zig_exe_path: bool = false,
depend_on_aro: bool = false,
capture: ?*[]u8 = null,
};
@@ -5714,6 +5725,10 @@ fn jitCmd(
 
if (options.prepend_zig_lib_dir_path)
child_argv.appendAssumeCapacity(zig_lib_directory.path.?);
if (options.prepend_zig_exe_path)
child_argv.appendAssumeCapacity(self_exe_path);
if (options.prepend_global_cache_path)
child_argv.appendAssumeCapacity(global_cache_directory.path.?);
 
child_argv.appendSliceAssumeCapacity(args);