forked from mirror/vulkan-zig
move generator/ to src/
A long time ago there was the idea to use src/ for something else, but that is no longer relevant. Its nice to switch to a more conventional repo layout.
This commit is contained in:
232
src/build_integration.zig
Normal file
232
src/build_integration.zig
Normal file
@@ -0,0 +1,232 @@
|
||||
const std = @import("std");
|
||||
const Build = std.Build;
|
||||
|
||||
/// Utility functionality to help with compiling shaders from build.zig.
|
||||
/// Invokes a shader compile command (e.g., glslc ...) for each shader
|
||||
/// added via `addShader`.
|
||||
pub const ShaderCompileStep = struct {
|
||||
/// The directory within the zig-cache directory that is used to store
|
||||
/// shader artifacts.
|
||||
pub const cache_dir = "shaders";
|
||||
|
||||
/// This structure contains additional options that pertain to specific shaders only.
|
||||
pub const ShaderOptions = struct {
|
||||
/// Additional arguments that should be passed to the shader compiler.
|
||||
args: []const []const u8 = &.{},
|
||||
|
||||
/// Paths of additional files that should be watched for changes to
|
||||
/// trigger recompilation.
|
||||
watched_files: []const []const u8 = &.{},
|
||||
|
||||
/// To ensure that if compilation options change, the shader is recompiled
|
||||
/// properly.
|
||||
fn hash(self: ShaderOptions, b: *Build, hasher: anytype) !void {
|
||||
for (self.args) |arg| {
|
||||
hasher.update(arg);
|
||||
}
|
||||
for (self.watched_files) |file_path| {
|
||||
const full_path = b.build_root.join(b.allocator, &.{file_path}) catch unreachable;
|
||||
|
||||
const source = std.fs.cwd().readFileAlloc(
|
||||
b.allocator,
|
||||
full_path,
|
||||
std.math.maxInt(usize),
|
||||
) catch |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
std.log.err("could not open file '{s}'", .{file_path});
|
||||
return error.FileNotFound;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
hasher.update(source);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/// Structure representing a shader to be compiled.
|
||||
const Shader = struct {
|
||||
/// The name of the shader in the generated file.
|
||||
/// Must be unique for all shaders added to this ShaderCompileStep.
|
||||
name: []const u8,
|
||||
|
||||
/// The path to the shader, relative to the current build root.
|
||||
source_path: []const u8,
|
||||
|
||||
/// The final hash of the shader
|
||||
hash: [64]u8,
|
||||
|
||||
/// Miscellaneous options to pass when compiling the shader.
|
||||
options: ShaderOptions,
|
||||
};
|
||||
|
||||
step: Build.Step,
|
||||
|
||||
/// The command and optional arguments used to invoke the shader compiler.
|
||||
compile_command: []const []const u8,
|
||||
|
||||
/// The compiler flag used to specify the output path, `-o` most of the time
|
||||
output_flag: []u8,
|
||||
|
||||
/// List of shaders that are to be compiled.
|
||||
shaders: std.ArrayList(Shader),
|
||||
|
||||
/// The main Zig file that contains all the shaders. Each shader is included as
|
||||
/// `pub const ${name} align(@alignOf(u32))= @embedFile("${path").*;`
|
||||
generated_file: Build.GeneratedFile,
|
||||
|
||||
/// Create a ShaderCompileStep for `builder`. When this step is invoked by the build
|
||||
/// system, `<compile_command...> <shader_source> <output_flag> <path>` is invoked for each shader.
|
||||
/// For example, if one calls this with `create(b, "glslc", "-o")` and then
|
||||
/// `c.addShader("vertex", "vertex.glsl", .{})`, the command will be `glslc vertex.glsl -o <path>`
|
||||
pub fn create(builder: *Build, compile_command: []const []const u8, output_flag: []const u8) *ShaderCompileStep {
|
||||
const self = builder.allocator.create(ShaderCompileStep) catch unreachable;
|
||||
self.* = .{
|
||||
.step = Build.Step.init(.{
|
||||
.id = .custom,
|
||||
.name = "shaders",
|
||||
.owner = builder,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.compile_command = builder.dupeStrings(compile_command),
|
||||
.output_flag = builder.dupe(output_flag),
|
||||
.shaders = std.ArrayList(Shader).init(builder.allocator),
|
||||
.generated_file = undefined,
|
||||
};
|
||||
self.generated_file = .{ .step = &self.step };
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Returns the shaders module with name.
|
||||
pub fn getModule(self: *ShaderCompileStep) *Build.Module {
|
||||
return self.step.owner.createModule(.{
|
||||
.root_source_file = self.getSource(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Returns the file source for the generated shader resource code.
|
||||
pub fn getSource(self: *ShaderCompileStep) Build.LazyPath {
|
||||
return .{ .generated = &self.generated_file };
|
||||
}
|
||||
|
||||
/// Add a shader to be compiled. `src` is shader source path, relative to the project root.
|
||||
/// Returns the full path where the compiled binary will be stored upon successful compilation.
|
||||
/// This path can then be used to include the binary into an executable, for example by passing it
|
||||
/// to @embedFile via an additional generated file.
|
||||
pub fn add(self: *ShaderCompileStep, name: []const u8, src: []const u8, options: ShaderOptions) void {
|
||||
const b = self.step.owner;
|
||||
const full_source_path = b.build_root.join(b.allocator, &.{src}) catch unreachable;
|
||||
self.shaders.append(.{
|
||||
.name = name,
|
||||
.source_path = full_source_path,
|
||||
.hash = undefined,
|
||||
.options = options,
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
/// Create a hash of a shader's source contents.
|
||||
fn hashShaderToFileName(self: *ShaderCompileStep, shader: Shader) ![64]u8 {
|
||||
const b = self.step.owner;
|
||||
const source = std.fs.cwd().readFileAlloc(
|
||||
b.allocator,
|
||||
shader.source_path,
|
||||
std.math.maxInt(usize),
|
||||
) catch |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
std.log.err("could not open shader '{s}'", .{shader.source_path});
|
||||
return error.FileNotFound;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
var hasher = std.crypto.hash.blake2.Blake2b384.init(.{});
|
||||
// Random bytes to make ShaderCompileStep unique. Refresh with new random
|
||||
// bytes when the implementation is changed in a non-backwards-compatible way.
|
||||
hasher.update("Pw7Z*9Q8r!fLY8&!");
|
||||
// Make sure that there is no cache hit if the shader's source has changed.
|
||||
hasher.update(source);
|
||||
// Not only the shader source must be the same to ensure uniqueness -
|
||||
// the compilation options must be the same as well!
|
||||
try shader.options.hash(b, &hasher);
|
||||
// And the compile command, too.
|
||||
for (self.compile_command) |cmd| {
|
||||
hasher.update(cmd);
|
||||
}
|
||||
|
||||
return digest(&hasher);
|
||||
}
|
||||
|
||||
/// Create a base-64 hash digest from a hasher, which we can use as file name.
|
||||
fn digest(hasher: anytype) [64]u8 {
|
||||
var hash_digest: [48]u8 = undefined;
|
||||
hasher.final(&hash_digest);
|
||||
var hash: [64]u8 = undefined;
|
||||
_ = std.fs.base64_encoder.encode(&hash, &hash_digest);
|
||||
return hash;
|
||||
}
|
||||
|
||||
/// Internal build function.
|
||||
fn make(step: *Build.Step, progress: *std.Progress.Node) !void {
|
||||
_ = progress;
|
||||
const b = step.owner;
|
||||
const self: *ShaderCompileStep = @fieldParentPtr("step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
var cmd = std.ArrayList([]const u8).init(b.allocator);
|
||||
try cmd.appendSlice(self.compile_command);
|
||||
const base_cmd_len = cmd.items.len;
|
||||
|
||||
var shaders_file_contents = std.ArrayList(u8).init(b.allocator);
|
||||
const shaders_out = shaders_file_contents.writer();
|
||||
|
||||
const shaders_dir = try b.cache_root.join(
|
||||
b.allocator,
|
||||
&.{cache_dir},
|
||||
);
|
||||
try cwd.makePath(shaders_dir);
|
||||
|
||||
for (self.shaders.items) |*shader| {
|
||||
shader.hash = try self.hashShaderToFileName(shader.*);
|
||||
const shader_out_path = try std.fs.path.join(b.allocator, &.{
|
||||
shaders_dir,
|
||||
&shader.hash,
|
||||
});
|
||||
|
||||
// This path must be relative to the shaders zig file - which is in the same directory
|
||||
try shaders_out.print("pub const {s} align(@alignOf(u32)) = @embedFile(\"{s}\").*;\n", .{
|
||||
shader.name,
|
||||
&shader.hash,
|
||||
});
|
||||
|
||||
// If we have a cache hit, we can save some compile time by not invoking the compile command.
|
||||
compile_shader: {
|
||||
std.fs.accessAbsolute(shader_out_path, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound => break :compile_shader,
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
cmd.items.len = base_cmd_len;
|
||||
|
||||
try cmd.appendSlice(shader.options.args);
|
||||
try cmd.appendSlice(&.{ shader.source_path, self.output_flag, shader_out_path });
|
||||
try step.evalChildProcess(cmd.items);
|
||||
}
|
||||
|
||||
// Generate a file name for the shaders zig source based on the contents of shaders_file_contents.
|
||||
// In this case we don't need to omit writing the file - Zig does this check already for us.
|
||||
var hasher = std.crypto.hash.blake2.Blake2b384.init(.{});
|
||||
// Note: don't need to seed the hasher - it transitively contains the seed from
|
||||
// hashShaderToFileName. Change that if the implementation changes.
|
||||
hasher.update(shaders_file_contents.items);
|
||||
|
||||
const shaders_path = try std.fs.path.join(
|
||||
b.allocator,
|
||||
&.{ shaders_dir, &digest(&hasher) },
|
||||
);
|
||||
|
||||
try cwd.writeFile(shaders_path, shaders_file_contents.items);
|
||||
self.generated_file.path = shaders_path;
|
||||
}
|
||||
};
|
||||
239
src/id_render.zig
Normal file
239
src/id_render.zig
Normal file
@@ -0,0 +1,239 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
|
||||
pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) {
|
||||
for (name[1..]) |c| {
|
||||
switch (c) {
|
||||
'0'...'9' => {},
|
||||
else => break,
|
||||
}
|
||||
} else return true;
|
||||
}
|
||||
|
||||
const primitives = [_][]const u8{
|
||||
"void",
|
||||
"comptime_float",
|
||||
"comptime_int",
|
||||
"bool",
|
||||
"isize",
|
||||
"usize",
|
||||
"f16",
|
||||
"f32",
|
||||
"f64",
|
||||
"f128",
|
||||
"noreturn",
|
||||
"type",
|
||||
"anyerror",
|
||||
"c_short",
|
||||
"c_ushort",
|
||||
"c_int",
|
||||
"c_uint",
|
||||
"c_long",
|
||||
"c_ulong",
|
||||
"c_longlong",
|
||||
"c_ulonglong",
|
||||
"c_longdouble",
|
||||
// Removed in stage 2 in https://github.com/ziglang/zig/commit/05cf44933d753f7a5a53ab289ea60fd43761de57,
|
||||
// but these are still invalid identifiers in stage 1.
|
||||
"undefined",
|
||||
"true",
|
||||
"false",
|
||||
"null",
|
||||
};
|
||||
|
||||
for (primitives) |reserved| {
|
||||
if (mem.eql(u8, reserved, name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn writeIdentifier(writer: anytype, id: []const u8) !void {
|
||||
// https://github.com/ziglang/zig/issues/2897
|
||||
if (isZigPrimitiveType(id)) {
|
||||
try writer.print("@\"{}\"", .{std.zig.fmtEscapes(id)});
|
||||
} else {
|
||||
try writer.print("{}", .{std.zig.fmtId(id)});
|
||||
}
|
||||
}
|
||||
|
||||
pub const CaseStyle = enum {
|
||||
snake,
|
||||
screaming_snake,
|
||||
title,
|
||||
camel,
|
||||
};
|
||||
|
||||
pub const SegmentIterator = struct {
|
||||
text: []const u8,
|
||||
offset: usize,
|
||||
|
||||
pub fn init(text: []const u8) SegmentIterator {
|
||||
return .{
|
||||
.text = text,
|
||||
.offset = 0,
|
||||
};
|
||||
}
|
||||
|
||||
fn nextBoundary(self: SegmentIterator) usize {
|
||||
var i = self.offset + 1;
|
||||
|
||||
while (true) {
|
||||
if (i == self.text.len or self.text[i] == '_') {
|
||||
return i;
|
||||
}
|
||||
|
||||
const prev_lower = std.ascii.isLower(self.text[i - 1]);
|
||||
const next_lower = std.ascii.isLower(self.text[i]);
|
||||
|
||||
if (prev_lower and !next_lower) {
|
||||
return i;
|
||||
} else if (i != self.offset + 1 and !prev_lower and next_lower) {
|
||||
return i - 1;
|
||||
}
|
||||
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(self: *SegmentIterator) ?[]const u8 {
|
||||
while (self.offset < self.text.len and self.text[self.offset] == '_') {
|
||||
self.offset += 1;
|
||||
}
|
||||
|
||||
if (self.offset == self.text.len) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const end = self.nextBoundary();
|
||||
const word = self.text[self.offset..end];
|
||||
self.offset = end;
|
||||
return word;
|
||||
}
|
||||
|
||||
pub fn rest(self: SegmentIterator) []const u8 {
|
||||
if (self.offset >= self.text.len) {
|
||||
return &[_]u8{};
|
||||
} else {
|
||||
return self.text[self.offset..];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const IdRenderer = struct {
|
||||
tags: []const []const u8,
|
||||
text_cache: std.ArrayList(u8),
|
||||
|
||||
pub fn init(allocator: Allocator, tags: []const []const u8) IdRenderer {
|
||||
return .{
|
||||
.tags = tags,
|
||||
.text_cache = std.ArrayList(u8).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: IdRenderer) void {
|
||||
self.text_cache.deinit();
|
||||
}
|
||||
|
||||
fn renderSnake(self: *IdRenderer, screaming: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
var it = SegmentIterator.init(id);
|
||||
var first = true;
|
||||
|
||||
while (it.next()) |segment| {
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
try self.text_cache.append('_');
|
||||
}
|
||||
|
||||
for (segment) |c| {
|
||||
try self.text_cache.append(if (screaming) std.ascii.toUpper(c) else std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
|
||||
if (tag) |name| {
|
||||
try self.text_cache.append('_');
|
||||
|
||||
for (name) |c| {
|
||||
try self.text_cache.append(if (screaming) std.ascii.toUpper(c) else std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn renderCamel(self: *IdRenderer, title: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
var it = SegmentIterator.init(id);
|
||||
var lower_first = !title;
|
||||
|
||||
while (it.next()) |segment| {
|
||||
var i: usize = 0;
|
||||
while (i < segment.len and std.ascii.isDigit(segment[i])) {
|
||||
try self.text_cache.append(segment[i]);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
if (i == segment.len) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (i == 0 and lower_first) {
|
||||
try self.text_cache.append(std.ascii.toLower(segment[i]));
|
||||
} else {
|
||||
try self.text_cache.append(std.ascii.toUpper(segment[i]));
|
||||
}
|
||||
lower_first = false;
|
||||
|
||||
for (segment[i + 1 ..]) |c| {
|
||||
try self.text_cache.append(std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
|
||||
if (tag) |name| {
|
||||
try self.text_cache.appendSlice(name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn renderFmt(self: *IdRenderer, out: anytype, comptime fmt: []const u8, args: anytype) !void {
|
||||
self.text_cache.items.len = 0;
|
||||
try std.fmt.format(self.text_cache.writer(), fmt, args);
|
||||
try writeIdentifier(out, self.text_cache.items);
|
||||
}
|
||||
|
||||
pub fn renderWithCase(self: *IdRenderer, out: anytype, case_style: CaseStyle, id: []const u8) !void {
|
||||
const tag = self.getAuthorTag(id);
|
||||
// The trailing underscore doesn't need to be removed here as its removed by the SegmentIterator.
|
||||
const adjusted_id = if (tag) |name| id[0 .. id.len - name.len] else id;
|
||||
|
||||
self.text_cache.items.len = 0;
|
||||
|
||||
switch (case_style) {
|
||||
.snake => try self.renderSnake(false, adjusted_id, tag),
|
||||
.screaming_snake => try self.renderSnake(true, adjusted_id, tag),
|
||||
.title => try self.renderCamel(true, adjusted_id, tag),
|
||||
.camel => try self.renderCamel(false, adjusted_id, tag),
|
||||
}
|
||||
|
||||
try writeIdentifier(out, self.text_cache.items);
|
||||
}
|
||||
|
||||
pub fn getAuthorTag(self: IdRenderer, id: []const u8) ?[]const u8 {
|
||||
for (self.tags) |tag| {
|
||||
if (mem.endsWith(u8, id, tag)) {
|
||||
return tag;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn stripAuthorTag(self: IdRenderer, id: []const u8) []const u8 {
|
||||
if (self.getAuthorTag(id)) |tag| {
|
||||
return mem.trimRight(u8, id[0 .. id.len - tag.len], "_");
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
};
|
||||
8
src/index.zig
Normal file
8
src/index.zig
Normal file
@@ -0,0 +1,8 @@
|
||||
pub const generateVk = @import("vulkan/generator.zig").generate;
|
||||
pub const VkGenerateStep = @import("vulkan/build_integration.zig").GenerateStep;
|
||||
pub const ShaderCompileStep = @import("build_integration.zig").ShaderCompileStep;
|
||||
|
||||
test "main" {
|
||||
_ = @import("xml.zig");
|
||||
_ = @import("vulkan/c_parse.zig");
|
||||
}
|
||||
129
src/main.zig
Normal file
129
src/main.zig
Normal file
@@ -0,0 +1,129 @@
|
||||
const std = @import("std");
|
||||
const generator = @import("vulkan/generator.zig");
|
||||
|
||||
fn invalidUsage(prog_name: []const u8, comptime fmt: []const u8, args: anytype) noreturn {
|
||||
std.log.err(fmt, args);
|
||||
std.log.err("see {s} --help for usage", .{prog_name});
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
pub fn main() void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var args = std.process.argsWithAllocator(allocator) catch |err| switch (err) {
|
||||
error.OutOfMemory => @panic("OOM"),
|
||||
};
|
||||
const prog_name = args.next() orelse "vulkan-zig-generator";
|
||||
|
||||
var maybe_xml_path: ?[]const u8 = null;
|
||||
var maybe_out_path: ?[]const u8 = null;
|
||||
var api = generator.Api.vulkan;
|
||||
|
||||
while (args.next()) |arg| {
|
||||
if (std.mem.eql(u8, arg, "--help") or std.mem.eql(u8, arg, "-h")) {
|
||||
@setEvalBranchQuota(2000);
|
||||
std.io.getStdOut().writer().print(
|
||||
\\Utility to generate a Zig binding from the Vulkan XML API registry.
|
||||
\\
|
||||
\\The most recent Vulkan XML API registry can be obtained from
|
||||
\\https://github.com/KhronosGroup/Vulkan-Docs/blob/master/xml/vk.xml,
|
||||
\\and the most recent LunarG Vulkan SDK version can be found at
|
||||
\\$VULKAN_SDK/x86_64/share/vulkan/registry/vk.xml.
|
||||
\\
|
||||
\\Usage: {s} [options] <spec xml path> <output zig source>
|
||||
\\Options:
|
||||
\\-h --help show this message and exit.
|
||||
\\-a --api <api> Generate API for 'vulkan' or 'vulkansc'. Defaults to 'vulkan'.
|
||||
\\
|
||||
,
|
||||
.{prog_name},
|
||||
) catch |err| {
|
||||
std.log.err("failed to write to stdout: {s}", .{@errorName(err)});
|
||||
std.process.exit(1);
|
||||
};
|
||||
return;
|
||||
} else if (std.mem.eql(u8, arg, "-a") or std.mem.eql(u8, arg, "--api")) {
|
||||
const api_str = args.next() orelse {
|
||||
invalidUsage(prog_name, "{s} expects argument <api>", .{arg});
|
||||
};
|
||||
api = std.meta.stringToEnum(generator.Api, api_str) orelse {
|
||||
invalidUsage(prog_name, "invalid api '{s}'", .{api_str});
|
||||
};
|
||||
} else if (maybe_xml_path == null) {
|
||||
maybe_xml_path = arg;
|
||||
} else if (maybe_out_path == null) {
|
||||
maybe_out_path = arg;
|
||||
} else {
|
||||
invalidUsage(prog_name, "superficial argument '{s}'", .{arg});
|
||||
}
|
||||
}
|
||||
|
||||
const xml_path = maybe_xml_path orelse {
|
||||
invalidUsage(prog_name, "missing required argument <spec xml path>", .{});
|
||||
};
|
||||
|
||||
const out_path = maybe_out_path orelse {
|
||||
invalidUsage(prog_name, "missing required argument <output zig source>", .{});
|
||||
};
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
const xml_src = cwd.readFileAlloc(allocator, xml_path, std.math.maxInt(usize)) catch |err| {
|
||||
std.log.err("failed to open input file '{s}' ({s})", .{ xml_path, @errorName(err) });
|
||||
std.process.exit(1);
|
||||
};
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(allocator);
|
||||
generator.generate(allocator, api, xml_src, out_buffer.writer()) catch |err| switch (err) {
|
||||
error.InvalidXml => {
|
||||
std.log.err("invalid vulkan registry - invalid xml", .{});
|
||||
std.log.err("please check that the correct vk.xml file is passed", .{});
|
||||
std.process.exit(1);
|
||||
},
|
||||
error.InvalidRegistry => {
|
||||
std.log.err("invalid vulkan registry - registry is valid xml but contents are invalid", .{});
|
||||
std.log.err("please check that the correct vk.xml file is passed", .{});
|
||||
std.process.exit(1);
|
||||
},
|
||||
error.UnhandledBitfieldStruct => {
|
||||
std.log.err("unhandled struct with bit fields detected in vk.xml", .{});
|
||||
std.log.err("this is a bug in vulkan-zig", .{});
|
||||
std.log.err("please make a bug report at https://github.com/Snektron/vulkan-zig/issues/", .{});
|
||||
std.process.exit(1);
|
||||
},
|
||||
error.OutOfMemory => @panic("oom"),
|
||||
};
|
||||
|
||||
out_buffer.append(0) catch @panic("oom");
|
||||
|
||||
const src = out_buffer.items[0 .. out_buffer.items.len - 1 :0];
|
||||
const tree = std.zig.Ast.parse(allocator, src, .zig) catch |err| switch (err) {
|
||||
error.OutOfMemory => @panic("oom"),
|
||||
};
|
||||
|
||||
if (tree.errors.len > 0) {
|
||||
std.log.err("generated invalid zig code", .{});
|
||||
std.log.err("this is a bug in vulkan-zig", .{});
|
||||
std.log.err("please make a bug report at https://github.com/Snektron/vulkan-zig/issues/", .{});
|
||||
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
const formatted = tree.render(allocator) catch |err| switch (err) {
|
||||
error.OutOfMemory => @panic("oom"),
|
||||
};
|
||||
defer allocator.free(formatted);
|
||||
|
||||
if (std.fs.path.dirname(out_path)) |dir| {
|
||||
cwd.makePath(dir) catch |err| {
|
||||
std.log.err("failed to create output directory '{s}' ({s})", .{ dir, @errorName(err) });
|
||||
std.process.exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
cwd.writeFile(out_path, formatted) catch |err| {
|
||||
std.log.err("failed to write to output file '{s}' ({s})", .{ out_path, @errorName(err) });
|
||||
std.process.exit(1);
|
||||
};
|
||||
}
|
||||
202
src/vulkan/build_integration.zig
Normal file
202
src/vulkan/build_integration.zig
Normal file
@@ -0,0 +1,202 @@
|
||||
const std = @import("std");
|
||||
const generator = @import("generator.zig");
|
||||
const Build = std.Build;
|
||||
|
||||
/// build.zig integration for Vulkan binding generation. This step can be used to generate
|
||||
/// Vulkan bindings at compiletime from vk.xml, by providing the path to vk.xml and the output
|
||||
/// path relative to zig-cache. The final package can then be obtained by `package()`, the result
|
||||
/// of which can be added to the project using `std.Build.addModule`.
|
||||
pub const GenerateStep = struct {
|
||||
step: Build.Step,
|
||||
generated_file: Build.GeneratedFile,
|
||||
/// The path to vk.xml
|
||||
spec_path: []const u8,
|
||||
/// The API to generate for.
|
||||
/// Defaults to Vulkan.
|
||||
// Note: VulkanSC is experimental.
|
||||
api: generator.Api = .vulkan,
|
||||
|
||||
/// Initialize a Vulkan generation step, for `builder`. `spec_path` is the path to
|
||||
/// vk.xml, relative to the project root. The generated bindings will be placed at
|
||||
/// `out_path`, which is relative to the zig-cache directory.
|
||||
pub fn create(builder: *Build, spec_path: []const u8) *GenerateStep {
|
||||
const self = builder.allocator.create(GenerateStep) catch unreachable;
|
||||
self.* = .{
|
||||
.step = Build.Step.init(.{
|
||||
.id = .custom,
|
||||
.name = "vulkan-generate",
|
||||
.owner = builder,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.generated_file = .{
|
||||
.step = &self.step,
|
||||
},
|
||||
.spec_path = spec_path,
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Initialize a Vulkan generation step for `builder`, by extracting vk.xml from the LunarG installation
|
||||
/// root. Typically, the location of the LunarG SDK root can be retrieved by querying for the VULKAN_SDK
|
||||
/// environment variable, set by activating the environment setup script located in the SDK root.
|
||||
/// `builder` and `out_path` are used in the same manner as `init`.
|
||||
pub fn createFromSdk(builder: *Build, sdk_path: []const u8, output_name: []const u8) *GenerateStep {
|
||||
const spec_path = std.fs.path.join(
|
||||
builder.allocator,
|
||||
&[_][]const u8{ sdk_path, "share/vulkan/registry/vk.xml" },
|
||||
) catch unreachable;
|
||||
|
||||
return create(builder, spec_path, output_name);
|
||||
}
|
||||
|
||||
/// Set the API to generate for.
|
||||
pub fn setApi(self: *GenerateStep, api_to_generate: generator.Api) void {
|
||||
self.api = api_to_generate;
|
||||
}
|
||||
|
||||
/// Returns the module with the generated budings, with name `module_name`.
|
||||
pub fn getModule(self: *GenerateStep) *Build.Module {
|
||||
return self.step.owner.createModule(.{
|
||||
.root_source_file = self.getSource(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Returns the file source for the generated bindings.
|
||||
pub fn getSource(self: *GenerateStep) Build.LazyPath {
|
||||
return .{ .generated = &self.generated_file };
|
||||
}
|
||||
|
||||
/// Internal build function. This reads `vk.xml`, and passes it to `generate`, which then generates
|
||||
/// the final bindings. The resulting generated bindings are not formatted, which is why an ArrayList
|
||||
/// writer is passed instead of a file writer. This is then formatted into standard formatting
|
||||
/// by parsing it and rendering with `std.zig.parse` and `std.zig.render` respectively.
|
||||
fn make(step: *Build.Step, progress: *std.Progress.Node) !void {
|
||||
_ = progress;
|
||||
|
||||
const b = step.owner;
|
||||
const self: *GenerateStep = @fieldParentPtr("step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
var man = b.graph.cache.obtain();
|
||||
defer man.deinit();
|
||||
|
||||
const spec = try cwd.readFileAlloc(b.allocator, self.spec_path, std.math.maxInt(usize));
|
||||
// TODO: Look into whether this is the right way to be doing
|
||||
// this - maybe the file-level caching API has some benefits I
|
||||
// don't understand.
|
||||
man.hash.addBytes(spec);
|
||||
|
||||
const already_exists = try step.cacheHit(&man);
|
||||
const digest = man.final();
|
||||
const output_file_path = try b.cache_root.join(b.allocator, &.{ "o", &digest, "vk.zig" });
|
||||
if (already_exists) {
|
||||
self.generated_file.path = output_file_path;
|
||||
return;
|
||||
}
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(b.allocator);
|
||||
generator.generate(b.allocator, self.api, spec, out_buffer.writer()) catch |err| switch (err) {
|
||||
error.InvalidXml => {
|
||||
std.log.err("invalid vulkan registry - invalid xml", .{});
|
||||
std.log.err("please check that the correct vk.xml file is passed", .{});
|
||||
return err;
|
||||
},
|
||||
error.InvalidRegistry => {
|
||||
std.log.err("invalid vulkan registry - registry is valid xml but contents are invalid", .{});
|
||||
std.log.err("please check that the correct vk.xml file is passed", .{});
|
||||
return err;
|
||||
},
|
||||
error.UnhandledBitfieldStruct => {
|
||||
std.log.err("unhandled struct with bit fields detected in vk.xml", .{});
|
||||
std.log.err("this is a bug in vulkan-zig", .{});
|
||||
std.log.err("please make a bug report at https://github.com/Snektron/vulkan-zig/issues/", .{});
|
||||
return err;
|
||||
},
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
try out_buffer.append(0);
|
||||
|
||||
const src = out_buffer.items[0 .. out_buffer.items.len - 1 :0];
|
||||
const tree = try std.zig.Ast.parse(b.allocator, src, .zig);
|
||||
|
||||
if (tree.errors.len > 0) {
|
||||
var start: usize = undefined;
|
||||
var end: usize = undefined;
|
||||
var index: usize = undefined;
|
||||
var repeat: usize = undefined;
|
||||
var spaces: []const u8 = undefined;
|
||||
var carets: []const u8 = undefined;
|
||||
var current_token: std.zig.Token = undefined;
|
||||
|
||||
std.debug.print("{s}\n", .{
|
||||
src,
|
||||
});
|
||||
|
||||
var tokens = try std.ArrayList(std.zig.Ast.Error).initCapacity(b.allocator, tree.errors.len);
|
||||
try tokens.appendSlice(tree.errors);
|
||||
|
||||
std.mem.sort(std.zig.Ast.Error, tokens.items, {}, struct {
|
||||
pub fn desc(_: void, l_err: std.zig.Ast.Error, r_err: std.zig.Ast.Error) bool {
|
||||
return l_err.token > r_err.token;
|
||||
}
|
||||
}.desc);
|
||||
|
||||
var iterator = std.zig.Tokenizer.init(src);
|
||||
|
||||
index = 1;
|
||||
current_token = iterator.next();
|
||||
|
||||
while (current_token.tag != std.zig.Token.Tag.eof and tokens.items.len > 0) {
|
||||
if (tokens.items[tokens.items.len - 1].token == index) {
|
||||
start = std.mem.lastIndexOf(u8, src[0..current_token.loc.start], "\n") orelse 0;
|
||||
end = (std.mem.indexOf(u8, src[current_token.loc.end..], "\n") orelse src.len - current_token.loc.end) + current_token.loc.end;
|
||||
|
||||
repeat = 1;
|
||||
spaces = "";
|
||||
while (repeat < current_token.loc.start - start) {
|
||||
spaces = try std.fmt.allocPrint(b.allocator, "{s} ", .{
|
||||
spaces,
|
||||
});
|
||||
repeat += 1;
|
||||
}
|
||||
|
||||
repeat = 1;
|
||||
carets = "";
|
||||
while (repeat < current_token.loc.end + 1 - current_token.loc.start) {
|
||||
carets = try std.fmt.allocPrint(b.allocator, "{s}^", .{
|
||||
carets,
|
||||
});
|
||||
repeat += 1;
|
||||
}
|
||||
|
||||
std.debug.print("ERROR: {}\nTOKEN: {}\n\n{s}\n{s}{s}\n", .{
|
||||
tokens.items[tokens.items.len - 1],
|
||||
current_token,
|
||||
if (src[start] == '\n') src[start + 1 .. end] else src[start..end],
|
||||
spaces,
|
||||
carets,
|
||||
});
|
||||
|
||||
_ = tokens.pop();
|
||||
}
|
||||
|
||||
current_token = iterator.next();
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
|
||||
std.debug.assert(tree.errors.len == 0); // If this triggers, vulkan-zig produced invalid code.
|
||||
|
||||
const formatted = try tree.render(b.allocator);
|
||||
|
||||
const output_dir_path = std.fs.path.dirname(output_file_path).?;
|
||||
cwd.makePath(output_dir_path) catch |err| {
|
||||
std.debug.print("unable to make path {s}: {s}\n", .{ output_dir_path, @errorName(err) });
|
||||
return err;
|
||||
};
|
||||
|
||||
try cwd.writeFile(output_file_path, formatted);
|
||||
self.generated_file.path = output_file_path;
|
||||
try step.writeManifest(&man);
|
||||
}
|
||||
};
|
||||
648
src/vulkan/c_parse.zig
Normal file
648
src/vulkan/c_parse.zig
Normal file
@@ -0,0 +1,648 @@
|
||||
const std = @import("std");
|
||||
const registry = @import("registry.zig");
|
||||
const xml = @import("../xml.zig");
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const testing = std.testing;
|
||||
const ArraySize = registry.Array.ArraySize;
|
||||
const TypeInfo = registry.TypeInfo;
|
||||
|
||||
pub const Token = struct {
|
||||
kind: Kind,
|
||||
text: []const u8,
|
||||
|
||||
const Kind = enum {
|
||||
id, // Any id thats not a keyword
|
||||
name, // Vulkan <name>...</name>
|
||||
type_name, // Vulkan <type>...</type>
|
||||
enum_name, // Vulkan <enum>...</enum>
|
||||
int,
|
||||
star,
|
||||
comma,
|
||||
semicolon,
|
||||
colon,
|
||||
minus,
|
||||
tilde,
|
||||
dot,
|
||||
hash,
|
||||
lparen,
|
||||
rparen,
|
||||
lbracket,
|
||||
rbracket,
|
||||
kw_typedef,
|
||||
kw_const,
|
||||
kw_vkapi_ptr,
|
||||
kw_struct,
|
||||
};
|
||||
};
|
||||
|
||||
pub const CTokenizer = struct {
|
||||
source: []const u8,
|
||||
offset: usize = 0,
|
||||
in_comment: bool = false,
|
||||
|
||||
fn peek(self: CTokenizer) ?u8 {
|
||||
return if (self.offset < self.source.len) self.source[self.offset] else null;
|
||||
}
|
||||
|
||||
fn consumeNoEof(self: *CTokenizer) u8 {
|
||||
const c = self.peek().?;
|
||||
self.offset += 1;
|
||||
return c;
|
||||
}
|
||||
|
||||
fn consume(self: *CTokenizer) !u8 {
|
||||
return if (self.offset < self.source.len)
|
||||
return self.consumeNoEof()
|
||||
else
|
||||
return null;
|
||||
}
|
||||
|
||||
fn keyword(self: *CTokenizer) Token {
|
||||
const start = self.offset;
|
||||
_ = self.consumeNoEof();
|
||||
|
||||
while (true) {
|
||||
const c = self.peek() orelse break;
|
||||
switch (c) {
|
||||
'A'...'Z', 'a'...'z', '_', '0'...'9' => _ = self.consumeNoEof(),
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
const token_text = self.source[start..self.offset];
|
||||
|
||||
const kind = if (mem.eql(u8, token_text, "typedef"))
|
||||
Token.Kind.kw_typedef
|
||||
else if (mem.eql(u8, token_text, "const"))
|
||||
Token.Kind.kw_const
|
||||
else if (mem.eql(u8, token_text, "VKAPI_PTR"))
|
||||
Token.Kind.kw_vkapi_ptr
|
||||
else if (mem.eql(u8, token_text, "struct"))
|
||||
Token.Kind.kw_struct
|
||||
else
|
||||
Token.Kind.id;
|
||||
|
||||
return .{ .kind = kind, .text = token_text };
|
||||
}
|
||||
|
||||
fn int(self: *CTokenizer) Token {
|
||||
const start = self.offset;
|
||||
_ = self.consumeNoEof();
|
||||
|
||||
while (true) {
|
||||
const c = self.peek() orelse break;
|
||||
switch (c) {
|
||||
'0'...'9' => _ = self.consumeNoEof(),
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.kind = .int,
|
||||
.text = self.source[start..self.offset],
|
||||
};
|
||||
}
|
||||
|
||||
fn skipws(self: *CTokenizer) void {
|
||||
while (true) {
|
||||
switch (self.peek() orelse break) {
|
||||
' ', '\t', '\n', '\r' => _ = self.consumeNoEof(),
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(self: *CTokenizer) !?Token {
|
||||
self.skipws();
|
||||
if (mem.startsWith(u8, self.source[self.offset..], "//") or self.in_comment) {
|
||||
const end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse {
|
||||
self.offset = self.source.len;
|
||||
self.in_comment = true;
|
||||
return null;
|
||||
};
|
||||
self.in_comment = false;
|
||||
self.offset = end + 1;
|
||||
}
|
||||
self.skipws();
|
||||
|
||||
const c = self.peek() orelse return null;
|
||||
var kind: Token.Kind = undefined;
|
||||
switch (c) {
|
||||
'A'...'Z', 'a'...'z', '_' => return self.keyword(),
|
||||
'0'...'9' => return self.int(),
|
||||
'*' => kind = .star,
|
||||
',' => kind = .comma,
|
||||
';' => kind = .semicolon,
|
||||
':' => kind = .colon,
|
||||
'-' => kind = .minus,
|
||||
'~' => kind = .tilde,
|
||||
'.' => kind = .dot,
|
||||
'#' => kind = .hash,
|
||||
'[' => kind = .lbracket,
|
||||
']' => kind = .rbracket,
|
||||
'(' => kind = .lparen,
|
||||
')' => kind = .rparen,
|
||||
else => return error.UnexpectedCharacter,
|
||||
}
|
||||
|
||||
const start = self.offset;
|
||||
_ = self.consumeNoEof();
|
||||
return Token{ .kind = kind, .text = self.source[start..self.offset] };
|
||||
}
|
||||
};
|
||||
|
||||
pub const XmlCTokenizer = struct {
|
||||
it: xml.Element.ChildIterator,
|
||||
ctok: ?CTokenizer = null,
|
||||
current: ?Token = null,
|
||||
|
||||
pub fn init(elem: *xml.Element) XmlCTokenizer {
|
||||
return .{
|
||||
.it = elem.iterator(),
|
||||
};
|
||||
}
|
||||
|
||||
fn elemToToken(elem: *xml.Element) !?Token {
|
||||
if (elem.children.len != 1 or elem.children[0] != .char_data) {
|
||||
return error.InvalidXml;
|
||||
}
|
||||
|
||||
const text = elem.children[0].char_data;
|
||||
if (mem.eql(u8, elem.tag, "type")) {
|
||||
return Token{ .kind = .type_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "enum")) {
|
||||
return Token{ .kind = .enum_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "name")) {
|
||||
return Token{ .kind = .name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "comment")) {
|
||||
return null;
|
||||
} else {
|
||||
return error.InvalidTag;
|
||||
}
|
||||
}
|
||||
|
||||
fn next(self: *XmlCTokenizer) !?Token {
|
||||
if (self.current) |current| {
|
||||
const token = current;
|
||||
self.current = null;
|
||||
return token;
|
||||
}
|
||||
|
||||
var in_comment: bool = false;
|
||||
|
||||
while (true) {
|
||||
if (self.ctok) |*ctok| {
|
||||
if (try ctok.next()) |tok| {
|
||||
return tok;
|
||||
}
|
||||
in_comment = ctok.in_comment;
|
||||
}
|
||||
|
||||
self.ctok = null;
|
||||
|
||||
if (self.it.next()) |child| {
|
||||
switch (child.*) {
|
||||
.char_data => |cdata| self.ctok = CTokenizer{ .source = cdata, .in_comment = in_comment },
|
||||
.comment => {}, // xml comment
|
||||
.element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn nextNoEof(self: *XmlCTokenizer) !Token {
|
||||
return (try self.next()) orelse return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn peek(self: *XmlCTokenizer) !?Token {
|
||||
if (self.current) |current| {
|
||||
return current;
|
||||
}
|
||||
|
||||
self.current = try self.next();
|
||||
return self.current;
|
||||
}
|
||||
|
||||
fn peekNoEof(self: *XmlCTokenizer) !Token {
|
||||
return (try self.peek()) orelse return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn expect(self: *XmlCTokenizer, kind: Token.Kind) !Token {
|
||||
const tok = (try self.next()) orelse return error.UnexpectedEof;
|
||||
if (tok.kind != kind) {
|
||||
return error.UnexpectedToken;
|
||||
}
|
||||
|
||||
return tok;
|
||||
}
|
||||
};
|
||||
|
||||
// TYPEDEF = kw_typedef DECLARATION ';'
|
||||
pub fn parseTypedef(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
_ = try xctok.expect(.kw_typedef);
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
_ = try xctok.expect(.semicolon);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
|
||||
return registry.Declaration{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.decl_type = .{ .typedef = decl.decl_type },
|
||||
};
|
||||
}
|
||||
|
||||
// MEMBER = DECLARATION (':' int)?
|
||||
pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
var field = registry.Container.Field{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.field_type = decl.decl_type,
|
||||
.bits = null,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
};
|
||||
|
||||
if (try xctok.peek()) |tok| {
|
||||
if (tok.kind != .colon) {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
|
||||
_ = try xctok.nextNoEof();
|
||||
const bits = try xctok.expect(.int);
|
||||
field.bits = try std.fmt.parseInt(usize, bits.text, 10);
|
||||
|
||||
// Assume for now that there won't be any invalid C types like `char char* x : 4`.
|
||||
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
}
|
||||
|
||||
return field;
|
||||
}
|
||||
|
||||
pub fn parseParamOrProto(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
var decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
|
||||
// Decay pointers
|
||||
switch (decl.decl_type) {
|
||||
.array => {
|
||||
const child = try allocator.create(TypeInfo);
|
||||
child.* = decl.decl_type;
|
||||
|
||||
decl.decl_type = .{
|
||||
.pointer = .{
|
||||
.is_const = decl.is_const,
|
||||
.is_optional = false,
|
||||
.size = .one,
|
||||
.child = child,
|
||||
},
|
||||
};
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return registry.Declaration{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.decl_type = .{ .typedef = decl.decl_type },
|
||||
};
|
||||
}
|
||||
|
||||
pub const Declaration = struct {
|
||||
name: ?[]const u8, // Parameter names may be optional, especially in case of func(void)
|
||||
decl_type: TypeInfo,
|
||||
is_const: bool,
|
||||
};
|
||||
|
||||
pub const ParseError = error{
|
||||
OutOfMemory,
|
||||
InvalidSyntax,
|
||||
InvalidTag,
|
||||
InvalidXml,
|
||||
Overflow,
|
||||
UnexpectedEof,
|
||||
UnexpectedCharacter,
|
||||
UnexpectedToken,
|
||||
MissingTypeIdentifier,
|
||||
};
|
||||
|
||||
// DECLARATION = kw_const? type_name DECLARATOR
|
||||
// DECLARATOR = POINTERS (id | name)? ('[' ARRAY_DECLARATOR ']')*
|
||||
// | POINTERS '(' FNPTRSUFFIX
|
||||
fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) ParseError!Declaration {
|
||||
// Parse declaration constness
|
||||
var tok = try xctok.nextNoEof();
|
||||
const inner_is_const = tok.kind == .kw_const;
|
||||
if (inner_is_const) {
|
||||
tok = try xctok.nextNoEof();
|
||||
}
|
||||
|
||||
if (tok.kind == .kw_struct) {
|
||||
tok = try xctok.nextNoEof();
|
||||
}
|
||||
// Parse type name
|
||||
if (tok.kind != .type_name and tok.kind != .id) return error.InvalidSyntax;
|
||||
const type_name = tok.text;
|
||||
|
||||
var type_info = TypeInfo{ .name = type_name };
|
||||
|
||||
// Parse pointers
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info, ptrs_optional);
|
||||
|
||||
// Parse name / fn ptr
|
||||
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info, ptrs_optional)) |decl| {
|
||||
return Declaration{
|
||||
.name = decl.name,
|
||||
.decl_type = decl.decl_type,
|
||||
.is_const = inner_is_const,
|
||||
};
|
||||
}
|
||||
|
||||
const name = blk: {
|
||||
const name_tok = (try xctok.peek()) orelse break :blk null;
|
||||
if (name_tok.kind == .id or name_tok.kind == .name) {
|
||||
_ = try xctok.nextNoEof();
|
||||
break :blk name_tok.text;
|
||||
} else {
|
||||
break :blk null;
|
||||
}
|
||||
};
|
||||
|
||||
var inner_type = &type_info;
|
||||
while (try parseArrayDeclarator(xctok)) |array_size| {
|
||||
// Move the current inner type to a new node on the heap
|
||||
const child = try allocator.create(TypeInfo);
|
||||
child.* = inner_type.*;
|
||||
|
||||
// Re-assign the previous inner type for the array type info node
|
||||
inner_type.* = .{
|
||||
.array = .{
|
||||
.size = array_size,
|
||||
.valid_size = .all, // Refined later
|
||||
.is_optional = true,
|
||||
.child = child,
|
||||
},
|
||||
};
|
||||
|
||||
// update the inner_type pointer so it points to the proper
|
||||
// inner type again
|
||||
inner_type = child;
|
||||
}
|
||||
|
||||
return Declaration{
|
||||
.name = name,
|
||||
.decl_type = type_info,
|
||||
.is_const = inner_is_const,
|
||||
};
|
||||
}
|
||||
|
||||
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
|
||||
fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo, ptrs_optional: bool) !?Declaration {
|
||||
const lparen = try xctok.peek();
|
||||
if (lparen == null or lparen.?.kind != .lparen) {
|
||||
return null;
|
||||
}
|
||||
_ = try xctok.nextNoEof();
|
||||
_ = try xctok.expect(.kw_vkapi_ptr);
|
||||
_ = try xctok.expect(.star);
|
||||
const name = try xctok.expect(.name);
|
||||
_ = try xctok.expect(.rparen);
|
||||
_ = try xctok.expect(.lparen);
|
||||
|
||||
const return_type_heap = try allocator.create(TypeInfo);
|
||||
return_type_heap.* = return_type;
|
||||
|
||||
var command_ptr = Declaration{
|
||||
.name = name.text,
|
||||
.decl_type = .{
|
||||
.command_ptr = .{
|
||||
.params = &[_]registry.Command.Param{},
|
||||
.return_type = return_type_heap,
|
||||
.success_codes = &[_][]const u8{},
|
||||
.error_codes = &[_][]const u8{},
|
||||
},
|
||||
},
|
||||
.is_const = false,
|
||||
};
|
||||
|
||||
const first_param = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
if (first_param.name == null) {
|
||||
if (first_param.decl_type != .name or !mem.eql(u8, first_param.decl_type.name, "void")) {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
|
||||
_ = try xctok.expect(.rparen);
|
||||
return command_ptr;
|
||||
}
|
||||
|
||||
// There is no good way to estimate the number of parameters beforehand.
|
||||
// Fortunately, there are usually a relatively low number of parameters to a function pointer,
|
||||
// so an ArrayList backed by an arena allocator is good enough.
|
||||
var params = std.ArrayList(registry.Command.Param).init(allocator);
|
||||
try params.append(.{
|
||||
.name = first_param.name.?,
|
||||
.param_type = first_param.decl_type,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
});
|
||||
|
||||
while (true) {
|
||||
switch ((try xctok.peekNoEof()).kind) {
|
||||
.rparen => break,
|
||||
.comma => _ = try xctok.nextNoEof(),
|
||||
else => return error.InvalidSyntax,
|
||||
}
|
||||
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
try params.append(.{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.param_type = decl.decl_type,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
});
|
||||
}
|
||||
|
||||
_ = try xctok.nextNoEof();
|
||||
command_ptr.decl_type.command_ptr.params = try params.toOwnedSlice();
|
||||
return command_ptr;
|
||||
}
|
||||
|
||||
// POINTERS = (kw_const? '*')*
|
||||
fn parsePointers(allocator: Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo, ptrs_optional: bool) !TypeInfo {
|
||||
var type_info = inner;
|
||||
var first_const = inner_const;
|
||||
|
||||
while (true) {
|
||||
var tok = (try xctok.peek()) orelse return type_info;
|
||||
var is_const = first_const;
|
||||
first_const = false;
|
||||
|
||||
if (tok.kind == .kw_const) {
|
||||
is_const = true;
|
||||
_ = try xctok.nextNoEof();
|
||||
tok = (try xctok.peek()) orelse return type_info;
|
||||
}
|
||||
|
||||
if (tok.kind != .star) {
|
||||
// if `is_const` is true at this point, there was a trailing const,
|
||||
// and the declaration itself is const.
|
||||
return type_info;
|
||||
}
|
||||
|
||||
_ = try xctok.nextNoEof();
|
||||
|
||||
const child = try allocator.create(TypeInfo);
|
||||
child.* = type_info;
|
||||
|
||||
type_info = .{
|
||||
.pointer = .{
|
||||
.is_const = is_const or first_const,
|
||||
.is_optional = ptrs_optional, // set elsewhere
|
||||
.size = .one, // set elsewhere
|
||||
.child = child,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ARRAY_DECLARATOR = '[' (int | enum_name) ']'
|
||||
fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
|
||||
const lbracket = try xctok.peek();
|
||||
if (lbracket == null or lbracket.?.kind != .lbracket) {
|
||||
return null;
|
||||
}
|
||||
|
||||
_ = try xctok.nextNoEof();
|
||||
|
||||
const size_tok = try xctok.nextNoEof();
|
||||
const size: ArraySize = switch (size_tok.kind) {
|
||||
.int => .{
|
||||
.int = std.fmt.parseInt(usize, size_tok.text, 10) catch |err| switch (err) {
|
||||
error.Overflow => return error.Overflow,
|
||||
error.InvalidCharacter => unreachable,
|
||||
},
|
||||
},
|
||||
.enum_name => .{ .alias = size_tok.text },
|
||||
else => return error.InvalidSyntax,
|
||||
};
|
||||
|
||||
_ = try xctok.expect(.rbracket);
|
||||
return size;
|
||||
}
|
||||
|
||||
pub fn parseVersion(xctok: *XmlCTokenizer) ![4][]const u8 {
|
||||
_ = try xctok.expect(.hash);
|
||||
const define = try xctok.expect(.id);
|
||||
if (!mem.eql(u8, define.text, "define")) {
|
||||
return error.InvalidVersion;
|
||||
}
|
||||
|
||||
_ = try xctok.expect(.name);
|
||||
const vk_make_version = try xctok.expect(.type_name);
|
||||
if (!mem.eql(u8, vk_make_version.text, "VK_MAKE_API_VERSION")) {
|
||||
return error.NotVersion;
|
||||
}
|
||||
|
||||
_ = try xctok.expect(.lparen);
|
||||
var version: [4][]const u8 = undefined;
|
||||
for (&version, 0..) |*part, i| {
|
||||
if (i != 0) {
|
||||
_ = try xctok.expect(.comma);
|
||||
}
|
||||
|
||||
const tok = try xctok.nextNoEof();
|
||||
switch (tok.kind) {
|
||||
.id, .int => part.* = tok.text,
|
||||
else => return error.UnexpectedToken,
|
||||
}
|
||||
}
|
||||
_ = try xctok.expect(.rparen);
|
||||
return version;
|
||||
}
|
||||
|
||||
fn testTokenizer(tokenizer: anytype, expected_tokens: []const Token) !void {
|
||||
for (expected_tokens) |expected| {
|
||||
const tok = (tokenizer.next() catch unreachable).?;
|
||||
try testing.expectEqual(expected.kind, tok.kind);
|
||||
try testing.expectEqualSlices(u8, expected.text, tok.text);
|
||||
}
|
||||
|
||||
if (tokenizer.next() catch unreachable) |_| unreachable;
|
||||
}
|
||||
|
||||
test "CTokenizer" {
|
||||
var ctok = CTokenizer{ .source = "typedef ([const)]** VKAPI_PTR 123,;aaaa" };
|
||||
|
||||
try testTokenizer(&ctok, &[_]Token{
|
||||
.{ .kind = .kw_typedef, .text = "typedef" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .lbracket, .text = "[" },
|
||||
.{ .kind = .kw_const, .text = "const" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .rbracket, .text = "]" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .kw_vkapi_ptr, .text = "VKAPI_PTR" },
|
||||
.{ .kind = .int, .text = "123" },
|
||||
.{ .kind = .comma, .text = "," },
|
||||
.{ .kind = .semicolon, .text = ";" },
|
||||
.{ .kind = .id, .text = "aaaa" },
|
||||
});
|
||||
}
|
||||
|
||||
test "XmlCTokenizer" {
|
||||
const document = try xml.parse(testing.allocator,
|
||||
\\<root>// comment <name>commented name</name> <type>commented type</type> trailing
|
||||
\\ typedef void (VKAPI_PTR *<name>PFN_vkVoidFunction</name>)(void);
|
||||
\\</root>
|
||||
);
|
||||
defer document.deinit();
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
|
||||
try testTokenizer(&xctok, &[_]Token{
|
||||
.{ .kind = .kw_typedef, .text = "typedef" },
|
||||
.{ .kind = .id, .text = "void" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .kw_vkapi_ptr, .text = "VKAPI_PTR" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .name, .text = "PFN_vkVoidFunction" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .id, .text = "void" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .semicolon, .text = ";" },
|
||||
});
|
||||
}
|
||||
|
||||
test "parseTypedef" {
|
||||
const document = try xml.parse(testing.allocator,
|
||||
\\<root> // comment <name>commented name</name> trailing
|
||||
\\ typedef const struct <type>Python</type>* pythons[4];
|
||||
\\ // more comments
|
||||
\\</root>
|
||||
\\
|
||||
);
|
||||
defer document.deinit();
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
const decl = try parseTypedef(arena.allocator(), &xctok, false);
|
||||
|
||||
try testing.expectEqualSlices(u8, "pythons", decl.name);
|
||||
const array = decl.decl_type.typedef.array;
|
||||
try testing.expectEqual(ArraySize{ .int = 4 }, array.size);
|
||||
const ptr = array.child.pointer;
|
||||
try testing.expectEqual(true, ptr.is_const);
|
||||
try testing.expectEqualSlices(u8, "Python", ptr.child.name);
|
||||
}
|
||||
227
src/vulkan/generator.zig
Normal file
227
src/vulkan/generator.zig
Normal file
@@ -0,0 +1,227 @@
|
||||
const std = @import("std");
|
||||
const reg = @import("registry.zig");
|
||||
const xml = @import("../xml.zig");
|
||||
const renderRegistry = @import("render.zig").render;
|
||||
const parseXml = @import("parse.zig").parseXml;
|
||||
const IdRenderer = @import("../id_render.zig").IdRenderer;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const FeatureLevel = reg.FeatureLevel;
|
||||
|
||||
const EnumFieldMerger = struct {
|
||||
const EnumExtensionMap = std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(reg.Enum.Field));
|
||||
const FieldSet = std.StringArrayHashMapUnmanaged(void);
|
||||
|
||||
arena: Allocator,
|
||||
registry: *reg.Registry,
|
||||
enum_extensions: EnumExtensionMap,
|
||||
field_set: FieldSet,
|
||||
|
||||
fn init(arena: Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
return .{
|
||||
.arena = arena,
|
||||
.registry = registry,
|
||||
.enum_extensions = .{},
|
||||
.field_set = .{},
|
||||
};
|
||||
}
|
||||
|
||||
fn putEnumExtension(self: *EnumFieldMerger, enum_name: []const u8, field: reg.Enum.Field) !void {
|
||||
const res = try self.enum_extensions.getOrPut(self.arena, enum_name);
|
||||
if (!res.found_existing) {
|
||||
res.value_ptr.* = std.ArrayListUnmanaged(reg.Enum.Field){};
|
||||
}
|
||||
|
||||
try res.value_ptr.append(self.arena, field);
|
||||
}
|
||||
|
||||
fn addRequires(self: *EnumFieldMerger, reqs: []const reg.Require) !void {
|
||||
for (reqs) |req| {
|
||||
for (req.extends) |enum_ext| {
|
||||
try self.putEnumExtension(enum_ext.extends, enum_ext.field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn mergeEnumFields(self: *EnumFieldMerger, name: []const u8, base_enum: *reg.Enum) !void {
|
||||
// If there are no extensions for this enum, assume its valid.
|
||||
const extensions = self.enum_extensions.get(name) orelse return;
|
||||
|
||||
self.field_set.clearRetainingCapacity();
|
||||
|
||||
const n_fields_upper_bound = base_enum.fields.len + extensions.items.len;
|
||||
const new_fields = try self.arena.alloc(reg.Enum.Field, n_fields_upper_bound);
|
||||
var i: usize = 0;
|
||||
|
||||
for (base_enum.fields) |field| {
|
||||
const res = try self.field_set.getOrPut(self.arena, field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Assume that if a field name clobbers, the value is the same
|
||||
for (extensions.items) |field| {
|
||||
const res = try self.field_set.getOrPut(self.arena, field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Existing base_enum.fields was allocated by `self.arena`, so
|
||||
// it gets cleaned up whenever that is deinited.
|
||||
base_enum.fields = new_fields[0..i];
|
||||
}
|
||||
|
||||
fn merge(self: *EnumFieldMerger) !void {
|
||||
for (self.registry.features) |feature| {
|
||||
try self.addRequires(feature.requires);
|
||||
}
|
||||
|
||||
for (self.registry.extensions) |ext| {
|
||||
try self.addRequires(ext.requires);
|
||||
}
|
||||
|
||||
// Merge all the enum fields.
|
||||
// Assume that all keys of enum_extensions appear in `self.registry.decls`
|
||||
for (self.registry.decls) |*decl| {
|
||||
if (decl.decl_type == .enumeration) {
|
||||
try self.mergeEnumFields(decl.name, &decl.decl_type.enumeration);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Generator = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
registry: reg.Registry,
|
||||
id_renderer: IdRenderer,
|
||||
|
||||
fn init(allocator: Allocator, spec: *xml.Element, api: reg.Api) !Generator {
|
||||
const result = try parseXml(allocator, spec, api);
|
||||
|
||||
const tags = try allocator.alloc([]const u8, result.registry.tags.len);
|
||||
for (tags, result.registry.tags) |*tag, registry_tag| tag.* = registry_tag.name;
|
||||
|
||||
return Generator{
|
||||
.arena = result.arena,
|
||||
.registry = result.registry,
|
||||
.id_renderer = IdRenderer.init(allocator, tags),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: Generator) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
fn stripFlagBits(self: Generator, name: []const u8) []const u8 {
|
||||
const tagless = self.id_renderer.stripAuthorTag(name);
|
||||
return tagless[0 .. tagless.len - "FlagBits".len];
|
||||
}
|
||||
|
||||
fn stripFlags(self: Generator, name: []const u8) []const u8 {
|
||||
const tagless = self.id_renderer.stripAuthorTag(name);
|
||||
return tagless[0 .. tagless.len - "Flags".len];
|
||||
}
|
||||
|
||||
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
|
||||
fn mergeEnumFields(self: *Generator) !void {
|
||||
var merger = EnumFieldMerger.init(self.arena.allocator(), &self.registry);
|
||||
try merger.merge();
|
||||
}
|
||||
|
||||
// https://github.com/KhronosGroup/Vulkan-Docs/pull/1556
|
||||
fn fixupBitFlags(self: *Generator) !void {
|
||||
var seen_bits = std.StringArrayHashMap(void).init(self.arena.allocator());
|
||||
defer seen_bits.deinit();
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
const bitmask = switch (decl.decl_type) {
|
||||
.bitmask => |bm| bm,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (bitmask.bits_enum) |bits_enum| {
|
||||
try seen_bits.put(bits_enum, {});
|
||||
}
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
switch (decl.decl_type) {
|
||||
.enumeration => |e| {
|
||||
if (e.is_bitmask and seen_bits.get(decl.name) == null)
|
||||
continue;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
self.registry.decls[i] = decl;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
self.registry.decls.len = i;
|
||||
}
|
||||
|
||||
fn render(self: *Generator, writer: anytype) !void {
|
||||
try renderRegistry(writer, self.arena.allocator(), &self.registry, &self.id_renderer);
|
||||
}
|
||||
};
|
||||
|
||||
/// The vulkan registry contains the specification for multiple APIs: Vulkan and VulkanSC. This enum
|
||||
/// describes applicable APIs.
|
||||
pub const Api = reg.Api;
|
||||
|
||||
/// Main function for generating the Vulkan bindings. vk.xml is to be provided via `spec_xml`,
|
||||
/// and the resulting binding is written to `writer`. `allocator` will be used to allocate temporary
|
||||
/// internal datastructures - mostly via an ArenaAllocator, but sometimes a hashmap uses this allocator
|
||||
/// directly. `api` is the API to generate the bindings for, usually `.vulkan`.
|
||||
pub fn generate(allocator: Allocator, api: Api, spec_xml: []const u8, writer: anytype) !void {
|
||||
const spec = xml.parse(allocator, spec_xml) catch |err| switch (err) {
|
||||
error.InvalidDocument,
|
||||
error.UnexpectedEof,
|
||||
error.UnexpectedCharacter,
|
||||
error.IllegalCharacter,
|
||||
error.InvalidEntity,
|
||||
error.InvalidName,
|
||||
error.InvalidStandaloneValue,
|
||||
error.NonMatchingClosingTag,
|
||||
error.UnclosedComment,
|
||||
error.UnclosedValue,
|
||||
=> return error.InvalidXml,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
defer spec.deinit();
|
||||
|
||||
var gen = Generator.init(allocator, spec.root, api) catch |err| switch (err) {
|
||||
error.InvalidXml,
|
||||
error.InvalidCharacter,
|
||||
error.Overflow,
|
||||
error.InvalidFeatureLevel,
|
||||
error.InvalidSyntax,
|
||||
error.InvalidTag,
|
||||
error.MissingTypeIdentifier,
|
||||
error.UnexpectedCharacter,
|
||||
error.UnexpectedEof,
|
||||
error.UnexpectedToken,
|
||||
error.InvalidRegistry,
|
||||
=> return error.InvalidRegistry,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
defer gen.deinit();
|
||||
|
||||
try gen.mergeEnumFields();
|
||||
try gen.fixupBitFlags();
|
||||
gen.render(writer) catch |err| switch (err) {
|
||||
error.InvalidApiConstant,
|
||||
error.InvalidConstantExpr,
|
||||
error.InvalidRegistry,
|
||||
error.UnexpectedCharacter,
|
||||
error.InvalidCharacter,
|
||||
error.Overflow,
|
||||
=> return error.InvalidRegistry,
|
||||
else => |others| return others,
|
||||
};
|
||||
}
|
||||
977
src/vulkan/parse.zig
Normal file
977
src/vulkan/parse.zig
Normal file
@@ -0,0 +1,977 @@
|
||||
const std = @import("std");
|
||||
const registry = @import("registry.zig");
|
||||
const xml = @import("../xml.zig");
|
||||
const cparse = @import("c_parse.zig");
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const api_constants_name = "API Constants";
|
||||
|
||||
pub const ParseResult = struct {
|
||||
arena: ArenaAllocator,
|
||||
registry: registry.Registry,
|
||||
|
||||
pub fn deinit(self: ParseResult) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
pub fn parseXml(backing_allocator: Allocator, root: *xml.Element, api: registry.Api) !ParseResult {
|
||||
var arena = ArenaAllocator.init(backing_allocator);
|
||||
errdefer arena.deinit();
|
||||
|
||||
const allocator = arena.allocator();
|
||||
|
||||
const reg = registry.Registry{
|
||||
.decls = try parseDeclarations(allocator, root, api),
|
||||
.api_constants = try parseApiConstants(allocator, root, api),
|
||||
.tags = try parseTags(allocator, root),
|
||||
.features = try parseFeatures(allocator, root, api),
|
||||
.extensions = try parseExtensions(allocator, root, api),
|
||||
};
|
||||
|
||||
return ParseResult{
|
||||
.arena = arena,
|
||||
.registry = reg,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseDeclarations(allocator: Allocator, root: *xml.Element, api: registry.Api) ![]registry.Declaration {
|
||||
const types_elem = root.findChildByTag("types") orelse return error.InvalidRegistry;
|
||||
const commands_elem = root.findChildByTag("commands") orelse return error.InvalidRegistry;
|
||||
|
||||
const decl_upper_bound = types_elem.children.len + commands_elem.children.len;
|
||||
const decls = try allocator.alloc(registry.Declaration, decl_upper_bound);
|
||||
|
||||
var count: usize = 0;
|
||||
count += try parseTypes(allocator, decls, types_elem, api);
|
||||
count += try parseEnums(allocator, decls[count..], root, api);
|
||||
count += try parseCommands(allocator, decls[count..], commands_elem, api);
|
||||
return decls[0..count];
|
||||
}
|
||||
|
||||
fn parseTypes(allocator: Allocator, out: []registry.Declaration, types_elem: *xml.Element, api: registry.Api) !usize {
|
||||
var i: usize = 0;
|
||||
var it = types_elem.findChildrenByTag("type");
|
||||
while (it.next()) |ty| {
|
||||
out[i] = blk: {
|
||||
if (!requiredByApi(ty, api))
|
||||
continue;
|
||||
|
||||
const category = ty.getAttribute("category") orelse {
|
||||
break :blk try parseForeigntype(ty);
|
||||
};
|
||||
|
||||
if (mem.eql(u8, category, "bitmask")) {
|
||||
break :blk try parseBitmaskType(ty);
|
||||
} else if (mem.eql(u8, category, "handle")) {
|
||||
break :blk try parseHandleType(ty);
|
||||
} else if (mem.eql(u8, category, "basetype")) {
|
||||
break :blk try parseBaseType(allocator, ty);
|
||||
} else if (mem.eql(u8, category, "struct")) {
|
||||
break :blk try parseContainer(allocator, ty, false, api);
|
||||
} else if (mem.eql(u8, category, "union")) {
|
||||
break :blk try parseContainer(allocator, ty, true, api);
|
||||
} else if (mem.eql(u8, category, "funcpointer")) {
|
||||
break :blk try parseFuncPointer(allocator, ty);
|
||||
} else if (mem.eql(u8, category, "enum")) {
|
||||
break :blk (try parseEnumAlias(ty)) orelse continue;
|
||||
}
|
||||
|
||||
continue;
|
||||
};
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseForeigntype(ty: *xml.Element) !registry.Declaration {
|
||||
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const depends = ty.getAttribute("requires") orelse if (mem.eql(u8, name, "int"))
|
||||
"vk_platform" // for some reason, int doesn't depend on vk_platform (but the other c types do)
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{ .foreign = .{ .depends = depends } },
|
||||
};
|
||||
}
|
||||
|
||||
fn parseBitmaskType(ty: *xml.Element) !registry.Declaration {
|
||||
if (ty.getAttribute("name")) |name| {
|
||||
const alias = ty.getAttribute("alias") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{ .alias = .{ .name = alias, .target = .other_type } },
|
||||
};
|
||||
} else {
|
||||
const flags_type = ty.getCharData("type") orelse return error.InvalidRegistry;
|
||||
|
||||
const bitwidth: u8 = if (mem.eql(u8, flags_type, "VkFlags"))
|
||||
32
|
||||
else if (mem.eql(u8, flags_type, "VkFlags64"))
|
||||
64
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
|
||||
return registry.Declaration{
|
||||
.name = ty.getCharData("name") orelse return error.InvalidRegistry,
|
||||
.decl_type = .{
|
||||
.bitmask = .{
|
||||
// Who knows why these are different fields
|
||||
.bits_enum = ty.getAttribute("requires") orelse ty.getAttribute("bitvalues"),
|
||||
.bitwidth = bitwidth,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn parseHandleType(ty: *xml.Element) !registry.Declaration {
|
||||
// Parent is not handled in case of an alias
|
||||
if (ty.getAttribute("name")) |name| {
|
||||
const alias = ty.getAttribute("alias") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_type },
|
||||
},
|
||||
};
|
||||
} else {
|
||||
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
||||
const handle_type = ty.getCharData("type") orelse return error.InvalidRegistry;
|
||||
const dispatchable = mem.eql(u8, handle_type, "VK_DEFINE_HANDLE");
|
||||
if (!dispatchable and !mem.eql(u8, handle_type, "VK_DEFINE_NON_DISPATCHABLE_HANDLE")) {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.handle = .{
|
||||
.parent = ty.getAttribute("parent"),
|
||||
.is_dispatchable = dispatchable,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn parseBaseType(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
||||
if (ty.getCharData("type")) |_| {
|
||||
var tok = cparse.XmlCTokenizer.init(ty);
|
||||
return try cparse.parseTypedef(allocator, &tok, false);
|
||||
} else {
|
||||
// Either ANativeWindow, AHardwareBuffer or CAMetalLayer. The latter has a lot of
|
||||
// macros, which is why this part is not built into the xml/c parser.
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{ .foreign = .{ .depends = &.{} } },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool, api: registry.Api) !registry.Declaration {
|
||||
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
|
||||
if (ty.getAttribute("alias")) |alias| {
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_type },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
var members = try allocator.alloc(registry.Container.Field, ty.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = ty.findChildrenByTag("member");
|
||||
var maybe_stype: ?[]const u8 = null;
|
||||
while (it.next()) |member| {
|
||||
if (!requiredByApi(member, api))
|
||||
continue;
|
||||
|
||||
var xctok = cparse.XmlCTokenizer.init(member);
|
||||
members[i] = try cparse.parseMember(allocator, &xctok, false);
|
||||
if (mem.eql(u8, members[i].name, "sType")) {
|
||||
if (member.getAttribute("values")) |stype| {
|
||||
maybe_stype = stype;
|
||||
}
|
||||
}
|
||||
|
||||
if (member.getAttribute("optional")) |optionals| {
|
||||
var optional_it = mem.split(u8, optionals, ",");
|
||||
if (optional_it.next()) |first_optional| {
|
||||
members[i].is_optional = mem.eql(u8, first_optional, "true");
|
||||
} else {
|
||||
// Optional is empty, probably incorrect.
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
members = members[0..i];
|
||||
|
||||
var maybe_extends: ?[][]const u8 = null;
|
||||
if (ty.getAttribute("structextends")) |extends| {
|
||||
const n_structs = std.mem.count(u8, extends, ",") + 1;
|
||||
maybe_extends = try allocator.alloc([]const u8, n_structs);
|
||||
var struct_extends = std.mem.split(u8, extends, ",");
|
||||
var j: usize = 0;
|
||||
while (struct_extends.next()) |struct_extend| {
|
||||
maybe_extends.?[j] = struct_extend;
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
|
||||
it = ty.findChildrenByTag("member");
|
||||
for (members) |*member| {
|
||||
const member_elem = while (it.next()) |elem| {
|
||||
if (requiredByApi(elem, api)) break elem;
|
||||
} else unreachable;
|
||||
|
||||
try parsePointerMeta(.{ .container = members }, &member.field_type, member_elem);
|
||||
|
||||
// pNext isn't always properly marked as optional, so just manually override it,
|
||||
if (mem.eql(u8, member.name, "pNext")) {
|
||||
member.field_type.pointer.is_optional = true;
|
||||
}
|
||||
}
|
||||
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.container = .{
|
||||
.stype = maybe_stype,
|
||||
.fields = members,
|
||||
.is_union = is_union,
|
||||
.extends = maybe_extends,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parseFuncPointer(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
var xctok = cparse.XmlCTokenizer.init(ty);
|
||||
return try cparse.parseTypedef(allocator, &xctok, true);
|
||||
}
|
||||
|
||||
// For some reason, the DeclarationType cannot be passed to lenToPointer, as
|
||||
// that causes the Zig compiler to generate invalid code for the function. Using a
|
||||
// dedicated enum fixes the issue...
|
||||
const Fields = union(enum) {
|
||||
command: []registry.Command.Param,
|
||||
container: []registry.Container.Field,
|
||||
};
|
||||
|
||||
// returns .{ size, nullable }
|
||||
fn lenToPointer(fields: Fields, len: []const u8) std.meta.Tuple(&.{ registry.Pointer.PointerSize, bool }) {
|
||||
switch (fields) {
|
||||
.command => |params| {
|
||||
for (params) |*param| {
|
||||
if (mem.eql(u8, param.name, len)) {
|
||||
param.is_buffer_len = true;
|
||||
return .{ .{ .other_field = param.name }, param.is_optional };
|
||||
}
|
||||
}
|
||||
},
|
||||
.container => |members| {
|
||||
for (members) |*member| {
|
||||
if (mem.eql(u8, member.name, len)) {
|
||||
member.is_buffer_len = true;
|
||||
return .{ .{ .other_field = member.name }, member.is_optional };
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if (mem.eql(u8, len, "null-terminated")) {
|
||||
return .{ .zero_terminated, false };
|
||||
} else {
|
||||
return .{ .many, false };
|
||||
}
|
||||
}
|
||||
|
||||
fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.Element) !void {
|
||||
var len_attribute_depth: usize = 0;
|
||||
|
||||
if (elem.getAttribute("len")) |lens| {
|
||||
var it = mem.split(u8, lens, ",");
|
||||
var current_type_info = type_info;
|
||||
|
||||
while (true) switch (current_type_info.*) {
|
||||
.pointer => |*ptr| {
|
||||
if (it.next()) |len_str| {
|
||||
ptr.size, ptr.is_optional = lenToPointer(fields, len_str);
|
||||
} else {
|
||||
ptr.size = .many;
|
||||
}
|
||||
|
||||
current_type_info = ptr.child;
|
||||
len_attribute_depth += 1;
|
||||
},
|
||||
.array => |*arr| {
|
||||
if (it.next()) |len_str| {
|
||||
const size, _ = lenToPointer(fields, len_str);
|
||||
arr.valid_size = switch (size) {
|
||||
.one => .all,
|
||||
.many => .many,
|
||||
.other_field => |field| .{ .other_field = field },
|
||||
.zero_terminated => .zero_terminated,
|
||||
};
|
||||
} else {
|
||||
arr.valid_size = .all;
|
||||
}
|
||||
|
||||
current_type_info = arr.child;
|
||||
len_attribute_depth += 1;
|
||||
},
|
||||
else => break,
|
||||
};
|
||||
|
||||
if (it.next()) |_| {
|
||||
// There are more elements in the `len` attribute than there are pointers
|
||||
// Something probably went wrong
|
||||
std.log.err("len: {s}", .{lens});
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
}
|
||||
|
||||
var current_depth: usize = 0;
|
||||
|
||||
if (elem.getAttribute("optional")) |optionals| {
|
||||
var it = mem.split(u8, optionals, ",");
|
||||
var current_type_info = type_info;
|
||||
while (true) switch (current_type_info.*) {
|
||||
inline .pointer, .array => |*info| {
|
||||
if (it.next()) |optional_str| {
|
||||
|
||||
// The pointer may have already been marked as optional due to its `len` attribute.
|
||||
const is_already_optional = current_depth < len_attribute_depth and info.is_optional;
|
||||
info.is_optional = is_already_optional or mem.eql(u8, optional_str, "true");
|
||||
} else {
|
||||
// There is no information for this pointer, probably incorrect.
|
||||
// Currently there is one definition where this is the case, VkCudaLaunchInfoNV.
|
||||
|
||||
// We work around these by assuming that they are optional, so that in the case
|
||||
// that they are, we can assign null to them.
|
||||
// See https://github.com/Snektron/vulkan-zig/issues/109
|
||||
info.is_optional = true;
|
||||
}
|
||||
|
||||
current_type_info = info.child;
|
||||
current_depth += 1;
|
||||
},
|
||||
else => break,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn parseEnumAlias(elem: *xml.Element) !?registry.Declaration {
|
||||
if (elem.getAttribute("alias")) |alias| {
|
||||
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_type },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn parseEnums(allocator: Allocator, out: []registry.Declaration, root: *xml.Element, api: registry.Api) !usize {
|
||||
var i: usize = 0;
|
||||
var it = root.findChildrenByTag("enums");
|
||||
while (it.next()) |enums| {
|
||||
const name = enums.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
if (mem.eql(u8, name, api_constants_name) or !requiredByApi(enums, api)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.decl_type = .{ .enumeration = try parseEnumFields(allocator, enums, api) },
|
||||
};
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseEnumFields(allocator: Allocator, elem: *xml.Element, api: registry.Api) !registry.Enum {
|
||||
// TODO: `type` was added recently, fall back to checking endswith FlagBits for older versions?
|
||||
const enum_type = elem.getAttribute("type") orelse return error.InvalidRegistry;
|
||||
const is_bitmask = mem.eql(u8, enum_type, "bitmask");
|
||||
if (!is_bitmask and !mem.eql(u8, enum_type, "enum")) {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
const bitwidth = if (elem.getAttribute("bitwidth")) |bitwidth|
|
||||
try std.fmt.parseInt(u8, bitwidth, 10)
|
||||
else
|
||||
32;
|
||||
|
||||
const fields = try allocator.alloc(registry.Enum.Field, elem.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = elem.findChildrenByTag("enum");
|
||||
while (it.next()) |field| {
|
||||
if (!requiredByApi(field, api))
|
||||
continue;
|
||||
|
||||
fields[i] = try parseEnumField(field);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return registry.Enum{
|
||||
.fields = fields[0..i],
|
||||
.bitwidth = bitwidth,
|
||||
.is_bitmask = is_bitmask,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseEnumField(field: *xml.Element) !registry.Enum.Field {
|
||||
const is_compat_alias = if (field.getAttribute("comment")) |comment|
|
||||
mem.eql(u8, comment, "Backwards-compatible alias containing a typo") or
|
||||
mem.eql(u8, comment, "Deprecated name for backwards compatibility")
|
||||
else
|
||||
false;
|
||||
|
||||
const name = field.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const value: registry.Enum.Value = blk: {
|
||||
// An enum variant's value could be defined by any of the following attributes:
|
||||
// - value: Straight up value of the enum variant, in either base 10 or 16 (prefixed with 0x).
|
||||
// - bitpos: Used for bitmasks, and can also be set in extensions.
|
||||
// - alias: The field is an alias of another variant within the same enum.
|
||||
// - offset: Used with features and extensions, where a non-bitpos value is added to an enum.
|
||||
// The value is given by `1e9 + (extr_nr - 1) * 1e3 + offset`, where `ext_nr` is either
|
||||
// given by the `extnumber` field (in the case of a feature), or given in the parent <extension>
|
||||
// tag. In the latter case its passed via the `ext_nr` parameter.
|
||||
if (field.getAttribute("value")) |value| {
|
||||
if (mem.startsWith(u8, value, "0x")) {
|
||||
break :blk .{ .bit_vector = try std.fmt.parseInt(i32, value[2..], 16) };
|
||||
} else {
|
||||
break :blk .{ .int = try std.fmt.parseInt(i32, value, 10) };
|
||||
}
|
||||
} else if (field.getAttribute("bitpos")) |bitpos| {
|
||||
break :blk .{ .bitpos = try std.fmt.parseInt(u6, bitpos, 10) };
|
||||
} else if (field.getAttribute("alias")) |alias| {
|
||||
break :blk .{ .alias = .{ .name = alias, .is_compat_alias = is_compat_alias } };
|
||||
} else {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
};
|
||||
|
||||
return registry.Enum.Field{
|
||||
.name = name,
|
||||
.value = value,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseCommands(allocator: Allocator, out: []registry.Declaration, commands_elem: *xml.Element, api: registry.Api) !usize {
|
||||
var i: usize = 0;
|
||||
var it = commands_elem.findChildrenByTag("command");
|
||||
while (it.next()) |elem| {
|
||||
if (!requiredByApi(elem, api))
|
||||
continue;
|
||||
|
||||
out[i] = try parseCommand(allocator, elem, api);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
fn splitCommaAlloc(allocator: Allocator, text: []const u8) ![][]const u8 {
|
||||
var n_codes: usize = 1;
|
||||
for (text) |c| {
|
||||
if (c == ',') n_codes += 1;
|
||||
}
|
||||
|
||||
const codes = try allocator.alloc([]const u8, n_codes);
|
||||
var it = mem.split(u8, text, ",");
|
||||
for (codes) |*code| {
|
||||
code.* = it.next().?;
|
||||
}
|
||||
|
||||
return codes;
|
||||
}
|
||||
|
||||
fn parseCommand(allocator: Allocator, elem: *xml.Element, api: registry.Api) !registry.Declaration {
|
||||
if (elem.getAttribute("alias")) |alias| {
|
||||
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_command },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const proto = elem.findChildByTag("proto") orelse return error.InvalidRegistry;
|
||||
var proto_xctok = cparse.XmlCTokenizer.init(proto);
|
||||
const command_decl = try cparse.parseParamOrProto(allocator, &proto_xctok, false);
|
||||
|
||||
var params = try allocator.alloc(registry.Command.Param, elem.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = elem.findChildrenByTag("param");
|
||||
while (it.next()) |param| {
|
||||
if (!requiredByApi(param, api))
|
||||
continue;
|
||||
|
||||
var xctok = cparse.XmlCTokenizer.init(param);
|
||||
const decl = try cparse.parseParamOrProto(allocator, &xctok, false);
|
||||
params[i] = .{
|
||||
.name = decl.name,
|
||||
.param_type = decl.decl_type.typedef,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
};
|
||||
|
||||
if (param.getAttribute("optional")) |optionals| {
|
||||
var optional_it = mem.split(u8, optionals, ",");
|
||||
if (optional_it.next()) |first_optional| {
|
||||
params[i].is_optional = mem.eql(u8, first_optional, "true");
|
||||
} else {
|
||||
// Optional is empty, probably incorrect.
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
const return_type = try allocator.create(registry.TypeInfo);
|
||||
return_type.* = command_decl.decl_type.typedef;
|
||||
|
||||
const success_codes = if (elem.getAttribute("successcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
|
||||
const error_codes = if (elem.getAttribute("errorcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
|
||||
params = params[0..i];
|
||||
|
||||
it = elem.findChildrenByTag("param");
|
||||
for (params) |*param| {
|
||||
const param_elem = while (it.next()) |param_elem| {
|
||||
if (requiredByApi(param_elem, api)) break param_elem;
|
||||
} else unreachable;
|
||||
|
||||
try parsePointerMeta(.{ .command = params }, ¶m.param_type, param_elem);
|
||||
}
|
||||
|
||||
return registry.Declaration{
|
||||
.name = command_decl.name,
|
||||
.decl_type = .{
|
||||
.command = .{
|
||||
.params = params,
|
||||
.return_type = return_type,
|
||||
.success_codes = success_codes,
|
||||
.error_codes = error_codes,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parseApiConstants(allocator: Allocator, root: *xml.Element, api: registry.Api) ![]registry.ApiConstant {
|
||||
var enums = blk: {
|
||||
var it = root.findChildrenByTag("enums");
|
||||
while (it.next()) |child| {
|
||||
const name = child.getAttribute("name") orelse continue;
|
||||
if (mem.eql(u8, name, api_constants_name)) {
|
||||
break :blk child;
|
||||
}
|
||||
}
|
||||
|
||||
return error.InvalidRegistry;
|
||||
};
|
||||
|
||||
var types = root.findChildByTag("types") orelse return error.InvalidRegistry;
|
||||
const n_defines = blk: {
|
||||
var n_defines: usize = 0;
|
||||
var it = types.findChildrenByTag("type");
|
||||
while (it.next()) |ty| {
|
||||
if (ty.getAttribute("category")) |category| {
|
||||
if (mem.eql(u8, category, "define")) {
|
||||
n_defines += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
break :blk n_defines;
|
||||
};
|
||||
|
||||
const constants = try allocator.alloc(registry.ApiConstant, enums.children.len + n_defines);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = enums.findChildrenByTag("enum");
|
||||
while (it.next()) |constant| {
|
||||
if (!requiredByApi(constant, api))
|
||||
continue;
|
||||
|
||||
const expr = if (constant.getAttribute("value")) |expr|
|
||||
expr
|
||||
else if (constant.getAttribute("alias")) |alias|
|
||||
alias
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
|
||||
constants[i] = .{
|
||||
.name = constant.getAttribute("name") orelse return error.InvalidRegistry,
|
||||
.value = .{ .expr = expr },
|
||||
};
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
i += try parseDefines(types, constants[i..], api);
|
||||
return constants[0..i];
|
||||
}
|
||||
|
||||
fn parseDefines(types: *xml.Element, out: []registry.ApiConstant, api: registry.Api) !usize {
|
||||
var i: usize = 0;
|
||||
var it = types.findChildrenByTag("type");
|
||||
while (it.next()) |ty| {
|
||||
if (!requiredByApi(ty, api))
|
||||
continue;
|
||||
|
||||
const category = ty.getAttribute("category") orelse continue;
|
||||
if (!mem.eql(u8, category, "define")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const name = ty.getCharData("name") orelse continue;
|
||||
if (mem.eql(u8, name, "VK_HEADER_VERSION") or mem.eql(u8, name, "VKSC_API_VARIANT")) {
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.value = .{ .expr = mem.trim(u8, ty.children[2].char_data, " ") },
|
||||
};
|
||||
} else {
|
||||
var xctok = cparse.XmlCTokenizer.init(ty);
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.value = .{ .version = cparse.parseVersion(&xctok) catch continue },
|
||||
};
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseTags(allocator: Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
var tags_elem = root.findChildByTag("tags") orelse return error.InvalidRegistry;
|
||||
const tags = try allocator.alloc(registry.Tag, tags_elem.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = tags_elem.findChildrenByTag("tag");
|
||||
while (it.next()) |tag| {
|
||||
tags[i] = .{
|
||||
.name = tag.getAttribute("name") orelse return error.InvalidRegistry,
|
||||
.author = tag.getAttribute("author") orelse return error.InvalidRegistry,
|
||||
};
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return tags[0..i];
|
||||
}
|
||||
|
||||
fn parseFeatures(allocator: Allocator, root: *xml.Element, api: registry.Api) ![]registry.Feature {
|
||||
var it = root.findChildrenByTag("feature");
|
||||
var count: usize = 0;
|
||||
while (it.next()) |_| count += 1;
|
||||
|
||||
const features = try allocator.alloc(registry.Feature, count);
|
||||
var i: usize = 0;
|
||||
it = root.findChildrenByTag("feature");
|
||||
while (it.next()) |feature| {
|
||||
if (!requiredByApi(feature, api))
|
||||
continue;
|
||||
|
||||
features[i] = try parseFeature(allocator, feature, api);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return features[0..i];
|
||||
}
|
||||
|
||||
fn parseFeature(allocator: Allocator, feature: *xml.Element, api: registry.Api) !registry.Feature {
|
||||
const name = feature.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const feature_level = blk: {
|
||||
const number = feature.getAttribute("number") orelse return error.InvalidRegistry;
|
||||
break :blk try splitFeatureLevel(number, ".");
|
||||
};
|
||||
|
||||
var requires = try allocator.alloc(registry.Require, feature.children.len);
|
||||
var i: usize = 0;
|
||||
var it = feature.findChildrenByTag("require");
|
||||
while (it.next()) |require| {
|
||||
if (!requiredByApi(require, api))
|
||||
continue;
|
||||
|
||||
requires[i] = try parseRequire(allocator, require, null, api);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return registry.Feature{
|
||||
.name = name,
|
||||
.level = feature_level,
|
||||
.requires = requires[0..i],
|
||||
};
|
||||
}
|
||||
|
||||
fn parseEnumExtension(elem: *xml.Element, parent_extnumber: ?u31) !?registry.Require.EnumExtension {
|
||||
// check for either _SPEC_VERSION or _EXTENSION_NAME
|
||||
const extends = elem.getAttribute("extends") orelse return null;
|
||||
|
||||
if (elem.getAttribute("offset")) |offset_str| {
|
||||
const offset = try std.fmt.parseInt(u31, offset_str, 10);
|
||||
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const extnumber = if (elem.getAttribute("extnumber")) |num|
|
||||
try std.fmt.parseInt(u31, num, 10)
|
||||
else
|
||||
null;
|
||||
|
||||
const actual_extnumber = extnumber orelse parent_extnumber orelse return error.InvalidRegistry;
|
||||
const value = blk: {
|
||||
const abs_value = enumExtOffsetToValue(actual_extnumber, offset);
|
||||
if (elem.getAttribute("dir")) |dir| {
|
||||
if (mem.eql(u8, dir, "-")) {
|
||||
break :blk -@as(i32, abs_value);
|
||||
} else {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
}
|
||||
|
||||
break :blk @as(i32, abs_value);
|
||||
};
|
||||
|
||||
return registry.Require.EnumExtension{
|
||||
.extends = extends,
|
||||
.extnumber = actual_extnumber,
|
||||
.field = .{
|
||||
.name = name,
|
||||
.value = .{ .int = value },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return registry.Require.EnumExtension{
|
||||
.extends = extends,
|
||||
.extnumber = parent_extnumber,
|
||||
.field = try parseEnumField(elem),
|
||||
};
|
||||
}
|
||||
|
||||
fn enumExtOffsetToValue(extnumber: u31, offset: u31) u31 {
|
||||
const extension_value_base = 1000000000;
|
||||
const extension_block = 1000;
|
||||
return extension_value_base + (extnumber - 1) * extension_block + offset;
|
||||
}
|
||||
|
||||
fn parseRequire(allocator: Allocator, require: *xml.Element, extnumber: ?u31, api: registry.Api) !registry.Require {
|
||||
var n_extends: usize = 0;
|
||||
var n_types: usize = 0;
|
||||
var n_commands: usize = 0;
|
||||
|
||||
var it = require.elements();
|
||||
while (it.next()) |elem| {
|
||||
if (mem.eql(u8, elem.tag, "enum")) {
|
||||
n_extends += 1;
|
||||
} else if (mem.eql(u8, elem.tag, "type")) {
|
||||
n_types += 1;
|
||||
} else if (mem.eql(u8, elem.tag, "command")) {
|
||||
n_commands += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const extends = try allocator.alloc(registry.Require.EnumExtension, n_extends);
|
||||
const types = try allocator.alloc([]const u8, n_types);
|
||||
const commands = try allocator.alloc([]const u8, n_commands);
|
||||
|
||||
var i_extends: usize = 0;
|
||||
var i_types: usize = 0;
|
||||
var i_commands: usize = 0;
|
||||
|
||||
it = require.elements();
|
||||
while (it.next()) |elem| {
|
||||
if (!requiredByApi(elem, api))
|
||||
continue;
|
||||
|
||||
if (mem.eql(u8, elem.tag, "enum")) {
|
||||
if (try parseEnumExtension(elem, extnumber)) |ext| {
|
||||
extends[i_extends] = ext;
|
||||
i_extends += 1;
|
||||
}
|
||||
} else if (mem.eql(u8, elem.tag, "type")) {
|
||||
types[i_types] = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
i_types += 1;
|
||||
} else if (mem.eql(u8, elem.tag, "command")) {
|
||||
commands[i_commands] = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
i_commands += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const required_feature_level = blk: {
|
||||
const feature_level = require.getAttribute("feature") orelse break :blk null;
|
||||
if (!mem.startsWith(u8, feature_level, "VK_VERSION_")) {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
break :blk try splitFeatureLevel(feature_level["VK_VERSION_".len..], "_");
|
||||
};
|
||||
|
||||
return registry.Require{
|
||||
.extends = extends[0..i_extends],
|
||||
.types = types[0..i_types],
|
||||
.commands = commands[0..i_commands],
|
||||
.required_feature_level = required_feature_level,
|
||||
.required_extension = require.getAttribute("extension"),
|
||||
};
|
||||
}
|
||||
|
||||
fn parseExtensions(allocator: Allocator, root: *xml.Element, api: registry.Api) ![]registry.Extension {
|
||||
const extensions_elem = root.findChildByTag("extensions") orelse return error.InvalidRegistry;
|
||||
|
||||
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.len);
|
||||
var i: usize = 0;
|
||||
var it = extensions_elem.findChildrenByTag("extension");
|
||||
while (it.next()) |extension| {
|
||||
if (!requiredByApi(extension, api))
|
||||
continue;
|
||||
// Some extensions (in particular 94) are disabled, so just skip them
|
||||
if (extension.getAttribute("supported")) |supported| {
|
||||
if (mem.eql(u8, supported, "disabled")) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
extensions[i] = try parseExtension(allocator, extension, api);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return extensions[0..i];
|
||||
}
|
||||
|
||||
fn findExtVersion(extension: *xml.Element) !u32 {
|
||||
var req_it = extension.findChildrenByTag("require");
|
||||
while (req_it.next()) |req| {
|
||||
var enum_it = req.findChildrenByTag("enum");
|
||||
while (enum_it.next()) |e| {
|
||||
const name = e.getAttribute("name") orelse continue;
|
||||
const value = e.getAttribute("value") orelse continue;
|
||||
if (mem.endsWith(u8, name, "_SPEC_VERSION")) {
|
||||
return try std.fmt.parseInt(u32, value, 10);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
fn parseExtension(allocator: Allocator, extension: *xml.Element, api: registry.Api) !registry.Extension {
|
||||
const name = extension.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const platform = extension.getAttribute("platform");
|
||||
const version = try findExtVersion(extension);
|
||||
|
||||
// For some reason there are two ways for an extension to state its required
|
||||
// feature level: both seperately in each <require> tag, or using
|
||||
// the requiresCore attribute.
|
||||
const requires_core = if (extension.getAttribute("requiresCore")) |feature_level|
|
||||
try splitFeatureLevel(feature_level, ".")
|
||||
else
|
||||
null;
|
||||
|
||||
const promoted_to: registry.Extension.Promotion = blk: {
|
||||
const promotedto = extension.getAttribute("promotedto") orelse break :blk .none;
|
||||
if (mem.startsWith(u8, promotedto, "VK_VERSION_")) {
|
||||
const feature_level = try splitFeatureLevel(promotedto["VK_VERSION_".len..], "_");
|
||||
break :blk .{ .feature = feature_level };
|
||||
}
|
||||
|
||||
break :blk .{ .extension = promotedto };
|
||||
};
|
||||
|
||||
const number = blk: {
|
||||
const number_str = extension.getAttribute("number") orelse return error.InvalidRegistry;
|
||||
break :blk try std.fmt.parseInt(u31, number_str, 10);
|
||||
};
|
||||
|
||||
const ext_type: ?registry.Extension.ExtensionType = blk: {
|
||||
const ext_type_str = extension.getAttribute("type") orelse break :blk null;
|
||||
if (mem.eql(u8, ext_type_str, "instance")) {
|
||||
break :blk .instance;
|
||||
} else if (mem.eql(u8, ext_type_str, "device")) {
|
||||
break :blk .device;
|
||||
} else {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
};
|
||||
|
||||
const depends = blk: {
|
||||
const requires_str = extension.getAttribute("requires") orelse break :blk &[_][]const u8{};
|
||||
break :blk try splitCommaAlloc(allocator, requires_str);
|
||||
};
|
||||
|
||||
var requires = try allocator.alloc(registry.Require, extension.children.len);
|
||||
var i: usize = 0;
|
||||
var it = extension.findChildrenByTag("require");
|
||||
while (it.next()) |require| {
|
||||
if (!requiredByApi(require, api))
|
||||
continue;
|
||||
requires[i] = try parseRequire(allocator, require, number, api);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return registry.Extension{
|
||||
.name = name,
|
||||
.number = number,
|
||||
.version = version,
|
||||
.extension_type = ext_type,
|
||||
.depends = depends,
|
||||
.promoted_to = promoted_to,
|
||||
.platform = platform,
|
||||
.required_feature_level = requires_core,
|
||||
.requires = requires[0..i],
|
||||
};
|
||||
}
|
||||
|
||||
fn splitFeatureLevel(ver: []const u8, split: []const u8) !registry.FeatureLevel {
|
||||
var it = mem.split(u8, ver, split);
|
||||
|
||||
const major = it.next() orelse return error.InvalidFeatureLevel;
|
||||
const minor = it.next() orelse return error.InvalidFeatureLevel;
|
||||
if (it.next() != null) {
|
||||
return error.InvalidFeatureLevel;
|
||||
}
|
||||
|
||||
return registry.FeatureLevel{
|
||||
.major = try std.fmt.parseInt(u32, major, 10),
|
||||
.minor = try std.fmt.parseInt(u32, minor, 10),
|
||||
};
|
||||
}
|
||||
|
||||
fn requiredByApi(elem: *xml.Element, api: registry.Api) bool {
|
||||
const apis = elem.getAttribute("api") orelse return true; // If the 'api' element is not present, assume required.
|
||||
|
||||
var it = mem.split(u8, apis, ",");
|
||||
while (it.next()) |required_by_api| {
|
||||
if (std.mem.eql(u8, @tagName(api), required_by_api)) return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
218
src/vulkan/registry.zig
Normal file
218
src/vulkan/registry.zig
Normal file
@@ -0,0 +1,218 @@
|
||||
pub const Api = enum {
|
||||
vulkan,
|
||||
vulkansc,
|
||||
};
|
||||
|
||||
pub const Registry = struct {
|
||||
decls: []Declaration,
|
||||
api_constants: []ApiConstant,
|
||||
tags: []Tag,
|
||||
features: []Feature,
|
||||
extensions: []Extension,
|
||||
};
|
||||
|
||||
pub const Declaration = struct {
|
||||
name: []const u8,
|
||||
decl_type: DeclarationType,
|
||||
};
|
||||
|
||||
pub const DeclarationType = union(enum) {
|
||||
container: Container,
|
||||
enumeration: Enum,
|
||||
bitmask: Bitmask,
|
||||
handle: Handle,
|
||||
command: Command,
|
||||
alias: Alias,
|
||||
foreign: Foreign,
|
||||
typedef: TypeInfo,
|
||||
external,
|
||||
};
|
||||
|
||||
pub const Alias = struct {
|
||||
pub const Target = enum {
|
||||
other_command,
|
||||
other_type,
|
||||
};
|
||||
|
||||
name: []const u8,
|
||||
target: Target,
|
||||
};
|
||||
|
||||
pub const ApiConstant = struct {
|
||||
pub const Value = union(enum) {
|
||||
expr: []const u8,
|
||||
version: [4][]const u8,
|
||||
};
|
||||
|
||||
name: []const u8,
|
||||
value: Value,
|
||||
};
|
||||
|
||||
pub const Tag = struct {
|
||||
name: []const u8,
|
||||
author: []const u8,
|
||||
};
|
||||
|
||||
pub const TypeInfo = union(enum) {
|
||||
name: []const u8,
|
||||
command_ptr: Command,
|
||||
pointer: Pointer,
|
||||
array: Array,
|
||||
};
|
||||
|
||||
pub const Container = struct {
|
||||
pub const Field = struct {
|
||||
name: []const u8,
|
||||
field_type: TypeInfo,
|
||||
bits: ?usize,
|
||||
is_buffer_len: bool,
|
||||
is_optional: bool,
|
||||
};
|
||||
|
||||
stype: ?[]const u8,
|
||||
extends: ?[]const []const u8,
|
||||
fields: []Field,
|
||||
is_union: bool,
|
||||
};
|
||||
|
||||
pub const Enum = struct {
|
||||
pub const Value = union(enum) {
|
||||
bitpos: u6, // 1 << bitpos
|
||||
bit_vector: i32, // Combined flags & some vendor IDs
|
||||
int: i32,
|
||||
alias: struct {
|
||||
name: []const u8,
|
||||
is_compat_alias: bool,
|
||||
},
|
||||
};
|
||||
|
||||
pub const Field = struct {
|
||||
name: []const u8,
|
||||
value: Value,
|
||||
};
|
||||
|
||||
fields: []Field,
|
||||
bitwidth: u8,
|
||||
is_bitmask: bool,
|
||||
};
|
||||
|
||||
pub const Bitmask = struct {
|
||||
bits_enum: ?[]const u8,
|
||||
bitwidth: u8,
|
||||
};
|
||||
|
||||
pub const Handle = struct {
|
||||
parent: ?[]const u8, // VkInstance has no parent
|
||||
is_dispatchable: bool,
|
||||
};
|
||||
|
||||
pub const Command = struct {
|
||||
pub const Param = struct {
|
||||
name: []const u8,
|
||||
param_type: TypeInfo,
|
||||
is_buffer_len: bool,
|
||||
is_optional: bool,
|
||||
};
|
||||
|
||||
params: []Param,
|
||||
return_type: *TypeInfo,
|
||||
success_codes: []const []const u8,
|
||||
error_codes: []const []const u8,
|
||||
};
|
||||
|
||||
pub const Pointer = struct {
|
||||
pub const PointerSize = union(enum) {
|
||||
one,
|
||||
/// The length is given by some complex expression, possibly involving another field
|
||||
many,
|
||||
/// The length is given by some other field or parameter
|
||||
other_field: []const u8,
|
||||
zero_terminated,
|
||||
};
|
||||
|
||||
is_const: bool,
|
||||
is_optional: bool,
|
||||
size: PointerSize,
|
||||
child: *TypeInfo,
|
||||
};
|
||||
|
||||
pub const Array = struct {
|
||||
pub const ArraySize = union(enum) {
|
||||
int: usize,
|
||||
alias: []const u8, // Field size is given by an api constant
|
||||
};
|
||||
|
||||
pub const ArrayValidSize = union(enum) {
|
||||
/// All elements are valid.
|
||||
all,
|
||||
/// The length is given by some complex expression, possibly involving another field
|
||||
many,
|
||||
/// The length is given by some complex expression, possibly involving another field
|
||||
other_field: []const u8,
|
||||
/// The valid elements are terminated by a 0, or by the bounds of the array.
|
||||
zero_terminated,
|
||||
};
|
||||
|
||||
/// This is the total size of the array
|
||||
size: ArraySize,
|
||||
/// The number of items that are actually filled with valid values
|
||||
valid_size: ArrayValidSize,
|
||||
/// Some members may indicate than an array is optional. This happens with
|
||||
/// VkPhysicalDeviceHostImageCopyPropertiesEXT::optimalTilingLayoutUUID for example.
|
||||
/// The spec is not entirely clear about what this means, but presumably it should
|
||||
/// be filled with all zeroes.
|
||||
is_optional: bool,
|
||||
child: *TypeInfo,
|
||||
};
|
||||
|
||||
pub const Foreign = struct {
|
||||
depends: []const u8, // Either a header or vk_platform
|
||||
};
|
||||
|
||||
pub const Feature = struct {
|
||||
name: []const u8,
|
||||
level: FeatureLevel, // from 'number'
|
||||
requires: []Require,
|
||||
};
|
||||
|
||||
pub const Extension = struct {
|
||||
pub const ExtensionType = enum {
|
||||
instance,
|
||||
device,
|
||||
};
|
||||
|
||||
pub const Promotion = union(enum) {
|
||||
none,
|
||||
feature: FeatureLevel,
|
||||
extension: []const u8,
|
||||
};
|
||||
|
||||
name: []const u8,
|
||||
number: u31,
|
||||
version: u32,
|
||||
extension_type: ?ExtensionType,
|
||||
depends: []const []const u8, // Other extensions
|
||||
promoted_to: Promotion,
|
||||
platform: ?[]const u8,
|
||||
required_feature_level: ?FeatureLevel,
|
||||
requires: []Require,
|
||||
};
|
||||
|
||||
pub const Require = struct {
|
||||
pub const EnumExtension = struct {
|
||||
extends: []const u8,
|
||||
extnumber: ?u31,
|
||||
field: Enum.Field,
|
||||
};
|
||||
|
||||
extends: []EnumExtension,
|
||||
types: []const []const u8,
|
||||
commands: []const []const u8,
|
||||
required_feature_level: ?FeatureLevel,
|
||||
required_extension: ?[]const u8,
|
||||
};
|
||||
|
||||
pub const FeatureLevel = struct {
|
||||
major: u32,
|
||||
minor: u32,
|
||||
};
|
||||
1672
src/vulkan/render.zig
Normal file
1672
src/vulkan/render.zig
Normal file
File diff suppressed because it is too large
Load Diff
638
src/xml.zig
Normal file
638
src/xml.zig
Normal file
@@ -0,0 +1,638 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const Allocator = mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
pub const Attribute = struct {
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const Content = union(enum) {
|
||||
char_data: []const u8,
|
||||
comment: []const u8,
|
||||
element: *Element,
|
||||
};
|
||||
|
||||
pub const Element = struct {
|
||||
tag: []const u8,
|
||||
attributes: []Attribute = &.{},
|
||||
children: []Content = &.{},
|
||||
|
||||
pub fn getAttribute(self: Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes) |child| {
|
||||
if (mem.eql(u8, child.name, attrib_name)) {
|
||||
return child.value;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getCharData(self: Element, child_tag: []const u8) ?[]const u8 {
|
||||
const child = self.findChildByTag(child_tag) orelse return null;
|
||||
if (child.children.len != 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return switch (child.children[0]) {
|
||||
.char_data => |char_data| char_data,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: Element) ChildIterator {
|
||||
return .{
|
||||
.items = self.children,
|
||||
.i = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn elements(self: Element) ChildElementIterator {
|
||||
return .{
|
||||
.inner = self.iterator(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn findChildByTag(self: Element, tag: []const u8) ?*Element {
|
||||
var it = self.findChildrenByTag(tag);
|
||||
return it.next();
|
||||
}
|
||||
|
||||
pub fn findChildrenByTag(self: Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
return .{
|
||||
.inner = self.elements(),
|
||||
.tag = tag,
|
||||
};
|
||||
}
|
||||
|
||||
pub const ChildIterator = struct {
|
||||
items: []Content,
|
||||
i: usize,
|
||||
|
||||
pub fn next(self: *ChildIterator) ?*Content {
|
||||
if (self.i < self.items.len) {
|
||||
self.i += 1;
|
||||
return &self.items[self.i - 1];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ChildElementIterator = struct {
|
||||
inner: ChildIterator,
|
||||
|
||||
pub fn next(self: *ChildElementIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (child.* != .element) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child.*.element;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const FindChildrenByTagIterator = struct {
|
||||
inner: ChildElementIterator,
|
||||
tag: []const u8,
|
||||
|
||||
pub fn next(self: *FindChildrenByTagIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (!mem.eql(u8, child.tag, self.tag)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub const Document = struct {
|
||||
arena: ArenaAllocator,
|
||||
xml_decl: ?*Element,
|
||||
root: *Element,
|
||||
|
||||
pub fn deinit(self: Document) void {
|
||||
var arena = self.arena; // Copy to stack so self can be taken by value.
|
||||
arena.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
const Parser = struct {
|
||||
source: []const u8,
|
||||
offset: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
|
||||
fn init(source: []const u8) Parser {
|
||||
return .{
|
||||
.source = source,
|
||||
.offset = 0,
|
||||
.line = 0,
|
||||
.column = 0,
|
||||
};
|
||||
}
|
||||
|
||||
fn peek(self: *Parser) ?u8 {
|
||||
return if (self.offset < self.source.len) self.source[self.offset] else null;
|
||||
}
|
||||
|
||||
fn consume(self: *Parser) !u8 {
|
||||
if (self.offset < self.source.len) {
|
||||
return self.consumeNoEof();
|
||||
}
|
||||
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn consumeNoEof(self: *Parser) u8 {
|
||||
std.debug.assert(self.offset < self.source.len);
|
||||
const c = self.source[self.offset];
|
||||
self.offset += 1;
|
||||
|
||||
if (c == '\n') {
|
||||
self.line += 1;
|
||||
self.column = 0;
|
||||
} else {
|
||||
self.column += 1;
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
fn eat(self: *Parser, char: u8) bool {
|
||||
self.expect(char) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expect(self: *Parser, expected: u8) !void {
|
||||
if (self.peek()) |actual| {
|
||||
if (expected != actual) {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
_ = self.consumeNoEof();
|
||||
return;
|
||||
}
|
||||
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn eatStr(self: *Parser, text: []const u8) bool {
|
||||
self.expectStr(text) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expectStr(self: *Parser, text: []const u8) !void {
|
||||
if (self.source.len < self.offset + text.len) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (mem.startsWith(u8, self.source[self.offset..], text)) {
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (i += 1) {
|
||||
_ = self.consumeNoEof();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn eatWs(self: *Parser) bool {
|
||||
var ws = false;
|
||||
|
||||
while (self.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => {
|
||||
ws = true;
|
||||
_ = self.consumeNoEof();
|
||||
},
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
fn expectWs(self: *Parser) !void {
|
||||
if (!self.eatWs()) return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn currentLine(self: Parser) []const u8 {
|
||||
var begin: usize = 0;
|
||||
if (mem.lastIndexOfScalar(u8, self.source[0..self.offset], '\n')) |prev_nl| {
|
||||
begin = prev_nl + 1;
|
||||
}
|
||||
|
||||
const end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse self.source.len;
|
||||
return self.source[begin..end];
|
||||
}
|
||||
};
|
||||
|
||||
test "xml: Parser" {
|
||||
{
|
||||
var parser = Parser.init("I like pythons");
|
||||
try testing.expectEqual(@as(?u8, 'I'), parser.peek());
|
||||
try testing.expectEqual(@as(u8, 'I'), parser.consumeNoEof());
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
try testing.expectEqual(@as(u8, ' '), try parser.consume());
|
||||
|
||||
try testing.expect(parser.eat('l'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eat('a'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
|
||||
try parser.expect('i');
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expect('a'));
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
|
||||
try testing.expect(parser.eatStr("ke"));
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
|
||||
try testing.expect(parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
|
||||
try testing.expectEqual(false, parser.eatStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
|
||||
try testing.expectError(error.UnexpectedEof, parser.expectStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expectStr("pytn"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try parser.expectStr("python");
|
||||
try testing.expectEqual(@as(?u8, 's'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("");
|
||||
try testing.expectEqual(parser.peek(), null);
|
||||
try testing.expectError(error.UnexpectedEof, parser.consume());
|
||||
try testing.expectEqual(parser.eat('p'), false);
|
||||
try testing.expectError(error.UnexpectedEof, parser.expect('p'));
|
||||
}
|
||||
}
|
||||
|
||||
pub const ParseError = error{
|
||||
IllegalCharacter,
|
||||
UnexpectedEof,
|
||||
UnexpectedCharacter,
|
||||
UnclosedValue,
|
||||
UnclosedComment,
|
||||
InvalidName,
|
||||
InvalidEntity,
|
||||
InvalidStandaloneValue,
|
||||
NonMatchingClosingTag,
|
||||
InvalidDocument,
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub fn parse(backing_allocator: Allocator, source: []const u8) !Document {
|
||||
var parser = Parser.init(source);
|
||||
return try parseDocument(&parser, backing_allocator);
|
||||
}
|
||||
|
||||
fn parseDocument(parser: *Parser, backing_allocator: Allocator) !Document {
|
||||
var doc = Document{
|
||||
.arena = ArenaAllocator.init(backing_allocator),
|
||||
.xml_decl = null,
|
||||
.root = undefined,
|
||||
};
|
||||
|
||||
errdefer doc.deinit();
|
||||
|
||||
const allocator = doc.arena.allocator();
|
||||
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
doc.xml_decl = try parseElement(parser, allocator, .xml_decl);
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
doc.root = (try parseElement(parser, allocator, .element)) orelse return error.InvalidDocument;
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
if (parser.peek() != null) return error.InvalidDocument;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
fn parseAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
const quote = try parser.consume();
|
||||
if (quote != '"' and quote != '\'') return error.UnexpectedCharacter;
|
||||
|
||||
const begin = parser.offset;
|
||||
|
||||
while (true) {
|
||||
const c = parser.consume() catch return error.UnclosedValue;
|
||||
if (c == quote) break;
|
||||
}
|
||||
|
||||
const end = parser.offset - 1;
|
||||
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseEqAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
|
||||
return try parseAttrValue(parser, alloc);
|
||||
}
|
||||
|
||||
fn parseNameNoDupe(parser: *Parser) ![]const u8 {
|
||||
// XML's spec on names is very long, so to make this easier
|
||||
// we just take any character that is not special and not whitespace
|
||||
const begin = parser.offset;
|
||||
|
||||
while (parser.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => break,
|
||||
'&', '"', '\'', '<', '>', '?', '=', '/' => break,
|
||||
else => _ = parser.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = parser.offset;
|
||||
if (begin == end) return error.InvalidName;
|
||||
|
||||
return parser.source[begin..end];
|
||||
}
|
||||
|
||||
fn parseCharData(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
const begin = parser.offset;
|
||||
|
||||
while (parser.peek()) |ch| {
|
||||
switch (ch) {
|
||||
'<' => break,
|
||||
else => _ = parser.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = parser.offset;
|
||||
if (begin == end) return null;
|
||||
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseContent(parser: *Parser, alloc: Allocator) ParseError!Content {
|
||||
if (try parseCharData(parser, alloc)) |cd| {
|
||||
return Content{ .char_data = cd };
|
||||
} else if (try parseComment(parser, alloc)) |comment| {
|
||||
return Content{ .comment = comment };
|
||||
} else if (try parseElement(parser, alloc, .element)) |elem| {
|
||||
return Content{ .element = elem };
|
||||
} else {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseAttr(parser: *Parser, alloc: Allocator) !?Attribute {
|
||||
const name = parseNameNoDupe(parser) catch return null;
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
const value = try parseAttrValue(parser, alloc);
|
||||
|
||||
const attr = Attribute{
|
||||
.name = try alloc.dupe(u8, name),
|
||||
.value = value,
|
||||
};
|
||||
return attr;
|
||||
}
|
||||
|
||||
const ElementKind = enum {
|
||||
xml_decl,
|
||||
element,
|
||||
};
|
||||
|
||||
fn parseElement(parser: *Parser, alloc: Allocator, comptime kind: ElementKind) !?*Element {
|
||||
const start = parser.offset;
|
||||
|
||||
const tag = switch (kind) {
|
||||
.xml_decl => blk: {
|
||||
if (!parser.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(parser), "xml")) {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
}
|
||||
break :blk "xml";
|
||||
},
|
||||
.element => blk: {
|
||||
if (!parser.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(parser) catch {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
};
|
||||
break :blk tag;
|
||||
},
|
||||
};
|
||||
|
||||
var attributes = std.ArrayList(Attribute).init(alloc);
|
||||
defer attributes.deinit();
|
||||
|
||||
var children = std.ArrayList(Content).init(alloc);
|
||||
defer children.deinit();
|
||||
|
||||
while (parser.eatWs()) {
|
||||
const attr = (try parseAttr(parser, alloc)) orelse break;
|
||||
try attributes.append(attr);
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.xml_decl => try parser.expectStr("?>"),
|
||||
.element => {
|
||||
if (!parser.eatStr("/>")) {
|
||||
try parser.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (parser.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (parser.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(parser, alloc);
|
||||
try children.append(content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(parser);
|
||||
if (!mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('>');
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const element = try alloc.create(Element);
|
||||
element.* = .{
|
||||
.tag = try alloc.dupe(u8, tag),
|
||||
.attributes = try attributes.toOwnedSlice(),
|
||||
.children = try children.toOwnedSlice(),
|
||||
};
|
||||
return element;
|
||||
}
|
||||
|
||||
test "xml: parseElement" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const alloc = arena.allocator();
|
||||
|
||||
{
|
||||
var parser = Parser.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, alloc, .element));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
|
||||
const size_attr = elem.?.attributes[0];
|
||||
try testing.expectEqualSlices(u8, size_attr.name, "size");
|
||||
try testing.expectEqualSlices(u8, size_attr.value, "15");
|
||||
|
||||
const color_attr = elem.?.attributes[1];
|
||||
try testing.expectEqualSlices(u8, color_attr.name, "color");
|
||||
try testing.expectEqualSlices(u8, color_attr.value, "green");
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<python>test</python>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "test");
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[1].element.tag, "c");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[2].char_data, "d");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[3].element.tag, "e");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[4].char_data, "f");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[5].comment, "g");
|
||||
}
|
||||
}
|
||||
|
||||
test "xml: parse prolog" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xmla version='aa'?>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, a, .xml_decl));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xml version='aa'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("encoding"));
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("standalone"));
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xml version=\"ccc\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "ccc", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqualSlices(u8, "bbb", decl.?.getAttribute("encoding").?);
|
||||
try testing.expectEqualSlices(u8, "yes", decl.?.getAttribute("standalone").?);
|
||||
}
|
||||
}
|
||||
|
||||
fn skipComments(parser: *Parser, alloc: Allocator) !void {
|
||||
while ((try parseComment(parser, alloc)) != null) {
|
||||
_ = parser.eatWs();
|
||||
}
|
||||
}
|
||||
|
||||
fn parseComment(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
if (!parser.eatStr("<!--")) return null;
|
||||
|
||||
const begin = parser.offset;
|
||||
while (!parser.eatStr("-->")) {
|
||||
_ = parser.consume() catch return error.UnclosedComment;
|
||||
}
|
||||
|
||||
const end = parser.offset - "-->".len;
|
||||
return try alloc.dupe(u8, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn unescapeEntity(text: []const u8) !u8 {
|
||||
const EntitySubstition = struct { text: []const u8, replacement: u8 };
|
||||
|
||||
const entities = [_]EntitySubstition{
|
||||
.{ .text = "<", .replacement = '<' },
|
||||
.{ .text = ">", .replacement = '>' },
|
||||
.{ .text = "&", .replacement = '&' },
|
||||
.{ .text = "'", .replacement = '\'' },
|
||||
.{ .text = """, .replacement = '"' },
|
||||
};
|
||||
|
||||
for (entities) |entity| {
|
||||
if (mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
}
|
||||
|
||||
return error.InvalidEntity;
|
||||
}
|
||||
|
||||
fn unescape(arena: Allocator, text: []const u8) ![]const u8 {
|
||||
const unescaped = try arena.alloc(u8, text.len);
|
||||
|
||||
var j: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (j += 1) {
|
||||
if (text[i] == '&') {
|
||||
const entity_end = 1 + (mem.indexOfScalarPos(u8, text, i, ';') orelse return error.InvalidEntity);
|
||||
unescaped[j] = try unescapeEntity(text[i..entity_end]);
|
||||
i = entity_end;
|
||||
} else {
|
||||
unescaped[j] = text[i];
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return unescaped[0..j];
|
||||
}
|
||||
|
||||
test "xml: unescape" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
||||
try testing.expectEqualSlices(u8, "test", try unescape(a, "test"));
|
||||
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try unescape(a, "a<b&c>d"e'f<"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&test;"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&boa"));
|
||||
}
|
||||
|
||||
test "xml: top level comments" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
||||
const doc = try parse(a, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
try testing.expectEqualSlices(u8, "python", doc.root.tag);
|
||||
}
|
||||
Reference in New Issue
Block a user