Restructure to allow for other binding generators in the future

This commit is contained in:
Robin Voetter
2020-07-03 16:40:49 +02:00
parent e049300d02
commit cebecfe197
8 changed files with 16 additions and 15 deletions

View File

@@ -0,0 +1,630 @@
const std = @import("std");
const registry = @import("registry.zig");
const xml = @import("../xml.zig");
const mem = std.mem;
const Allocator = mem.Allocator;
const testing = std.testing;
const ArraySize = registry.Array.ArraySize;
const TypeInfo = registry.TypeInfo;
pub const Token = struct {
kind: Kind,
text: []const u8,
const Kind = enum {
id, // Any id thats not a keyword
name, // Vulkan <name>...</name>
type_name, // Vulkan <type>...</type>
enum_name, // Vulkan <enum>...</enum>
int,
star,
comma,
semicolon,
colon,
minus,
tilde,
dot,
hash,
lparen,
rparen,
lbracket,
rbracket,
kw_typedef,
kw_const,
kw_vkapi_ptr,
kw_struct,
};
};
pub const CTokenizer = struct {
source: []const u8,
offset: usize = 0,
in_comment: bool = false,
fn peek(self: CTokenizer) ?u8 {
return if (self.offset < self.source.len) self.source[self.offset] else null;
}
fn consumeNoEof(self: *CTokenizer) u8 {
const c = self.peek().?;
self.offset += 1;
return c;
}
fn consume(self: *CTokenizer) !u8 {
return if (self.offset < self.source.len)
return self.consumeNoEof()
else
return null;
}
fn keyword(self: *CTokenizer) Token {
const start = self.offset;
_ = self.consumeNoEof();
while (true) {
const c = self.peek() orelse break;
switch (c) {
'A'...'Z', 'a'...'z', '_', '0'...'9' => _ = self.consumeNoEof(),
else => break,
}
}
const token_text = self.source[start .. self.offset];
const kind = if (mem.eql(u8, token_text, "typedef"))
Token.Kind.kw_typedef
else if (mem.eql(u8, token_text, "const"))
Token.Kind.kw_const
else if (mem.eql(u8, token_text, "VKAPI_PTR"))
Token.Kind.kw_vkapi_ptr
else if (mem.eql(u8, token_text, "struct"))
Token.Kind.kw_struct
else
Token.Kind.id;
return .{.kind = kind, .text = token_text};
}
fn int(self: *CTokenizer) Token {
const start = self.offset;
_ = self.consumeNoEof();
while (true) {
const c = self.peek() orelse break;
switch (c) {
'0'...'9' => _ = self.consumeNoEof(),
else => break,
}
}
return .{
.kind = .int,
.text = self.source[start .. self.offset],
};
}
fn skipws(self: *CTokenizer) void {
while (true) {
switch (self.peek() orelse break) {
' ', '\t', '\n', '\r' => _ = self.consumeNoEof(),
else => break,
}
}
}
pub fn next(self: *CTokenizer) !?Token {
self.skipws();
if (mem.startsWith(u8, self.source[self.offset ..], "//") or self.in_comment) {
const end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse {
self.offset = self.source.len;
self.in_comment = true;
return null;
};
self.in_comment = false;
self.offset = end + 1;
}
self.skipws();
const c = self.peek() orelse return null;
var kind: Token.Kind = undefined;
switch (c) {
'A'...'Z', 'a'...'z', '_' => return self.keyword(),
'0'...'9' => return self.int(),
'*' => kind = .star,
',' => kind = .comma,
';' => kind = .semicolon,
':' => kind = .colon,
'-' => kind = .minus,
'~' => kind = .tilde,
'.' => kind = .dot,
'#' => kind = .hash,
'[' => kind = .lbracket,
']' => kind = .rbracket,
'(' => kind = .lparen,
')' => kind = .rparen,
else => return error.UnexpectedCharacter
}
const start = self.offset;
_ = self.consumeNoEof();
return Token{
.kind = kind,
.text = self.source[start .. self.offset]
};
}
};
pub const XmlCTokenizer = struct {
it: xml.Element.ContentList.Iterator,
ctok: ?CTokenizer = null,
current: ?Token = null,
pub fn init(elem: *xml.Element) XmlCTokenizer {
return .{
.it = elem.children.iterator(0),
};
}
fn elemToToken(elem: *xml.Element) !?Token {
if (elem.children.count() != 1 or elem.children.at(0).* != .CharData) {
return error.InvalidXml;
}
const text = elem.children.at(0).CharData;
if (mem.eql(u8, elem.tag, "type")) {
return Token{.kind = .type_name, .text = text};
} else if (mem.eql(u8, elem.tag, "enum")) {
return Token{.kind = .enum_name, .text = text};
} else if (mem.eql(u8, elem.tag, "name")) {
return Token{.kind = .name, .text = text};
} else if (mem.eql(u8, elem.tag, "comment")) {
return null;
} else {
return error.InvalidTag;
}
}
fn next(self: *XmlCTokenizer) !?Token {
if (self.current) |current| {
const token = current;
self.current = null;
return token;
}
var in_comment: bool = false;
while (true) {
if (self.ctok) |*ctok| {
if (try ctok.next()) |tok| {
return tok;
}
in_comment = ctok.in_comment;
}
self.ctok = null;
if (self.it.next()) |child| {
switch (child.*) {
.CharData => |cdata| self.ctok = CTokenizer{.source = cdata, .in_comment = in_comment},
.Comment => {}, // xml comment
.Element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
}
} else {
return null;
}
}
}
fn nextNoEof(self: *XmlCTokenizer) !Token {
return (try self.next()) orelse return error.UnexpectedEof;
}
fn peek(self: *XmlCTokenizer) !?Token {
if (self.current) |current| {
return current;
}
self.current = try self.next();
return self.current;
}
fn peekNoEof(self: *XmlCTokenizer) !Token {
return (try self.peek()) orelse return error.UnexpectedEof;
}
fn expect(self: *XmlCTokenizer, kind: Token.Kind) !Token {
const tok = (try self.next()) orelse return error.UnexpectedEof;
if (tok.kind != kind) {
return error.UnexpectedToken;
}
return tok;
}
};
// TYPEDEF = kw_typedef DECLARATION ';'
pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
_ = try xctok.expect(.kw_typedef);
const decl = try parseDeclaration(allocator, xctok);
_ = try xctok.expect(.semicolon);
if (try xctok.peek()) |_| {
return error.InvalidSyntax;
}
return registry.Declaration{
.name = decl.name orelse return error.MissingTypeIdentifier,
.decl_type = .{.typedef = decl.decl_type},
};
}
// MEMBER = DECLARATION (':' int)?
pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Container.Field {
const decl = try parseDeclaration(allocator, xctok);
var field = registry.Container.Field {
.name = decl.name orelse return error.MissingTypeIdentifier,
.field_type = decl.decl_type,
.bits = null,
.is_buffer_len = false,
};
if (try xctok.peek()) |tok| {
if (tok.kind != .colon) {
return error.InvalidSyntax;
}
_ = try xctok.nextNoEof();
const bits = try xctok.expect(.int);
field.bits = try std.fmt.parseInt(usize, bits.text, 10);
// Assume for now that there won't be any invalid C types like `char char* x : 4`.
if (try xctok.peek()) |_| {
return error.InvalidSyntax;
}
}
return field;
}
pub fn parseParamOrProto(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
const decl = try parseDeclaration(allocator, xctok);
if (try xctok.peek()) |_| {
return error.InvalidSyntax;
}
return registry.Declaration{
.name = decl.name orelse return error.MissingTypeIdentifier,
.decl_type = .{.typedef = decl.decl_type},
};
}
pub const Declaration = struct {
name: ?[]const u8, // Parameter names may be optional, especially in case of func(void)
decl_type: TypeInfo,
};
pub const ParseError = error{
OutOfMemory,
InvalidSyntax,
InvalidTag,
InvalidXml,
Overflow,
UnexpectedEof,
UnexpectedCharacter,
UnexpectedToken,
MissingTypeIdentifier,
};
// DECLARATION = kw_const? type_name DECLARATOR
// DECLARATOR = POINTERS (id | name)? ('[' ARRAY_DECLARATOR ']')*
// | POINTERS '(' FNPTRSUFFIX
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
// Parse declaration constness
var tok = try xctok.nextNoEof();
const inner_is_const = tok.kind == .kw_const;
if (inner_is_const) {
tok = try xctok.nextNoEof();
}
if (tok.kind == .kw_struct) {
tok = try xctok.nextNoEof();
}
// Parse type name
if (tok.kind != .type_name and tok.kind != .id) return error.InvalidSyntax;
const type_name = tok.text;
var type_info = TypeInfo{.name = type_name};
// Parse pointers
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info);
// Parse name / fn ptr
if (try parseFnPtrSuffix(allocator, xctok, type_info)) |decl| {
return decl;
}
const name = blk: {
const name_tok = (try xctok.peek()) orelse break :blk null;
if (name_tok.kind == .id or name_tok.kind == .name) {
_ = try xctok.nextNoEof();
break :blk name_tok.text;
} else {
break :blk null;
}
};
var inner_type = &type_info;
while (try parseArrayDeclarator(xctok)) |array_size| {
// Move the current inner type to a new node on the heap
const child = try allocator.create(TypeInfo);
child.* = inner_type.*;
// Re-assign the previous inner type for the array type info node
inner_type.* = .{
.array = .{
.size = array_size,
.child = child,
}
};
// update the inner_type pointer so it points to the proper
// inner type again
inner_type = child;
}
return Declaration{
.name = name,
.decl_type = type_info,
};
}
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
const lparen = try xctok.peek();
if (lparen == null or lparen.?.kind != .lparen) {
return null;
}
_ = try xctok.nextNoEof();
_ = try xctok.expect(.kw_vkapi_ptr);
_ = try xctok.expect(.star);
const name = try xctok.expect(.name);
_ = try xctok.expect(.rparen);
_ = try xctok.expect(.lparen);
const return_type_heap = try allocator.create(TypeInfo);
return_type_heap.* = return_type;
var command_ptr = Declaration{
.name = name.text,
.decl_type = .{
.command_ptr = .{
.params = &[_]registry.Command.Param{},
.return_type = return_type_heap,
.success_codes = &[_][]const u8{},
.error_codes = &[_][]const u8{},
}
}
};
const first_param = try parseDeclaration(allocator, xctok);
if (first_param.name == null) {
if (first_param.decl_type != .name or !mem.eql(u8, first_param.decl_type.name, "void")) {
return error.InvalidSyntax;
}
_ = try xctok.expect(.rparen);
return command_ptr;
}
// There is no good way to estimate the number of parameters beforehand.
// Fortunately, there are usually a relatively low number of parameters to a function pointer,
// so an ArrayList backed by an arena allocator is good enough.
var params = std.ArrayList(registry.Command.Param).init(allocator);
try params.append(.{
.name = first_param.name.?,
.param_type = first_param.decl_type,
.is_buffer_len = false,
});
while (true) {
switch ((try xctok.peekNoEof()).kind) {
.rparen => break,
.comma => _ = try xctok.nextNoEof(),
else => return error.InvalidSyntax,
}
const decl = try parseDeclaration(allocator, xctok);
try params.append(.{
.name = decl.name orelse return error.MissingTypeIdentifier,
.param_type = decl.decl_type,
.is_buffer_len = false,
});
}
_ = try xctok.nextNoEof();
command_ptr.decl_type.command_ptr.params = params.toOwnedSlice();
return command_ptr;
}
// POINTERS = (kw_const? '*')*
fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo) !TypeInfo {
var type_info = inner;
var first_const = inner_const;
while (true) {
var tok = (try xctok.peek()) orelse return type_info;
var is_const = first_const;
first_const = false;
if (tok.kind == .kw_const) {
is_const = true;
_ = try xctok.nextNoEof();
tok = (try xctok.peek()) orelse return type_info;
}
if (tok.kind != .star) {
// if `is_const` is true at this point, there was a trailing const,
// and the declaration itself is const.
return type_info;
}
_ = try xctok.nextNoEof();
const child = try allocator.create(TypeInfo);
child.* = type_info;
type_info = .{
.pointer = .{
.is_const = is_const or first_const,
.is_optional = false, // set elsewhere
.size = .one, // set elsewhere
.child = child,
},
};
}
}
// ARRAY_DECLARATOR = '[' (int | enum_name) ']'
fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
const lbracket = try xctok.peek();
if (lbracket == null or lbracket.?.kind != .lbracket) {
return null;
}
_ = try xctok.nextNoEof();
const size_tok = try xctok.nextNoEof();
const size: ArraySize = switch (size_tok.kind) {
.int => .{
.int = std.fmt.parseInt(usize, size_tok.text, 10) catch |err| switch (err) {
error.Overflow => return error.Overflow,
error.InvalidCharacter => unreachable,
}
},
.enum_name => .{.alias = size_tok.text},
else => return error.InvalidSyntax
};
_ = try xctok.expect(.rbracket);
return size;
}
pub fn parseVersion(xctok: *XmlCTokenizer) ![3][]const u8 {
_ = try xctok.expect(.hash);
const define = try xctok.expect(.id);
if (!mem.eql(u8, define.text, "define")) {
return error.InvalidVersion;
}
const name = try xctok.expect(.name);
const vk_make_version = try xctok.expect(.type_name);
if (!mem.eql(u8, vk_make_version.text, "VK_MAKE_VERSION")) {
return error.NotVersion;
}
_ = try xctok.expect(.lparen);
var version: [3][]const u8 = undefined;
for (version) |*part, i| {
if (i != 0) {
_ = try xctok.expect(.comma);
}
const tok = try xctok.nextNoEof();
switch (tok.kind) {
.id, .int => part.* = tok.text,
else => return error.UnexpectedToken,
}
}
_ = try xctok.expect(.rparen);
return version;
}
fn testTokenizer(tokenizer: var, expected_tokens: []const Token) void {
for (expected_tokens) |expected| {
const tok = (tokenizer.next() catch unreachable).?;
testing.expectEqual(expected.kind, tok.kind);
testing.expectEqualSlices(u8, expected.text, tok.text);
}
if (tokenizer.next() catch unreachable) |_| unreachable;
}
test "CTokenizer" {
var ctok = CTokenizer {
.source = \\typedef ([const)]** VKAPI_PTR 123,;aaaa
};
testTokenizer(
&ctok,
&[_]Token{
.{.kind = .kw_typedef, .text = "typedef"},
.{.kind = .lparen, .text = "("},
.{.kind = .lbracket, .text = "["},
.{.kind = .kw_const, .text = "const"},
.{.kind = .rparen, .text = ")"},
.{.kind = .rbracket, .text = "]"},
.{.kind = .star, .text = "*"},
.{.kind = .star, .text = "*"},
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
.{.kind = .int, .text = "123"},
.{.kind = .comma, .text = ","},
.{.kind = .semicolon, .text = ";"},
.{.kind = .id, .text = "aaaa"},
}
);
}
test "XmlCTokenizer" {
const document = try xml.parse(
testing.allocator,
\\<root>// comment <name>commented name</name> <type>commented type</type> trailing
\\ typedef void (VKAPI_PTR *<name>PFN_vkVoidFunction</name>)(void);
\\</root>
);
defer document.deinit();
var xctok = XmlCTokenizer.init(document.root);
testTokenizer(
&xctok,
&[_]Token{
.{.kind = .kw_typedef, .text = "typedef"},
.{.kind = .id, .text = "void"},
.{.kind = .lparen, .text = "("},
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
.{.kind = .star, .text = "*"},
.{.kind = .name, .text = "PFN_vkVoidFunction"},
.{.kind = .rparen, .text = ")"},
.{.kind = .lparen, .text = "("},
.{.kind = .id, .text = "void"},
.{.kind = .rparen, .text = ")"},
.{.kind = .semicolon, .text = ";"},
}
);
}
test "parseTypedef" {
const document = try xml.parse(
testing.allocator,
\\<root> // comment <name>commented name</name> trailing
\\ typedef const struct <type>Python</type>* pythons[4];
\\ // more comments
\\</root>
\\
);
defer document.deinit();
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
var xctok = XmlCTokenizer.init(document.root);
const decl = try parseTypedef(&arena.allocator, &xctok);
testing.expectEqualSlices(u8, "pythons", decl.name);
const array = decl.decl_type.typedef.array;
testing.expectEqual(ArraySize{.int = 4}, array.size);
const ptr = array.child.pointer;
testing.expectEqual(true, ptr.is_const);
testing.expectEqualSlices(u8, "Python", ptr.child.name);
}

View File

@@ -0,0 +1,206 @@
const std = @import("std");
const reg = @import("registry.zig");
const xml = @import("../xml.zig");
const renderRegistry = @import("render.zig").render;
const parseXml = @import("parse.zig").parseXml;
const Allocator = std.mem.Allocator;
const FeatureLevel = reg.FeatureLevel;
fn cmpFeatureLevels(a: FeatureLevel, b: FeatureLevel) std.math.Order {
if (a.major > b.major) {
return .gt;
} if (a.major < b.major) {
return .lt;
}
if (a.minor > b.minor) {
return .gt;
} else if (a.minor < b.minor) {
return .lt;
}
return .eq;
}
const DeclarationResolver = struct {
const DeclarationSet = std.StringHashMap(void);
const EnumExtensionMap = std.StringHashMap(std.ArrayList(reg.Enum.Field));
const FieldSet = std.StringHashMap(void);
allocator: *Allocator,
reg_arena: *Allocator,
registry: *reg.Registry,
declarations: DeclarationSet,
enum_extensions: EnumExtensionMap,
field_set: FieldSet,
fn init(allocator: *Allocator, reg_arena: *Allocator, registry: *reg.Registry) DeclarationResolver {
return .{
.allocator = allocator,
.reg_arena = reg_arena,
.registry = registry,
.declarations = DeclarationSet.init(allocator),
.enum_extensions = EnumExtensionMap.init(allocator),
.field_set = FieldSet.init(allocator),
};
}
fn deinit(self: DeclarationResolver) void {
var it = self.enum_extensions.iterator();
while (it.next()) |kv| {
kv.value.deinit();
}
self.field_set.deinit();
self.enum_extensions.deinit();
self.declarations.deinit();
}
fn putEnumExtension(self: *DeclarationResolver, enum_name: []const u8, field: reg.Enum.Field) !void {
const res = try self.enum_extensions.getOrPut(enum_name);
if (!res.found_existing) {
res.kv.value = std.ArrayList(reg.Enum.Field).init(self.allocator);
}
try res.kv.value.append(field);
}
fn addRequire(self: *DeclarationResolver, req: reg.Require) !void {
for (req.types) |type_name| {
_ = try self.declarations.put(type_name, {});
}
for (req.commands) |command| {
_ = try self.declarations.put(command, {});
}
for (req.extends) |enum_ext| {
try self.putEnumExtension(enum_ext.extends, enum_ext.field);
}
}
fn mergeEnumFields(self: *DeclarationResolver, name: []const u8, base_enum: *reg.Enum) !void {
// If there are no extensions for this enum, assume its valid.
const extensions = self.enum_extensions.get(name) orelse return;
self.field_set.clear();
const n_fields_upper_bound = base_enum.fields.len + extensions.value.items.len;
const new_fields = try self.reg_arena.alloc(reg.Enum.Field, n_fields_upper_bound);
var i: usize = 0;
for (base_enum.fields) |field| {
const existing = try self.field_set.put(field.name, {});
if (existing == null) {
new_fields[i] = field;
i += 1;
}
}
// Assume that if a field name clobbers, the value is the same
for (extensions.value.items) |field| {
const existing = try self.field_set.put(field.name, {});
if (existing == null) {
new_fields[i] = field;
i += 1;
}
}
// Existing base_enum.fields was allocatued by `self.reg_arena`, so
// it gets cleaned up whenever that is deinited.
base_enum.fields = self.reg_arena.shrink(new_fields, i);
}
fn resolve(self: *DeclarationResolver) !void {
for (self.registry.features) |feature| {
for (feature.requires) |req| {
try self.addRequire(req);
}
}
for (self.registry.extensions) |ext| {
for (ext.requires) |req| {
try self.addRequire(req);
}
}
// Merge all the enum fields.
// Assume that all keys of enum_extensions appear in `self.registry.decls`
for (self.registry.decls) |*decl| {
if (decl.decl_type == .enumeration) {
try self.mergeEnumFields(decl.name, &decl.decl_type.enumeration);
}
}
// Remove all declarations that are not required.
// Some declarations may exist in `self.declarations` that do not exit in
// `self.registry.decls`, these are mostly macros and other stuff not pa
var read_index: usize = 0;
var write_index: usize = 0;
while (read_index < self.registry.decls.len) {
const decl = self.registry.decls[read_index];
const is_required = self.declarations.contains(decl.name);
if (decl.decl_type == .foreign or is_required) {
self.registry.decls[write_index] = decl;
write_index += 1;
}
read_index += 1;
}
self.registry.decls = self.reg_arena.shrink(self.registry.decls, write_index);
}
};
pub const Generator = struct {
gpa: *Allocator,
reg_arena: std.heap.ArenaAllocator,
registry: reg.Registry,
fn init(allocator: *Allocator, spec: *xml.Element) !Generator {
const result = try parseXml(allocator, spec);
return Generator{
.gpa = allocator,
.reg_arena = result.arena,
.registry = result.registry,
};
}
fn deinit(self: Generator) void {
self.reg_arena.deinit();
}
fn removePromotedExtensions(self: *Generator) void {
var write_index: usize = 0;
for (self.registry.extensions) |ext| {
if (ext.promoted_to == .none) {
self.registry.extensions[write_index] = ext;
write_index += 1;
}
}
self.registry.extensions.len = write_index;
}
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
fn resolveDeclarations(self: *Generator) !void {
var resolver = DeclarationResolver.init(self.gpa, &self.reg_arena.allocator, &self.registry);
defer resolver.deinit();
try resolver.resolve();
}
fn render(self: *Generator, out_stream: var) !void {
try renderRegistry(out_stream, &self.reg_arena.allocator, &self.registry);
}
};
pub fn generate(allocator: *Allocator, spec_xml: []const u8, writer: var) !void {
const spec = try xml.parse(allocator, spec_xml);
defer spec.deinit();
var gen = try Generator.init(allocator, spec.root);
defer gen.deinit();
gen.removePromotedExtensions();
try gen.resolveDeclarations();
try gen.render(writer);
}

814
generator/vulkan/parse.zig Normal file
View File

@@ -0,0 +1,814 @@
const std = @import("std");
const registry = @import("registry.zig");
const xml = @import("../xml.zig");
const cparse = @import("c-parse.zig");
const mem = std.mem;
const Allocator = mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
const api_constants_name = "API Constants";
pub const ParseResult = struct {
arena: ArenaAllocator,
registry: registry.Registry,
pub fn deinit(self: ParseResult) void {
self.arena.deinit();
}
};
pub fn parseXml(backing_allocator: *Allocator, root: *xml.Element) !ParseResult {
var arena = ArenaAllocator.init(backing_allocator);
errdefer arena.deinit();
const allocator = &arena.allocator;
var reg = registry.Registry{
.decls = try parseDeclarations(allocator, root),
.api_constants = try parseApiConstants(allocator, root),
.tags = try parseTags(allocator, root),
.features = try parseFeatures(allocator, root),
.extensions = try parseExtensions(allocator, root),
};
return ParseResult{
.arena = arena,
.registry = reg,
};
}
fn parseDeclarations(allocator: *Allocator, root: *xml.Element) ![]registry.Declaration {
var types_elem = root.findChildByTag("types") orelse return error.InvalidRegistry;
var commands_elem = root.findChildByTag("commands") orelse return error.InvalidRegistry;
const decl_upper_bound = types_elem.children.count() + commands_elem.children.count();
const decls = try allocator.alloc(registry.Declaration, decl_upper_bound);
var count: usize = 0;
count += try parseTypes(allocator, decls, types_elem);
count += try parseEnums(allocator, decls[count..], root);
count += try parseCommands(allocator, decls[count..], commands_elem);
return allocator.shrink(decls, count);
}
fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
var i: usize = 0;
var it = types_elem.findChildrenByTag("type");
while (it.next()) |ty| {
out[i] = blk: {
const category = ty.getAttribute("category") orelse {
break :blk try parseForeigntype(ty);
};
if (mem.eql(u8, category, "bitmask")) {
break :blk try parseBitmaskType(ty);
} else if (mem.eql(u8, category, "handle")) {
break :blk try parseHandleType(ty);
} else if (mem.eql(u8, category, "basetype")) {
break :blk try parseBaseType(allocator, ty);
} else if (mem.eql(u8, category, "struct")) {
break :blk try parseContainer(allocator, ty, false);
} else if (mem.eql(u8, category, "union")) {
break :blk try parseContainer(allocator, ty, true);
} else if (mem.eql(u8, category, "funcpointer")) {
break :blk try parseFuncPointer(allocator, ty);
} else if (mem.eql(u8, category, "enum")) {
break :blk (try parseEnumAlias(allocator, ty)) orelse continue;
}
continue;
};
i += 1;
}
return i;
}
fn parseForeigntype(ty: *xml.Element) !registry.Declaration {
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
const depends = ty.getAttribute("requires") orelse if (mem.eql(u8, name, "int"))
"vk_platform" // for some reason, int doesn't depend on vk_platform (but the other c types do)
else
return error.InvalidRegistry;
return registry.Declaration{
.name = name,
.decl_type = .{.foreign = .{.depends = depends}},
};
}
fn parseBitmaskType(ty: *xml.Element) !registry.Declaration {
if (ty.getAttribute("name")) |name| {
const alias = ty.getAttribute("alias") orelse return error.InvalidRegistry;
return registry.Declaration{
.name = name,
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
};
} else {
return registry.Declaration{
.name = ty.getCharData("name") orelse return error.InvalidRegistry,
.decl_type = .{.bitmask = .{.bits_enum = ty.getAttribute("requires")}},
};
}
}
fn parseHandleType(ty: *xml.Element) !registry.Declaration {
// Parent is not handled in case of an alias
if (ty.getAttribute("name")) |name| {
const alias = ty.getAttribute("alias") orelse return error.InvalidRegistry;
return registry.Declaration{
.name = name,
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
};
} else {
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
const handle_type = ty.getCharData("type") orelse return error.InvalidRegistry;
const dispatchable = mem.eql(u8, handle_type, "VK_DEFINE_HANDLE");
if (!dispatchable and !mem.eql(u8, handle_type, "VK_DEFINE_NON_DISPATCHABLE_HANDLE")) {
return error.InvalidRegistry;
}
return registry.Declaration{
.name = name,
.decl_type = .{
.handle = .{
.parent = ty.getAttribute("parent"),
.is_dispatchable = dispatchable,
}
},
};
}
}
fn parseBaseType(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
if (ty.getCharData("type")) |_| {
var tok = cparse.XmlCTokenizer.init(ty);
return try cparse.parseTypedef(allocator, &tok);
} else {
// Either ANativeWindow, AHardwareBuffer or CAMetalLayer. The latter has a lot of
// macros, which is why this part is not built into the xml/c parser.
return registry.Declaration{
.name = name,
.decl_type = .{.opaque = {}},
};
}
}
fn parseContainer(allocator: *Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
if (ty.getAttribute("alias")) |alias| {
return registry.Declaration{
.name = name,
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
};
}
var members = try allocator.alloc(registry.Container.Field, ty.children.count());
var i: usize = 0;
var it = ty.findChildrenByTag("member");
while (it.next()) |member| {
var xctok = cparse.XmlCTokenizer.init(member);
members[i] = try cparse.parseMember(allocator, &xctok);
i += 1;
}
members = allocator.shrink(members, i);
it = ty.findChildrenByTag("member");
for (members) |*member| {
const member_elem = it.next().?;
try parsePointerMeta(.{.container = members}, &member.field_type, member_elem);
// pNext isn't properly marked as optional, so just manually override it,
if (mem.eql(u8, member.name, "pNext")) {
member.field_type.pointer.is_optional = true;
}
}
return registry.Declaration {
.name = name,
.decl_type = .{
.container = .{
.fields = members,
.is_union = is_union,
}
}
};
}
fn parseFuncPointer(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
var xctok = cparse.XmlCTokenizer.init(ty);
return try cparse.parseTypedef(allocator, &xctok);
}
// For some reason, the DeclarationType cannot be passed to lenToPointerSize, as
// that causes the Zig compiler to generate invalid code for the function. Using a
// dedicated enum fixes the issue...
const Fields = union(enum) {
command: []registry.Command.Param,
container: []registry.Container.Field,
};
fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSize {
switch (fields) {
.command => |params| {
for (params) |*param| {
if (mem.eql(u8, param.name, len)) {
param.is_buffer_len = true;
return .{.other_field = param.name};
}
}
},
.container => |members| {
for (members) |*member| {
if (mem.eql(u8, member.name, len)) {
member.is_buffer_len = true;
return .{.other_field = member.name};
}
}
},
}
if (mem.eql(u8, len, "null-terminated")) {
return .zero_terminated;
} else {
return .many;
}
}
fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.Element) !void {
if (elem.getAttribute("len")) |lens| {
var it = mem.split(lens, ",");
var current_type_info = type_info;
while (current_type_info.* == .pointer) {
// TODO: Check altlen
const size = if (it.next()) |len_str| lenToPointerSize(fields, len_str) else .one;
current_type_info.pointer.size = size;
current_type_info = current_type_info.pointer.child;
}
if (it.next()) |_| {
// There are more elements in the `len` attribute than there are pointers
// Something probably went wrong
return error.InvalidRegistry;
}
}
if (elem.getAttribute("optional")) |optionals| {
var it = mem.split(optionals, ",");
var current_type_info = type_info;
while (current_type_info.* == .pointer) {
if (it.next()) |current_optional| {
current_type_info.pointer.is_optional = mem.eql(u8, current_optional, "true");
} else {
// There is no information for this pointer, probably incorrect.
return error.InvalidRegistry;
}
current_type_info = current_type_info.pointer.child;
}
}
}
fn parseEnumAlias(allocator: *Allocator, elem: *xml.Element) !?registry.Declaration {
if (elem.getAttribute("alias")) |alias| {
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
return registry.Declaration{
.name = name,
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
};
}
return null;
}
fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
var i: usize = 0;
var it = root.findChildrenByTag("enums");
while (it.next()) |enums| {
const name = enums.getAttribute("name") orelse return error.InvalidRegistry;
if (mem.eql(u8, name, api_constants_name)) {
continue;
}
out[i] = .{
.name = name,
.decl_type = .{.enumeration = try parseEnumFields(allocator, enums)},
};
i += 1;
}
return i;
}
fn parseEnumFields(allocator: *Allocator, elem: *xml.Element) !registry.Enum {
// TODO: `type` was added recently, fall back to checking endswith FlagBits for older versions?
const enum_type = elem.getAttribute("type") orelse return error.InvalidRegistry;
const is_bitmask = mem.eql(u8, enum_type, "bitmask");
if (!is_bitmask and !mem.eql(u8, enum_type, "enum")) {
return error.InvalidRegistry;
}
const fields = try allocator.alloc(registry.Enum.Field, elem.children.count());
var i: usize = 0;
var it = elem.findChildrenByTag("enum");
while (it.next()) |field| {
fields[i] = try parseEnumField(field);
i += 1;
}
return registry.Enum{
.fields = allocator.shrink(fields, i),
.is_bitmask = is_bitmask,
};
}
fn parseEnumField(field: *xml.Element) !registry.Enum.Field {
const is_compat_alias = if (field.getAttribute("comment")) |comment|
mem.eql(u8, comment, "Backwards-compatible alias containing a typo") or
mem.eql(u8, comment, "Deprecated name for backwards compatibility")
else
false;
const name = field.getAttribute("name") orelse return error.InvalidRegistry;
const value: registry.Enum.Value = blk: {
// An enum variant's value could be defined by any of the following attributes:
// - value: Straight up value of the enum variant, in either base 10 or 16 (prefixed with 0x).
// - bitpos: Used for bitmasks, and can also be set in extensions.
// - alias: The field is an alias of another variant within the same enum.
// - offset: Used with features and extensions, where a non-bitpos value is added to an enum.
// The value is given by `1e9 + (extr_nr - 1) * 1e3 + offset`, where `ext_nr` is either
// given by the `extnumber` field (in the case of a feature), or given in the parent <extension>
// tag. In the latter case its passed via the `ext_nr` parameter.
if (field.getAttribute("value")) |value| {
if (mem.startsWith(u8, value, "0x")) {
break :blk .{.bit_vector = try std.fmt.parseInt(i32, value[2..], 16)};
} else {
break :blk .{.int = try std.fmt.parseInt(i32, value, 10)};
}
} else if (field.getAttribute("bitpos")) |bitpos| {
break :blk .{.bitpos = try std.fmt.parseInt(u5, bitpos, 10)};
} else if (field.getAttribute("alias")) |alias| {
break :blk .{.alias = .{.name = alias, .is_compat_alias = is_compat_alias}};
} else {
return error.InvalidRegistry;
}
};
return registry.Enum.Field{
.name = name,
.value = value,
};
}
fn parseCommands(allocator: *Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
var i: usize = 0;
var it = commands_elem.findChildrenByTag("command");
while (it.next()) |elem| {
out[i] = try parseCommand(allocator, elem);
i += 1;
}
return i;
}
fn splitCommaAlloc(allocator: *Allocator, text: []const u8) ![]const []const u8 {
var n_codes: usize = 1;
for (text) |c| {
if (c == ',') n_codes += 1;
}
const codes = try allocator.alloc([]const u8, n_codes);
var it = mem.split(text, ",");
for (codes) |*code| {
code.* = it.next().?;
}
return codes;
}
fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration {
if (elem.getAttribute("alias")) |alias| {
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
return registry.Declaration{
.name = name,
.decl_type = .{.alias = .{.name = alias, .target = .other_command}}
};
}
const proto = elem.findChildByTag("proto") orelse return error.InvalidRegistry;
var proto_xctok = cparse.XmlCTokenizer.init(proto);
const command_decl = try cparse.parseParamOrProto(allocator, &proto_xctok);
var params = try allocator.alloc(registry.Command.Param, elem.children.count());
var i: usize = 0;
var it = elem.findChildrenByTag("param");
while (it.next()) |param| {
var xctok = cparse.XmlCTokenizer.init(param);
const decl = try cparse.parseParamOrProto(allocator, &xctok);
params[i] = .{
.name = decl.name,
.param_type = decl.decl_type.typedef,
.is_buffer_len = false,
};
i += 1;
}
const return_type = try allocator.create(registry.TypeInfo);
return_type.* = command_decl.decl_type.typedef;
const success_codes = if (elem.getAttribute("successcodes")) |codes|
try splitCommaAlloc(allocator, codes)
else
&[_][]const u8{};
const error_codes = if (elem.getAttribute("errorcodes")) |codes|
try splitCommaAlloc(allocator, codes)
else
&[_][]const u8{};
params = allocator.shrink(params, i);
it = elem.findChildrenByTag("param");
for (params) |*param| {
const param_elem = it.next().?;
try parsePointerMeta(.{.command = params}, &param.param_type, param_elem);
}
return registry.Declaration {
.name = command_decl.name,
.decl_type = .{
.command = .{
.params = params,
.return_type = return_type,
.success_codes = success_codes,
.error_codes = error_codes,
}
}
};
}
fn parseApiConstants(allocator: *Allocator, root: *xml.Element) ![]registry.ApiConstant {
var enums = blk: {
var it = root.findChildrenByTag("enums");
while (it.next()) |child| {
const name = child.getAttribute("name") orelse continue;
if (mem.eql(u8, name, api_constants_name)) {
break :blk child;
}
}
return error.InvalidRegistry;
};
var types = root.findChildByTag("types") orelse return error.InvalidRegistry;
const n_defines = blk: {
var n_defines: usize = 0;
var it = types.findChildrenByTag("type");
while (it.next()) |ty| {
if (ty.getAttribute("category")) |category| {
if (mem.eql(u8, category, "define")) {
n_defines += 1;
}
}
}
break :blk n_defines;
};
const constants = try allocator.alloc(registry.ApiConstant, enums.children.count() + n_defines);
var i: usize = 0;
var it = enums.findChildrenByTag("enum");
while (it.next()) |constant| {
const expr = if (constant.getAttribute("value")) |expr|
expr
else if (constant.getAttribute("alias")) |alias|
alias
else
return error.InvalidRegistry;
constants[i] = .{
.name = constant.getAttribute("name") orelse return error.InvalidRegistry,
.value = .{.expr = expr},
};
i += 1;
}
i += try parseDefines(types, constants[i..]);
return allocator.shrink(constants, i);
}
fn parseDefines(types: *xml.Element, out: []registry.ApiConstant) !usize {
var i: usize = 0;
var it = types.findChildrenByTag("type");
while (it.next()) |ty| {
const category = ty.getAttribute("category") orelse continue;
if (!mem.eql(u8, category, "define")) {
continue;
}
const name = ty.getCharData("name") orelse continue;
if (mem.eql(u8, name, "VK_HEADER_VERSION")) {
out[i] = .{
.name = name,
.value = .{.expr = mem.trim(u8, ty.children.at(2).CharData, " ")},
};
} else {
var xctok = cparse.XmlCTokenizer.init(ty);
out[i] = .{
.name = name,
.value = .{
.version = cparse.parseVersion(&xctok) catch continue
},
};
}
i += 1;
}
return i;
}
fn parseTags(allocator: *Allocator, root: *xml.Element) ![]registry.Tag {
var tags_elem = root.findChildByTag("tags") orelse return error.InvalidRegistry;
const tags = try allocator.alloc(registry.Tag, tags_elem.children.count());
var i: usize = 0;
var it = tags_elem.findChildrenByTag("tag");
while (it.next()) |tag| {
tags[i] = .{
.name = tag.getAttribute("name") orelse return error.InvalidRegistry,
.author = tag.getAttribute("author") orelse return error.InvalidRegistry,
};
i += 1;
}
return allocator.shrink(tags, i);
}
fn parseFeatures(allocator: *Allocator, root: *xml.Element) ![]registry.Feature {
var it = root.findChildrenByTag("feature");
var count: usize = 0;
while (it.next()) |_| count += 1;
const features = try allocator.alloc(registry.Feature, count);
var i: usize = 0;
it = root.findChildrenByTag("feature");
while (it.next()) |feature| {
features[i] = try parseFeature(allocator, feature);
i += 1;
}
return features;
}
fn parseFeature(allocator: *Allocator, feature: *xml.Element) !registry.Feature {
const name = feature.getAttribute("name") orelse return error.InvalidRegistry;
const feature_level = blk: {
const number = feature.getAttribute("number") orelse return error.InvalidRegistry;
break :blk try splitFeatureLevel(number, ".");
};
var requires = try allocator.alloc(registry.Require, feature.children.count());
var i: usize = 0;
var it = feature.findChildrenByTag("require");
while (it.next()) |require| {
requires[i] = try parseRequire(allocator, require, null);
i += 1;
}
return registry.Feature{
.name = name,
.level = feature_level,
.requires = allocator.shrink(requires, i)
};
}
fn parseEnumExtension(elem: *xml.Element, parent_extnumber: ?u31) !?registry.Require.EnumExtension {
// check for either _SPEC_VERSION or _EXTENSION_NAME
const extends = elem.getAttribute("extends") orelse return null;
if (elem.getAttribute("offset")) |offset_str| {
const offset = try std.fmt.parseInt(u31, offset_str, 10);
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
const extnumber = if (elem.getAttribute("extnumber")) |num|
try std.fmt.parseInt(u31, num, 10)
else
null;
const actual_extnumber = extnumber orelse parent_extnumber orelse return error.InvalidRegistry;
const value = blk: {
const abs_value = enumExtOffsetToValue(actual_extnumber, offset);
if (elem.getAttribute("dir")) |dir| {
if (mem.eql(u8, dir, "-")) {
break :blk -@as(i32, abs_value);
} else {
return error.InvalidRegistry;
}
}
break :blk @as(i32, abs_value);
};
return registry.Require.EnumExtension{
.extends = extends,
.extnumber = actual_extnumber,
.field = .{.name = name, .value = .{.int = value}},
};
}
return registry.Require.EnumExtension{
.extends = extends,
.extnumber = parent_extnumber,
.field = try parseEnumField(elem),
};
}
fn enumExtOffsetToValue(extnumber: u31, offset: u31) u31 {
const extension_value_base = 1000000000;
const extension_block = 1000;
return extension_value_base + (extnumber - 1) * extension_block + offset;
}
fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
var n_extends: usize = 0;
var n_types: usize = 0;
var n_commands: usize = 0;
var it = require.elements();
while (it.next()) |elem| {
if (mem.eql(u8, elem.tag, "enum")) {
n_extends += 1;
} else if (mem.eql(u8, elem.tag, "type")) {
n_types += 1;
} else if (mem.eql(u8, elem.tag, "command")) {
n_commands += 1;
}
}
const extends = try allocator.alloc(registry.Require.EnumExtension, n_extends);
const types = try allocator.alloc([]const u8, n_types);
const commands = try allocator.alloc([]const u8, n_commands);
var i_extends: usize = 0;
var i_types: usize = 0;
var i_commands: usize = 0;
it = require.elements();
while (it.next()) |elem| {
if (mem.eql(u8, elem.tag, "enum")) {
if (try parseEnumExtension(elem, extnumber)) |ext| {
extends[i_extends] = ext;
i_extends += 1;
}
} else if (mem.eql(u8, elem.tag, "type")) {
types[i_types] = elem.getAttribute("name") orelse return error.InvalidRegistry;
i_types += 1;
} else if (mem.eql(u8, elem.tag, "command")) {
commands[i_commands] = elem.getAttribute("name") orelse return error.InvalidRegistry;
i_commands += 1;
}
}
const required_feature_level = blk: {
const feature_level = require.getAttribute("feature") orelse break :blk null;
if (!mem.startsWith(u8, feature_level, "VK_VERSION_")) {
return error.InvalidRegistry;
}
break :blk try splitFeatureLevel(feature_level["VK_VERSION_".len ..], "_");
};
return registry.Require{
.extends = allocator.shrink(extends, i_extends),
.types = types,
.commands = commands,
.required_feature_level = required_feature_level,
.required_extension = require.getAttribute("extension"),
};
}
fn parseExtensions(allocator: *Allocator, root: *xml.Element) ![]registry.Extension {
const extensions_elem = root.findChildByTag("extensions") orelse return error.InvalidRegistry;
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.count());
var i: usize = 0;
var it = extensions_elem.findChildrenByTag("extension");
while (it.next()) |extension| {
// Some extensions (in particular 94) are disabled, so just skip them
if (extension.getAttribute("supported")) |supported| {
if (mem.eql(u8, supported, "disabled")) {
continue;
}
}
extensions[i] = try parseExtension(allocator, extension);
i += 1;
}
return allocator.shrink(extensions, i);
}
fn findExtVersion(extension: *xml.Element) !u32 {
var req_it = extension.findChildrenByTag("require");
while (req_it.next()) |req| {
var enum_it = req.findChildrenByTag("enum");
while (enum_it.next()) |e| {
const name = e.getAttribute("name") orelse continue;
const value = e.getAttribute("value") orelse continue;
if (mem.endsWith(u8, name, "_SPEC_VERSION")) {
return try std.fmt.parseInt(u32, value, 10);
}
}
}
return error.InvalidRegistry;
}
fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Extension {
const name = extension.getAttribute("name") orelse return error.InvalidRegistry;
const platform = extension.getAttribute("platform");
const version = try findExtVersion(extension);
// For some reason there are two ways for an extension to state its required
// feature level: both seperately in each <require> tag, or using
// the requiresCore attribute.
const requires_core = if (extension.getAttribute("requiresCore")) |feature_level|
try splitFeatureLevel(feature_level, ".")
else
null;
const promoted_to: registry.Extension.Promotion = blk: {
const promotedto = extension.getAttribute("promotedto") orelse break :blk .none;
if (mem.startsWith(u8, promotedto, "VK_VERSION_")) {
const feature_level = try splitFeatureLevel(promotedto["VK_VERSION_".len ..], "_");
break :blk .{.feature = feature_level};
}
break :blk .{.extension = promotedto};
};
const number = blk: {
const number_str = extension.getAttribute("number") orelse return error.InvalidRegistry;
break :blk try std.fmt.parseInt(u31, number_str, 10);
};
const ext_type: ?registry.Extension.ExtensionType = blk: {
const ext_type_str = extension.getAttribute("type") orelse break :blk null;
if (mem.eql(u8, ext_type_str, "instance")) {
break :blk .instance;
} else if (mem.eql(u8, ext_type_str, "device")) {
break :blk .device;
} else {
return error.InvalidRegistry;
}
};
const depends = blk: {
const requires_str = extension.getAttribute("requires") orelse break :blk &[_][]const u8{};
break :blk try splitCommaAlloc(allocator, requires_str);
};
var requires = try allocator.alloc(registry.Require, extension.children.count());
var i: usize = 0;
var it = extension.findChildrenByTag("require");
while (it.next()) |require| {
requires[i] = try parseRequire(allocator, require, number);
i += 1;
}
return registry.Extension{
.name = name,
.number = number,
.version = version,
.extension_type = ext_type,
.depends = depends,
.promoted_to = promoted_to,
.platform = platform,
.required_feature_level = requires_core,
.requires = allocator.shrink(requires, i)
};
}
fn splitFeatureLevel(ver: []const u8, split: []const u8) !registry.FeatureLevel {
var it = mem.split(ver, split);
const major = it.next() orelse return error.InvalidFeatureLevel;
const minor = it.next() orelse return error.InvalidFeatureLevel;
if (it.next() != null) {
return error.InvalidFeatureLevel;
}
return registry.FeatureLevel{
.major = try std.fmt.parseInt(u32, major, 10),
.minor = try std.fmt.parseInt(u32, minor, 10),
};
}

View File

@@ -0,0 +1,186 @@
pub const Registry = struct {
decls: []Declaration,
api_constants: []ApiConstant,
tags: []Tag,
features: []Feature,
extensions: []Extension,
};
pub const Declaration = struct {
name: []const u8,
decl_type: DeclarationType,
};
pub const DeclarationType = union(enum) {
container: Container,
enumeration: Enum,
bitmask: Bitmask,
handle: Handle,
command: Command,
alias: Alias,
foreign: Foreign,
typedef: TypeInfo,
opaque,
};
pub const Alias = struct {
pub const Target = enum {
other_command,
other_type,
};
name: []const u8,
target: Target,
};
pub const ApiConstant = struct {
pub const Value = union(enum) {
expr: []const u8,
version: [3][]const u8,
};
name: []const u8,
value: Value,
};
pub const Tag = struct {
name: []const u8,
author: []const u8,
};
pub const TypeInfo = union(enum) {
name: []const u8,
command_ptr: Command,
pointer: Pointer,
array: Array,
};
pub const Container = struct {
pub const Field = struct {
name: []const u8,
field_type: TypeInfo,
bits: ?usize,
is_buffer_len: bool,
};
fields: []Field,
is_union: bool,
};
pub const Enum = struct {
pub const Value = union(enum) {
bitpos: u5, // 1 << bitpos
bit_vector: i32, // Combined flags & some vendor IDs
int: i32,
alias: struct {
name: []const u8,
is_compat_alias: bool,
}
};
pub const Field = struct {
name: []const u8,
value: Value,
};
fields: []Field,
is_bitmask: bool,
};
pub const Bitmask = struct {
bits_enum: ?[]const u8,
};
pub const Handle = struct {
parent: ?[]const u8, // VkInstance has no parent
is_dispatchable: bool,
};
pub const Command = struct {
pub const Param = struct {
name: []const u8,
param_type: TypeInfo,
is_buffer_len: bool,
};
params: []Param,
return_type: *TypeInfo,
success_codes: []const []const u8,
error_codes: []const []const u8,
};
pub const Pointer = struct {
pub const PointerSize = union(enum) {
one,
many, // The length is given by some complex expression, possibly involving another field
other_field: []const u8, // The length is given by some other field or parameter
zero_terminated
};
is_const: bool,
is_optional: bool,
size: PointerSize,
child: *TypeInfo,
};
pub const Array = struct {
pub const ArraySize = union(enum) {
int: usize,
alias: []const u8, // Field size is given by an api constant
};
size: ArraySize,
child: *TypeInfo,
};
pub const Foreign = struct {
depends: []const u8, // Either a header or vk_platform
};
pub const Feature = struct {
name: []const u8,
level: FeatureLevel, // from 'number'
requires: []Require,
};
pub const Extension = struct {
pub const ExtensionType = enum {
instance,
device,
};
pub const Promotion = union(enum) {
none,
feature: FeatureLevel,
extension: []const u8,
};
name: []const u8,
number: u31,
version: u32,
extension_type: ?ExtensionType,
depends: []const []const u8, // Other extensions
promoted_to: Promotion,
platform: ?[]const u8,
required_feature_level: ?FeatureLevel,
requires: []Require,
};
pub const Require = struct {
pub const EnumExtension = struct {
extends: []const u8,
extnumber: ?u31,
field: Enum.Field,
};
extends: []EnumExtension,
types: []const []const u8,
commands: []const []const u8,
required_feature_level: ?FeatureLevel,
required_extension: ?[]const u8,
};
pub const FeatureLevel = struct {
major: u32,
minor: u32,
};

View File

@@ -0,0 +1,251 @@
const std = @import("std");
const reg = @import("registry.zig");
const mem = std.mem;
const Allocator = mem.Allocator;
// Lifted from src-self-hosted/translate_c.zig
pub fn isValidZigIdentifier(name: []const u8) bool {
for (name) |c, i| {
switch (c) {
'_', 'a'...'z', 'A'...'Z' => {},
'0' ... '9' => if (i == 0) return false,
else => return false
}
}
return true;
}
// Lifted from src-self-hosted/translate_c.zig
pub fn isZigReservedIdentifier(name: []const u8) bool {
if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) {
for (name[1..]) |c| {
switch (c) {
'0'...'9' => {},
else => return false,
}
}
return true;
}
const reserved_names = [_][]const u8 {
"void", "comptime_float", "comptime_int", "bool", "isize",
"usize", "f16", "f32", "f64", "f128", "c_longdouble",
"noreturn", "type", "anyerror", "c_short", "c_ushort",
"c_int", "c_uint", "c_long", "c_ulong", "c_longlong", "c_ulonglong"
};
for (reserved_names) |reserved| {
if (mem.eql(u8, reserved, name)) {
return true;
}
}
return false;
}
pub fn needZigEscape(name: []const u8) bool {
return !isValidZigIdentifier(name)
or isZigReservedIdentifier(name)
or std.zig.Token.getKeyword(name) != null;
}
pub fn writeIdentifier(out: var, id: []const u8) !void {
if (needZigEscape(id)) {
try out.print("@\"{}\"", .{id});
} else {
try out.writeAll(id);
}
}
pub const CaseStyle = enum {
snake,
screaming_snake,
title,
camel,
};
pub fn trimVkNamespace(id: []const u8) []const u8 {
const prefixes = [_][]const u8{"VK_", "vk", "Vk", "PFN_vk"};
for (prefixes) |prefix| {
if (mem.startsWith(u8, id, prefix)) {
return id[prefix.len..];
}
}
return id;
}
pub fn getAuthorTag(id: []const u8, tags: []const reg.Tag) ?[]const u8 {
for (tags) |tag| {
if (mem.endsWith(u8, id, tag.name)) {
return tag.name;
}
}
return null;
}
pub fn stripAuthorTag(id: []const u8, tags: []const reg.Tag) []const u8 {
if (getAuthorTag(id, tags)) |tag| {
return id[0 .. id.len - tag.len];
}
return id;
}
pub const SegmentIterator = struct {
text: []const u8,
offset: usize,
pub fn init(text: []const u8) SegmentIterator {
return .{
.text = text,
.offset = 0,
};
}
fn nextBoundary(self: SegmentIterator) usize {
var i = self.offset + 1;
while (true) {
if (i == self.text.len or self.text[i] == '_') {
return i;
}
const prev_lower = std.ascii.isLower(self.text[i - 1]);
const next_lower = std.ascii.isLower(self.text[i]);
if (prev_lower and !next_lower) {
return i;
} else if (i != self.offset + 1 and !prev_lower and next_lower) {
return i - 1;
}
i += 1;
}
}
pub fn next(self: *SegmentIterator) ?[]const u8 {
while (self.offset < self.text.len and self.text[self.offset] == '_') {
self.offset += 1;
}
if (self.offset == self.text.len) {
return null;
}
const end = self.nextBoundary();
const word = self.text[self.offset .. end];
self.offset = end;
return word;
}
pub fn rest(self: SegmentIterator) []const u8 {
if (self.offset >= self.text.len) {
return &[_]u8{};
} else {
return self.text[self.offset..];
}
}
};
pub const IdRenderer = struct {
tags: []const reg.Tag,
text_cache: std.ArrayList(u8),
pub fn init(allocator: *Allocator, tags: []const reg.Tag) IdRenderer {
return .{
.tags = tags,
.text_cache = std.ArrayList(u8).init(allocator),
};
}
pub fn deinit(self: IdRenderer) void {
self.text_cache.deinit();
}
fn renderSnake(self: *IdRenderer, screaming: bool, id: []const u8, tag: ?[]const u8) !void {
var it = SegmentIterator.init(id);
var first = true;
const transform = if (screaming) std.ascii.toUpper else std.ascii.toLower;
while (it.next()) |segment| {
if (first) {
first = false;
} else {
try self.text_cache.append('_');
}
for (segment) |c| {
try self.text_cache.append(transform(c));
}
}
if (tag) |name| {
try self.text_cache.append('_');
for (name) |c| {
try self.text_cache.append(transform(c));
}
}
}
fn renderCamel(self: *IdRenderer, title: bool, id: []const u8, tag: ?[]const u8) !void {
var it = SegmentIterator.init(id);
var lower_first = !title;
while (it.next()) |segment| {
var i: usize = 0;
while (i < segment.len and std.ascii.isDigit(segment[i])) {
try self.text_cache.append(segment[i]);
i += 1;
}
if (i == segment.len) {
continue;
}
if (i == 0 and lower_first) {
try self.text_cache.append(std.ascii.toLower(segment[i]));
} else {
try self.text_cache.append(std.ascii.toUpper(segment[i]));
}
lower_first = false;
for (segment[i + 1..]) |c| {
try self.text_cache.append(std.ascii.toLower(c));
}
}
if (tag) |name| {
try self.text_cache.appendSlice(name);
}
}
pub fn render(self: IdRenderer, out: var, id: []const u8) !void {
try writeIdentifier(out, id);
}
pub fn renderFmt(self: *IdRenderer, out: var, comptime fmt: []const u8, args: var) !void {
self.text_cache.items.len = 0;
try std.fmt.format(self.text_cache.writer(), fmt, args);
try writeIdentifier(out, self.text_cache.items);
}
pub fn renderWithCase(self: *IdRenderer, out: var, case_style: CaseStyle, id: []const u8) !void {
const tag = getAuthorTag(id, self.tags);
const adjusted_id = if (tag) |name| id[0 .. id.len - name.len] else id;
self.text_cache.items.len = 0;
switch (case_style) {
.snake => try self.renderSnake(false, adjusted_id, tag),
.screaming_snake => try self.renderSnake(true, adjusted_id, tag),
.title => try self.renderCamel(true, adjusted_id, tag),
.camel => try self.renderCamel(false, adjusted_id, tag),
}
try writeIdentifier(out, self.text_cache.items);
}
};

1078
generator/vulkan/render.zig Normal file

File diff suppressed because it is too large Load Diff