forked from mirror/vulkan-zig
Rename Token.Id to Token.Kind to remove confusion with Token.Kind.id
This commit is contained in:
@@ -8,10 +8,10 @@ const ArraySize = registry.Array.ArraySize;
|
|||||||
const TypeInfo = registry.TypeInfo;
|
const TypeInfo = registry.TypeInfo;
|
||||||
|
|
||||||
pub const Token = struct {
|
pub const Token = struct {
|
||||||
id: Id,
|
kind: Kind,
|
||||||
text: []const u8,
|
text: []const u8,
|
||||||
|
|
||||||
const Id = enum {
|
const Kind = enum {
|
||||||
id, // Any id thats not a keyword
|
id, // Any id thats not a keyword
|
||||||
name, // Vulkan <name>...</name>
|
name, // Vulkan <name>...</name>
|
||||||
type_name, // Vulkan <type>...</type>
|
type_name, // Vulkan <type>...</type>
|
||||||
@@ -72,18 +72,18 @@ pub const CTokenizer = struct {
|
|||||||
|
|
||||||
const token_text = self.source[start .. self.offset];
|
const token_text = self.source[start .. self.offset];
|
||||||
|
|
||||||
const id = if (mem.eql(u8, token_text, "typedef"))
|
const kind = if (mem.eql(u8, token_text, "typedef"))
|
||||||
Token.Id.kw_typedef
|
Token.Kind.kw_typedef
|
||||||
else if (mem.eql(u8, token_text, "const"))
|
else if (mem.eql(u8, token_text, "const"))
|
||||||
Token.Id.kw_const
|
Token.Kind.kw_const
|
||||||
else if (mem.eql(u8, token_text, "VKAPI_PTR"))
|
else if (mem.eql(u8, token_text, "VKAPI_PTR"))
|
||||||
Token.Id.kw_vkapi_ptr
|
Token.Kind.kw_vkapi_ptr
|
||||||
else if (mem.eql(u8, token_text, "struct"))
|
else if (mem.eql(u8, token_text, "struct"))
|
||||||
Token.Id.kw_struct
|
Token.Kind.kw_struct
|
||||||
else
|
else
|
||||||
Token.Id.id;
|
Token.Kind.id;
|
||||||
|
|
||||||
return .{.id = id, .text = token_text};
|
return .{.kind = kind, .text = token_text};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn int(self: *CTokenizer) Token {
|
fn int(self: *CTokenizer) Token {
|
||||||
@@ -99,7 +99,7 @@ pub const CTokenizer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.id = .int,
|
.kind = .int,
|
||||||
.text = self.source[start .. self.offset],
|
.text = self.source[start .. self.offset],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -127,29 +127,29 @@ pub const CTokenizer = struct {
|
|||||||
self.skipws();
|
self.skipws();
|
||||||
|
|
||||||
const c = self.peek() orelse return null;
|
const c = self.peek() orelse return null;
|
||||||
var id: Token.Id = undefined;
|
var kind: Token.Kind = undefined;
|
||||||
switch (c) {
|
switch (c) {
|
||||||
'A'...'Z', 'a'...'z', '_' => return self.keyword(),
|
'A'...'Z', 'a'...'z', '_' => return self.keyword(),
|
||||||
'0'...'9' => return self.int(),
|
'0'...'9' => return self.int(),
|
||||||
'*' => id = .star,
|
'*' => kind = .star,
|
||||||
',' => id = .comma,
|
',' => kind = .comma,
|
||||||
';' => id = .semicolon,
|
';' => kind = .semicolon,
|
||||||
':' => id = .colon,
|
':' => kind = .colon,
|
||||||
'-' => id = .minus,
|
'-' => kind = .minus,
|
||||||
'~' => id = .tilde,
|
'~' => kind = .tilde,
|
||||||
'.' => id = .dot,
|
'.' => kind = .dot,
|
||||||
'#' => id = .hash,
|
'#' => kind = .hash,
|
||||||
'[' => id = .lbracket,
|
'[' => kind = .lbracket,
|
||||||
']' => id = .rbracket,
|
']' => kind = .rbracket,
|
||||||
'(' => id = .lparen,
|
'(' => kind = .lparen,
|
||||||
')' => id = .rparen,
|
')' => kind = .rparen,
|
||||||
else => return error.UnexpectedCharacter
|
else => return error.UnexpectedCharacter
|
||||||
}
|
}
|
||||||
|
|
||||||
const start = self.offset;
|
const start = self.offset;
|
||||||
_ = self.consumeNoEof();
|
_ = self.consumeNoEof();
|
||||||
return Token{
|
return Token{
|
||||||
.id = id,
|
.kind = kind,
|
||||||
.text = self.source[start .. self.offset]
|
.text = self.source[start .. self.offset]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -173,11 +173,11 @@ pub const XmlCTokenizer = struct {
|
|||||||
|
|
||||||
const text = elem.children.at(0).CharData;
|
const text = elem.children.at(0).CharData;
|
||||||
if (mem.eql(u8, elem.tag, "type")) {
|
if (mem.eql(u8, elem.tag, "type")) {
|
||||||
return Token{.id = .type_name, .text = text};
|
return Token{.kind = .type_name, .text = text};
|
||||||
} else if (mem.eql(u8, elem.tag, "enum")) {
|
} else if (mem.eql(u8, elem.tag, "enum")) {
|
||||||
return Token{.id = .enum_name, .text = text};
|
return Token{.kind = .enum_name, .text = text};
|
||||||
} else if (mem.eql(u8, elem.tag, "name")) {
|
} else if (mem.eql(u8, elem.tag, "name")) {
|
||||||
return Token{.id = .name, .text = text};
|
return Token{.kind = .name, .text = text};
|
||||||
} else if (mem.eql(u8, elem.tag, "comment")) {
|
} else if (mem.eql(u8, elem.tag, "comment")) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
@@ -233,9 +233,9 @@ pub const XmlCTokenizer = struct {
|
|||||||
return (try self.peek()) orelse return error.UnexpectedEof;
|
return (try self.peek()) orelse return error.UnexpectedEof;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect(self: *XmlCTokenizer, id: Token.Id) !Token {
|
fn expect(self: *XmlCTokenizer, kind: Token.Kind) !Token {
|
||||||
const tok = (try self.next()) orelse return error.UnexpectedEof;
|
const tok = (try self.next()) orelse return error.UnexpectedEof;
|
||||||
if (tok.id != id) {
|
if (tok.kind != kind) {
|
||||||
return error.UnexpectedToken;
|
return error.UnexpectedToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -269,7 +269,7 @@ pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Conta
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (try xctok.peek()) |tok| {
|
if (try xctok.peek()) |tok| {
|
||||||
if (tok.id != .colon) {
|
if (tok.kind != .colon) {
|
||||||
return error.InvalidSyntax;
|
return error.InvalidSyntax;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -321,16 +321,16 @@ pub const ParseError = error{
|
|||||||
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
|
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
|
||||||
// Parse declaration constness
|
// Parse declaration constness
|
||||||
var tok = try xctok.nextNoEof();
|
var tok = try xctok.nextNoEof();
|
||||||
const inner_is_const = tok.id == .kw_const;
|
const inner_is_const = tok.kind == .kw_const;
|
||||||
if (inner_is_const) {
|
if (inner_is_const) {
|
||||||
tok = try xctok.nextNoEof();
|
tok = try xctok.nextNoEof();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tok.id == .kw_struct) {
|
if (tok.kind == .kw_struct) {
|
||||||
tok = try xctok.nextNoEof();
|
tok = try xctok.nextNoEof();
|
||||||
}
|
}
|
||||||
// Parse type name
|
// Parse type name
|
||||||
if (tok.id != .type_name and tok.id != .id) return error.InvalidSyntax;
|
if (tok.kind != .type_name and tok.kind != .id) return error.InvalidSyntax;
|
||||||
const type_name = tok.text;
|
const type_name = tok.text;
|
||||||
|
|
||||||
var type_info = TypeInfo{.name = type_name};
|
var type_info = TypeInfo{.name = type_name};
|
||||||
@@ -346,7 +346,7 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
|||||||
|
|
||||||
const name = blk: {
|
const name = blk: {
|
||||||
const name_tok = (try xctok.peek()) orelse break :blk null;
|
const name_tok = (try xctok.peek()) orelse break :blk null;
|
||||||
if (name_tok.id == .id or name_tok.id == .name) {
|
if (name_tok.kind == .id or name_tok.kind == .name) {
|
||||||
_ = try xctok.nextNoEof();
|
_ = try xctok.nextNoEof();
|
||||||
break :blk name_tok.text;
|
break :blk name_tok.text;
|
||||||
} else {
|
} else {
|
||||||
@@ -382,7 +382,7 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
|||||||
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
|
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
|
||||||
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
|
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
|
||||||
const lparen = try xctok.peek();
|
const lparen = try xctok.peek();
|
||||||
if (lparen == null or lparen.?.id != .lparen) {
|
if (lparen == null or lparen.?.kind != .lparen) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
_ = try xctok.nextNoEof();
|
_ = try xctok.nextNoEof();
|
||||||
@@ -428,7 +428,7 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
|||||||
});
|
});
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
switch ((try xctok.peekNoEof()).id) {
|
switch ((try xctok.peekNoEof()).kind) {
|
||||||
.rparen => break,
|
.rparen => break,
|
||||||
.comma => _ = try xctok.nextNoEof(),
|
.comma => _ = try xctok.nextNoEof(),
|
||||||
else => return error.InvalidSyntax,
|
else => return error.InvalidSyntax,
|
||||||
@@ -457,13 +457,13 @@ fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool
|
|||||||
var is_const = first_const;
|
var is_const = first_const;
|
||||||
first_const = false;
|
first_const = false;
|
||||||
|
|
||||||
if (tok.id == .kw_const) {
|
if (tok.kind == .kw_const) {
|
||||||
is_const = true;
|
is_const = true;
|
||||||
_ = try xctok.nextNoEof();
|
_ = try xctok.nextNoEof();
|
||||||
tok = (try xctok.peek()) orelse return type_info;
|
tok = (try xctok.peek()) orelse return type_info;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tok.id != .star) {
|
if (tok.kind != .star) {
|
||||||
// if `is_const` is true at this point, there was a trailing const,
|
// if `is_const` is true at this point, there was a trailing const,
|
||||||
// and the declaration itself is const.
|
// and the declaration itself is const.
|
||||||
return type_info;
|
return type_info;
|
||||||
@@ -488,14 +488,14 @@ fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool
|
|||||||
// ARRAY_DECLARATOR = '[' (int | enum_name) ']'
|
// ARRAY_DECLARATOR = '[' (int | enum_name) ']'
|
||||||
fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
|
fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
|
||||||
const lbracket = try xctok.peek();
|
const lbracket = try xctok.peek();
|
||||||
if (lbracket == null or lbracket.?.id != .lbracket) {
|
if (lbracket == null or lbracket.?.kind != .lbracket) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = try xctok.nextNoEof();
|
_ = try xctok.nextNoEof();
|
||||||
|
|
||||||
const size_tok = try xctok.nextNoEof();
|
const size_tok = try xctok.nextNoEof();
|
||||||
const size: ArraySize = switch (size_tok.id) {
|
const size: ArraySize = switch (size_tok.kind) {
|
||||||
.int => .{
|
.int => .{
|
||||||
.int = std.fmt.parseInt(usize, size_tok.text, 10) catch |err| switch (err) {
|
.int = std.fmt.parseInt(usize, size_tok.text, 10) catch |err| switch (err) {
|
||||||
error.Overflow => return error.Overflow,
|
error.Overflow => return error.Overflow,
|
||||||
@@ -531,7 +531,7 @@ pub fn parseVersion(xctok: *XmlCTokenizer) ![3][]const u8 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const tok = try xctok.nextNoEof();
|
const tok = try xctok.nextNoEof();
|
||||||
switch (tok.id) {
|
switch (tok.kind) {
|
||||||
.id, .int => part.* = tok.text,
|
.id, .int => part.* = tok.text,
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
@@ -543,7 +543,7 @@ pub fn parseVersion(xctok: *XmlCTokenizer) ![3][]const u8 {
|
|||||||
fn testTokenizer(tokenizer: var, expected_tokens: []const Token) void {
|
fn testTokenizer(tokenizer: var, expected_tokens: []const Token) void {
|
||||||
for (expected_tokens) |expected| {
|
for (expected_tokens) |expected| {
|
||||||
const tok = (tokenizer.next() catch unreachable).?;
|
const tok = (tokenizer.next() catch unreachable).?;
|
||||||
testing.expectEqual(expected.id, tok.id);
|
testing.expectEqual(expected.kind, tok.kind);
|
||||||
testing.expectEqualSlices(u8, expected.text, tok.text);
|
testing.expectEqualSlices(u8, expected.text, tok.text);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -558,19 +558,19 @@ test "CTokenizer" {
|
|||||||
testTokenizer(
|
testTokenizer(
|
||||||
&ctok,
|
&ctok,
|
||||||
&[_]Token{
|
&[_]Token{
|
||||||
.{.id = .kw_typedef, .text = "typedef"},
|
.{.kind = .kw_typedef, .text = "typedef"},
|
||||||
.{.id = .lparen, .text = "("},
|
.{.kind = .lparen, .text = "("},
|
||||||
.{.id = .lbracket, .text = "["},
|
.{.kind = .lbracket, .text = "["},
|
||||||
.{.id = .kw_const, .text = "const"},
|
.{.kind = .kw_const, .text = "const"},
|
||||||
.{.id = .rparen, .text = ")"},
|
.{.kind = .rparen, .text = ")"},
|
||||||
.{.id = .rbracket, .text = "]"},
|
.{.kind = .rbracket, .text = "]"},
|
||||||
.{.id = .star, .text = "*"},
|
.{.kind = .star, .text = "*"},
|
||||||
.{.id = .star, .text = "*"},
|
.{.kind = .star, .text = "*"},
|
||||||
.{.id = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||||
.{.id = .int, .text = "123"},
|
.{.kind = .int, .text = "123"},
|
||||||
.{.id = .comma, .text = ","},
|
.{.kind = .comma, .text = ","},
|
||||||
.{.id = .semicolon, .text = ";"},
|
.{.kind = .semicolon, .text = ";"},
|
||||||
.{.id = .id, .text = "aaaa"},
|
.{.kind = .id, .text = "aaaa"},
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -589,17 +589,17 @@ test "XmlCTokenizer" {
|
|||||||
testTokenizer(
|
testTokenizer(
|
||||||
&xctok,
|
&xctok,
|
||||||
&[_]Token{
|
&[_]Token{
|
||||||
.{.id = .kw_typedef, .text = "typedef"},
|
.{.kind = .kw_typedef, .text = "typedef"},
|
||||||
.{.id = .id, .text = "void"},
|
.{.kind = .id, .text = "void"},
|
||||||
.{.id = .lparen, .text = "("},
|
.{.kind = .lparen, .text = "("},
|
||||||
.{.id = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||||
.{.id = .star, .text = "*"},
|
.{.kind = .star, .text = "*"},
|
||||||
.{.id = .name, .text = "PFN_vkVoidFunction"},
|
.{.kind = .name, .text = "PFN_vkVoidFunction"},
|
||||||
.{.id = .rparen, .text = ")"},
|
.{.kind = .rparen, .text = ")"},
|
||||||
.{.id = .lparen, .text = "("},
|
.{.kind = .lparen, .text = "("},
|
||||||
.{.id = .id, .text = "void"},
|
.{.kind = .id, .text = "void"},
|
||||||
.{.id = .rparen, .text = ")"},
|
.{.kind = .rparen, .text = ")"},
|
||||||
.{.id = .semicolon, .text = ";"},
|
.{.kind = .semicolon, .text = ";"},
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -298,7 +298,7 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
const tok = peeked orelse (try tokenizer.next()) orelse break;
|
const tok = peeked orelse (try tokenizer.next()) orelse break;
|
||||||
peeked = null;
|
peeked = null;
|
||||||
|
|
||||||
switch (tok.id) {
|
switch (tok.kind) {
|
||||||
.lparen, .rparen, .tilde, .minus => {
|
.lparen, .rparen, .tilde, .minus => {
|
||||||
try self.writer.writeAll(tok.text);
|
try self.writer.writeAll(tok.text);
|
||||||
continue;
|
continue;
|
||||||
@@ -316,7 +316,7 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
break;
|
break;
|
||||||
};
|
};
|
||||||
|
|
||||||
switch (suffix.id) {
|
switch (suffix.kind) {
|
||||||
.id => {
|
.id => {
|
||||||
if (mem.eql(u8, suffix.text, "ULL")) {
|
if (mem.eql(u8, suffix.text, "ULL")) {
|
||||||
try self.writer.print("@as(u64, {})", .{tok.text});
|
try self.writer.print("@as(u64, {})", .{tok.text});
|
||||||
@@ -331,7 +331,7 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
try self.writer.print("@as(f32, {}.{})", .{tok.text, decimal.text});
|
try self.writer.print("@as(f32, {}.{})", .{tok.text, decimal.text});
|
||||||
|
|
||||||
const f = (try tokenizer.next()) orelse return error.InvalidConstantExpr;
|
const f = (try tokenizer.next()) orelse return error.InvalidConstantExpr;
|
||||||
if (f.id != .id or !mem.eql(u8, f.text, "f")) {
|
if (f.kind != .id or !mem.eql(u8, f.text, "f")) {
|
||||||
return error.InvalidApiConstant;
|
return error.InvalidApiConstant;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -371,8 +371,9 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
try self.writeIdentifier(name[2..]);
|
try self.writeIdentifier(name[2..]);
|
||||||
return;
|
return;
|
||||||
} else if (mem.startsWith(u8, name, "PFN_vk")) {
|
} else if (mem.startsWith(u8, name, "PFN_vk")) {
|
||||||
// Function pointer type, render using same name for now
|
// Function pointer type, strip off the PFN_vk part. Note that this function
|
||||||
try self.writeIdentifier(name);
|
// is only called to render the typedeffed function pointers like vkVoidFunction
|
||||||
|
try self.writeIdentifier(name[6..]);
|
||||||
return;
|
return;
|
||||||
} else if (mem.startsWith(u8, name, "VK_")) {
|
} else if (mem.startsWith(u8, name, "VK_")) {
|
||||||
// Constants
|
// Constants
|
||||||
@@ -666,6 +667,10 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
try self.writer.writeAll(";\n");
|
try self.writer.writeAll(";\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn renderCommandPtrName(self: *Self, name: []const u8) !void {
|
||||||
|
try self.writeIdentifierFmt("{}Fn", .{util.trimVkNamespace(name)});
|
||||||
|
}
|
||||||
|
|
||||||
fn renderCommandPtrs(self: *Self) !void {
|
fn renderCommandPtrs(self: *Self) !void {
|
||||||
for (self.registry.decls) |decl| {
|
for (self.registry.decls) |decl| {
|
||||||
if (decl.decl_type != .command) {
|
if (decl.decl_type != .command) {
|
||||||
@@ -673,7 +678,7 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try self.writer.writeAll("pub const ");
|
try self.writer.writeAll("pub const ");
|
||||||
try self.writeIdentifierFmt("PFN_{}", .{decl.name});
|
try self.renderCommandPtrName(decl.name);
|
||||||
try self.writer.writeAll(" = ");
|
try self.writer.writeAll(" = ");
|
||||||
try self.renderCommandPtr(decl.decl_type.command, false);
|
try self.renderCommandPtr(decl.decl_type.command, false);
|
||||||
try self.writer.writeAll(";\n");
|
try self.writer.writeAll(";\n");
|
||||||
|
|||||||
Reference in New Issue
Block a user