forked from mirror/vulkan-zig
typedef/declaration parsing
This commit is contained in:
@@ -92,6 +92,7 @@ pub const Pointer = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
is_const: bool,
|
is_const: bool,
|
||||||
|
size: PointerSize,
|
||||||
child: *TypeInfo,
|
child: *TypeInfo,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,9 @@ const std = @import("std");
|
|||||||
const registry = @import("registry-new.zig");
|
const registry = @import("registry-new.zig");
|
||||||
const xml = @import("xml.zig");
|
const xml = @import("xml.zig");
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
|
const Allocator = mem.Allocator;
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
|
const ArraySize = registry.Array.ArraySize;
|
||||||
|
|
||||||
const Token = struct {
|
const Token = struct {
|
||||||
id: Id,
|
id: Id,
|
||||||
@@ -24,7 +26,6 @@ const Token = struct {
|
|||||||
kw_typedef,
|
kw_typedef,
|
||||||
kw_const,
|
kw_const,
|
||||||
kw_vkapi_ptr,
|
kw_vkapi_ptr,
|
||||||
whitespace,
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -75,24 +76,6 @@ const CTokenizer = struct {
|
|||||||
return .{.id = id, .text = token_text};
|
return .{.id = id, .text = token_text};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn whitespace(self: *CTokenizer) Token {
|
|
||||||
const start = self.offset;
|
|
||||||
_ = self.consumeNoEof();
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const c = self.peek() orelse break;
|
|
||||||
switch (c) {
|
|
||||||
' ', '\t', '\n', '\r' => _ = self.consumeNoEof(),
|
|
||||||
else => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.id = .whitespace,
|
|
||||||
.text = self.source[start .. self.offset],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn int(self: *CTokenizer) Token {
|
fn int(self: *CTokenizer) Token {
|
||||||
const start = self.offset;
|
const start = self.offset;
|
||||||
_ = self.consumeNoEof();
|
_ = self.consumeNoEof();
|
||||||
@@ -113,12 +96,17 @@ const CTokenizer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn next(self: *CTokenizer) !?Token {
|
fn next(self: *CTokenizer) !?Token {
|
||||||
const c = self.peek() orelse return null;
|
while (true) {
|
||||||
var id: Token.Id = undefined;
|
switch (self.peek() orelse return null) {
|
||||||
|
' ', '\t', '\n', '\r' => _ = self.consumeNoEof(),
|
||||||
|
else => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const c = self.peek().?;
|
||||||
|
var id: Token.Id = undefined;
|
||||||
switch (c) {
|
switch (c) {
|
||||||
'A'...'Z', 'a'...'z', '_' => return self.keyword(),
|
'A'...'Z', 'a'...'z', '_' => return self.keyword(),
|
||||||
' ', '\t', '\n', '\r' => return self.whitespace(),
|
|
||||||
'0'...'9' => return self.int(),
|
'0'...'9' => return self.int(),
|
||||||
'*' => id = .star,
|
'*' => id = .star,
|
||||||
',' => id = .comma,
|
',' => id = .comma,
|
||||||
@@ -139,9 +127,16 @@ const CTokenizer = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const XmlCTokenizer = struct {
|
pub const XmlCTokenizer = struct {
|
||||||
it: xml.Element.ContentList.Iterator,
|
it: xml.Element.ContentList.Iterator,
|
||||||
ctok: ?CTokenizer = null,
|
ctok: ?CTokenizer = null,
|
||||||
|
current: ?Token = null,
|
||||||
|
|
||||||
|
pub fn init(elem: *xml.Element) XmlCTokenizer {
|
||||||
|
return .{
|
||||||
|
.it = elem.children.iterator(0),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
fn elemToToken(elem: *xml.Element) !?Token {
|
fn elemToToken(elem: *xml.Element) !?Token {
|
||||||
if (elem.children.count() != 1 or elem.children.at(0).* != .CharData) {
|
if (elem.children.count() != 1 or elem.children.at(0).* != .CharData) {
|
||||||
@@ -161,6 +156,12 @@ const XmlCTokenizer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn next(self: *XmlCTokenizer) !?Token {
|
fn next(self: *XmlCTokenizer) !?Token {
|
||||||
|
if (self.current) |current| {
|
||||||
|
const token = current;
|
||||||
|
self.current = null;
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (self.ctok) |*ctok| {
|
if (self.ctok) |*ctok| {
|
||||||
if (try ctok.next()) |tok| {
|
if (try ctok.next()) |tok| {
|
||||||
@@ -185,17 +186,153 @@ const XmlCTokenizer = struct {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nextIgnoreWs(self: *XmlCTokenizer) !?Token {
|
fn nextNoEof(self: *XmlCTokenizer) !Token {
|
||||||
while (try self.next()) |tok| {
|
return (try self.next()) orelse return error.InvalidSyntax;
|
||||||
if (tok.id != .whitespace) {
|
|
||||||
return tok;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
fn peek(self: *XmlCTokenizer) !?Token {
|
||||||
|
if (self.current) |current| {
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.current = try self.next();
|
||||||
|
return self.current;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peekNoEof(self: *XmlCTokenizer) !Token {
|
||||||
|
return (try self.peek()) orelse return error.InvalidSyntax;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expect(self: *XmlCTokenizer, id: Token.Id) !Token {
|
||||||
|
const tok = (try self.next()) orelse return error.UnexpectedEof;
|
||||||
|
if (tok.id != id) {
|
||||||
|
return error.UnexpectedToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
return tok;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const PointerInfo = struct {
|
||||||
|
is_const: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
// TYPEDEF = kw_typedef DECLARATION ';'
|
||||||
|
pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||||
|
_ = try xctok.expect(.kw_typedef);
|
||||||
|
const decl = try parseDeclaration(allocator, xctok);
|
||||||
|
_ = try xctok.expect(.semicolon);
|
||||||
|
return decl;
|
||||||
|
}
|
||||||
|
|
||||||
|
// DECLARATION = kw_const? type_name DECLARATOR
|
||||||
|
// DECLARATOR = POINTERS? (id | name) ('[' ARRAY_EXPR ']')
|
||||||
|
// | POINTERS? '(' kw_vkapi_ptr '*' name' ')' // TODO
|
||||||
|
// POINTERS = (kw_const? '*')+
|
||||||
|
pub fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||||
|
// Parse declaration constness
|
||||||
|
var tok = try xctok.nextNoEof();
|
||||||
|
const inner_is_const = tok.id == .kw_const;
|
||||||
|
if (inner_is_const) {
|
||||||
|
tok = try xctok.nextNoEof();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse type name
|
||||||
|
if (tok.id != .type_name) return error.InvalidSyntax;
|
||||||
|
const type_name = tok.text;
|
||||||
|
|
||||||
|
var type_info = registry.TypeInfo{.Alias = type_name};
|
||||||
|
|
||||||
|
// Parse pointers
|
||||||
|
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info);
|
||||||
|
|
||||||
|
// Parse name / fn ptr
|
||||||
|
tok = try xctok.nextNoEof();
|
||||||
|
if (tok.id == .lparen) { // Assume this declaration is a function pointer
|
||||||
|
unreachable; // WIP
|
||||||
|
} else if (tok.id != .id and tok.id != .name) {
|
||||||
|
return error.InvalidSyntax;
|
||||||
|
}
|
||||||
|
|
||||||
|
const name = tok.text;
|
||||||
|
|
||||||
|
if (try parseArrayDeclarator(xctok)) |array_size| {
|
||||||
|
const child = try allocator.create(registry.TypeInfo);
|
||||||
|
child.* = type_info;
|
||||||
|
type_info = .{
|
||||||
|
.Array = .{
|
||||||
|
.size = array_size,
|
||||||
|
.child = child,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return registry.Declaration {
|
||||||
|
.name = name,
|
||||||
|
.decl_type = type_info,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parsePointers(
|
||||||
|
allocator: *Allocator,
|
||||||
|
xctok: *XmlCTokenizer,
|
||||||
|
inner_const: bool,
|
||||||
|
inner: registry.TypeInfo,
|
||||||
|
) !registry.TypeInfo {
|
||||||
|
var type_info = inner;
|
||||||
|
var first_const = inner_const;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
var tok = (try xctok.peek()) orelse return type_info;
|
||||||
|
var is_const = first_const;
|
||||||
|
first_const = false;
|
||||||
|
|
||||||
|
if (tok.id == .kw_const) {
|
||||||
|
is_const = true;
|
||||||
|
_ = try xctok.nextNoEof();
|
||||||
|
tok = (try xctok.peek()) orelse return type_info;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tok.id != .star) {
|
||||||
|
// if `is_const` is true at this point, there was a trailing const,
|
||||||
|
// and the declaration itself is const.
|
||||||
|
return type_info;
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = try xctok.nextNoEof();
|
||||||
|
|
||||||
|
const child = try allocator.create(registry.TypeInfo);
|
||||||
|
child.* = type_info;
|
||||||
|
|
||||||
|
type_info = .{
|
||||||
|
.Pointer = .{
|
||||||
|
.size = .one, // set elsewhere
|
||||||
|
.is_const = is_const or (first_const),
|
||||||
|
.child = child,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
|
||||||
|
const lbracket = try xctok.peekNoEof();
|
||||||
|
if (lbracket.id != .lbracket) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = try xctok.nextNoEof();
|
||||||
|
|
||||||
|
const size_tok = try xctok.nextNoEof();
|
||||||
|
const size: ArraySize = switch (size_tok.id) {
|
||||||
|
.int => .{.int = try std.fmt.parseInt(usize, size_tok.text, 10)},
|
||||||
|
.enum_name => .{.alias = size_tok.text},
|
||||||
|
else => return error.InvalidSyntax
|
||||||
|
};
|
||||||
|
|
||||||
|
_ = try xctok.expect(.rbracket);
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
fn testTokenizer(tokenizer: var, expected_tokens: []const Token) void {
|
fn testTokenizer(tokenizer: var, expected_tokens: []const Token) void {
|
||||||
for (expected_tokens) |expected| {
|
for (expected_tokens) |expected| {
|
||||||
const tok = (tokenizer.next() catch unreachable).?;
|
const tok = (tokenizer.next() catch unreachable).?;
|
||||||
@@ -215,7 +352,6 @@ test "CTokenizer" {
|
|||||||
&ctok,
|
&ctok,
|
||||||
&[_]Token{
|
&[_]Token{
|
||||||
.{.id = .kw_typedef, .text = "typedef"},
|
.{.id = .kw_typedef, .text = "typedef"},
|
||||||
.{.id = .whitespace, .text = " "},
|
|
||||||
.{.id = .lparen, .text = "("},
|
.{.id = .lparen, .text = "("},
|
||||||
.{.id = .lbracket, .text = "["},
|
.{.id = .lbracket, .text = "["},
|
||||||
.{.id = .kw_const, .text = "const"},
|
.{.id = .kw_const, .text = "const"},
|
||||||
@@ -223,9 +359,7 @@ test "CTokenizer" {
|
|||||||
.{.id = .rbracket, .text = "]"},
|
.{.id = .rbracket, .text = "]"},
|
||||||
.{.id = .star, .text = "*"},
|
.{.id = .star, .text = "*"},
|
||||||
.{.id = .star, .text = "*"},
|
.{.id = .star, .text = "*"},
|
||||||
.{.id = .whitespace, .text = " "},
|
|
||||||
.{.id = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
.{.id = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||||
.{.id = .whitespace, .text = " "},
|
|
||||||
.{.id = .int, .text = "123"},
|
.{.id = .int, .text = "123"},
|
||||||
.{.id = .comma, .text = ","},
|
.{.id = .comma, .text = ","},
|
||||||
.{.id = .semicolon, .text = ";"},
|
.{.id = .semicolon, .text = ";"},
|
||||||
@@ -241,20 +375,15 @@ test "XmlCTokenizer" {
|
|||||||
);
|
);
|
||||||
defer document.deinit();
|
defer document.deinit();
|
||||||
|
|
||||||
var xctok = XmlCTokenizer{
|
var xctok = XmlCTokenizer.init(document.root);
|
||||||
.it = document.root.children.iterator(0)
|
|
||||||
};
|
|
||||||
|
|
||||||
testTokenizer(
|
testTokenizer(
|
||||||
&xctok,
|
&xctok,
|
||||||
&[_]Token{
|
&[_]Token{
|
||||||
.{.id = .kw_typedef, .text = "typedef"},
|
.{.id = .kw_typedef, .text = "typedef"},
|
||||||
.{.id = .whitespace, .text = " "},
|
|
||||||
.{.id = .id, .text = "void"},
|
.{.id = .id, .text = "void"},
|
||||||
.{.id = .whitespace, .text = " "},
|
|
||||||
.{.id = .lparen, .text = "("},
|
.{.id = .lparen, .text = "("},
|
||||||
.{.id = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
.{.id = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||||
.{.id = .whitespace, .text = " "},
|
|
||||||
.{.id = .star, .text = "*"},
|
.{.id = .star, .text = "*"},
|
||||||
.{.id = .name, .text = "PFN_vkVoidFunction"},
|
.{.id = .name, .text = "PFN_vkVoidFunction"},
|
||||||
.{.id = .rparen, .text = ")"},
|
.{.id = .rparen, .text = ")"},
|
||||||
@@ -265,3 +394,18 @@ test "XmlCTokenizer" {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test "parseTypedef" {
|
||||||
|
const document = try xml.parse(
|
||||||
|
testing.allocator,
|
||||||
|
"<root>typedef const <type>char</type>* pMessage[4];</root>"
|
||||||
|
);
|
||||||
|
defer document.deinit();
|
||||||
|
|
||||||
|
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||||
|
defer arena.deinit();
|
||||||
|
|
||||||
|
var xctok = XmlCTokenizer.init(document.root);
|
||||||
|
const decl = try parseTypedef(&arena.allocator, &xctok);
|
||||||
|
std.debug.warn("{}\n", .{decl.decl_type.Array.child.Pointer});
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const registry = @import("registry-new.zig");
|
const registry = @import("registry-new.zig");
|
||||||
const xml = @import("xml.zig");
|
const xml = @import("xml.zig");
|
||||||
|
const xmlc = @import("spec-c-parse.zig");
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const Allocator = mem.Allocator;
|
const Allocator = mem.Allocator;
|
||||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
@@ -65,7 +66,7 @@ fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *x
|
|||||||
} else if (mem.eql(u8, category, "handle")) {
|
} else if (mem.eql(u8, category, "handle")) {
|
||||||
break :blk try parseHandleType(ty);
|
break :blk try parseHandleType(ty);
|
||||||
} else if (mem.eql(u8, category, "basetype")) {
|
} else if (mem.eql(u8, category, "basetype")) {
|
||||||
break :blk try parseBaseType(ty);
|
break :blk try parseBaseType(allocator, ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
@@ -133,14 +134,14 @@ fn parseHandleType(ty: *xml.Element) !registry.Declaration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parseBaseType(ty: *xml.Element) !registry.Declaration {
|
fn parseBaseType(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
|
||||||
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
||||||
if (ty.getCharData("type")) |alias| { // TODO: Parse as full type?
|
if (ty.getCharData("type")) |_| { // TODO: Parse as full type?
|
||||||
return registry.Declaration{
|
var tok = xmlc.XmlCTokenizer.init(ty);
|
||||||
.name = name,
|
return try xmlc.parseTypedef(allocator, &tok);
|
||||||
.decl_type = .{.Alias = alias},
|
|
||||||
};
|
|
||||||
} else {
|
} else {
|
||||||
|
// Either ANativeWindow, AHardwareBuffer or CAMetalLayer. The latter has a lot of
|
||||||
|
// macros, which is why this part is not built into the xml/c parser.
|
||||||
return registry.Declaration{
|
return registry.Declaration{
|
||||||
.name = name,
|
.name = name,
|
||||||
.decl_type = .{.Opaque = {}},
|
.decl_type = .{.Opaque = {}},
|
||||||
|
|||||||
Reference in New Issue
Block a user