Adapt to latest Zig master

See ziglang/zig/pull/10055
This commit is contained in:
Robin Voetter
2021-12-01 05:05:34 +01:00
parent e634a4b434
commit 797ae8af88
10 changed files with 78 additions and 76 deletions

View File

@@ -128,7 +128,7 @@ pub const IdRenderer = struct {
tags: []const []const u8,
text_cache: std.ArrayList(u8),
pub fn init(allocator: *Allocator, tags: []const []const u8) IdRenderer {
pub fn init(allocator: Allocator, tags: []const []const u8) IdRenderer {
return .{
.tags = tags,
.text_cache = std.ArrayList(u8).init(allocator),

View File

@@ -8,7 +8,7 @@ pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
const allocator = &arena.allocator;
const allocator = arena.allocator();
var args = std.process.args();
const prog_name = try args.next(allocator) orelse return error.ExecutableNameMissing;

View File

@@ -241,7 +241,7 @@ pub const XmlCTokenizer = struct {
};
// TYPEDEF = kw_typedef DECLARATION ';'
pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
pub fn parseTypedef(allocator: Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
_ = try xctok.expect(.kw_typedef);
const decl = try parseDeclaration(allocator, xctok);
_ = try xctok.expect(.semicolon);
@@ -256,7 +256,7 @@ pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Decl
}
// MEMBER = DECLARATION (':' int)?
pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Container.Field {
pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer) !registry.Container.Field {
const decl = try parseDeclaration(allocator, xctok);
var field = registry.Container.Field{
.name = decl.name orelse return error.MissingTypeIdentifier,
@@ -284,7 +284,7 @@ pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Conta
return field;
}
pub fn parseParamOrProto(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
pub fn parseParamOrProto(allocator: Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
const decl = try parseDeclaration(allocator, xctok);
if (try xctok.peek()) |_| {
return error.InvalidSyntax;
@@ -315,7 +315,7 @@ pub const ParseError = error{
// DECLARATION = kw_const? type_name DECLARATOR
// DECLARATOR = POINTERS (id | name)? ('[' ARRAY_DECLARATOR ']')*
// | POINTERS '(' FNPTRSUFFIX
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
// Parse declaration constness
var tok = try xctok.nextNoEof();
const inner_is_const = tok.kind == .kw_const;
@@ -377,7 +377,7 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
}
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
const lparen = try xctok.peek();
if (lparen == null or lparen.?.kind != .lparen) {
return null;
@@ -445,7 +445,7 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
}
// POINTERS = (kw_const? '*')*
fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo) !TypeInfo {
fn parsePointers(allocator: Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo) !TypeInfo {
var type_info = inner;
var first_const = inner_const;
@@ -606,7 +606,7 @@ test "parseTypedef" {
defer arena.deinit();
var xctok = XmlCTokenizer.init(document.root);
const decl = try parseTypedef(&arena.allocator, &xctok);
const decl = try parseTypedef(arena.allocator(), &xctok);
try testing.expectEqualSlices(u8, "pythons", decl.name);
const array = decl.decl_type.typedef.array;

View File

@@ -12,12 +12,12 @@ const EnumFieldMerger = struct {
const EnumExtensionMap = std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(reg.Enum.Field));
const FieldSet = std.StringArrayHashMapUnmanaged(void);
arena: *Allocator,
arena: Allocator,
registry: *reg.Registry,
enum_extensions: EnumExtensionMap,
field_set: FieldSet,
fn init(arena: *Allocator, registry: *reg.Registry) EnumFieldMerger {
fn init(arena: Allocator, registry: *reg.Registry) EnumFieldMerger {
return .{
.arena = arena,
.registry = registry,
@@ -99,7 +99,7 @@ pub const Generator = struct {
registry: reg.Registry,
id_renderer: IdRenderer,
fn init(allocator: *Allocator, spec: *xml.Element) !Generator {
fn init(allocator: Allocator, spec: *xml.Element) !Generator {
const result = try parseXml(allocator, spec);
const tags = try allocator.alloc([]const u8, result.registry.tags.len);
@@ -128,13 +128,13 @@ pub const Generator = struct {
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
fn mergeEnumFields(self: *Generator) !void {
var merger = EnumFieldMerger.init(&self.arena.allocator, &self.registry);
var merger = EnumFieldMerger.init(self.arena.allocator(), &self.registry);
try merger.merge();
}
// https://github.com/KhronosGroup/Vulkan-Docs/pull/1556
fn fixupBitFlags(self: *Generator) !void {
var seen_bits = std.StringArrayHashMap(void).init(&self.arena.allocator);
var seen_bits = std.StringArrayHashMap(void).init(self.arena.allocator());
defer seen_bits.deinit();
for (self.registry.decls) |decl| {
@@ -166,7 +166,7 @@ pub const Generator = struct {
}
fn render(self: *Generator, writer: anytype) !void {
try renderRegistry(writer, &self.arena.allocator, &self.registry, &self.id_renderer);
try renderRegistry(writer, self.arena.allocator(), &self.registry, &self.id_renderer);
}
};
@@ -174,7 +174,7 @@ pub const Generator = struct {
/// and the resulting binding is written to `writer`. `allocator` will be used to allocate temporary
/// internal datastructures - mostly via an ArenaAllocator, but sometimes a hashmap uses this allocator
/// directly.
pub fn generate(allocator: *Allocator, spec_xml: []const u8, writer: anytype) !void {
pub fn generate(allocator: Allocator, spec_xml: []const u8, writer: anytype) !void {
const spec = try xml.parse(allocator, spec_xml);
defer spec.deinit();

View File

@@ -17,11 +17,11 @@ pub const ParseResult = struct {
}
};
pub fn parseXml(backing_allocator: *Allocator, root: *xml.Element) !ParseResult {
pub fn parseXml(backing_allocator: Allocator, root: *xml.Element) !ParseResult {
var arena = ArenaAllocator.init(backing_allocator);
errdefer arena.deinit();
const allocator = &arena.allocator;
const allocator = arena.allocator();
var reg = registry.Registry{
.copyright = root.getCharData("comment") orelse return error.InvalidRegistry,
@@ -38,7 +38,7 @@ pub fn parseXml(backing_allocator: *Allocator, root: *xml.Element) !ParseResult
};
}
fn parseDeclarations(allocator: *Allocator, root: *xml.Element) ![]registry.Declaration {
fn parseDeclarations(allocator: Allocator, root: *xml.Element) ![]registry.Declaration {
var types_elem = root.findChildByTag("types") orelse return error.InvalidRegistry;
var commands_elem = root.findChildByTag("commands") orelse return error.InvalidRegistry;
@@ -52,7 +52,7 @@ fn parseDeclarations(allocator: *Allocator, root: *xml.Element) ![]registry.Decl
return allocator.shrink(decls, count);
}
fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
fn parseTypes(allocator: Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
var i: usize = 0;
var it = types_elem.findChildrenByTag("type");
while (it.next()) |ty| {
@@ -159,7 +159,7 @@ fn parseHandleType(ty: *xml.Element) !registry.Declaration {
}
}
fn parseBaseType(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
fn parseBaseType(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
if (ty.getCharData("type")) |_| {
var tok = cparse.XmlCTokenizer.init(ty);
@@ -174,7 +174,7 @@ fn parseBaseType(allocator: *Allocator, ty: *xml.Element) !registry.Declaration
}
}
fn parseContainer(allocator: *Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
if (ty.getAttribute("alias")) |alias| {
@@ -236,7 +236,7 @@ fn parseContainer(allocator: *Allocator, ty: *xml.Element, is_union: bool) !regi
};
}
fn parseFuncPointer(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
fn parseFuncPointer(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
var xctok = cparse.XmlCTokenizer.init(ty);
return try cparse.parseTypedef(allocator, &xctok);
}
@@ -324,7 +324,7 @@ fn parseEnumAlias(elem: *xml.Element) !?registry.Declaration {
return null;
}
fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
fn parseEnums(allocator: Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
var i: usize = 0;
var it = root.findChildrenByTag("enums");
while (it.next()) |enums| {
@@ -343,7 +343,7 @@ fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Ele
return i;
}
fn parseEnumFields(allocator: *Allocator, elem: *xml.Element) !registry.Enum {
fn parseEnumFields(allocator: Allocator, elem: *xml.Element) !registry.Enum {
// TODO: `type` was added recently, fall back to checking endswith FlagBits for older versions?
const enum_type = elem.getAttribute("type") orelse return error.InvalidRegistry;
const is_bitmask = mem.eql(u8, enum_type, "bitmask");
@@ -410,7 +410,7 @@ fn parseEnumField(field: *xml.Element) !registry.Enum.Field {
};
}
fn parseCommands(allocator: *Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
fn parseCommands(allocator: Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
var i: usize = 0;
var it = commands_elem.findChildrenByTag("command");
while (it.next()) |elem| {
@@ -421,7 +421,7 @@ fn parseCommands(allocator: *Allocator, out: []registry.Declaration, commands_el
return i;
}
fn splitCommaAlloc(allocator: *Allocator, text: []const u8) ![][]const u8 {
fn splitCommaAlloc(allocator: Allocator, text: []const u8) ![][]const u8 {
var n_codes: usize = 1;
for (text) |c| {
if (c == ',') n_codes += 1;
@@ -436,7 +436,7 @@ fn splitCommaAlloc(allocator: *Allocator, text: []const u8) ![][]const u8 {
return codes;
}
fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration {
fn parseCommand(allocator: Allocator, elem: *xml.Element) !registry.Declaration {
if (elem.getAttribute("alias")) |alias| {
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
return registry.Declaration{
@@ -500,7 +500,7 @@ fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration
};
}
fn parseApiConstants(allocator: *Allocator, root: *xml.Element) ![]registry.ApiConstant {
fn parseApiConstants(allocator: Allocator, root: *xml.Element) ![]registry.ApiConstant {
var enums = blk: {
var it = root.findChildrenByTag("enums");
while (it.next()) |child| {
@@ -579,7 +579,7 @@ fn parseDefines(types: *xml.Element, out: []registry.ApiConstant) !usize {
return i;
}
fn parseTags(allocator: *Allocator, root: *xml.Element) ![]registry.Tag {
fn parseTags(allocator: Allocator, root: *xml.Element) ![]registry.Tag {
var tags_elem = root.findChildByTag("tags") orelse return error.InvalidRegistry;
const tags = try allocator.alloc(registry.Tag, tags_elem.children.items.len);
@@ -597,7 +597,7 @@ fn parseTags(allocator: *Allocator, root: *xml.Element) ![]registry.Tag {
return allocator.shrink(tags, i);
}
fn parseFeatures(allocator: *Allocator, root: *xml.Element) ![]registry.Feature {
fn parseFeatures(allocator: Allocator, root: *xml.Element) ![]registry.Feature {
var it = root.findChildrenByTag("feature");
var count: usize = 0;
while (it.next()) |_| count += 1;
@@ -613,7 +613,7 @@ fn parseFeatures(allocator: *Allocator, root: *xml.Element) ![]registry.Feature
return features;
}
fn parseFeature(allocator: *Allocator, feature: *xml.Element) !registry.Feature {
fn parseFeature(allocator: Allocator, feature: *xml.Element) !registry.Feature {
const name = feature.getAttribute("name") orelse return error.InvalidRegistry;
const feature_level = blk: {
const number = feature.getAttribute("number") orelse return error.InvalidRegistry;
@@ -684,7 +684,7 @@ fn enumExtOffsetToValue(extnumber: u31, offset: u31) u31 {
return extension_value_base + (extnumber - 1) * extension_block + offset;
}
fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
fn parseRequire(allocator: Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
var n_extends: usize = 0;
var n_types: usize = 0;
var n_commands: usize = 0;
@@ -742,7 +742,7 @@ fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !
};
}
fn parseExtensions(allocator: *Allocator, root: *xml.Element) ![]registry.Extension {
fn parseExtensions(allocator: Allocator, root: *xml.Element) ![]registry.Extension {
const extensions_elem = root.findChildByTag("extensions") orelse return error.InvalidRegistry;
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.items.len);
@@ -779,7 +779,7 @@ fn findExtVersion(extension: *xml.Element) !u32 {
return error.InvalidRegistry;
}
fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Extension {
fn parseExtension(allocator: Allocator, extension: *xml.Element) !registry.Extension {
const name = extension.getAttribute("name") orelse return error.InvalidRegistry;
const platform = extension.getAttribute("platform");
const version = try findExtVersion(extension);

View File

@@ -179,12 +179,12 @@ fn Renderer(comptime WriterType: type) type {
};
writer: WriterType,
allocator: *Allocator,
allocator: Allocator,
registry: *const reg.Registry,
id_renderer: *IdRenderer,
declarations_by_name: std.StringHashMap(*const reg.DeclarationType),
fn init(writer: WriterType, allocator: *Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !Self {
fn init(writer: WriterType, allocator: Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !Self {
var declarations_by_name = std.StringHashMap(*const reg.DeclarationType).init(allocator);
errdefer declarations_by_name.deinit();
@@ -1357,7 +1357,7 @@ fn Renderer(comptime WriterType: type) type {
};
}
pub fn render(writer: anytype, allocator: *Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !void {
pub fn render(writer: anytype, allocator: Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !void {
var renderer = try Renderer(@TypeOf(writer)).init(writer, allocator, registry, id_renderer);
defer renderer.deinit();
try renderer.render();

View File

@@ -24,7 +24,7 @@ pub const Element = struct {
attributes: AttributeList,
children: ContentList,
fn init(tag: []const u8, alloc: *Allocator) Element {
fn init(tag: []const u8, alloc: Allocator) Element {
return .{
.tag = tag,
.attributes = AttributeList.init(alloc),
@@ -314,12 +314,12 @@ pub const ParseError = error{
OutOfMemory,
};
pub fn parse(backing_allocator: *Allocator, source: []const u8) !Document {
pub fn parse(backing_allocator: Allocator, source: []const u8) !Document {
var ctx = ParseContext.init(source);
return try parseDocument(&ctx, backing_allocator);
}
fn parseDocument(ctx: *ParseContext, backing_allocator: *Allocator) !Document {
fn parseDocument(ctx: *ParseContext, backing_allocator: Allocator) !Document {
var doc = Document{
.arena = ArenaAllocator.init(backing_allocator),
.xml_decl = null,
@@ -328,22 +328,24 @@ fn parseDocument(ctx: *ParseContext, backing_allocator: *Allocator) !Document {
errdefer doc.deinit();
try trySkipComments(ctx, &doc.arena.allocator);
const allocator = doc.arena.allocator();
doc.xml_decl = try tryParseProlog(ctx, &doc.arena.allocator);
_ = ctx.eatWs();
try trySkipComments(ctx, &doc.arena.allocator);
try trySkipComments(ctx, allocator);
doc.root = (try tryParseElement(ctx, &doc.arena.allocator)) orelse return error.InvalidDocument;
doc.xml_decl = try tryParseProlog(ctx, allocator);
_ = ctx.eatWs();
try trySkipComments(ctx, &doc.arena.allocator);
try trySkipComments(ctx, allocator);
doc.root = (try tryParseElement(ctx, allocator)) orelse return error.InvalidDocument;
_ = ctx.eatWs();
try trySkipComments(ctx, allocator);
if (ctx.peek() != null) return error.InvalidDocument;
return doc;
}
fn parseAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
fn parseAttrValue(ctx: *ParseContext, alloc: Allocator) ![]const u8 {
const quote = try ctx.consume();
if (quote != '"' and quote != '\'') return error.UnexpectedCharacter;
@@ -359,7 +361,7 @@ fn parseAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
}
fn parseEqAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
fn parseEqAttrValue(ctx: *ParseContext, alloc: Allocator) ![]const u8 {
_ = ctx.eatWs();
try ctx.expect('=');
_ = ctx.eatWs();
@@ -386,7 +388,7 @@ fn parseNameNoDupe(ctx: *ParseContext) ![]const u8 {
return ctx.source[begin..end];
}
fn tryParseCharData(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
fn tryParseCharData(ctx: *ParseContext, alloc: Allocator) !?[]const u8 {
const begin = ctx.offset;
while (ctx.peek()) |ch| {
@@ -402,7 +404,7 @@ fn tryParseCharData(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
}
fn parseContent(ctx: *ParseContext, alloc: *Allocator) ParseError!Content {
fn parseContent(ctx: *ParseContext, alloc: Allocator) ParseError!Content {
if (try tryParseCharData(ctx, alloc)) |cd| {
return Content{ .CharData = cd };
} else if (try tryParseComment(ctx, alloc)) |comment| {
@@ -414,7 +416,7 @@ fn parseContent(ctx: *ParseContext, alloc: *Allocator) ParseError!Content {
}
}
fn tryParseAttr(ctx: *ParseContext, alloc: *Allocator) !?*Attribute {
fn tryParseAttr(ctx: *ParseContext, alloc: Allocator) !?*Attribute {
const name = parseNameNoDupe(ctx) catch return null;
_ = ctx.eatWs();
try ctx.expect('=');
@@ -427,7 +429,7 @@ fn tryParseAttr(ctx: *ParseContext, alloc: *Allocator) !?*Attribute {
return attr;
}
fn tryParseElement(ctx: *ParseContext, alloc: *Allocator) !?*Element {
fn tryParseElement(ctx: *ParseContext, alloc: Allocator) !?*Element {
const start = ctx.offset;
if (!ctx.eat('<')) return null;
const tag = parseNameNoDupe(ctx) catch {
@@ -473,7 +475,7 @@ fn tryParseElement(ctx: *ParseContext, alloc: *Allocator) !?*Element {
test "tryParseElement" {
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
var alloc = &arena.allocator;
const alloc = arena.allocator();
{
var ctx = ParseContext.init("<= a='b'/>");
@@ -515,7 +517,7 @@ test "tryParseElement" {
}
}
fn tryParseProlog(ctx: *ParseContext, alloc: *Allocator) !?*XmlDecl {
fn tryParseProlog(ctx: *ParseContext, alloc: Allocator) !?*XmlDecl {
const start = ctx.offset;
if (!ctx.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(ctx), "xml")) {
ctx.offset = start;
@@ -561,7 +563,7 @@ fn tryParseProlog(ctx: *ParseContext, alloc: *Allocator) !?*XmlDecl {
test "tryParseProlog" {
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
var alloc = &arena.allocator;
const alloc = arena.allocator();
{
var ctx = ParseContext.init("<?xmla version='aa'?>");
@@ -586,13 +588,13 @@ test "tryParseProlog" {
}
}
fn trySkipComments(ctx: *ParseContext, alloc: *Allocator) !void {
fn trySkipComments(ctx: *ParseContext, alloc: Allocator) !void {
while (try tryParseComment(ctx, alloc)) |_| {
_ = ctx.eatWs();
}
}
fn tryParseComment(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
fn tryParseComment(ctx: *ParseContext, alloc: Allocator) !?[]const u8 {
if (!ctx.eatStr("<!--")) return null;
const begin = ctx.offset;
@@ -622,7 +624,7 @@ fn unescapeEntity(text: []const u8) !u8 {
return error.InvalidEntity;
}
fn dupeAndUnescape(alloc: *Allocator, text: []const u8) ![]const u8 {
fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
const str = try alloc.alloc(u8, text.len);
var j: usize = 0;
@@ -644,7 +646,7 @@ fn dupeAndUnescape(alloc: *Allocator, text: []const u8) ![]const u8 {
test "dupeAndUnescape" {
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
var alloc = &arena.allocator;
const alloc = arena.allocator();
try testing.expectEqualSlices(u8, "test", try dupeAndUnescape(alloc, "test"));
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try dupeAndUnescape(alloc, "a&lt;b&amp;c&gt;d&quot;e&apos;f&lt;"));
@@ -657,7 +659,7 @@ test "dupeAndUnescape" {
test "Top level comments" {
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
var alloc = &arena.allocator;
const alloc = arena.allocator();
const doc = try parse(alloc, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
try testing.expectEqualSlices(u8, "python", doc.root.tag);