forked from mirror/vulkan-zig
Compare commits
129 Commits
zig-0.7.1-
...
zig-stage1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ccb04a619 | ||
|
|
c42f540ce3 | ||
|
|
33a137a0dd | ||
|
|
889d84a5db | ||
|
|
84def477bf | ||
|
|
8e45eff185 | ||
|
|
445f3e6b7a | ||
|
|
2d76a88a46 | ||
|
|
efa1a714a8 | ||
|
|
fe786c1be7 | ||
|
|
ea6cfb9073 | ||
|
|
2bd9927cfe | ||
|
|
5d274abc8c | ||
|
|
cc87740422 | ||
|
|
f6f5f66f20 | ||
|
|
fdf43d846a | ||
|
|
0b4b6f8acb | ||
|
|
4a59a1ad03 | ||
|
|
be31203f2e | ||
|
|
05deff877b | ||
|
|
8bf440cdf0 | ||
|
|
7fdaece86c | ||
|
|
2f2125046c | ||
|
|
aac20d107a | ||
|
|
470615632d | ||
|
|
3781fe779c | ||
|
|
5b1385de70 | ||
|
|
ed13b3ec74 | ||
|
|
9607e97fe9 | ||
|
|
7b80166108 | ||
|
|
0f496af7e9 | ||
|
|
ec4b4a0d46 | ||
|
|
e9615a2ce2 | ||
|
|
a09cca0801 | ||
|
|
86ba5f9980 | ||
|
|
e37b7a0f1b | ||
|
|
d7f499c949 | ||
|
|
4332a44c28 | ||
|
|
1e1af94949 | ||
|
|
ac1eb6fa17 | ||
|
|
dc2015adc3 | ||
|
|
809537f536 | ||
|
|
e2268a7eb4 | ||
|
|
b337356ecc | ||
|
|
e4c0d63aaa | ||
|
|
10fe28eddc | ||
|
|
fd4302f848 | ||
|
|
2c9aa3655d | ||
|
|
eb4373bb7c | ||
|
|
9034c90184 | ||
|
|
bf0d0bc43b | ||
|
|
0fffe145ed | ||
|
|
7c2d1d466b | ||
|
|
c169871f96 | ||
|
|
d9a57859c6 | ||
|
|
d1e949a434 | ||
|
|
2952901939 | ||
|
|
cc9ebf50bb | ||
|
|
797ae8af88 | ||
|
|
e634a4b434 | ||
|
|
c036464d21 | ||
|
|
a3f4e6a400 | ||
|
|
156843cb9a | ||
|
|
9aab9f4480 | ||
|
|
c6b46172c8 | ||
|
|
8862336b77 | ||
|
|
caf8e6be99 | ||
|
|
087cd02413 | ||
|
|
d9c1c79223 | ||
|
|
a6063cc89e | ||
|
|
596672d631 | ||
|
|
941a940122 | ||
|
|
5afeb68873 | ||
|
|
58787a9dc3 | ||
|
|
5a08cb8f9f | ||
|
|
55c2da886b | ||
|
|
90d81c1a04 | ||
|
|
5f9c17de9a | ||
|
|
d800f1f9a9 | ||
|
|
907adb6d91 | ||
|
|
9e40721655 | ||
|
|
07e530719b | ||
|
|
89e16f69a8 | ||
|
|
eb417c6aa0 | ||
|
|
9166080041 | ||
|
|
c1493a8218 | ||
|
|
c788b09e78 | ||
|
|
5b759fe124 | ||
|
|
701aaeb2cd | ||
|
|
a054c7a657 | ||
|
|
fa249d7d73 | ||
|
|
871d4e7251 | ||
|
|
7df0b39eae | ||
|
|
9b289704c7 | ||
|
|
e3b1249a45 | ||
|
|
ecf3ba4a39 | ||
|
|
27c8d4862c | ||
|
|
844c1564d7 | ||
|
|
a36db3a2f6 | ||
|
|
722ddfdb6b | ||
|
|
e36930ad5d | ||
|
|
79d901a3e0 | ||
|
|
de0a048f45 | ||
|
|
511211f038 | ||
|
|
6f965fead0 | ||
|
|
4b4ef38c93 | ||
|
|
2af84b5212 | ||
|
|
beaa5bb0a5 | ||
|
|
e0101accda | ||
|
|
1ebdd7f063 | ||
|
|
0c404b3c32 | ||
|
|
5375c873d9 | ||
|
|
711bc08211 | ||
|
|
80d338e984 | ||
|
|
fc7c823293 | ||
|
|
4c96d30e11 | ||
|
|
1e7b5edb10 | ||
|
|
938359c6c9 | ||
|
|
bb21cf6892 | ||
|
|
5c5134269b | ||
|
|
9f23e2e16d | ||
|
|
e7d6f9f012 | ||
|
|
4ccb530585 | ||
|
|
d8b223bb3b | ||
|
|
a1f08ee687 | ||
|
|
fe85c8975e | ||
|
|
1c8e6b5a4b | ||
|
|
4a3700dbdd | ||
|
|
caaf915671 |
16
.github/workflows/build.yml
vendored
16
.github/workflows/build.yml
vendored
@@ -2,9 +2,9 @@ name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [ zig-stage1-compat ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
branches: [ zig-stage1-compat ]
|
||||
schedule:
|
||||
- cron: '0 6 * * *'
|
||||
|
||||
@@ -22,12 +22,12 @@ jobs:
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
zig build test
|
||||
zig build -fstage1 test
|
||||
|
||||
- name: Fetch Vulkan SDK
|
||||
run: |
|
||||
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
|
||||
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-1.2.162-focal.list https://packages.lunarg.com/vulkan/1.2.162/lunarg-vulkan-1.2.162-focal.list
|
||||
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-1.3.224-focal.list https://packages.lunarg.com/vulkan/1.3.224/lunarg-vulkan-1.3.224-focal.list
|
||||
sudo apt update
|
||||
sudo apt install shaderc libglfw3 libglfw3-dev
|
||||
|
||||
@@ -37,4 +37,10 @@ jobs:
|
||||
|
||||
- name: Build with latest zig & vk.xml
|
||||
run: |
|
||||
zig build -Dvulkan-registry=./vk.xml
|
||||
zig build -fstage1 -Dvulkan-registry=./vk.xml
|
||||
|
||||
- name: Archive vk.xml
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: vk.zig
|
||||
path: zig-cache/vk.zig
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1 +1,2 @@
|
||||
zig-cache/
|
||||
zig-cache/
|
||||
zig-out/
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright © 2020 Robin Voetter
|
||||
Copyright © 2020-2022 Robin Voetter
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
|
||||
81
README.md
81
README.md
@@ -12,13 +12,15 @@ vulkan-zig is automatically tested daily against the latest vk.xml and zig, and
|
||||
|
||||
### Zig versions
|
||||
|
||||
vulkan-zig aims to be always compatible with the ever-changing Zig master branch (however, development may lag a few days behind). Sometimes, the Zig master branch breaks a bunch of functionality however, which may make the latest version vulkan-zig incompatible with older releases of Zig. Versions compatible with older versions of zig are marked with the tag `zig-<version>`.
|
||||
vulkan-zig aims to be always compatible with the ever-changing Zig master branch (however, development may lag a few days behind). Sometimes, the Zig master branch breaks a bunch of functionality however, which may make the latest version vulkan-zig incompatible with older releases of Zig. This repository aims to have a version compatible for both the latest Zig master, and the latest Zig release. The `master` branch is compatible with the `master` branch of Zig, and versions for older versions of Zig are maintained in the `zig-<version>-compat` branch.
|
||||
|
||||
This branch (zig-stage1-compat) is compatible with the Zig stage 1 compiler.
|
||||
|
||||
## Features
|
||||
### CLI-interface
|
||||
A CLI-interface is provided to generate vk.zig from the [Vulkan XML registry](https://github.com/KhronosGroup/Vulkan-Docs/blob/master/xml), which is built by default when invoking `zig build` in the project root. To generate vk.zig, simply invoke the program as follows:
|
||||
A CLI-interface is provided to generate vk.zig from the [Vulkan XML registry](https://github.com/KhronosGroup/Vulkan-Docs/blob/main/xml), which is built by default when invoking `zig build` in the project root. To generate vk.zig, simply invoke the program as follows:
|
||||
```
|
||||
$ zig-cache/bin/vulkan-zig-generator path/to/vk.xml output/path/to/vk.zig
|
||||
$ zig-out/bin/vulkan-zig-generator path/to/vk.xml output/path/to/vk.zig
|
||||
```
|
||||
This reads the xml file, parses its contents, renders the Vulkan bindings, and formats file, before writing the result to the output path. While the intended usage of vulkan-zig is through direct generation from build.zig (see below), the CLI-interface can be used for one-off generation and vendoring the result.
|
||||
|
||||
@@ -33,13 +35,12 @@ pub fn build(b: *Builder) void {
|
||||
|
||||
// Create a step that generates vk.zig (stored in zig-cache) from the provided vulkan registry.
|
||||
const gen = vkgen.VkGenerateStep.init(b, "path/to/vk.xml", "vk.zig");
|
||||
exe.step.dependOn(&gen.step);
|
||||
|
||||
// Add the generated file as package to the final executable
|
||||
exe.addPackagePath("vulkan", gen.full_out_path);
|
||||
exe.addPackage(gen.package);
|
||||
}
|
||||
```
|
||||
This reads vk.xml, parses its contents, and renders the Vulkan bindings to "vk.zig", which is then formatted and placed in `zig-cache`. The resulting file can then be added to an executable by using `addPackagePath`.
|
||||
This reads vk.xml, parses its contents, and renders the Vulkan bindings to "vk.zig", which is then formatted and placed in `zig-cache`. The resulting file can then be added to an executable by using `addPackage`, after which the bindings will be made available to the executable under the name `vulkan`.
|
||||
|
||||
### Function & field renaming
|
||||
Functions and fields are renamed to be more or less in line with [Zig's standard library style](https://ziglang.org/documentation/master/#Style-Guide):
|
||||
@@ -67,23 +68,22 @@ For each function, a wrapper is generated into one of three structs:
|
||||
* InstanceWrapper. This contains wrappers for functions which are otherwise loaded by `vkGetInstanceProcAddr`.
|
||||
* DeviceWrapper. This contains wrappers for functions which are loaded by `vkGetDeviceProcAddr`.
|
||||
|
||||
Each wrapper struct is to be used as a mixin on a struct containing **just** function pointers as members:
|
||||
Each wrapper struct can be called with an array of the appropriate enums:
|
||||
```zig
|
||||
const vk = @import("vulkan");
|
||||
const BaseDispatch = struct {
|
||||
vkCreateInstance: vk.PfnCreateInstance,
|
||||
usingnamespace vk.BaseWrapper(@This());
|
||||
};
|
||||
const BaseDispatch = vk.BaseWrapper(.{
|
||||
.createInstance = true,
|
||||
});
|
||||
```
|
||||
The wrapper struct then provides wrapper functions for each function pointer in the dispatch struct:
|
||||
```zig
|
||||
pub const BaseWrapper(comptime Self: type) type {
|
||||
pub const BaseWrapper(comptime cmds: anytype) type {
|
||||
...
|
||||
const Dispatch = CreateDispatchStruct(cmds);
|
||||
return struct {
|
||||
pub fn createInstance(
|
||||
self: Self,
|
||||
create_info: InstanceCreateInfo,
|
||||
p_allocator: ?*const AllocationCallbacks,
|
||||
) error{
|
||||
dispatch: Dispatch,
|
||||
|
||||
pub const CreateInstanceError = error{
|
||||
OutOfHostMemory,
|
||||
OutOfDeviceMemory,
|
||||
InitializationFailed,
|
||||
@@ -91,9 +91,14 @@ pub const BaseWrapper(comptime Self: type) type {
|
||||
ExtensionNotPresent,
|
||||
IncompatibleDriver,
|
||||
Unknown,
|
||||
}!Instance {
|
||||
};
|
||||
pub fn createInstance(
|
||||
self: Self,
|
||||
create_info: InstanceCreateInfo,
|
||||
p_allocator: ?*const AllocationCallbacks,
|
||||
) CreateInstanceError!Instance {
|
||||
var instance: Instance = undefined;
|
||||
const result = self.vkCreateInstance(
|
||||
const result = self.dispatch.vkCreateInstance(
|
||||
&create_info,
|
||||
p_allocator,
|
||||
&instance,
|
||||
@@ -125,9 +130,32 @@ Wrappers are generated according to the following rules:
|
||||
* As of yet, there is no specific handling of enumeration style commands or other commands which accept slices.
|
||||
|
||||
Furthermore, each wrapper contains a function to load each function pointer member when passed either `PfnGetInstanceProcAddr` or `PfnGetDeviceProcAddr`, which attempts to load each member as function pointer and casts it to the appropriate type. These functions are loaded literally, and any wrongly named member or member with a wrong function pointer type will result in problems.
|
||||
* For `BaseWrapper`, this function has signature `fn load(loader: PfnGetInstanceProcAddr) !Self`.
|
||||
* For `InstanceWrapper`, this function has signature `fn load(instance: Instance, loader: PfnGetInstanceProcAddr) !Self`.
|
||||
* For `DeviceWrapper`, this function has signature `fn load(device: Device, loader: PfnGetDeviceProcAddr) !Self`.
|
||||
* For `BaseWrapper`, this function has signature `fn load(loader: anytype) error{CommandFailure}!Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr` (with optionally having a different calling convention).
|
||||
* For `InstanceWrapper`, this function has signature `fn load(instance: Instance, loader: anytype) error{CommandFailure}!Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr`.
|
||||
* For `DeviceWrapper`, this function has signature `fn load(device: Device, loader: anytype) error{CommandFailure}!Self`, where the type of `loader` must resemble `PfnGetDeviceProcAddr`.
|
||||
|
||||
Note that these functions accepts a loader with the signature of `anytype` instead of `PfnGetInstanceProcAddr`. This is because it is valid for `vkGetInstanceProcAddr` to load itself, in which case the returned function is to be called with the vulkan calling convention. This calling convention is not required for loading vulkan-zig itself, though, and a loader to be called with any calling convention with the target architecture may be passed in. This is particularly useful when interacting with C libraries that provide `vkGetInstanceProcAddr`.
|
||||
|
||||
```zig
|
||||
// vkGetInstanceProcAddr as provided by GLFW.
|
||||
// Note that vk.Instance and vk.PfnVoidFunction are ABI compatible with VkInstance,
|
||||
// and that `extern` implies the C calling convention.
|
||||
pub extern fn glfwGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction;
|
||||
|
||||
// Or provide a custom implementation.
|
||||
// This function is called with the unspecified Zig-internal calling convention.
|
||||
fn customGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction {
|
||||
...
|
||||
}
|
||||
|
||||
// Both calls are valid, even
|
||||
const vkb = try BaseDispatch.load(glfwGetInstanceProcAddress);
|
||||
const vkb = try BaseDispatch.load(customGetInstanceProcAddress);
|
||||
```
|
||||
|
||||
By default, wrapper `load` functions return `error.CommandLoadFailure` if a call to the loader resulted in `null`. If this behaviour is not desired, one can use `loadNoFail`. This function accepts the same parameters as `load`, but does not return an error any function pointer fails to load and sets its value to `undefined` instead. It is at the programmer's discretion not to invoke invalid functions, which can be tested for by checking whether the required core and extension versions the function requires are supported.
|
||||
|
||||
One can access the underlying unwrapped C functions by doing `wrapper.dispatch.vkFuncYouWant(..)`.
|
||||
|
||||
### Bitflags
|
||||
Packed structs of bools are used for bit flags in vulkan-zig, instead of both a `FlagBits` and `Flags` variant. Places where either of these variants are used are both replaced by this packed struct instead. This means that even in places where just one flag would normally be accepted, the packed struct is accepted. The programmer is responsible for only enabling a single bit.
|
||||
@@ -192,7 +220,7 @@ Defaults are generated for certain fields of structs:
|
||||
```zig
|
||||
pub const InstanceCreateInfo = extern struct {
|
||||
s_type: StructureType = .instance_create_info,
|
||||
p_next: ?*const c_void = null,
|
||||
p_next: ?*const anyopaque = null,
|
||||
flags: InstanceCreateFlags,
|
||||
...
|
||||
};
|
||||
@@ -224,8 +252,7 @@ pub fn build(b: *Builder) void {
|
||||
const exe = b.addExecutable("my-executable", "src/main.zig");
|
||||
|
||||
const gen = vkgen.VkGenerateStep(b, "path/to/vk.xml", "vk.zig");
|
||||
exe.step.dependOn(&gen.step);
|
||||
exe.addPackagePath("vulkan", gen.full_out_path);
|
||||
exe.addPackage(gen.package);
|
||||
|
||||
const shader_comp = vkgen.ShaderCompileStep.init(
|
||||
builder,
|
||||
@@ -246,5 +273,7 @@ Upon compilation, glslc is then invoked to compile each shader, and the result i
|
||||
A partial implementation of https://vulkan-tutorial.org is implemented in [examples/triangle.zig](examples/triangle.zig). This example can be ran by executing `zig build run-triangle` in vulkan-zig's root.
|
||||
|
||||
## See also
|
||||
* Implementation of https://vulkan-tutorial.org: https://github.com/andrewrk/zig-vulkan-triangle.
|
||||
* Implementation of https://vulkan-tutorial.org using `@cImport`'ed bindings: https://github.com/andrewrk/zig-vulkan-triangle.
|
||||
* Alternative binding generator: https://github.com/SpexGuy/Zig-Vulkan-Headers
|
||||
* Zig bindings for GLFW: https://github.com/hexops/mach-glfw
|
||||
* With vulkan-zig integration example: https://github.com/hexops/mach-glfw-vulkan-example
|
||||
|
||||
35
build.zig
35
build.zig
@@ -8,6 +8,7 @@ pub const ResourceGenStep = struct {
|
||||
shader_step: *vkgen.ShaderCompileStep,
|
||||
builder: *Builder,
|
||||
package: std.build.Pkg,
|
||||
output_file: std.build.GeneratedFile,
|
||||
resources: std.ArrayList(u8),
|
||||
|
||||
pub fn init(builder: *Builder, out: []const u8) *ResourceGenStep {
|
||||
@@ -19,14 +20,18 @@ pub const ResourceGenStep = struct {
|
||||
}) catch unreachable;
|
||||
|
||||
self.* = .{
|
||||
.step = Step.init(.Custom, "resources", builder.allocator, make),
|
||||
.shader_step = vkgen.ShaderCompileStep.init(builder, &[_][]const u8{"glslc", "--target-env=vulkan1.2"}),
|
||||
.step = Step.init(.custom, "resources", builder.allocator, make),
|
||||
.shader_step = vkgen.ShaderCompileStep.init(builder, &[_][]const u8{ "glslc", "--target-env=vulkan1.2" }, "shaders"),
|
||||
.builder = builder,
|
||||
.package = .{
|
||||
.name = "resources",
|
||||
.path = full_out_path,
|
||||
.source = .{ .generated = &self.output_file },
|
||||
.dependencies = null,
|
||||
},
|
||||
.output_file = .{
|
||||
.step = &self.step,
|
||||
.path = full_out_path,
|
||||
},
|
||||
.resources = std.ArrayList(u8).init(builder.allocator),
|
||||
};
|
||||
|
||||
@@ -34,15 +39,15 @@ pub const ResourceGenStep = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
fn renderPath(self: *ResourceGenStep, path: []const u8, writer: anytype) void {
|
||||
const separators = &[_]u8{ std.fs.path.sep_windows, std.fs.path.sep_posix };
|
||||
fn renderPath(path: []const u8, writer: anytype) void {
|
||||
const separators = &[_]u8{ std.fs.path.sep_windows, std.fs.path.sep_posix };
|
||||
var i: usize = 0;
|
||||
while (std.mem.indexOfAnyPos(u8, path, i, separators)) |j| {
|
||||
writer.writeAll(path[i .. j]) catch unreachable;
|
||||
writer.writeAll(path[i..j]) catch unreachable;
|
||||
switch (std.fs.path.sep) {
|
||||
std.fs.path.sep_windows => writer.writeAll("\\\\") catch unreachable,
|
||||
std.fs.path.sep_posix => writer.writeByte(std.fs.path.sep_posix) catch unreachable,
|
||||
else => unreachable
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
i = j + 1;
|
||||
@@ -51,21 +56,21 @@ pub const ResourceGenStep = struct {
|
||||
}
|
||||
|
||||
pub fn addShader(self: *ResourceGenStep, name: []const u8, source: []const u8) void {
|
||||
const shader_out_path = self.shader_step.add(source);
|
||||
const shader_out_path = self.shader_step.add(source, .{});
|
||||
var writer = self.resources.writer();
|
||||
|
||||
writer.print("pub const {s} = @embedFile(\"", .{ name }) catch unreachable;
|
||||
self.renderPath(shader_out_path, writer);
|
||||
writer.writeAll("\");\n") catch unreachable;
|
||||
writer.print("pub const {s} align(@alignOf(u32)) = @embedFile(\"", .{name}) catch unreachable;
|
||||
renderPath(shader_out_path, writer);
|
||||
writer.writeAll("\").*;\n") catch unreachable;
|
||||
}
|
||||
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(ResourceGenStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
const dir = std.fs.path.dirname(self.package.path).?;
|
||||
const dir = std.fs.path.dirname(self.output_file.path.?).?;
|
||||
try cwd.makePath(dir);
|
||||
try cwd.writeFile(self.package.path, self.resources.items);
|
||||
try cwd.writeFile(self.output_file.path.?, self.resources.items);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -88,16 +93,14 @@ pub fn build(b: *Builder) void {
|
||||
triangle_exe.linkLibC();
|
||||
triangle_exe.linkSystemLibrary("glfw");
|
||||
|
||||
const vk_xml_path = b.option([]const u8, "vulkan-registry", "Override the to the Vulkan registry") orelse "examples/vk.xml";
|
||||
const vk_xml_path = b.option([]const u8, "vulkan-registry", "Override the path to the Vulkan registry") orelse "examples/vk.xml";
|
||||
|
||||
const gen = vkgen.VkGenerateStep.init(b, vk_xml_path, "vk.zig");
|
||||
triangle_exe.step.dependOn(&gen.step);
|
||||
triangle_exe.addPackage(gen.package);
|
||||
|
||||
const res = ResourceGenStep.init(b, "resources.zig");
|
||||
res.addShader("triangle_vert", "examples/shaders/triangle.vert");
|
||||
res.addShader("triangle_frag", "examples/shaders/triangle.frag");
|
||||
triangle_exe.step.dependOn(&res.step);
|
||||
triangle_exe.addPackage(res.package);
|
||||
|
||||
const triangle_run_cmd = triangle_exe.run();
|
||||
|
||||
@@ -4,10 +4,11 @@ pub usingnamespace @cImport({
|
||||
});
|
||||
|
||||
const vk = @import("vulkan");
|
||||
const c = @This();
|
||||
|
||||
// usually the GLFW vulkan functions are exported if Vulkan is included,
|
||||
// but since thats not the case here, they are manually imported.
|
||||
|
||||
pub extern fn glfwGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction;
|
||||
pub extern fn glfwGetPhysicalDevicePresentationSupport(instance: vk.Instance, pdev: vk.PhysicalDevice, queuefamily: u32) c_int;
|
||||
pub extern fn glfwCreateWindowSurface(instance: vk.Instance, window: *GLFWwindow, allocation_callbacks: ?*const vk.AllocationCallbacks, surface: *vk.SurfaceKHR) vk.Result;
|
||||
pub extern fn glfwCreateWindowSurface(instance: vk.Instance, window: *c.GLFWwindow, allocation_callbacks: ?*const vk.AllocationCallbacks, surface: *vk.SurfaceKHR) vk.Result;
|
||||
|
||||
@@ -3,85 +3,80 @@ const vk = @import("vulkan");
|
||||
const c = @import("c.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const required_device_extensions = [_][]const u8{
|
||||
vk.extension_info.khr_swapchain.name
|
||||
};
|
||||
const required_device_extensions = [_][*:0]const u8{vk.extension_info.khr_swapchain.name};
|
||||
|
||||
const BaseDispatch = struct {
|
||||
vkCreateInstance: vk.PfnCreateInstance,
|
||||
usingnamespace vk.BaseWrapper(@This());
|
||||
};
|
||||
const BaseDispatch = vk.BaseWrapper(.{
|
||||
.createInstance = true,
|
||||
});
|
||||
|
||||
const InstanceDispatch = struct {
|
||||
vkDestroyInstance: vk.PfnDestroyInstance,
|
||||
vkCreateDevice: vk.PfnCreateDevice,
|
||||
vkDestroySurfaceKHR: vk.PfnDestroySurfaceKHR,
|
||||
vkEnumeratePhysicalDevices: vk.PfnEnumeratePhysicalDevices,
|
||||
vkGetPhysicalDeviceProperties: vk.PfnGetPhysicalDeviceProperties,
|
||||
vkEnumerateDeviceExtensionProperties: vk.PfnEnumerateDeviceExtensionProperties,
|
||||
vkGetPhysicalDeviceSurfaceFormatsKHR: vk.PfnGetPhysicalDeviceSurfaceFormatsKHR,
|
||||
vkGetPhysicalDeviceSurfacePresentModesKHR: vk.PfnGetPhysicalDeviceSurfacePresentModesKHR,
|
||||
vkGetPhysicalDeviceSurfaceCapabilitiesKHR: vk.PfnGetPhysicalDeviceSurfaceCapabilitiesKHR,
|
||||
vkGetPhysicalDeviceQueueFamilyProperties: vk.PfnGetPhysicalDeviceQueueFamilyProperties,
|
||||
vkGetPhysicalDeviceSurfaceSupportKHR: vk.PfnGetPhysicalDeviceSurfaceSupportKHR,
|
||||
vkGetPhysicalDeviceMemoryProperties: vk.PfnGetPhysicalDeviceMemoryProperties,
|
||||
vkGetDeviceProcAddr: vk.PfnGetDeviceProcAddr,
|
||||
usingnamespace vk.InstanceWrapper(@This());
|
||||
};
|
||||
const InstanceDispatch = vk.InstanceWrapper(.{
|
||||
.destroyInstance = true,
|
||||
.createDevice = true,
|
||||
.destroySurfaceKHR = true,
|
||||
.enumeratePhysicalDevices = true,
|
||||
.getPhysicalDeviceProperties = true,
|
||||
.enumerateDeviceExtensionProperties = true,
|
||||
.getPhysicalDeviceSurfaceFormatsKHR = true,
|
||||
.getPhysicalDeviceSurfacePresentModesKHR = true,
|
||||
.getPhysicalDeviceSurfaceCapabilitiesKHR = true,
|
||||
.getPhysicalDeviceQueueFamilyProperties = true,
|
||||
.getPhysicalDeviceSurfaceSupportKHR = true,
|
||||
.getPhysicalDeviceMemoryProperties = true,
|
||||
.getDeviceProcAddr = true,
|
||||
});
|
||||
|
||||
const DeviceDispatch = struct {
|
||||
vkDestroyDevice: vk.PfnDestroyDevice,
|
||||
vkGetDeviceQueue: vk.PfnGetDeviceQueue,
|
||||
vkCreateSemaphore: vk.PfnCreateSemaphore,
|
||||
vkCreateFence: vk.PfnCreateFence,
|
||||
vkCreateImageView: vk.PfnCreateImageView,
|
||||
vkDestroyImageView: vk.PfnDestroyImageView,
|
||||
vkDestroySemaphore: vk.PfnDestroySemaphore,
|
||||
vkDestroyFence: vk.PfnDestroyFence,
|
||||
vkGetSwapchainImagesKHR: vk.PfnGetSwapchainImagesKHR,
|
||||
vkCreateSwapchainKHR: vk.PfnCreateSwapchainKHR,
|
||||
vkDestroySwapchainKHR: vk.PfnDestroySwapchainKHR,
|
||||
vkAcquireNextImageKHR: vk.PfnAcquireNextImageKHR,
|
||||
vkDeviceWaitIdle: vk.PfnDeviceWaitIdle,
|
||||
vkWaitForFences: vk.PfnWaitForFences,
|
||||
vkResetFences: vk.PfnResetFences,
|
||||
vkQueueSubmit: vk.PfnQueueSubmit,
|
||||
vkQueuePresentKHR: vk.PfnQueuePresentKHR,
|
||||
vkCreateCommandPool: vk.PfnCreateCommandPool,
|
||||
vkDestroyCommandPool: vk.PfnDestroyCommandPool,
|
||||
vkAllocateCommandBuffers: vk.PfnAllocateCommandBuffers,
|
||||
vkFreeCommandBuffers: vk.PfnFreeCommandBuffers,
|
||||
vkQueueWaitIdle: vk.PfnQueueWaitIdle,
|
||||
vkCreateShaderModule: vk.PfnCreateShaderModule,
|
||||
vkDestroyShaderModule: vk.PfnDestroyShaderModule,
|
||||
vkCreatePipelineLayout: vk.PfnCreatePipelineLayout,
|
||||
vkDestroyPipelineLayout: vk.PfnDestroyPipelineLayout,
|
||||
vkCreateRenderPass: vk.PfnCreateRenderPass,
|
||||
vkDestroyRenderPass: vk.PfnDestroyRenderPass,
|
||||
vkCreateGraphicsPipelines: vk.PfnCreateGraphicsPipelines,
|
||||
vkDestroyPipeline: vk.PfnDestroyPipeline,
|
||||
vkCreateFramebuffer: vk.PfnCreateFramebuffer,
|
||||
vkDestroyFramebuffer: vk.PfnDestroyFramebuffer,
|
||||
vkBeginCommandBuffer: vk.PfnBeginCommandBuffer,
|
||||
vkEndCommandBuffer: vk.PfnEndCommandBuffer,
|
||||
vkAllocateMemory: vk.PfnAllocateMemory,
|
||||
vkFreeMemory: vk.PfnFreeMemory,
|
||||
vkCreateBuffer: vk.PfnCreateBuffer,
|
||||
vkDestroyBuffer: vk.PfnDestroyBuffer,
|
||||
vkGetBufferMemoryRequirements: vk.PfnGetBufferMemoryRequirements,
|
||||
vkMapMemory: vk.PfnMapMemory,
|
||||
vkUnmapMemory: vk.PfnUnmapMemory,
|
||||
vkBindBufferMemory: vk.PfnBindBufferMemory,
|
||||
vkCmdBeginRenderPass: vk.PfnCmdBeginRenderPass,
|
||||
vkCmdEndRenderPass: vk.PfnCmdEndRenderPass,
|
||||
vkCmdBindPipeline: vk.PfnCmdBindPipeline,
|
||||
vkCmdDraw: vk.PfnCmdDraw,
|
||||
vkCmdSetViewport: vk.PfnCmdSetViewport,
|
||||
vkCmdSetScissor: vk.PfnCmdSetScissor,
|
||||
vkCmdBindVertexBuffers: vk.PfnCmdBindVertexBuffers,
|
||||
vkCmdCopyBuffer: vk.PfnCmdCopyBuffer,
|
||||
usingnamespace vk.DeviceWrapper(@This());
|
||||
};
|
||||
const DeviceDispatch = vk.DeviceWrapper(.{
|
||||
.destroyDevice = true,
|
||||
.getDeviceQueue = true,
|
||||
.createSemaphore = true,
|
||||
.createFence = true,
|
||||
.createImageView = true,
|
||||
.destroyImageView = true,
|
||||
.destroySemaphore = true,
|
||||
.destroyFence = true,
|
||||
.getSwapchainImagesKHR = true,
|
||||
.createSwapchainKHR = true,
|
||||
.destroySwapchainKHR = true,
|
||||
.acquireNextImageKHR = true,
|
||||
.deviceWaitIdle = true,
|
||||
.waitForFences = true,
|
||||
.resetFences = true,
|
||||
.queueSubmit = true,
|
||||
.queuePresentKHR = true,
|
||||
.createCommandPool = true,
|
||||
.destroyCommandPool = true,
|
||||
.allocateCommandBuffers = true,
|
||||
.freeCommandBuffers = true,
|
||||
.queueWaitIdle = true,
|
||||
.createShaderModule = true,
|
||||
.destroyShaderModule = true,
|
||||
.createPipelineLayout = true,
|
||||
.destroyPipelineLayout = true,
|
||||
.createRenderPass = true,
|
||||
.destroyRenderPass = true,
|
||||
.createGraphicsPipelines = true,
|
||||
.destroyPipeline = true,
|
||||
.createFramebuffer = true,
|
||||
.destroyFramebuffer = true,
|
||||
.beginCommandBuffer = true,
|
||||
.endCommandBuffer = true,
|
||||
.allocateMemory = true,
|
||||
.freeMemory = true,
|
||||
.createBuffer = true,
|
||||
.destroyBuffer = true,
|
||||
.getBufferMemoryRequirements = true,
|
||||
.mapMemory = true,
|
||||
.unmapMemory = true,
|
||||
.bindBufferMemory = true,
|
||||
.cmdBeginRenderPass = true,
|
||||
.cmdEndRenderPass = true,
|
||||
.cmdBindPipeline = true,
|
||||
.cmdDraw = true,
|
||||
.cmdSetViewport = true,
|
||||
.cmdSetScissor = true,
|
||||
.cmdBindVertexBuffers = true,
|
||||
.cmdCopyBuffer = true,
|
||||
});
|
||||
|
||||
pub const GraphicsContext = struct {
|
||||
vkb: BaseDispatch,
|
||||
@@ -98,7 +93,7 @@ pub const GraphicsContext = struct {
|
||||
graphics_queue: Queue,
|
||||
present_queue: Queue,
|
||||
|
||||
pub fn init(allocator: *Allocator, app_name: [*:0]const u8, window: *c.GLFWwindow) !GraphicsContext {
|
||||
pub fn init(allocator: Allocator, app_name: [*:0]const u8, window: *c.GLFWwindow) !GraphicsContext {
|
||||
var self: GraphicsContext = undefined;
|
||||
self.vkb = try BaseDispatch.load(c.glfwGetInstanceProcAddress);
|
||||
|
||||
@@ -113,7 +108,7 @@ pub const GraphicsContext = struct {
|
||||
.api_version = vk.API_VERSION_1_2,
|
||||
};
|
||||
|
||||
self.instance = try self.vkb.createInstance(.{
|
||||
self.instance = try self.vkb.createInstance(&.{
|
||||
.flags = .{},
|
||||
.p_application_info = &app_info,
|
||||
.enabled_layer_count = 0,
|
||||
@@ -125,18 +120,18 @@ pub const GraphicsContext = struct {
|
||||
self.vki = try InstanceDispatch.load(self.instance, c.glfwGetInstanceProcAddress);
|
||||
errdefer self.vki.destroyInstance(self.instance, null);
|
||||
|
||||
self.surface = try createSurface(self.vki, self.instance, window);
|
||||
self.surface = try createSurface(self.instance, window);
|
||||
errdefer self.vki.destroySurfaceKHR(self.instance, self.surface, null);
|
||||
|
||||
const candidate = try pickPhysicalDevice(self.vki, self.instance, allocator, self.surface);
|
||||
self.pdev = candidate.pdev;
|
||||
self.props = candidate.props;
|
||||
self.dev = try initializeCandidate(self.vki, candidate);
|
||||
self.vkd = try DeviceDispatch.load(self.dev, self.vki.vkGetDeviceProcAddr);
|
||||
self.vkd = try DeviceDispatch.load(self.dev, self.vki.dispatch.vkGetDeviceProcAddr);
|
||||
errdefer self.vkd.destroyDevice(self.dev, null);
|
||||
|
||||
self.graphics_queue = Queue.init(self.vkd, self.dev, candidate.queues.graphics_family);
|
||||
self.present_queue = Queue.init(self.vkd, self.dev, candidate.queues.graphics_family);
|
||||
self.present_queue = Queue.init(self.vkd, self.dev, candidate.queues.present_family);
|
||||
|
||||
self.mem_props = self.vki.getPhysicalDeviceMemoryProperties(self.pdev);
|
||||
|
||||
@@ -149,13 +144,13 @@ pub const GraphicsContext = struct {
|
||||
self.vki.destroyInstance(self.instance, null);
|
||||
}
|
||||
|
||||
pub fn deviceName(self: GraphicsContext) []const u8 {
|
||||
pub fn deviceName(self: *const GraphicsContext) []const u8 {
|
||||
const len = std.mem.indexOfScalar(u8, &self.props.device_name, 0).?;
|
||||
return self.props.device_name[0 .. len];
|
||||
return self.props.device_name[0..len];
|
||||
}
|
||||
|
||||
pub fn findMemoryTypeIndex(self: GraphicsContext, memory_type_bits: u32, flags: vk.MemoryPropertyFlags) !u32 {
|
||||
for (self.mem_props.memory_types[0 .. self.mem_props.memory_type_count]) |mem_type, i| {
|
||||
for (self.mem_props.memory_types[0..self.mem_props.memory_type_count]) |mem_type, i| {
|
||||
if (memory_type_bits & (@as(u32, 1) << @truncate(u5, i)) != 0 and mem_type.property_flags.contains(flags)) {
|
||||
return @truncate(u32, i);
|
||||
}
|
||||
@@ -165,7 +160,7 @@ pub const GraphicsContext = struct {
|
||||
}
|
||||
|
||||
pub fn allocate(self: GraphicsContext, requirements: vk.MemoryRequirements, flags: vk.MemoryPropertyFlags) !vk.DeviceMemory {
|
||||
return try self.vkd.allocateMemory(self.dev, .{
|
||||
return try self.vkd.allocateMemory(self.dev, &.{
|
||||
.allocation_size = requirements.size,
|
||||
.memory_type_index = try self.findMemoryTypeIndex(requirements.memory_type_bits, flags),
|
||||
}, null);
|
||||
@@ -184,7 +179,7 @@ pub const Queue = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn createSurface(vki: InstanceDispatch, instance: vk.Instance, window: *c.GLFWwindow) !vk.SurfaceKHR {
|
||||
fn createSurface(instance: vk.Instance, window: *c.GLFWwindow) !vk.SurfaceKHR {
|
||||
var surface: vk.SurfaceKHR = undefined;
|
||||
if (c.glfwCreateWindowSurface(instance, window, null, &surface) != .success) {
|
||||
return error.SurfaceInitFailed;
|
||||
@@ -207,15 +202,15 @@ fn initializeCandidate(vki: InstanceDispatch, candidate: DeviceCandidate) !vk.De
|
||||
.queue_family_index = candidate.queues.present_family,
|
||||
.queue_count = 1,
|
||||
.p_queue_priorities = &priority,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const queue_count: u32 = if (candidate.queues.graphics_family == candidate.queues.present_family)
|
||||
1
|
||||
else
|
||||
2;
|
||||
1
|
||||
else
|
||||
2;
|
||||
|
||||
return try vki.createDevice(candidate.pdev, .{
|
||||
return try vki.createDevice(candidate.pdev, &.{
|
||||
.flags = .{},
|
||||
.queue_create_info_count = queue_count,
|
||||
.p_queue_create_infos = &qci,
|
||||
@@ -241,7 +236,7 @@ const QueueAllocation = struct {
|
||||
fn pickPhysicalDevice(
|
||||
vki: InstanceDispatch,
|
||||
instance: vk.Instance,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
surface: vk.SurfaceKHR,
|
||||
) !DeviceCandidate {
|
||||
var device_count: u32 = undefined;
|
||||
@@ -264,7 +259,7 @@ fn pickPhysicalDevice(
|
||||
fn checkSuitable(
|
||||
vki: InstanceDispatch,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
surface: vk.SurfaceKHR,
|
||||
) !?DeviceCandidate {
|
||||
const props = vki.getPhysicalDeviceProperties(pdev);
|
||||
@@ -281,19 +276,14 @@ fn checkSuitable(
|
||||
return DeviceCandidate{
|
||||
.pdev = pdev,
|
||||
.props = props,
|
||||
.queues = allocation
|
||||
.queues = allocation,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn allocateQueues(
|
||||
vki: InstanceDispatch,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: *Allocator,
|
||||
surface: vk.SurfaceKHR
|
||||
) !?QueueAllocation {
|
||||
fn allocateQueues(vki: InstanceDispatch, pdev: vk.PhysicalDevice, allocator: Allocator, surface: vk.SurfaceKHR) !?QueueAllocation {
|
||||
var family_count: u32 = undefined;
|
||||
vki.getPhysicalDeviceQueueFamilyProperties(pdev, &family_count, null);
|
||||
|
||||
@@ -307,7 +297,7 @@ fn allocateQueues(
|
||||
for (families) |properties, i| {
|
||||
const family = @intCast(u32, i);
|
||||
|
||||
if (graphics_family == null and properties.queue_flags.contains(.{.graphics_bit = true})) {
|
||||
if (graphics_family == null and properties.queue_flags.graphics_bit) {
|
||||
graphics_family = family;
|
||||
}
|
||||
|
||||
@@ -319,7 +309,7 @@ fn allocateQueues(
|
||||
if (graphics_family != null and present_family != null) {
|
||||
return QueueAllocation{
|
||||
.graphics_family = graphics_family.?,
|
||||
.present_family = present_family.?
|
||||
.present_family = present_family.?,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -339,7 +329,7 @@ fn checkSurfaceSupport(vki: InstanceDispatch, pdev: vk.PhysicalDevice, surface:
|
||||
fn checkExtensionSupport(
|
||||
vki: InstanceDispatch,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
) !bool {
|
||||
var count: u32 = undefined;
|
||||
_ = try vki.enumerateDeviceExtensionProperties(pdev, null, &count, null);
|
||||
@@ -352,8 +342,8 @@ fn checkExtensionSupport(
|
||||
for (required_device_extensions) |ext| {
|
||||
for (propsv) |props| {
|
||||
const len = std.mem.indexOfScalar(u8, &props.extension_name, 0).?;
|
||||
const prop_ext_name = props.extension_name[0 .. len];
|
||||
if (std.mem.eql(u8, ext, prop_ext_name)) {
|
||||
const prop_ext_name = props.extension_name[0..len];
|
||||
if (std.mem.eql(u8, std.mem.span(ext), prop_ext_name)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -10,7 +10,7 @@ pub const Swapchain = struct {
|
||||
};
|
||||
|
||||
gc: *const GraphicsContext,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
|
||||
surface_format: vk.SurfaceFormatKHR,
|
||||
present_mode: vk.PresentModeKHR,
|
||||
@@ -21,11 +21,11 @@ pub const Swapchain = struct {
|
||||
image_index: u32,
|
||||
next_image_acquired: vk.Semaphore,
|
||||
|
||||
pub fn init(gc: *const GraphicsContext, allocator: *Allocator, extent: vk.Extent2D) !Swapchain {
|
||||
pub fn init(gc: *const GraphicsContext, allocator: Allocator, extent: vk.Extent2D) !Swapchain {
|
||||
return try initRecycle(gc, allocator, extent, .null_handle);
|
||||
}
|
||||
|
||||
pub fn initRecycle(gc: *const GraphicsContext, allocator: *Allocator, extent: vk.Extent2D, old_handle: vk.SwapchainKHR) !Swapchain {
|
||||
pub fn initRecycle(gc: *const GraphicsContext, allocator: Allocator, extent: vk.Extent2D, old_handle: vk.SwapchainKHR) !Swapchain {
|
||||
const caps = try gc.vki.getPhysicalDeviceSurfaceCapabilitiesKHR(gc.pdev, gc.surface);
|
||||
const actual_extent = findActualExtent(caps, extent);
|
||||
if (actual_extent.width == 0 or actual_extent.height == 0) {
|
||||
@@ -40,10 +40,13 @@ pub const Swapchain = struct {
|
||||
image_count = std.math.min(image_count, caps.max_image_count);
|
||||
}
|
||||
|
||||
const concurrent = gc.graphics_queue.family != gc.present_queue.family;
|
||||
const qfi = [_]u32{gc.graphics_queue.family, gc.present_queue.family};
|
||||
const qfi = [_]u32{ gc.graphics_queue.family, gc.present_queue.family };
|
||||
const sharing_mode: vk.SharingMode = if (gc.graphics_queue.family != gc.present_queue.family)
|
||||
.concurrent
|
||||
else
|
||||
.exclusive;
|
||||
|
||||
const handle = try gc.vkd.createSwapchainKHR(gc.dev, .{
|
||||
const handle = try gc.vkd.createSwapchainKHR(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.surface = gc.surface,
|
||||
.min_image_count = image_count,
|
||||
@@ -51,12 +54,12 @@ pub const Swapchain = struct {
|
||||
.image_color_space = surface_format.color_space,
|
||||
.image_extent = actual_extent,
|
||||
.image_array_layers = 1,
|
||||
.image_usage = .{.color_attachment_bit = true, .transfer_dst_bit = true},
|
||||
.image_sharing_mode = if (concurrent) .concurrent else .exclusive,
|
||||
.image_usage = .{ .color_attachment_bit = true, .transfer_dst_bit = true },
|
||||
.image_sharing_mode = sharing_mode,
|
||||
.queue_family_index_count = qfi.len,
|
||||
.p_queue_family_indices = &qfi,
|
||||
.pre_transform = caps.current_transform,
|
||||
.composite_alpha = .{.opaque_bit_khr = true},
|
||||
.composite_alpha = .{ .opaque_bit_khr = true },
|
||||
.present_mode = present_mode,
|
||||
.clipped = vk.TRUE,
|
||||
.old_swapchain = old_handle,
|
||||
@@ -69,9 +72,12 @@ pub const Swapchain = struct {
|
||||
}
|
||||
|
||||
const swap_images = try initSwapchainImages(gc, handle, surface_format.format, allocator);
|
||||
errdefer for (swap_images) |si| si.deinit(gc);
|
||||
errdefer {
|
||||
for (swap_images) |si| si.deinit(gc);
|
||||
allocator.free(swap_images);
|
||||
}
|
||||
|
||||
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, .{.flags = .{}}, null);
|
||||
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, next_image_acquired, null);
|
||||
|
||||
const result = try gc.vkd.acquireNextImageKHR(gc.dev, handle, std.math.maxInt(u64), next_image_acquired, .null_handle);
|
||||
@@ -95,6 +101,7 @@ pub const Swapchain = struct {
|
||||
|
||||
fn deinitExceptSwapchain(self: Swapchain) void {
|
||||
for (self.swap_images) |si| si.deinit(self.gc);
|
||||
self.allocator.free(self.swap_images);
|
||||
self.gc.vkd.destroySemaphore(self.gc.dev, self.next_image_acquired, null);
|
||||
}
|
||||
|
||||
@@ -147,7 +154,7 @@ pub const Swapchain = struct {
|
||||
try self.gc.vkd.resetFences(self.gc.dev, 1, @ptrCast([*]const vk.Fence, ¤t.frame_fence));
|
||||
|
||||
// Step 2: Submit the command buffer
|
||||
const wait_stage = [_]vk.PipelineStageFlags{.{.top_of_pipe_bit = true}};
|
||||
const wait_stage = [_]vk.PipelineStageFlags{.{ .top_of_pipe_bit = true }};
|
||||
try self.gc.vkd.queueSubmit(self.gc.graphics_queue.handle, 1, &[_]vk.SubmitInfo{.{
|
||||
.wait_semaphore_count = 1,
|
||||
.p_wait_semaphores = @ptrCast([*]const vk.Semaphore, ¤t.image_acquired),
|
||||
@@ -159,7 +166,7 @@ pub const Swapchain = struct {
|
||||
}}, current.frame_fence);
|
||||
|
||||
// Step 3: Present the current frame
|
||||
_ = try self.gc.vkd.queuePresentKHR(self.gc.present_queue.handle, .{
|
||||
_ = try self.gc.vkd.queuePresentKHR(self.gc.present_queue.handle, &.{
|
||||
.wait_semaphore_count = 1,
|
||||
.p_wait_semaphores = @ptrCast([*]const vk.Semaphore, ¤t.render_finished),
|
||||
.swapchain_count = 1,
|
||||
@@ -196,14 +203,14 @@ const SwapImage = struct {
|
||||
frame_fence: vk.Fence,
|
||||
|
||||
fn init(gc: *const GraphicsContext, image: vk.Image, format: vk.Format) !SwapImage {
|
||||
const view = try gc.vkd.createImageView(gc.dev, .{
|
||||
const view = try gc.vkd.createImageView(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.image = image,
|
||||
.view_type = .@"2d",
|
||||
.format = format,
|
||||
.components = .{.r = .identity, .g = .identity, .b = .identity, .a = .identity},
|
||||
.components = .{ .r = .identity, .g = .identity, .b = .identity, .a = .identity },
|
||||
.subresource_range = .{
|
||||
.aspect_mask = .{.color_bit = true},
|
||||
.aspect_mask = .{ .color_bit = true },
|
||||
.base_mip_level = 0,
|
||||
.level_count = 1,
|
||||
.base_array_layer = 0,
|
||||
@@ -212,13 +219,13 @@ const SwapImage = struct {
|
||||
}, null);
|
||||
errdefer gc.vkd.destroyImageView(gc.dev, view, null);
|
||||
|
||||
const image_acquired = try gc.vkd.createSemaphore(gc.dev, .{.flags = .{}}, null);
|
||||
const image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||
|
||||
const render_finished = try gc.vkd.createSemaphore(gc.dev, .{.flags = .{}}, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||
const render_finished = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, render_finished, null);
|
||||
|
||||
const frame_fence = try gc.vkd.createFence(gc.dev, .{.flags = .{.signaled_bit = true}}, null);
|
||||
const frame_fence = try gc.vkd.createFence(gc.dev, &.{ .flags = .{ .signaled_bit = true } }, null);
|
||||
errdefer gc.vkd.destroyFence(gc.dev, frame_fence, null);
|
||||
|
||||
return SwapImage{
|
||||
@@ -243,7 +250,7 @@ const SwapImage = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, format: vk.Format, allocator: *Allocator) ![]SwapImage {
|
||||
fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, format: vk.Format, allocator: Allocator) ![]SwapImage {
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vkd.getSwapchainImagesKHR(gc.dev, swapchain, &count, null);
|
||||
const images = try allocator.alloc(vk.Image, count);
|
||||
@@ -251,10 +258,10 @@ fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, f
|
||||
_ = try gc.vkd.getSwapchainImagesKHR(gc.dev, swapchain, &count, images.ptr);
|
||||
|
||||
const swap_images = try allocator.alloc(SwapImage, count);
|
||||
errdefer allocator.free(images);
|
||||
errdefer allocator.free(swap_images);
|
||||
|
||||
var i: usize = 0;
|
||||
errdefer for (swap_images[0 .. i]) |si| si.deinit(gc);
|
||||
errdefer for (swap_images[0..i]) |si| si.deinit(gc);
|
||||
|
||||
for (images) |image| {
|
||||
swap_images[i] = try SwapImage.init(gc, image, format);
|
||||
@@ -264,7 +271,7 @@ fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, f
|
||||
return swap_images;
|
||||
}
|
||||
|
||||
fn findSurfaceFormat(gc: *const GraphicsContext, allocator: *Allocator) !vk.SurfaceFormatKHR {
|
||||
fn findSurfaceFormat(gc: *const GraphicsContext, allocator: Allocator) !vk.SurfaceFormatKHR {
|
||||
const preferred = vk.SurfaceFormatKHR{
|
||||
.format = .b8g8r8a8_srgb,
|
||||
.color_space = .srgb_nonlinear_khr,
|
||||
@@ -285,7 +292,7 @@ fn findSurfaceFormat(gc: *const GraphicsContext, allocator: *Allocator) !vk.Surf
|
||||
return surface_formats[0]; // There must always be at least one supported surface format
|
||||
}
|
||||
|
||||
fn findPresentMode(gc: *const GraphicsContext, allocator: *Allocator) !vk.PresentModeKHR {
|
||||
fn findPresentMode(gc: *const GraphicsContext, allocator: Allocator) !vk.PresentModeKHR {
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vki.getPhysicalDeviceSurfacePresentModesKHR(gc.pdev, gc.surface, &count, null);
|
||||
const present_modes = try allocator.alloc(vk.PresentModeKHR, count);
|
||||
|
||||
@@ -20,13 +20,13 @@ const Vertex = struct {
|
||||
.binding = 0,
|
||||
.location = 0,
|
||||
.format = .r32g32_sfloat,
|
||||
.offset = @byteOffsetOf(Vertex, "pos"),
|
||||
.offset = @offsetOf(Vertex, "pos"),
|
||||
},
|
||||
.{
|
||||
.binding = 0,
|
||||
.location = 1,
|
||||
.format = .r32g32b32_sfloat,
|
||||
.offset = @byteOffsetOf(Vertex, "color"),
|
||||
.offset = @offsetOf(Vertex, "color"),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -35,16 +35,16 @@ const Vertex = struct {
|
||||
};
|
||||
|
||||
const vertices = [_]Vertex{
|
||||
.{.pos = .{0, -0.5}, .color = .{1, 0, 0}},
|
||||
.{.pos = .{0.5, 0.5}, .color = .{0, 1, 0}},
|
||||
.{.pos = .{-0.5, 0.5}, .color = .{0, 0, 1}},
|
||||
.{ .pos = .{ 0, -0.5 }, .color = .{ 1, 0, 0 } },
|
||||
.{ .pos = .{ 0.5, 0.5 }, .color = .{ 0, 1, 0 } },
|
||||
.{ .pos = .{ -0.5, 0.5 }, .color = .{ 0, 0, 1 } },
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
if (c.glfwInit() != c.GLFW_TRUE) return error.GlfwInitFailed;
|
||||
defer c.glfwTerminate();
|
||||
|
||||
var extent = vk.Extent2D{.width = 800, .height = 600};
|
||||
var extent = vk.Extent2D{ .width = 800, .height = 600 };
|
||||
|
||||
c.glfwWindowHint(c.GLFW_CLIENT_API, c.GLFW_NO_API);
|
||||
const window = c.glfwCreateWindow(
|
||||
@@ -52,21 +52,23 @@ pub fn main() !void {
|
||||
@intCast(c_int, extent.height),
|
||||
app_name,
|
||||
null,
|
||||
null
|
||||
null,
|
||||
) orelse return error.WindowInitFailed;
|
||||
defer c.glfwDestroyWindow(window);
|
||||
|
||||
const allocator = std.heap.page_allocator;
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
defer _ = gpa.deinit();
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
const gc = try GraphicsContext.init(allocator, app_name, window);
|
||||
defer gc.deinit();
|
||||
|
||||
std.debug.print("Using device: {s}\n", .{ gc.deviceName() });
|
||||
std.debug.print("Using device: {s}\n", .{gc.deviceName()});
|
||||
|
||||
var swapchain = try Swapchain.init(&gc, allocator, extent);
|
||||
defer swapchain.deinit();
|
||||
|
||||
const pipeline_layout = try gc.vkd.createPipelineLayout(gc.dev, .{
|
||||
const pipeline_layout = try gc.vkd.createPipelineLayout(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.set_layout_count = 0,
|
||||
.p_set_layouts = undefined,
|
||||
@@ -78,33 +80,33 @@ pub fn main() !void {
|
||||
const render_pass = try createRenderPass(&gc, swapchain);
|
||||
defer gc.vkd.destroyRenderPass(gc.dev, render_pass, null);
|
||||
|
||||
var pipeline = try createPipeline(&gc, extent, pipeline_layout, render_pass);
|
||||
var pipeline = try createPipeline(&gc, pipeline_layout, render_pass);
|
||||
defer gc.vkd.destroyPipeline(gc.dev, pipeline, null);
|
||||
|
||||
var framebuffers = try createFramebuffers(&gc, allocator, render_pass, swapchain);
|
||||
defer destroyFramebuffers(&gc, allocator, framebuffers);
|
||||
|
||||
const pool = try gc.vkd.createCommandPool(gc.dev, .{
|
||||
const pool = try gc.vkd.createCommandPool(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.queue_family_index = gc.graphics_queue.family,
|
||||
}, null);
|
||||
defer gc.vkd.destroyCommandPool(gc.dev, pool, null);
|
||||
|
||||
const buffer = try gc.vkd.createBuffer(gc.dev, .{
|
||||
const buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.size = @sizeOf(@TypeOf(vertices)),
|
||||
.usage = .{.transfer_dst_bit = true, .vertex_buffer_bit = true},
|
||||
.usage = .{ .transfer_dst_bit = true, .vertex_buffer_bit = true },
|
||||
.sharing_mode = .exclusive,
|
||||
.queue_family_index_count = 0,
|
||||
.p_queue_family_indices = undefined,
|
||||
}, null);
|
||||
defer gc.vkd.destroyBuffer(gc.dev, buffer, null);
|
||||
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, buffer);
|
||||
const memory = try gc.allocate(mem_reqs, .{.device_local_bit = true});
|
||||
const memory = try gc.allocate(mem_reqs, .{ .device_local_bit = true });
|
||||
defer gc.vkd.freeMemory(gc.dev, memory, null);
|
||||
try gc.vkd.bindBufferMemory(gc.dev, buffer, memory, 0);
|
||||
|
||||
try uploadVertices(&gc, pool, buffer, memory);
|
||||
try uploadVertices(&gc, pool, buffer);
|
||||
|
||||
var cmdbufs = try createCommandBuffers(
|
||||
&gc,
|
||||
@@ -114,7 +116,7 @@ pub fn main() !void {
|
||||
swapchain.extent,
|
||||
render_pass,
|
||||
pipeline,
|
||||
framebuffers
|
||||
framebuffers,
|
||||
);
|
||||
defer destroyCommandBuffers(&gc, pool, allocator, cmdbufs);
|
||||
|
||||
@@ -126,10 +128,11 @@ pub fn main() !void {
|
||||
else => |narrow| return narrow,
|
||||
};
|
||||
|
||||
if (state == .suboptimal) {
|
||||
var w: c_int = undefined;
|
||||
var h: c_int = undefined;
|
||||
c.glfwGetWindowSize(window, &w, &h);
|
||||
var w: c_int = undefined;
|
||||
var h: c_int = undefined;
|
||||
c.glfwGetWindowSize(window, &w, &h);
|
||||
|
||||
if (state == .suboptimal or extent.width != @intCast(u32, w) or extent.height != @intCast(u32, h)) {
|
||||
extent.width = @intCast(u32, w);
|
||||
extent.height = @intCast(u32, h);
|
||||
try swapchain.recreate(extent);
|
||||
@@ -146,30 +149,28 @@ pub fn main() !void {
|
||||
swapchain.extent,
|
||||
render_pass,
|
||||
pipeline,
|
||||
framebuffers
|
||||
framebuffers,
|
||||
);
|
||||
}
|
||||
|
||||
c.glfwSwapBuffers(window);
|
||||
c.glfwPollEvents();
|
||||
|
||||
}
|
||||
|
||||
try swapchain.waitForAllFences();
|
||||
}
|
||||
|
||||
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer, memory: vk.DeviceMemory) !void {
|
||||
const staging_buffer = try gc.vkd.createBuffer(gc.dev, .{
|
||||
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer) !void {
|
||||
const staging_buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.size = @sizeOf(@TypeOf(vertices)),
|
||||
.usage = .{.transfer_src_bit = true},
|
||||
.usage = .{ .transfer_src_bit = true },
|
||||
.sharing_mode = .exclusive,
|
||||
.queue_family_index_count = 0,
|
||||
.p_queue_family_indices = undefined,
|
||||
}, null);
|
||||
defer gc.vkd.destroyBuffer(gc.dev, staging_buffer, null);
|
||||
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, staging_buffer);
|
||||
const staging_memory = try gc.allocate(mem_reqs, .{.host_visible_bit = true, .host_coherent_bit = true});
|
||||
const staging_memory = try gc.allocate(mem_reqs, .{ .host_visible_bit = true, .host_coherent_bit = true });
|
||||
defer gc.vkd.freeMemory(gc.dev, staging_memory, null);
|
||||
try gc.vkd.bindBufferMemory(gc.dev, staging_buffer, staging_memory, 0);
|
||||
|
||||
@@ -188,15 +189,15 @@ fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.B
|
||||
|
||||
fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer, src: vk.Buffer, size: vk.DeviceSize) !void {
|
||||
var cmdbuf: vk.CommandBuffer = undefined;
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, .{
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, &.{
|
||||
.command_pool = pool,
|
||||
.level = .primary,
|
||||
.command_buffer_count = 1,
|
||||
}, @ptrCast([*]vk.CommandBuffer, &cmdbuf));
|
||||
defer gc.vkd.freeCommandBuffers(gc.dev, pool, 1, @ptrCast([*]const vk.CommandBuffer, &cmdbuf));
|
||||
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, .{
|
||||
.flags = .{.one_time_submit_bit = true},
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
||||
.flags = .{ .one_time_submit_bit = true },
|
||||
.p_inheritance_info = null,
|
||||
});
|
||||
|
||||
@@ -225,7 +226,7 @@ fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer,
|
||||
fn createCommandBuffers(
|
||||
gc: *const GraphicsContext,
|
||||
pool: vk.CommandPool,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
buffer: vk.Buffer,
|
||||
extent: vk.Extent2D,
|
||||
render_pass: vk.RenderPass,
|
||||
@@ -235,7 +236,7 @@ fn createCommandBuffers(
|
||||
const cmdbufs = try allocator.alloc(vk.CommandBuffer, framebuffers.len);
|
||||
errdefer allocator.free(cmdbufs);
|
||||
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, .{
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, &.{
|
||||
.command_pool = pool,
|
||||
.level = .primary,
|
||||
.command_buffer_count = @truncate(u32, cmdbufs.len),
|
||||
@@ -243,7 +244,7 @@ fn createCommandBuffers(
|
||||
errdefer gc.vkd.freeCommandBuffers(gc.dev, pool, @truncate(u32, cmdbufs.len), cmdbufs.ptr);
|
||||
|
||||
const clear = vk.ClearValue{
|
||||
.color = .{.float_32 = .{0, 0, 0, 1}},
|
||||
.color = .{ .float_32 = .{ 0, 0, 0, 1 } },
|
||||
};
|
||||
|
||||
const viewport = vk.Viewport{
|
||||
@@ -256,12 +257,12 @@ fn createCommandBuffers(
|
||||
};
|
||||
|
||||
const scissor = vk.Rect2D{
|
||||
.offset = .{.x = 0, .y = 0},
|
||||
.offset = .{ .x = 0, .y = 0 },
|
||||
.extent = extent,
|
||||
};
|
||||
|
||||
for (cmdbufs) |cmdbuf, i| {
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, .{
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
||||
.flags = .{},
|
||||
.p_inheritance_info = null,
|
||||
});
|
||||
@@ -269,13 +270,16 @@ fn createCommandBuffers(
|
||||
gc.vkd.cmdSetViewport(cmdbuf, 0, 1, @ptrCast([*]const vk.Viewport, &viewport));
|
||||
gc.vkd.cmdSetScissor(cmdbuf, 0, 1, @ptrCast([*]const vk.Rect2D, &scissor));
|
||||
|
||||
gc.vkd.cmdBeginRenderPass(cmdbuf, .{
|
||||
// This needs to be a separate definition - see https://github.com/ziglang/zig/issues/7627.
|
||||
const render_area = vk.Rect2D{
|
||||
.offset = .{ .x = 0, .y = 0 },
|
||||
.extent = extent,
|
||||
};
|
||||
|
||||
gc.vkd.cmdBeginRenderPass(cmdbuf, &.{
|
||||
.render_pass = render_pass,
|
||||
.framebuffer = framebuffers[i],
|
||||
.render_area = .{
|
||||
.offset = .{.x = 0, .y = 0},
|
||||
.extent = extent,
|
||||
},
|
||||
.render_area = render_area,
|
||||
.clear_value_count = 1,
|
||||
.p_clear_values = @ptrCast([*]const vk.ClearValue, &clear),
|
||||
}, .@"inline");
|
||||
@@ -292,25 +296,20 @@ fn createCommandBuffers(
|
||||
return cmdbufs;
|
||||
}
|
||||
|
||||
fn destroyCommandBuffers(gc: *const GraphicsContext, pool: vk.CommandPool, allocator: *Allocator, cmdbufs: []vk.CommandBuffer) void {
|
||||
fn destroyCommandBuffers(gc: *const GraphicsContext, pool: vk.CommandPool, allocator: Allocator, cmdbufs: []vk.CommandBuffer) void {
|
||||
gc.vkd.freeCommandBuffers(gc.dev, pool, @truncate(u32, cmdbufs.len), cmdbufs.ptr);
|
||||
allocator.free(cmdbufs);
|
||||
}
|
||||
|
||||
fn createFramebuffers(
|
||||
gc: *const GraphicsContext,
|
||||
allocator: *Allocator,
|
||||
render_pass: vk.RenderPass,
|
||||
swapchain: Swapchain
|
||||
) ![]vk.Framebuffer {
|
||||
fn createFramebuffers(gc: *const GraphicsContext, allocator: Allocator, render_pass: vk.RenderPass, swapchain: Swapchain) ![]vk.Framebuffer {
|
||||
const framebuffers = try allocator.alloc(vk.Framebuffer, swapchain.swap_images.len);
|
||||
errdefer allocator.free(framebuffers);
|
||||
|
||||
var i: usize = 0;
|
||||
errdefer for (framebuffers[0 .. i]) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
errdefer for (framebuffers[0..i]) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
|
||||
for (framebuffers) |*fb| {
|
||||
fb.* = try gc.vkd.createFramebuffer(gc.dev, .{
|
||||
fb.* = try gc.vkd.createFramebuffer(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.render_pass = render_pass,
|
||||
.attachment_count = 1,
|
||||
@@ -325,7 +324,7 @@ fn createFramebuffers(
|
||||
return framebuffers;
|
||||
}
|
||||
|
||||
fn destroyFramebuffers(gc: *const GraphicsContext, allocator: *Allocator, framebuffers: []const vk.Framebuffer) void {
|
||||
fn destroyFramebuffers(gc: *const GraphicsContext, allocator: Allocator, framebuffers: []const vk.Framebuffer) void {
|
||||
for (framebuffers) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
allocator.free(framebuffers);
|
||||
}
|
||||
@@ -334,7 +333,7 @@ fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.Render
|
||||
const color_attachment = vk.AttachmentDescription{
|
||||
.flags = .{},
|
||||
.format = swapchain.surface_format.format,
|
||||
.samples = .{.@"1_bit" = true},
|
||||
.samples = .{ .@"1_bit" = true },
|
||||
.load_op = .clear,
|
||||
.store_op = .store,
|
||||
.stencil_load_op = .dont_care,
|
||||
@@ -361,7 +360,7 @@ fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.Render
|
||||
.p_preserve_attachments = undefined,
|
||||
};
|
||||
|
||||
return try gc.vkd.createRenderPass(gc.dev, .{
|
||||
return try gc.vkd.createRenderPass(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.attachment_count = 1,
|
||||
.p_attachments = @ptrCast([*]const vk.AttachmentDescription, &color_attachment),
|
||||
@@ -374,35 +373,34 @@ fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.Render
|
||||
|
||||
fn createPipeline(
|
||||
gc: *const GraphicsContext,
|
||||
extent: vk.Extent2D,
|
||||
layout: vk.PipelineLayout,
|
||||
render_pass: vk.RenderPass,
|
||||
) !vk.Pipeline {
|
||||
const vert = try gc.vkd.createShaderModule(gc.dev, .{
|
||||
const vert = try gc.vkd.createShaderModule(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.code_size = resources.triangle_vert.len,
|
||||
.p_code = @ptrCast([*]const u32, resources.triangle_vert),
|
||||
.p_code = @ptrCast([*]const u32, &resources.triangle_vert),
|
||||
}, null);
|
||||
defer gc.vkd.destroyShaderModule(gc.dev, vert, null);
|
||||
|
||||
const frag = try gc.vkd.createShaderModule(gc.dev, .{
|
||||
const frag = try gc.vkd.createShaderModule(gc.dev, &.{
|
||||
.flags = .{},
|
||||
.code_size = resources.triangle_frag.len,
|
||||
.p_code = @ptrCast([*]const u32, resources.triangle_frag),
|
||||
.p_code = @ptrCast([*]const u32, &resources.triangle_frag),
|
||||
}, null);
|
||||
defer gc.vkd.destroyShaderModule(gc.dev, frag, null);
|
||||
|
||||
const pssci = [_]vk.PipelineShaderStageCreateInfo{
|
||||
.{
|
||||
.flags = .{},
|
||||
.stage = .{.vertex_bit = true},
|
||||
.stage = .{ .vertex_bit = true },
|
||||
.module = vert,
|
||||
.p_name = "main",
|
||||
.p_specialization_info = null,
|
||||
},
|
||||
.{
|
||||
.flags = .{},
|
||||
.stage = .{.fragment_bit = true},
|
||||
.stage = .{ .fragment_bit = true },
|
||||
.module = frag,
|
||||
.p_name = "main",
|
||||
.p_specialization_info = null,
|
||||
@@ -436,7 +434,7 @@ fn createPipeline(
|
||||
.depth_clamp_enable = vk.FALSE,
|
||||
.rasterizer_discard_enable = vk.FALSE,
|
||||
.polygon_mode = .fill,
|
||||
.cull_mode = .{.back_bit = true},
|
||||
.cull_mode = .{ .back_bit = true },
|
||||
.front_face = .clockwise,
|
||||
.depth_bias_enable = vk.FALSE,
|
||||
.depth_bias_constant_factor = 0,
|
||||
@@ -447,7 +445,7 @@ fn createPipeline(
|
||||
|
||||
const pmsci = vk.PipelineMultisampleStateCreateInfo{
|
||||
.flags = .{},
|
||||
.rasterization_samples = .{.@"1_bit" = true},
|
||||
.rasterization_samples = .{ .@"1_bit" = true },
|
||||
.sample_shading_enable = vk.FALSE,
|
||||
.min_sample_shading = 1,
|
||||
.p_sample_mask = null,
|
||||
@@ -463,7 +461,7 @@ fn createPipeline(
|
||||
.src_alpha_blend_factor = .one,
|
||||
.dst_alpha_blend_factor = .zero,
|
||||
.alpha_blend_op = .add,
|
||||
.color_write_mask = .{.r_bit = true, .g_bit = true, .b_bit = true, .a_bit = true},
|
||||
.color_write_mask = .{ .r_bit = true, .g_bit = true, .b_bit = true, .a_bit = true },
|
||||
};
|
||||
|
||||
const pcbsci = vk.PipelineColorBlendStateCreateInfo{
|
||||
@@ -472,10 +470,10 @@ fn createPipeline(
|
||||
.logic_op = .copy,
|
||||
.attachment_count = 1,
|
||||
.p_attachments = @ptrCast([*]const vk.PipelineColorBlendAttachmentState, &pcbas),
|
||||
.blend_constants = [_]f32{0, 0, 0, 0},
|
||||
.blend_constants = [_]f32{ 0, 0, 0, 0 },
|
||||
};
|
||||
|
||||
const dynstate = [_]vk.DynamicState{.viewport, .scissor};
|
||||
const dynstate = [_]vk.DynamicState{ .viewport, .scissor };
|
||||
const pdsci = vk.PipelineDynamicStateCreateInfo{
|
||||
.flags = .{},
|
||||
.dynamic_state_count = dynstate.len,
|
||||
@@ -506,7 +504,8 @@ fn createPipeline(
|
||||
_ = try gc.vkd.createGraphicsPipelines(
|
||||
gc.dev,
|
||||
.null_handle,
|
||||
1, @ptrCast([*]const vk.GraphicsPipelineCreateInfo, &gpci),
|
||||
1,
|
||||
@ptrCast([*]const vk.GraphicsPipelineCreateInfo, &gpci),
|
||||
null,
|
||||
@ptrCast([*]vk.Pipeline, &pipeline),
|
||||
);
|
||||
|
||||
2110
examples/vk.xml
2110
examples/vk.xml
File diff suppressed because it is too large
Load Diff
@@ -3,10 +3,27 @@ const path = std.fs.path;
|
||||
const Builder = std.build.Builder;
|
||||
const Step = std.build.Step;
|
||||
|
||||
/// Stage the shader should be built for. This is passed to the -fshader-stage
|
||||
/// argument when invoking glslc.
|
||||
pub const ShaderStage = enum {
|
||||
vertex,
|
||||
fragment,
|
||||
tesscontrol,
|
||||
tesseval,
|
||||
geometry,
|
||||
compute,
|
||||
};
|
||||
|
||||
/// Utility functionality to help with compiling shaders from build.zig.
|
||||
/// Invokes glslc (or another shader compiler passed to `init`) for each shader
|
||||
/// added via `addShader`.
|
||||
pub const ShaderCompileStep = struct {
|
||||
const AddFileParams = struct {
|
||||
entry_point: ?[]const u8 = null,
|
||||
stage: ?ShaderStage = null,
|
||||
output_filename: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
/// Structure representing a shader to be compiled.
|
||||
const Shader = struct {
|
||||
/// The path to the shader, relative to the current build root.
|
||||
@@ -14,6 +31,13 @@ pub const ShaderCompileStep = struct {
|
||||
|
||||
/// The full output path where the compiled shader binary is placed.
|
||||
full_out_path: []const u8,
|
||||
|
||||
/// The entry point to use when compiling the shader.
|
||||
entry_point: ?[]const u8,
|
||||
|
||||
/// The stage to use when building. If not null, this is passed to
|
||||
/// the -fshader-stage argument.
|
||||
stage: ?ShaderStage,
|
||||
};
|
||||
|
||||
step: Step,
|
||||
@@ -22,17 +46,21 @@ pub const ShaderCompileStep = struct {
|
||||
/// The command and optional arguments used to invoke the shader compiler.
|
||||
glslc_cmd: []const []const u8,
|
||||
|
||||
/// The directory within `zig-cache/` that the compiled shaders are placed in.
|
||||
output_dir: []const u8,
|
||||
|
||||
/// List of shaders that are to be compiled.
|
||||
shaders: std.ArrayList(Shader),
|
||||
|
||||
/// Create a ShaderCompilerStep for `builder`. When this step is invoked by the build
|
||||
/// system, `<glcl_cmd...> <shader_source> -o <dst_addr>` is invoked for each shader.
|
||||
pub fn init(builder: *Builder, glslc_cmd: []const []const u8) *ShaderCompileStep {
|
||||
pub fn init(builder: *Builder, glslc_cmd: []const []const u8, output_dir: []const u8) *ShaderCompileStep {
|
||||
const self = builder.allocator.create(ShaderCompileStep) catch unreachable;
|
||||
self.* = .{
|
||||
.step = Step.init(.Custom, "shader-compile", builder.allocator, make),
|
||||
.step = Step.init(.custom, "shader-compile", builder.allocator, make),
|
||||
.builder = builder,
|
||||
.glslc_cmd = glslc_cmd,
|
||||
.output_dir = output_dir,
|
||||
.glslc_cmd = builder.dupeStrings(glslc_cmd),
|
||||
.shaders = std.ArrayList(Shader).init(builder.allocator),
|
||||
};
|
||||
return self;
|
||||
@@ -41,15 +69,15 @@ pub const ShaderCompileStep = struct {
|
||||
/// Add a shader to be compiled. `src` is shader source path, relative to the project root.
|
||||
/// Returns the full path where the compiled binary will be stored upon successful compilation.
|
||||
/// This path can then be used to include the binary into an executable, for example by passing it
|
||||
/// to @embedFile via an additional generated file.
|
||||
pub fn add(self: *ShaderCompileStep, src: []const u8) []const u8 {
|
||||
/// to @embedFile via an additional generated file. `entry_point` is the entry point to pass to the compiler.
|
||||
/// `stage` is an optional shader stage to pass to the compiler with the flag `-fshader-stage` when building the shader.
|
||||
pub fn add(self: *ShaderCompileStep, src: []const u8, params: AddFileParams) []const u8 {
|
||||
const full_out_path = path.join(self.builder.allocator, &[_][]const u8{
|
||||
self.builder.build_root,
|
||||
self.builder.cache_root,
|
||||
"shaders",
|
||||
src,
|
||||
if (params.output_filename) |out| out else std.fmt.allocPrint(self.builder.allocator, "{s}.spv", .{src}) catch unreachable,
|
||||
}) catch unreachable;
|
||||
self.shaders.append(.{.source_path = src, .full_out_path = full_out_path}) catch unreachable;
|
||||
self.shaders.append(.{ .source_path = src, .full_out_path = full_out_path, .entry_point = params.entry_point, .stage = params.stage }) catch unreachable;
|
||||
return full_out_path;
|
||||
}
|
||||
|
||||
@@ -58,18 +86,26 @@ pub const ShaderCompileStep = struct {
|
||||
const self = @fieldParentPtr(ShaderCompileStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
const cmd = try self.builder.allocator.alloc([]const u8, self.glslc_cmd.len + 3);
|
||||
for (self.glslc_cmd) |part, i| {
|
||||
cmd[i] = part;
|
||||
}
|
||||
cmd[cmd.len - 2] = "-o";
|
||||
var cmd = std.ArrayList([]const u8).init(self.builder.allocator);
|
||||
try cmd.appendSlice(self.glslc_cmd);
|
||||
const base_cmd_len = cmd.items.len;
|
||||
|
||||
for (self.shaders.items) |shader| {
|
||||
cmd.items.len = base_cmd_len;
|
||||
|
||||
if (shader.entry_point) |entry_point| {
|
||||
try cmd.append(try std.fmt.allocPrint(self.builder.allocator, "-fentry-point={s}", .{entry_point}));
|
||||
}
|
||||
|
||||
if (shader.stage) |stage| {
|
||||
try cmd.append(try std.fmt.allocPrint(self.builder.allocator, "-fshader-stage={s}", .{@tagName(stage)}));
|
||||
}
|
||||
|
||||
const dir = path.dirname(shader.full_out_path).?;
|
||||
try cwd.makePath(dir);
|
||||
cmd[cmd.len - 3] = shader.source_path;
|
||||
cmd[cmd.len - 1] = shader.full_out_path;
|
||||
try self.builder.spawnChild(cmd);
|
||||
|
||||
try cmd.appendSlice(&.{shader.source_path, "-o", shader.full_out_path});
|
||||
try self.builder.spawnChild(cmd.items);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,6 +2,65 @@ const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
|
||||
pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) {
|
||||
for (name[1..]) |c| {
|
||||
switch (c) {
|
||||
'0'...'9' => {},
|
||||
else => break,
|
||||
}
|
||||
} else return true;
|
||||
}
|
||||
|
||||
const primitives = [_][]const u8{
|
||||
"void",
|
||||
"comptime_float",
|
||||
"comptime_int",
|
||||
"bool",
|
||||
"isize",
|
||||
"usize",
|
||||
"f16",
|
||||
"f32",
|
||||
"f64",
|
||||
"f128",
|
||||
"noreturn",
|
||||
"type",
|
||||
"anyerror",
|
||||
"c_short",
|
||||
"c_ushort",
|
||||
"c_int",
|
||||
"c_uint",
|
||||
"c_long",
|
||||
"c_ulong",
|
||||
"c_longlong",
|
||||
"c_ulonglong",
|
||||
"c_longdouble",
|
||||
// Removed in stage 2 in https://github.com/ziglang/zig/commit/05cf44933d753f7a5a53ab289ea60fd43761de57,
|
||||
// but these are still invalid identifiers in stage 1.
|
||||
"undefined",
|
||||
"true",
|
||||
"false",
|
||||
"null",
|
||||
};
|
||||
|
||||
for (primitives) |reserved| {
|
||||
if (mem.eql(u8, reserved, name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn writeIdentifier(writer: anytype, id: []const u8) !void {
|
||||
// https://github.com/ziglang/zig/issues/2897
|
||||
if (isZigPrimitiveType(id)) {
|
||||
try writer.print("@\"{}\"", .{std.zig.fmtEscapes(id)});
|
||||
} else {
|
||||
try writer.print("{}", .{std.zig.fmtId(id)});
|
||||
}
|
||||
}
|
||||
|
||||
pub const CaseStyle = enum {
|
||||
snake,
|
||||
screaming_snake,
|
||||
@@ -51,7 +110,7 @@ pub const SegmentIterator = struct {
|
||||
}
|
||||
|
||||
const end = self.nextBoundary();
|
||||
const word = self.text[self.offset .. end];
|
||||
const word = self.text[self.offset..end];
|
||||
self.offset = end;
|
||||
return word;
|
||||
}
|
||||
@@ -69,7 +128,7 @@ pub const IdRenderer = struct {
|
||||
tags: []const []const u8,
|
||||
text_cache: std.ArrayList(u8),
|
||||
|
||||
pub fn init(allocator: *Allocator, tags: []const []const u8) IdRenderer {
|
||||
pub fn init(allocator: Allocator, tags: []const []const u8) IdRenderer {
|
||||
return .{
|
||||
.tags = tags,
|
||||
.text_cache = std.ArrayList(u8).init(allocator),
|
||||
@@ -80,7 +139,7 @@ pub const IdRenderer = struct {
|
||||
self.text_cache.deinit();
|
||||
}
|
||||
|
||||
fn renderSnake(self: *IdRenderer, screaming: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
fn renderSnake(self: *IdRenderer, comptime screaming: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
var it = SegmentIterator.init(id);
|
||||
var first = true;
|
||||
const transform = if (screaming) std.ascii.toUpper else std.ascii.toLower;
|
||||
@@ -128,7 +187,7 @@ pub const IdRenderer = struct {
|
||||
}
|
||||
lower_first = false;
|
||||
|
||||
for (segment[i + 1..]) |c| {
|
||||
for (segment[i + 1 ..]) |c| {
|
||||
try self.text_cache.append(std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
@@ -138,14 +197,10 @@ pub const IdRenderer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(self: IdRenderer, out: anytype, id: []const u8) !void {
|
||||
try out.print("{z}", .{ id });
|
||||
}
|
||||
|
||||
pub fn renderFmt(self: *IdRenderer, out: anytype, comptime fmt: []const u8, args: anytype) !void {
|
||||
self.text_cache.items.len = 0;
|
||||
try std.fmt.format(self.text_cache.writer(), fmt, args);
|
||||
try out.print("{z}", .{ self.text_cache.items });
|
||||
try writeIdentifier(out, self.text_cache.items);
|
||||
}
|
||||
|
||||
pub fn renderWithCase(self: *IdRenderer, out: anytype, case_style: CaseStyle, id: []const u8) !void {
|
||||
@@ -162,7 +217,7 @@ pub const IdRenderer = struct {
|
||||
.camel => try self.renderCamel(false, adjusted_id, tag),
|
||||
}
|
||||
|
||||
try out.print("{z}", .{ self.text_cache.items });
|
||||
try writeIdentifier(out, self.text_cache.items);
|
||||
}
|
||||
|
||||
pub fn getAuthorTag(self: IdRenderer, id: []const u8) ?[]const u8 {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pub const generateVk = @import("vulkan/generator.zig").generate;
|
||||
pub const VkGenerateStep = @import("vulkan/build_integration.zig").GenerateStep;
|
||||
pub const generateSpirv = @import("spirv/generator.zig").generate;
|
||||
pub const ShaderStage = @import("build_integration.zig").ShaderStage;
|
||||
pub const ShaderCompileStep = @import("build_integration.zig").ShaderCompileStep;
|
||||
|
||||
test "main" {
|
||||
|
||||
@@ -5,21 +5,18 @@ const usage = "Usage: {s} [-h|--help] <spec xml path> <output zig source>\n";
|
||||
|
||||
pub fn main() !void {
|
||||
const stderr = std.io.getStdErr();
|
||||
const stdout = std.io.getStdOut();
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var args = std.process.args();
|
||||
const prog_name = try args.next(allocator) orelse return error.ExecutableNameMissing;
|
||||
var args = try std.process.argsWithAllocator(allocator);
|
||||
const prog_name = args.next() orelse return error.ExecutableNameMissing;
|
||||
|
||||
var maybe_xml_path: ?[]const u8 = null;
|
||||
var maybe_out_path: ?[]const u8 = null;
|
||||
|
||||
while (args.next(allocator)) |err_or_arg| {
|
||||
const arg = try err_or_arg;
|
||||
|
||||
while (args.next()) |arg| {
|
||||
if (std.mem.eql(u8, arg, "--help") or std.mem.eql(u8, arg, "-h")) {
|
||||
@setEvalBranchQuota(2000);
|
||||
try stderr.writer().print(
|
||||
@@ -32,7 +29,7 @@ pub fn main() !void {
|
||||
\\
|
||||
\\
|
||||
++ usage,
|
||||
.{ prog_name },
|
||||
.{prog_name},
|
||||
);
|
||||
return;
|
||||
} else if (maybe_xml_path == null) {
|
||||
@@ -40,17 +37,17 @@ pub fn main() !void {
|
||||
} else if (maybe_out_path == null) {
|
||||
maybe_out_path = arg;
|
||||
} else {
|
||||
try stderr.writer().print("Error: Superficial argument '{s}'\n", .{ arg });
|
||||
try stderr.writer().print("Error: Superficial argument '{s}'\n", .{arg});
|
||||
}
|
||||
}
|
||||
|
||||
const xml_path = maybe_xml_path orelse {
|
||||
try stderr.writer().print("Error: Missing required argument <spec xml path>\n" ++ usage, .{ prog_name });
|
||||
try stderr.writer().print("Error: Missing required argument <spec xml path>\n" ++ usage, .{prog_name});
|
||||
return;
|
||||
};
|
||||
|
||||
const out_path = maybe_out_path orelse {
|
||||
try stderr.writer().print("Error: Missing required argument <output zig source>\n" ++ usage, .{ prog_name });
|
||||
try stderr.writer().print("Error: Missing required argument <output zig source>\n" ++ usage, .{prog_name});
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -60,15 +57,24 @@ pub fn main() !void {
|
||||
return;
|
||||
};
|
||||
|
||||
const out_file = cwd.createFile(out_path, .{}) catch |err| {
|
||||
try stderr.writer().print("Error: Failed to create output file '{s}' ({s})\n", .{ out_path, @errorName(err) });
|
||||
return;
|
||||
};
|
||||
defer out_file.close();
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(allocator);
|
||||
try generate(allocator, xml_src, out_buffer.writer());
|
||||
const tree = try std.zig.parse(allocator, out_buffer.items);
|
||||
try out_buffer.append(0);
|
||||
|
||||
_ = try std.zig.render(allocator, out_file.writer(), tree);
|
||||
const src = out_buffer.items[0 .. out_buffer.items.len - 1 :0];
|
||||
const tree = try std.zig.parse(allocator, src);
|
||||
const formatted = try tree.render(allocator);
|
||||
defer allocator.free(formatted);
|
||||
|
||||
if (std.fs.path.dirname(out_path)) |dir| {
|
||||
cwd.makePath(dir) catch |err| {
|
||||
try stderr.writer().print("Error: Failed to create output directory '{s}' ({s})\n", .{ dir, @errorName(err) });
|
||||
return;
|
||||
};
|
||||
}
|
||||
|
||||
cwd.writeFile(out_path, formatted) catch |err| {
|
||||
try stderr.writer().print("Error: Failed to write to output file '{s}' ({s})\n", .{ out_path, @errorName(err) });
|
||||
return;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -21,6 +21,8 @@ pub const GenerateStep = struct {
|
||||
/// name `vulkan`.
|
||||
package: std.build.Pkg,
|
||||
|
||||
output_file: std.build.GeneratedFile,
|
||||
|
||||
/// Initialize a Vulkan generation step, for `builder`. `spec_path` is the path to
|
||||
/// vk.xml, relative to the project root. The generated bindings will be placed at
|
||||
/// `out_path`, which is relative to the zig-cache directory.
|
||||
@@ -33,14 +35,18 @@ pub const GenerateStep = struct {
|
||||
}) catch unreachable;
|
||||
|
||||
self.* = .{
|
||||
.step = Step.init(.Custom, "vulkan-generate", builder.allocator, make),
|
||||
.step = Step.init(.custom, "vulkan-generate", builder.allocator, make),
|
||||
.builder = builder,
|
||||
.spec_path = spec_path,
|
||||
.package = .{
|
||||
.name = "vulkan",
|
||||
.path = full_out_path,
|
||||
.source = .{ .generated = &self.output_file },
|
||||
.dependencies = null,
|
||||
}
|
||||
},
|
||||
.output_file = .{
|
||||
.step = &self.step,
|
||||
.path = full_out_path,
|
||||
},
|
||||
};
|
||||
return self;
|
||||
}
|
||||
@@ -52,7 +58,7 @@ pub const GenerateStep = struct {
|
||||
pub fn initFromSdk(builder: *Builder, sdk_path: []const u8, out_path: []const u8) *GenerateStep {
|
||||
const spec_path = std.fs.path.join(
|
||||
builder.allocator,
|
||||
&[_][]const u8{sdk_path, "share/vulkan/registry/vk.xml"},
|
||||
&[_][]const u8{ sdk_path, "share/vulkan/registry/vk.xml" },
|
||||
) catch unreachable;
|
||||
|
||||
return init(builder, spec_path, out_path);
|
||||
@@ -65,16 +71,21 @@ pub const GenerateStep = struct {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(GenerateStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
var out_buffer = std.ArrayList(u8).init(self.builder.allocator);
|
||||
|
||||
const spec = try cwd.readFileAlloc(self.builder.allocator, self.spec_path, std.math.maxInt(usize));
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(self.builder.allocator);
|
||||
try generate(self.builder.allocator, spec, out_buffer.writer());
|
||||
try out_buffer.append(0);
|
||||
|
||||
const tree = try std.zig.parse(self.builder.allocator, out_buffer.items);
|
||||
const src = out_buffer.items[0 .. out_buffer.items.len - 1 :0];
|
||||
const tree = try std.zig.parse(self.builder.allocator, src);
|
||||
std.debug.assert(tree.errors.len == 0); // If this triggers, vulkan-zig produced invalid code.
|
||||
|
||||
const dir = path.dirname(self.package.path).?;
|
||||
var formatted = try tree.render(self.builder.allocator);
|
||||
|
||||
const dir = path.dirname(self.output_file.path.?).?;
|
||||
try cwd.makePath(dir);
|
||||
const output_file = cwd.createFile(self.package.path, .{}) catch unreachable;
|
||||
defer output_file.close();
|
||||
_ = try std.zig.render(self.builder.allocator, output_file.writer(), tree);
|
||||
try cwd.writeFile(self.output_file.path.?, formatted);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -53,9 +53,9 @@ pub const CTokenizer = struct {
|
||||
|
||||
fn consume(self: *CTokenizer) !u8 {
|
||||
return if (self.offset < self.source.len)
|
||||
return self.consumeNoEof()
|
||||
else
|
||||
return null;
|
||||
return self.consumeNoEof()
|
||||
else
|
||||
return null;
|
||||
}
|
||||
|
||||
fn keyword(self: *CTokenizer) Token {
|
||||
@@ -70,20 +70,20 @@ pub const CTokenizer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const token_text = self.source[start .. self.offset];
|
||||
const token_text = self.source[start..self.offset];
|
||||
|
||||
const kind = if (mem.eql(u8, token_text, "typedef"))
|
||||
Token.Kind.kw_typedef
|
||||
else if (mem.eql(u8, token_text, "const"))
|
||||
Token.Kind.kw_const
|
||||
else if (mem.eql(u8, token_text, "VKAPI_PTR"))
|
||||
Token.Kind.kw_vkapi_ptr
|
||||
else if (mem.eql(u8, token_text, "struct"))
|
||||
Token.Kind.kw_struct
|
||||
else
|
||||
Token.Kind.id;
|
||||
Token.Kind.kw_typedef
|
||||
else if (mem.eql(u8, token_text, "const"))
|
||||
Token.Kind.kw_const
|
||||
else if (mem.eql(u8, token_text, "VKAPI_PTR"))
|
||||
Token.Kind.kw_vkapi_ptr
|
||||
else if (mem.eql(u8, token_text, "struct"))
|
||||
Token.Kind.kw_struct
|
||||
else
|
||||
Token.Kind.id;
|
||||
|
||||
return .{.kind = kind, .text = token_text};
|
||||
return .{ .kind = kind, .text = token_text };
|
||||
}
|
||||
|
||||
fn int(self: *CTokenizer) Token {
|
||||
@@ -100,7 +100,7 @@ pub const CTokenizer = struct {
|
||||
|
||||
return .{
|
||||
.kind = .int,
|
||||
.text = self.source[start .. self.offset],
|
||||
.text = self.source[start..self.offset],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ pub const CTokenizer = struct {
|
||||
|
||||
pub fn next(self: *CTokenizer) !?Token {
|
||||
self.skipws();
|
||||
if (mem.startsWith(u8, self.source[self.offset ..], "//") or self.in_comment) {
|
||||
if (mem.startsWith(u8, self.source[self.offset..], "//") or self.in_comment) {
|
||||
const end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse {
|
||||
self.offset = self.source.len;
|
||||
self.in_comment = true;
|
||||
@@ -143,15 +143,12 @@ pub const CTokenizer = struct {
|
||||
']' => kind = .rbracket,
|
||||
'(' => kind = .lparen,
|
||||
')' => kind = .rparen,
|
||||
else => return error.UnexpectedCharacter
|
||||
else => return error.UnexpectedCharacter,
|
||||
}
|
||||
|
||||
const start = self.offset;
|
||||
_ = self.consumeNoEof();
|
||||
return Token{
|
||||
.kind = kind,
|
||||
.text = self.source[start .. self.offset]
|
||||
};
|
||||
return Token{ .kind = kind, .text = self.source[start..self.offset] };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -167,17 +164,17 @@ pub const XmlCTokenizer = struct {
|
||||
}
|
||||
|
||||
fn elemToToken(elem: *xml.Element) !?Token {
|
||||
if (elem.children.items.len != 1 or elem.children.items[0] != .CharData) {
|
||||
if (elem.children.len != 1 or elem.children[0] != .char_data) {
|
||||
return error.InvalidXml;
|
||||
}
|
||||
|
||||
const text = elem.children.items[0].CharData;
|
||||
const text = elem.children[0].char_data;
|
||||
if (mem.eql(u8, elem.tag, "type")) {
|
||||
return Token{.kind = .type_name, .text = text};
|
||||
return Token{ .kind = .type_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "enum")) {
|
||||
return Token{.kind = .enum_name, .text = text};
|
||||
return Token{ .kind = .enum_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "name")) {
|
||||
return Token{.kind = .name, .text = text};
|
||||
return Token{ .kind = .name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "comment")) {
|
||||
return null;
|
||||
} else {
|
||||
@@ -206,9 +203,9 @@ pub const XmlCTokenizer = struct {
|
||||
|
||||
if (self.it.next()) |child| {
|
||||
switch (child.*) {
|
||||
.CharData => |cdata| self.ctok = CTokenizer{.source = cdata, .in_comment = in_comment},
|
||||
.Comment => {}, // xml comment
|
||||
.Element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
.char_data => |cdata| self.ctok = CTokenizer{ .source = cdata, .in_comment = in_comment },
|
||||
.comment => {}, // xml comment
|
||||
.element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
@@ -244,9 +241,9 @@ pub const XmlCTokenizer = struct {
|
||||
};
|
||||
|
||||
// TYPEDEF = kw_typedef DECLARATION ';'
|
||||
pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||
pub fn parseTypedef(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
_ = try xctok.expect(.kw_typedef);
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
_ = try xctok.expect(.semicolon);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
@@ -254,14 +251,14 @@ pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Decl
|
||||
|
||||
return registry.Declaration{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.decl_type = .{.typedef = decl.decl_type},
|
||||
.decl_type = .{ .typedef = decl.decl_type },
|
||||
};
|
||||
}
|
||||
|
||||
// MEMBER = DECLARATION (':' int)?
|
||||
pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
var field = registry.Container.Field {
|
||||
pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
var field = registry.Container.Field{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.field_type = decl.decl_type,
|
||||
.bits = null,
|
||||
@@ -287,14 +284,14 @@ pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Conta
|
||||
return field;
|
||||
}
|
||||
|
||||
pub fn parseParamOrProto(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
pub fn parseParamOrProto(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
return registry.Declaration{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.decl_type = .{.typedef = decl.decl_type},
|
||||
.decl_type = .{ .typedef = decl.decl_type },
|
||||
};
|
||||
}
|
||||
|
||||
@@ -318,7 +315,7 @@ pub const ParseError = error{
|
||||
// DECLARATION = kw_const? type_name DECLARATOR
|
||||
// DECLARATOR = POINTERS (id | name)? ('[' ARRAY_DECLARATOR ']')*
|
||||
// | POINTERS '(' FNPTRSUFFIX
|
||||
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
|
||||
fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) ParseError!Declaration {
|
||||
// Parse declaration constness
|
||||
var tok = try xctok.nextNoEof();
|
||||
const inner_is_const = tok.kind == .kw_const;
|
||||
@@ -333,14 +330,14 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
||||
if (tok.kind != .type_name and tok.kind != .id) return error.InvalidSyntax;
|
||||
const type_name = tok.text;
|
||||
|
||||
var type_info = TypeInfo{.name = type_name};
|
||||
var type_info = TypeInfo{ .name = type_name };
|
||||
|
||||
// Parse pointers
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info);
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info, ptrs_optional);
|
||||
|
||||
// Parse name / fn ptr
|
||||
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info)) |decl| {
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info, ptrs_optional)) |decl| {
|
||||
return decl;
|
||||
}
|
||||
|
||||
@@ -365,7 +362,7 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
||||
.array = .{
|
||||
.size = array_size,
|
||||
.child = child,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// update the inner_type pointer so it points to the proper
|
||||
@@ -380,7 +377,7 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
||||
}
|
||||
|
||||
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
|
||||
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
|
||||
fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo, ptrs_optional: bool) !?Declaration {
|
||||
const lparen = try xctok.peek();
|
||||
if (lparen == null or lparen.?.kind != .lparen) {
|
||||
return null;
|
||||
@@ -403,11 +400,11 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
||||
.return_type = return_type_heap,
|
||||
.success_codes = &[_][]const u8{},
|
||||
.error_codes = &[_][]const u8{},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const first_param = try parseDeclaration(allocator, xctok);
|
||||
const first_param = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
if (first_param.name == null) {
|
||||
if (first_param.decl_type != .name or !mem.eql(u8, first_param.decl_type.name, "void")) {
|
||||
return error.InvalidSyntax;
|
||||
@@ -434,7 +431,7 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
||||
else => return error.InvalidSyntax,
|
||||
}
|
||||
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
try params.append(.{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.param_type = decl.decl_type,
|
||||
@@ -448,7 +445,7 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
||||
}
|
||||
|
||||
// POINTERS = (kw_const? '*')*
|
||||
fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo) !TypeInfo {
|
||||
fn parsePointers(allocator: Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo, ptrs_optional: bool) !TypeInfo {
|
||||
var type_info = inner;
|
||||
var first_const = inner_const;
|
||||
|
||||
@@ -477,7 +474,7 @@ fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool
|
||||
type_info = .{
|
||||
.pointer = .{
|
||||
.is_const = is_const or first_const,
|
||||
.is_optional = false, // set elsewhere
|
||||
.is_optional = ptrs_optional, // set elsewhere
|
||||
.size = .one, // set elsewhere
|
||||
.child = child,
|
||||
},
|
||||
@@ -500,10 +497,10 @@ fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
|
||||
.int = std.fmt.parseInt(usize, size_tok.text, 10) catch |err| switch (err) {
|
||||
error.Overflow => return error.Overflow,
|
||||
error.InvalidCharacter => unreachable,
|
||||
}
|
||||
},
|
||||
},
|
||||
.enum_name => .{.alias = size_tok.text},
|
||||
else => return error.InvalidSyntax
|
||||
.enum_name => .{ .alias = size_tok.text },
|
||||
else => return error.InvalidSyntax,
|
||||
};
|
||||
|
||||
_ = try xctok.expect(.rbracket);
|
||||
@@ -517,7 +514,7 @@ pub fn parseVersion(xctok: *XmlCTokenizer) ![4][]const u8 {
|
||||
return error.InvalidVersion;
|
||||
}
|
||||
|
||||
const name = try xctok.expect(.name);
|
||||
_ = try xctok.expect(.name);
|
||||
const vk_make_version = try xctok.expect(.type_name);
|
||||
if (!mem.eql(u8, vk_make_version.text, "VK_MAKE_API_VERSION")) {
|
||||
return error.NotVersion;
|
||||
@@ -540,44 +537,38 @@ pub fn parseVersion(xctok: *XmlCTokenizer) ![4][]const u8 {
|
||||
return version;
|
||||
}
|
||||
|
||||
fn testTokenizer(tokenizer: anytype, expected_tokens: []const Token) void {
|
||||
fn testTokenizer(tokenizer: anytype, expected_tokens: []const Token) !void {
|
||||
for (expected_tokens) |expected| {
|
||||
const tok = (tokenizer.next() catch unreachable).?;
|
||||
testing.expectEqual(expected.kind, tok.kind);
|
||||
testing.expectEqualSlices(u8, expected.text, tok.text);
|
||||
try testing.expectEqual(expected.kind, tok.kind);
|
||||
try testing.expectEqualSlices(u8, expected.text, tok.text);
|
||||
}
|
||||
|
||||
if (tokenizer.next() catch unreachable) |_| unreachable;
|
||||
}
|
||||
|
||||
test "CTokenizer" {
|
||||
var ctok = CTokenizer {
|
||||
.source = \\typedef ([const)]** VKAPI_PTR 123,;aaaa
|
||||
};
|
||||
var ctok = CTokenizer{ .source = "typedef ([const)]** VKAPI_PTR 123,;aaaa" };
|
||||
|
||||
testTokenizer(
|
||||
&ctok,
|
||||
&[_]Token{
|
||||
.{.kind = .kw_typedef, .text = "typedef"},
|
||||
.{.kind = .lparen, .text = "("},
|
||||
.{.kind = .lbracket, .text = "["},
|
||||
.{.kind = .kw_const, .text = "const"},
|
||||
.{.kind = .rparen, .text = ")"},
|
||||
.{.kind = .rbracket, .text = "]"},
|
||||
.{.kind = .star, .text = "*"},
|
||||
.{.kind = .star, .text = "*"},
|
||||
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||
.{.kind = .int, .text = "123"},
|
||||
.{.kind = .comma, .text = ","},
|
||||
.{.kind = .semicolon, .text = ";"},
|
||||
.{.kind = .id, .text = "aaaa"},
|
||||
}
|
||||
);
|
||||
try testTokenizer(&ctok, &[_]Token{
|
||||
.{ .kind = .kw_typedef, .text = "typedef" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .lbracket, .text = "[" },
|
||||
.{ .kind = .kw_const, .text = "const" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .rbracket, .text = "]" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .kw_vkapi_ptr, .text = "VKAPI_PTR" },
|
||||
.{ .kind = .int, .text = "123" },
|
||||
.{ .kind = .comma, .text = "," },
|
||||
.{ .kind = .semicolon, .text = ";" },
|
||||
.{ .kind = .id, .text = "aaaa" },
|
||||
});
|
||||
}
|
||||
|
||||
test "XmlCTokenizer" {
|
||||
const document = try xml.parse(
|
||||
testing.allocator,
|
||||
const document = try xml.parse(testing.allocator,
|
||||
\\<root>// comment <name>commented name</name> <type>commented type</type> trailing
|
||||
\\ typedef void (VKAPI_PTR *<name>PFN_vkVoidFunction</name>)(void);
|
||||
\\</root>
|
||||
@@ -586,27 +577,23 @@ test "XmlCTokenizer" {
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
|
||||
testTokenizer(
|
||||
&xctok,
|
||||
&[_]Token{
|
||||
.{.kind = .kw_typedef, .text = "typedef"},
|
||||
.{.kind = .id, .text = "void"},
|
||||
.{.kind = .lparen, .text = "("},
|
||||
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||
.{.kind = .star, .text = "*"},
|
||||
.{.kind = .name, .text = "PFN_vkVoidFunction"},
|
||||
.{.kind = .rparen, .text = ")"},
|
||||
.{.kind = .lparen, .text = "("},
|
||||
.{.kind = .id, .text = "void"},
|
||||
.{.kind = .rparen, .text = ")"},
|
||||
.{.kind = .semicolon, .text = ";"},
|
||||
}
|
||||
);
|
||||
try testTokenizer(&xctok, &[_]Token{
|
||||
.{ .kind = .kw_typedef, .text = "typedef" },
|
||||
.{ .kind = .id, .text = "void" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .kw_vkapi_ptr, .text = "VKAPI_PTR" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .name, .text = "PFN_vkVoidFunction" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .id, .text = "void" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .semicolon, .text = ";" },
|
||||
});
|
||||
}
|
||||
|
||||
test "parseTypedef" {
|
||||
const document = try xml.parse(
|
||||
testing.allocator,
|
||||
const document = try xml.parse(testing.allocator,
|
||||
\\<root> // comment <name>commented name</name> trailing
|
||||
\\ typedef const struct <type>Python</type>* pythons[4];
|
||||
\\ // more comments
|
||||
@@ -619,12 +606,12 @@ test "parseTypedef" {
|
||||
defer arena.deinit();
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
const decl = try parseTypedef(&arena.allocator, &xctok);
|
||||
const decl = try parseTypedef(arena.allocator(), &xctok, false);
|
||||
|
||||
testing.expectEqualSlices(u8, "pythons", decl.name);
|
||||
try testing.expectEqualSlices(u8, "pythons", decl.name);
|
||||
const array = decl.decl_type.typedef.array;
|
||||
testing.expectEqual(ArraySize{.int = 4}, array.size);
|
||||
try testing.expectEqual(ArraySize{ .int = 4 }, array.size);
|
||||
const ptr = array.child.pointer;
|
||||
testing.expectEqual(true, ptr.is_const);
|
||||
testing.expectEqualSlices(u8, "Python", ptr.child.name);
|
||||
try testing.expectEqual(true, ptr.is_const);
|
||||
try testing.expectEqualSlices(u8, "Python", ptr.child.name);
|
||||
}
|
||||
|
||||
@@ -9,41 +9,30 @@ const Allocator = mem.Allocator;
|
||||
const FeatureLevel = reg.FeatureLevel;
|
||||
|
||||
const EnumFieldMerger = struct {
|
||||
const EnumExtensionMap = std.StringArrayHashMap(std.ArrayListUnmanaged(reg.Enum.Field));
|
||||
const FieldSet = std.StringArrayHashMap(void);
|
||||
const EnumExtensionMap = std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(reg.Enum.Field));
|
||||
const FieldSet = std.StringArrayHashMapUnmanaged(void);
|
||||
|
||||
gpa: *Allocator,
|
||||
reg_arena: *Allocator,
|
||||
arena: Allocator,
|
||||
registry: *reg.Registry,
|
||||
enum_extensions: EnumExtensionMap,
|
||||
field_set: FieldSet,
|
||||
|
||||
fn init(gpa: *Allocator, reg_arena: *Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
fn init(arena: Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
return .{
|
||||
.gpa = gpa,
|
||||
.reg_arena = reg_arena,
|
||||
.arena = arena,
|
||||
.registry = registry,
|
||||
.enum_extensions = EnumExtensionMap.init(gpa),
|
||||
.field_set = FieldSet.init(gpa),
|
||||
.enum_extensions = .{},
|
||||
.field_set = .{},
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *EnumFieldMerger) void {
|
||||
for (self.enum_extensions.items()) |*entry| {
|
||||
entry.value.deinit(self.gpa);
|
||||
}
|
||||
|
||||
self.field_set.deinit();
|
||||
self.enum_extensions.deinit();
|
||||
}
|
||||
|
||||
fn putEnumExtension(self: *EnumFieldMerger, enum_name: []const u8, field: reg.Enum.Field) !void {
|
||||
const res = try self.enum_extensions.getOrPut(enum_name);
|
||||
const res = try self.enum_extensions.getOrPut(self.arena, enum_name);
|
||||
if (!res.found_existing) {
|
||||
res.entry.value = std.ArrayListUnmanaged(reg.Enum.Field){};
|
||||
res.value_ptr.* = std.ArrayListUnmanaged(reg.Enum.Field){};
|
||||
}
|
||||
|
||||
try res.entry.value.append(self.gpa, field);
|
||||
try res.value_ptr.append(self.arena, field);
|
||||
}
|
||||
|
||||
fn addRequires(self: *EnumFieldMerger, reqs: []const reg.Require) !void {
|
||||
@@ -61,11 +50,11 @@ const EnumFieldMerger = struct {
|
||||
self.field_set.clearRetainingCapacity();
|
||||
|
||||
const n_fields_upper_bound = base_enum.fields.len + extensions.items.len;
|
||||
const new_fields = try self.reg_arena.alloc(reg.Enum.Field, n_fields_upper_bound);
|
||||
const new_fields = try self.arena.alloc(reg.Enum.Field, n_fields_upper_bound);
|
||||
var i: usize = 0;
|
||||
|
||||
for (base_enum.fields) |field| {
|
||||
const res = try self.field_set.getOrPut(field.name);
|
||||
const res = try self.field_set.getOrPut(self.arena, field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
@@ -74,16 +63,16 @@ const EnumFieldMerger = struct {
|
||||
|
||||
// Assume that if a field name clobbers, the value is the same
|
||||
for (extensions.items) |field| {
|
||||
const res = try self.field_set.getOrPut(field.name);
|
||||
const res = try self.field_set.getOrPut(self.arena, field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Existing base_enum.fields was allocatued by `self.reg_arena`, so
|
||||
// Existing base_enum.fields was allocated by `self.arena`, so
|
||||
// it gets cleaned up whenever that is deinited.
|
||||
base_enum.fields = self.reg_arena.shrink(new_fields, i);
|
||||
base_enum.fields = self.arena.shrink(new_fields, i);
|
||||
}
|
||||
|
||||
fn merge(self: *EnumFieldMerger) !void {
|
||||
@@ -106,28 +95,25 @@ const EnumFieldMerger = struct {
|
||||
};
|
||||
|
||||
pub const Generator = struct {
|
||||
gpa: *Allocator,
|
||||
reg_arena: std.heap.ArenaAllocator,
|
||||
arena: std.heap.ArenaAllocator,
|
||||
registry: reg.Registry,
|
||||
id_renderer: IdRenderer,
|
||||
|
||||
fn init(allocator: *Allocator, spec: *xml.Element) !Generator {
|
||||
fn init(allocator: Allocator, spec: *xml.Element) !Generator {
|
||||
const result = try parseXml(allocator, spec);
|
||||
|
||||
const tags = try allocator.alloc([]const u8, result.registry.tags.len);
|
||||
for (tags) |*tag, i| tag.* = result.registry.tags[i].name;
|
||||
|
||||
return Generator{
|
||||
.gpa = allocator,
|
||||
.reg_arena = result.arena,
|
||||
.arena = result.arena,
|
||||
.registry = result.registry,
|
||||
.id_renderer = IdRenderer.init(allocator, tags),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: Generator) void {
|
||||
self.gpa.free(self.id_renderer.tags);
|
||||
self.reg_arena.deinit();
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
fn stripFlagBits(self: Generator, name: []const u8) []const u8 {
|
||||
@@ -142,19 +128,45 @@ pub const Generator = struct {
|
||||
|
||||
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
|
||||
fn mergeEnumFields(self: *Generator) !void {
|
||||
var merger = EnumFieldMerger.init(self.gpa, &self.reg_arena.allocator, &self.registry);
|
||||
defer merger.deinit();
|
||||
var merger = EnumFieldMerger.init(self.arena.allocator(), &self.registry);
|
||||
try merger.merge();
|
||||
}
|
||||
|
||||
fn fixupTags(self: *Generator) !void {
|
||||
var fixer_upper = TagFixerUpper.init(self.gpa, &self.registry, &self.id_renderer);
|
||||
defer fixer_upper.deinit();
|
||||
try fixer_upper.fixup();
|
||||
// https://github.com/KhronosGroup/Vulkan-Docs/pull/1556
|
||||
fn fixupBitFlags(self: *Generator) !void {
|
||||
var seen_bits = std.StringArrayHashMap(void).init(self.arena.allocator());
|
||||
defer seen_bits.deinit();
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
const bitmask = switch (decl.decl_type) {
|
||||
.bitmask => |bm| bm,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (bitmask.bits_enum) |bits_enum| {
|
||||
try seen_bits.put(bits_enum, {});
|
||||
}
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
switch (decl.decl_type) {
|
||||
.enumeration => |e| {
|
||||
if (e.is_bitmask and seen_bits.get(decl.name) == null)
|
||||
continue;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
self.registry.decls[i] = decl;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
self.registry.decls.len = i;
|
||||
}
|
||||
|
||||
fn render(self: *Generator, writer: anytype) !void {
|
||||
try renderRegistry(writer, &self.reg_arena.allocator, &self.registry, &self.id_renderer);
|
||||
try renderRegistry(writer, self.arena.allocator(), &self.registry, &self.id_renderer);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -162,7 +174,7 @@ pub const Generator = struct {
|
||||
/// and the resulting binding is written to `writer`. `allocator` will be used to allocate temporary
|
||||
/// internal datastructures - mostly via an ArenaAllocator, but sometimes a hashmap uses this allocator
|
||||
/// directly.
|
||||
pub fn generate(allocator: *Allocator, spec_xml: []const u8, writer: anytype) !void {
|
||||
pub fn generate(allocator: Allocator, spec_xml: []const u8, writer: anytype) !void {
|
||||
const spec = try xml.parse(allocator, spec_xml);
|
||||
defer spec.deinit();
|
||||
|
||||
@@ -170,5 +182,6 @@ pub fn generate(allocator: *Allocator, spec_xml: []const u8, writer: anytype) !v
|
||||
defer gen.deinit();
|
||||
|
||||
try gen.mergeEnumFields();
|
||||
try gen.fixupBitFlags();
|
||||
try gen.render(writer);
|
||||
}
|
||||
|
||||
@@ -17,14 +17,13 @@ pub const ParseResult = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn parseXml(backing_allocator: *Allocator, root: *xml.Element) !ParseResult {
|
||||
pub fn parseXml(backing_allocator: Allocator, root: *xml.Element) !ParseResult {
|
||||
var arena = ArenaAllocator.init(backing_allocator);
|
||||
errdefer arena.deinit();
|
||||
|
||||
const allocator = &arena.allocator;
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var reg = registry.Registry{
|
||||
.copyright = root.getCharData("comment") orelse return error.InvalidRegistry,
|
||||
.decls = try parseDeclarations(allocator, root),
|
||||
.api_constants = try parseApiConstants(allocator, root),
|
||||
.tags = try parseTags(allocator, root),
|
||||
@@ -38,11 +37,11 @@ pub fn parseXml(backing_allocator: *Allocator, root: *xml.Element) !ParseResult
|
||||
};
|
||||
}
|
||||
|
||||
fn parseDeclarations(allocator: *Allocator, root: *xml.Element) ![]registry.Declaration {
|
||||
fn parseDeclarations(allocator: Allocator, root: *xml.Element) ![]registry.Declaration {
|
||||
var types_elem = root.findChildByTag("types") orelse return error.InvalidRegistry;
|
||||
var commands_elem = root.findChildByTag("commands") orelse return error.InvalidRegistry;
|
||||
|
||||
const decl_upper_bound = types_elem.children.items.len + commands_elem.children.items.len;
|
||||
const decl_upper_bound = types_elem.children.len + commands_elem.children.len;
|
||||
const decls = try allocator.alloc(registry.Declaration, decl_upper_bound);
|
||||
|
||||
var count: usize = 0;
|
||||
@@ -52,7 +51,7 @@ fn parseDeclarations(allocator: *Allocator, root: *xml.Element) ![]registry.Decl
|
||||
return allocator.shrink(decls, count);
|
||||
}
|
||||
|
||||
fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
|
||||
fn parseTypes(allocator: Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
|
||||
var i: usize = 0;
|
||||
var it = types_elem.findChildrenByTag("type");
|
||||
while (it.next()) |ty| {
|
||||
@@ -74,7 +73,7 @@ fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *x
|
||||
} else if (mem.eql(u8, category, "funcpointer")) {
|
||||
break :blk try parseFuncPointer(allocator, ty);
|
||||
} else if (mem.eql(u8, category, "enum")) {
|
||||
break :blk (try parseEnumAlias(allocator, ty)) orelse continue;
|
||||
break :blk (try parseEnumAlias(ty)) orelse continue;
|
||||
}
|
||||
|
||||
continue;
|
||||
@@ -89,13 +88,13 @@ fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *x
|
||||
fn parseForeigntype(ty: *xml.Element) !registry.Declaration {
|
||||
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const depends = ty.getAttribute("requires") orelse if (mem.eql(u8, name, "int"))
|
||||
"vk_platform" // for some reason, int doesn't depend on vk_platform (but the other c types do)
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
"vk_platform" // for some reason, int doesn't depend on vk_platform (but the other c types do)
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.foreign = .{.depends = depends}},
|
||||
.decl_type = .{ .foreign = .{ .depends = depends } },
|
||||
};
|
||||
}
|
||||
|
||||
@@ -104,24 +103,27 @@ fn parseBitmaskType(ty: *xml.Element) !registry.Declaration {
|
||||
const alias = ty.getAttribute("alias") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
|
||||
.decl_type = .{ .alias = .{ .name = alias, .target = .other_type } },
|
||||
};
|
||||
} else {
|
||||
const flags_type = ty.getCharData("type") orelse return error.InvalidRegistry;
|
||||
|
||||
const bitwidth: u8 = if (mem.eql(u8, flags_type, "VkFlags"))
|
||||
32
|
||||
else if (mem.eql(u8, flags_type, "VkFlags64"))
|
||||
64
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
32
|
||||
else if (mem.eql(u8, flags_type, "VkFlags64"))
|
||||
64
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
|
||||
return registry.Declaration{
|
||||
.name = ty.getCharData("name") orelse return error.InvalidRegistry,
|
||||
.decl_type = .{.bitmask = .{
|
||||
.bits_enum = ty.getAttribute("requires") orelse ty.getAttribute("bitvalues"), // Who knows why these are different fields
|
||||
.bitwidth = bitwidth,
|
||||
}},
|
||||
.decl_type = .{
|
||||
.bitmask = .{
|
||||
// Who knows why these are different fields
|
||||
.bits_enum = ty.getAttribute("requires") orelse ty.getAttribute("bitvalues"),
|
||||
.bitwidth = bitwidth,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -132,7 +134,9 @@ fn parseHandleType(ty: *xml.Element) !registry.Declaration {
|
||||
const alias = ty.getAttribute("alias") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_type },
|
||||
},
|
||||
};
|
||||
} else {
|
||||
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
||||
@@ -148,45 +152,47 @@ fn parseHandleType(ty: *xml.Element) !registry.Declaration {
|
||||
.handle = .{
|
||||
.parent = ty.getAttribute("parent"),
|
||||
.is_dispatchable = dispatchable,
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn parseBaseType(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
fn parseBaseType(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
||||
if (ty.getCharData("type")) |_| {
|
||||
var tok = cparse.XmlCTokenizer.init(ty);
|
||||
return try cparse.parseTypedef(allocator, &tok);
|
||||
return try cparse.parseTypedef(allocator, &tok, false);
|
||||
} else {
|
||||
// Either ANativeWindow, AHardwareBuffer or CAMetalLayer. The latter has a lot of
|
||||
// macros, which is why this part is not built into the xml/c parser.
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.external = {}},
|
||||
.decl_type = .{ .external = {} },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn parseContainer(allocator: *Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
|
||||
fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
|
||||
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
|
||||
if (ty.getAttribute("alias")) |alias| {
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_type },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
var members = try allocator.alloc(registry.Container.Field, ty.children.items.len);
|
||||
var members = try allocator.alloc(registry.Container.Field, ty.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = ty.findChildrenByTag("member");
|
||||
var maybe_stype: ?[]const u8 = null;
|
||||
while (it.next()) |member| {
|
||||
var xctok = cparse.XmlCTokenizer.init(member);
|
||||
members[i] = try cparse.parseMember(allocator, &xctok);
|
||||
members[i] = try cparse.parseMember(allocator, &xctok, false);
|
||||
if (mem.eql(u8, members[i].name, "sType")) {
|
||||
if (member.getAttribute("values")) |stype| {
|
||||
maybe_stype = stype;
|
||||
@@ -198,27 +204,40 @@ fn parseContainer(allocator: *Allocator, ty: *xml.Element, is_union: bool) !regi
|
||||
|
||||
members = allocator.shrink(members, i);
|
||||
|
||||
var maybe_extends: ?[][]const u8 = null;
|
||||
if (ty.getAttribute("structextends")) |extends| {
|
||||
const n_structs = std.mem.count(u8, extends, ",") + 1;
|
||||
maybe_extends = try allocator.alloc([]const u8, n_structs);
|
||||
var struct_extends = std.mem.split(u8, extends, ",");
|
||||
var j: usize = 0;
|
||||
while (struct_extends.next()) |struct_extend| {
|
||||
maybe_extends.?[j] = struct_extend;
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
|
||||
it = ty.findChildrenByTag("member");
|
||||
for (members) |*member| {
|
||||
const member_elem = it.next().?;
|
||||
try parsePointerMeta(.{.container = members}, &member.field_type, member_elem);
|
||||
try parsePointerMeta(.{ .container = members }, &member.field_type, member_elem);
|
||||
}
|
||||
|
||||
return registry.Declaration {
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{
|
||||
.container = .{
|
||||
.stype = maybe_stype,
|
||||
.fields = members,
|
||||
.is_union = is_union,
|
||||
}
|
||||
}
|
||||
.extends = maybe_extends,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parseFuncPointer(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
fn parseFuncPointer(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
var xctok = cparse.XmlCTokenizer.init(ty);
|
||||
return try cparse.parseTypedef(allocator, &xctok);
|
||||
return try cparse.parseTypedef(allocator, &xctok, true);
|
||||
}
|
||||
|
||||
// For some reason, the DeclarationType cannot be passed to lenToPointerSize, as
|
||||
@@ -235,7 +254,7 @@ fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSiz
|
||||
for (params) |*param| {
|
||||
if (mem.eql(u8, param.name, len)) {
|
||||
param.is_buffer_len = true;
|
||||
return .{.other_field = param.name};
|
||||
return .{ .other_field = param.name };
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -243,7 +262,7 @@ fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSiz
|
||||
for (members) |*member| {
|
||||
if (mem.eql(u8, member.name, len)) {
|
||||
member.is_buffer_len = true;
|
||||
return .{.other_field = member.name};
|
||||
return .{ .other_field = member.name };
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -258,7 +277,7 @@ fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSiz
|
||||
|
||||
fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.Element) !void {
|
||||
if (elem.getAttribute("len")) |lens| {
|
||||
var it = mem.split(lens, ",");
|
||||
var it = mem.split(u8, lens, ",");
|
||||
var current_type_info = type_info;
|
||||
while (current_type_info.* == .pointer) {
|
||||
// TODO: Check altlen
|
||||
@@ -275,7 +294,7 @@ fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.El
|
||||
}
|
||||
|
||||
if (elem.getAttribute("optional")) |optionals| {
|
||||
var it = mem.split(optionals, ",");
|
||||
var it = mem.split(u8, optionals, ",");
|
||||
var current_type_info = type_info;
|
||||
while (current_type_info.* == .pointer) {
|
||||
if (it.next()) |current_optional| {
|
||||
@@ -290,19 +309,21 @@ fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.El
|
||||
}
|
||||
}
|
||||
|
||||
fn parseEnumAlias(allocator: *Allocator, elem: *xml.Element) !?registry.Declaration {
|
||||
fn parseEnumAlias(elem: *xml.Element) !?registry.Declaration {
|
||||
if (elem.getAttribute("alias")) |alias| {
|
||||
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.alias = .{.name = alias, .target = .other_type}},
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_type },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
|
||||
fn parseEnums(allocator: Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
|
||||
var i: usize = 0;
|
||||
var it = root.findChildrenByTag("enums");
|
||||
while (it.next()) |enums| {
|
||||
@@ -313,7 +334,7 @@ fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Ele
|
||||
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.decl_type = .{.enumeration = try parseEnumFields(allocator, enums)},
|
||||
.decl_type = .{ .enumeration = try parseEnumFields(allocator, enums) },
|
||||
};
|
||||
i += 1;
|
||||
}
|
||||
@@ -321,7 +342,7 @@ fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Ele
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseEnumFields(allocator: *Allocator, elem: *xml.Element) !registry.Enum {
|
||||
fn parseEnumFields(allocator: Allocator, elem: *xml.Element) !registry.Enum {
|
||||
// TODO: `type` was added recently, fall back to checking endswith FlagBits for older versions?
|
||||
const enum_type = elem.getAttribute("type") orelse return error.InvalidRegistry;
|
||||
const is_bitmask = mem.eql(u8, enum_type, "bitmask");
|
||||
@@ -330,11 +351,11 @@ fn parseEnumFields(allocator: *Allocator, elem: *xml.Element) !registry.Enum {
|
||||
}
|
||||
|
||||
const bitwidth = if (elem.getAttribute("bitwidth")) |bitwidth|
|
||||
try std.fmt.parseInt(u8, bitwidth, 10)
|
||||
else
|
||||
32;
|
||||
try std.fmt.parseInt(u8, bitwidth, 10)
|
||||
else
|
||||
32;
|
||||
|
||||
const fields = try allocator.alloc(registry.Enum.Field, elem.children.items.len);
|
||||
const fields = try allocator.alloc(registry.Enum.Field, elem.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = elem.findChildrenByTag("enum");
|
||||
@@ -369,14 +390,14 @@ fn parseEnumField(field: *xml.Element) !registry.Enum.Field {
|
||||
// tag. In the latter case its passed via the `ext_nr` parameter.
|
||||
if (field.getAttribute("value")) |value| {
|
||||
if (mem.startsWith(u8, value, "0x")) {
|
||||
break :blk .{.bit_vector = try std.fmt.parseInt(i32, value[2..], 16)};
|
||||
break :blk .{ .bit_vector = try std.fmt.parseInt(i32, value[2..], 16) };
|
||||
} else {
|
||||
break :blk .{.int = try std.fmt.parseInt(i32, value, 10)};
|
||||
break :blk .{ .int = try std.fmt.parseInt(i32, value, 10) };
|
||||
}
|
||||
} else if (field.getAttribute("bitpos")) |bitpos| {
|
||||
break :blk .{.bitpos = try std.fmt.parseInt(u6, bitpos, 10)};
|
||||
break :blk .{ .bitpos = try std.fmt.parseInt(u6, bitpos, 10) };
|
||||
} else if (field.getAttribute("alias")) |alias| {
|
||||
break :blk .{.alias = .{.name = alias, .is_compat_alias = is_compat_alias}};
|
||||
break :blk .{ .alias = .{ .name = alias, .is_compat_alias = is_compat_alias } };
|
||||
} else {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
@@ -388,7 +409,7 @@ fn parseEnumField(field: *xml.Element) !registry.Enum.Field {
|
||||
};
|
||||
}
|
||||
|
||||
fn parseCommands(allocator: *Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
|
||||
fn parseCommands(allocator: Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
|
||||
var i: usize = 0;
|
||||
var it = commands_elem.findChildrenByTag("command");
|
||||
while (it.next()) |elem| {
|
||||
@@ -399,14 +420,14 @@ fn parseCommands(allocator: *Allocator, out: []registry.Declaration, commands_el
|
||||
return i;
|
||||
}
|
||||
|
||||
fn splitCommaAlloc(allocator: *Allocator, text: []const u8) ![][]const u8 {
|
||||
fn splitCommaAlloc(allocator: Allocator, text: []const u8) ![][]const u8 {
|
||||
var n_codes: usize = 1;
|
||||
for (text) |c| {
|
||||
if (c == ',') n_codes += 1;
|
||||
}
|
||||
|
||||
const codes = try allocator.alloc([]const u8, n_codes);
|
||||
var it = mem.split(text, ",");
|
||||
var it = mem.split(u8, text, ",");
|
||||
for (codes) |*code| {
|
||||
code.* = it.next().?;
|
||||
}
|
||||
@@ -414,26 +435,28 @@ fn splitCommaAlloc(allocator: *Allocator, text: []const u8) ![][]const u8 {
|
||||
return codes;
|
||||
}
|
||||
|
||||
fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration {
|
||||
fn parseCommand(allocator: Allocator, elem: *xml.Element) !registry.Declaration {
|
||||
if (elem.getAttribute("alias")) |alias| {
|
||||
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
.name = name,
|
||||
.decl_type = .{.alias = .{.name = alias, .target = .other_command}}
|
||||
.decl_type = .{
|
||||
.alias = .{ .name = alias, .target = .other_command },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const proto = elem.findChildByTag("proto") orelse return error.InvalidRegistry;
|
||||
var proto_xctok = cparse.XmlCTokenizer.init(proto);
|
||||
const command_decl = try cparse.parseParamOrProto(allocator, &proto_xctok);
|
||||
const command_decl = try cparse.parseParamOrProto(allocator, &proto_xctok, false);
|
||||
|
||||
var params = try allocator.alloc(registry.Command.Param, elem.children.items.len);
|
||||
var params = try allocator.alloc(registry.Command.Param, elem.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = elem.findChildrenByTag("param");
|
||||
while (it.next()) |param| {
|
||||
var xctok = cparse.XmlCTokenizer.init(param);
|
||||
const decl = try cparse.parseParamOrProto(allocator, &xctok);
|
||||
const decl = try cparse.parseParamOrProto(allocator, &xctok, false);
|
||||
params[i] = .{
|
||||
.name = decl.name,
|
||||
.param_type = decl.decl_type.typedef,
|
||||
@@ -445,25 +468,25 @@ fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration
|
||||
const return_type = try allocator.create(registry.TypeInfo);
|
||||
return_type.* = command_decl.decl_type.typedef;
|
||||
|
||||
const success_codes = if (elem.getAttribute("successcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
const success_codes: [][]const u8 = if (elem.getAttribute("successcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
|
||||
const error_codes = if (elem.getAttribute("errorcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
const error_codes: [][]const u8 = if (elem.getAttribute("errorcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
|
||||
params = allocator.shrink(params, i);
|
||||
|
||||
it = elem.findChildrenByTag("param");
|
||||
for (params) |*param| {
|
||||
const param_elem = it.next().?;
|
||||
try parsePointerMeta(.{.command = params}, ¶m.param_type, param_elem);
|
||||
try parsePointerMeta(.{ .command = params }, ¶m.param_type, param_elem);
|
||||
}
|
||||
|
||||
return registry.Declaration {
|
||||
return registry.Declaration{
|
||||
.name = command_decl.name,
|
||||
.decl_type = .{
|
||||
.command = .{
|
||||
@@ -471,12 +494,12 @@ fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration
|
||||
.return_type = return_type,
|
||||
.success_codes = success_codes,
|
||||
.error_codes = error_codes,
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parseApiConstants(allocator: *Allocator, root: *xml.Element) ![]registry.ApiConstant {
|
||||
fn parseApiConstants(allocator: Allocator, root: *xml.Element) ![]registry.ApiConstant {
|
||||
var enums = blk: {
|
||||
var it = root.findChildrenByTag("enums");
|
||||
while (it.next()) |child| {
|
||||
@@ -503,21 +526,21 @@ fn parseApiConstants(allocator: *Allocator, root: *xml.Element) ![]registry.ApiC
|
||||
break :blk n_defines;
|
||||
};
|
||||
|
||||
const constants = try allocator.alloc(registry.ApiConstant, enums.children.items.len + n_defines);
|
||||
const constants = try allocator.alloc(registry.ApiConstant, enums.children.len + n_defines);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = enums.findChildrenByTag("enum");
|
||||
while (it.next()) |constant| {
|
||||
const expr = if (constant.getAttribute("value")) |expr|
|
||||
expr
|
||||
else if (constant.getAttribute("alias")) |alias|
|
||||
alias
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
expr
|
||||
else if (constant.getAttribute("alias")) |alias|
|
||||
alias
|
||||
else
|
||||
return error.InvalidRegistry;
|
||||
|
||||
constants[i] = .{
|
||||
.name = constant.getAttribute("name") orelse return error.InvalidRegistry,
|
||||
.value = .{.expr = expr},
|
||||
.value = .{ .expr = expr },
|
||||
};
|
||||
|
||||
i += 1;
|
||||
@@ -540,15 +563,13 @@ fn parseDefines(types: *xml.Element, out: []registry.ApiConstant) !usize {
|
||||
if (mem.eql(u8, name, "VK_HEADER_VERSION")) {
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.value = .{.expr = mem.trim(u8, ty.children.items[2].CharData, " ")},
|
||||
.value = .{ .expr = mem.trim(u8, ty.children[2].char_data, " ") },
|
||||
};
|
||||
} else {
|
||||
var xctok = cparse.XmlCTokenizer.init(ty);
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.value = .{
|
||||
.version = cparse.parseVersion(&xctok) catch continue
|
||||
},
|
||||
.value = .{ .version = cparse.parseVersion(&xctok) catch continue },
|
||||
};
|
||||
}
|
||||
i += 1;
|
||||
@@ -557,9 +578,9 @@ fn parseDefines(types: *xml.Element, out: []registry.ApiConstant) !usize {
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseTags(allocator: *Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
fn parseTags(allocator: Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
var tags_elem = root.findChildByTag("tags") orelse return error.InvalidRegistry;
|
||||
const tags = try allocator.alloc(registry.Tag, tags_elem.children.items.len);
|
||||
const tags = try allocator.alloc(registry.Tag, tags_elem.children.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = tags_elem.findChildrenByTag("tag");
|
||||
@@ -575,7 +596,7 @@ fn parseTags(allocator: *Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
return allocator.shrink(tags, i);
|
||||
}
|
||||
|
||||
fn parseFeatures(allocator: *Allocator, root: *xml.Element) ![]registry.Feature {
|
||||
fn parseFeatures(allocator: Allocator, root: *xml.Element) ![]registry.Feature {
|
||||
var it = root.findChildrenByTag("feature");
|
||||
var count: usize = 0;
|
||||
while (it.next()) |_| count += 1;
|
||||
@@ -591,14 +612,14 @@ fn parseFeatures(allocator: *Allocator, root: *xml.Element) ![]registry.Feature
|
||||
return features;
|
||||
}
|
||||
|
||||
fn parseFeature(allocator: *Allocator, feature: *xml.Element) !registry.Feature {
|
||||
fn parseFeature(allocator: Allocator, feature: *xml.Element) !registry.Feature {
|
||||
const name = feature.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const feature_level = blk: {
|
||||
const number = feature.getAttribute("number") orelse return error.InvalidRegistry;
|
||||
break :blk try splitFeatureLevel(number, ".");
|
||||
};
|
||||
|
||||
var requires = try allocator.alloc(registry.Require, feature.children.items.len);
|
||||
var requires = try allocator.alloc(registry.Require, feature.children.len);
|
||||
var i: usize = 0;
|
||||
var it = feature.findChildrenByTag("require");
|
||||
while (it.next()) |require| {
|
||||
@@ -609,7 +630,7 @@ fn parseFeature(allocator: *Allocator, feature: *xml.Element) !registry.Feature
|
||||
return registry.Feature{
|
||||
.name = name,
|
||||
.level = feature_level,
|
||||
.requires = allocator.shrink(requires, i)
|
||||
.requires = allocator.shrink(requires, i),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -642,7 +663,10 @@ fn parseEnumExtension(elem: *xml.Element, parent_extnumber: ?u31) !?registry.Req
|
||||
return registry.Require.EnumExtension{
|
||||
.extends = extends,
|
||||
.extnumber = actual_extnumber,
|
||||
.field = .{.name = name, .value = .{.int = value}},
|
||||
.field = .{
|
||||
.name = name,
|
||||
.value = .{ .int = value },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -659,7 +683,7 @@ fn enumExtOffsetToValue(extnumber: u31, offset: u31) u31 {
|
||||
return extension_value_base + (extnumber - 1) * extension_block + offset;
|
||||
}
|
||||
|
||||
fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
|
||||
fn parseRequire(allocator: Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
|
||||
var n_extends: usize = 0;
|
||||
var n_types: usize = 0;
|
||||
var n_commands: usize = 0;
|
||||
@@ -705,7 +729,7 @@ fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
break :blk try splitFeatureLevel(feature_level["VK_VERSION_".len ..], "_");
|
||||
break :blk try splitFeatureLevel(feature_level["VK_VERSION_".len..], "_");
|
||||
};
|
||||
|
||||
return registry.Require{
|
||||
@@ -717,10 +741,10 @@ fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !
|
||||
};
|
||||
}
|
||||
|
||||
fn parseExtensions(allocator: *Allocator, root: *xml.Element) ![]registry.Extension {
|
||||
fn parseExtensions(allocator: Allocator, root: *xml.Element) ![]registry.Extension {
|
||||
const extensions_elem = root.findChildByTag("extensions") orelse return error.InvalidRegistry;
|
||||
|
||||
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.items.len);
|
||||
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.len);
|
||||
var i: usize = 0;
|
||||
var it = extensions_elem.findChildrenByTag("extension");
|
||||
while (it.next()) |extension| {
|
||||
@@ -754,7 +778,7 @@ fn findExtVersion(extension: *xml.Element) !u32 {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Extension {
|
||||
fn parseExtension(allocator: Allocator, extension: *xml.Element) !registry.Extension {
|
||||
const name = extension.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const platform = extension.getAttribute("platform");
|
||||
const version = try findExtVersion(extension);
|
||||
@@ -763,19 +787,18 @@ fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Exte
|
||||
// feature level: both seperately in each <require> tag, or using
|
||||
// the requiresCore attribute.
|
||||
const requires_core = if (extension.getAttribute("requiresCore")) |feature_level|
|
||||
try splitFeatureLevel(feature_level, ".")
|
||||
else
|
||||
null;
|
||||
try splitFeatureLevel(feature_level, ".")
|
||||
else
|
||||
null;
|
||||
|
||||
const promoted_to: registry.Extension.Promotion = blk: {
|
||||
const promotedto = extension.getAttribute("promotedto") orelse break :blk .none;
|
||||
if (mem.startsWith(u8, promotedto, "VK_VERSION_")) {
|
||||
const feature_level = try splitFeatureLevel(promotedto["VK_VERSION_".len ..], "_");
|
||||
|
||||
break :blk .{.feature = feature_level};
|
||||
const feature_level = try splitFeatureLevel(promotedto["VK_VERSION_".len..], "_");
|
||||
break :blk .{ .feature = feature_level };
|
||||
}
|
||||
|
||||
break :blk .{.extension = promotedto};
|
||||
break :blk .{ .extension = promotedto };
|
||||
};
|
||||
|
||||
const number = blk: {
|
||||
@@ -799,7 +822,7 @@ fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Exte
|
||||
break :blk try splitCommaAlloc(allocator, requires_str);
|
||||
};
|
||||
|
||||
var requires = try allocator.alloc(registry.Require, extension.children.items.len);
|
||||
var requires = try allocator.alloc(registry.Require, extension.children.len);
|
||||
var i: usize = 0;
|
||||
var it = extension.findChildrenByTag("require");
|
||||
while (it.next()) |require| {
|
||||
@@ -816,12 +839,12 @@ fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Exte
|
||||
.promoted_to = promoted_to,
|
||||
.platform = platform,
|
||||
.required_feature_level = requires_core,
|
||||
.requires = allocator.shrink(requires, i)
|
||||
.requires = allocator.shrink(requires, i),
|
||||
};
|
||||
}
|
||||
|
||||
fn splitFeatureLevel(ver: []const u8, split: []const u8) !registry.FeatureLevel {
|
||||
var it = mem.split(ver, split);
|
||||
var it = mem.split(u8, ver, split);
|
||||
|
||||
const major = it.next() orelse return error.InvalidFeatureLevel;
|
||||
const minor = it.next() orelse return error.InvalidFeatureLevel;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
pub const Registry = struct {
|
||||
copyright: []const u8,
|
||||
decls: []Declaration,
|
||||
api_constants: []ApiConstant,
|
||||
tags: []Tag,
|
||||
@@ -65,6 +64,7 @@ pub const Container = struct {
|
||||
};
|
||||
|
||||
stype: ?[]const u8,
|
||||
extends: ?[]const []const u8,
|
||||
fields: []Field,
|
||||
is_union: bool,
|
||||
};
|
||||
@@ -77,7 +77,7 @@ pub const Enum = struct {
|
||||
alias: struct {
|
||||
name: []const u8,
|
||||
is_compat_alias: bool,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
pub const Field = struct {
|
||||
@@ -118,7 +118,7 @@ pub const Pointer = struct {
|
||||
one,
|
||||
many, // The length is given by some complex expression, possibly involving another field
|
||||
other_field: []const u8, // The length is given by some other field or parameter
|
||||
zero_terminated
|
||||
zero_terminated,
|
||||
};
|
||||
|
||||
is_const: bool,
|
||||
|
||||
@@ -8,15 +8,16 @@ const CaseStyle = id_render.CaseStyle;
|
||||
const IdRenderer = id_render.IdRenderer;
|
||||
|
||||
const preamble =
|
||||
\\
|
||||
\\// This file is generated from the Khronos Vulkan XML API registry
|
||||
\\// This file is generated from the Khronos Vulkan XML API registry by vulkan-zig.
|
||||
\\
|
||||
\\const std = @import("std");
|
||||
\\const builtin = @import("builtin");
|
||||
\\const root = @import("root");
|
||||
\\pub const vulkan_call_conv: builtin.CallingConvention = if (builtin.os.tag == .windows and builtin.cpu.arch == .i386)
|
||||
\\const vk = @This();
|
||||
\\
|
||||
\\pub const vulkan_call_conv: std.builtin.CallingConvention = if (builtin.os.tag == .windows and builtin.cpu.arch == .i386)
|
||||
\\ .Stdcall
|
||||
\\ else if (builtin.abi == .android and (builtin.cpu.arch.isARM() or builtin.cpu.arch.isThumb()) and builtin.Target.arm.featureSetHas(builtin.cpu.features, .has_v7) and builtin.cpu.arch.ptrBitWidth() == 32)
|
||||
\\ else if (builtin.abi == .android and (builtin.cpu.arch.isARM() or builtin.cpu.arch.isThumb()) and std.Target.arm.featureSetHas(builtin.cpu.features, .has_v7) and builtin.cpu.arch.ptrBitWidth() == 32)
|
||||
\\ // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat"
|
||||
\\ // calling convention, i.e. float parameters are passed in registers. This
|
||||
\\ // is true even if the rest of the application passes floats on the stack,
|
||||
@@ -40,7 +41,7 @@ const preamble =
|
||||
\\ return fromInt(toInt(lhs) & toInt(rhs));
|
||||
\\ }
|
||||
\\ pub fn complement(self: FlagsType) FlagsType {
|
||||
\\ return fromInt(~toInt(lhs));
|
||||
\\ return fromInt(~toInt(self));
|
||||
\\ }
|
||||
\\ pub fn subtract(lhs: FlagsType, rhs: FlagsType) FlagsType {
|
||||
\\ return fromInt(toInt(lhs) & toInt(rhs.complement()));
|
||||
@@ -66,45 +67,45 @@ const preamble =
|
||||
\\ return @truncate(u12, version);
|
||||
\\}
|
||||
\\
|
||||
;
|
||||
;
|
||||
|
||||
const builtin_types = std.ComptimeStringMap([]const u8, .{
|
||||
.{"void", @typeName(void)},
|
||||
.{"char", @typeName(u8)},
|
||||
.{"float", @typeName(f32)},
|
||||
.{"double", @typeName(f64)},
|
||||
.{"uint8_t", @typeName(u8)},
|
||||
.{"uint16_t", @typeName(u16)},
|
||||
.{"uint32_t", @typeName(u32)},
|
||||
.{"uint64_t", @typeName(u64)},
|
||||
.{"int8_t", @typeName(i8)},
|
||||
.{"int16_t", @typeName(i16)},
|
||||
.{"int32_t", @typeName(i32)},
|
||||
.{"int64_t", @typeName(i64)},
|
||||
.{"size_t", @typeName(usize)},
|
||||
.{"int", @typeName(c_int)},
|
||||
.{ "void", @typeName(void) },
|
||||
.{ "char", @typeName(u8) },
|
||||
.{ "float", @typeName(f32) },
|
||||
.{ "double", @typeName(f64) },
|
||||
.{ "uint8_t", @typeName(u8) },
|
||||
.{ "uint16_t", @typeName(u16) },
|
||||
.{ "uint32_t", @typeName(u32) },
|
||||
.{ "uint64_t", @typeName(u64) },
|
||||
.{ "int8_t", @typeName(i8) },
|
||||
.{ "int16_t", @typeName(i16) },
|
||||
.{ "int32_t", @typeName(i32) },
|
||||
.{ "int64_t", @typeName(i64) },
|
||||
.{ "size_t", @typeName(usize) },
|
||||
.{ "int", @typeName(c_int) },
|
||||
});
|
||||
|
||||
const foreign_types = std.ComptimeStringMap([]const u8, .{
|
||||
.{"Display", "opaque {}"},
|
||||
.{"VisualID", @typeName(c_uint)},
|
||||
.{"Window", @typeName(c_ulong)},
|
||||
.{"RROutput", @typeName(c_ulong)},
|
||||
.{"wl_display", "opaque {}"},
|
||||
.{"wl_surface", "opaque {}"},
|
||||
.{"HINSTANCE", "std.os.HINSTANCE"},
|
||||
.{"HWND", "*opaque {}"},
|
||||
.{"HMONITOR", "*opaque {}"},
|
||||
.{"HANDLE", "std.os.HANDLE"},
|
||||
.{"SECURITY_ATTRIBUTES", "std.os.SECURITY_ATTRIBUTES"},
|
||||
.{"DWORD", "std.os.DWORD"},
|
||||
.{"LPCWSTR", "std.os.LPCWSTR"},
|
||||
.{"xcb_connection_t", "opaque {}"},
|
||||
.{"xcb_visualid_t", @typeName(u32)},
|
||||
.{"xcb_window_t", @typeName(u32)},
|
||||
.{"zx_handle_t", @typeName(u32)},
|
||||
.{"_screen_context", "opaque {}"},
|
||||
.{"_screen_window", "opaque {}"},
|
||||
.{ "Display", "opaque {}" },
|
||||
.{ "VisualID", @typeName(c_uint) },
|
||||
.{ "Window", @typeName(c_ulong) },
|
||||
.{ "RROutput", @typeName(c_ulong) },
|
||||
.{ "wl_display", "opaque {}" },
|
||||
.{ "wl_surface", "opaque {}" },
|
||||
.{ "HINSTANCE", "std.os.windows.HINSTANCE" },
|
||||
.{ "HWND", "std.os.windows.HWND" },
|
||||
.{ "HMONITOR", "*opaque {}" },
|
||||
.{ "HANDLE", "std.os.windows.HANDLE" },
|
||||
.{ "SECURITY_ATTRIBUTES", "std.os.SECURITY_ATTRIBUTES" },
|
||||
.{ "DWORD", "std.os.windows.DWORD" },
|
||||
.{ "LPCWSTR", "std.os.windows.LPCWSTR" },
|
||||
.{ "xcb_connection_t", "opaque {}" },
|
||||
.{ "xcb_visualid_t", @typeName(u32) },
|
||||
.{ "xcb_window_t", @typeName(u32) },
|
||||
.{ "zx_handle_t", @typeName(u32) },
|
||||
.{ "_screen_context", "opaque {}" },
|
||||
.{ "_screen_window", "opaque {}" },
|
||||
});
|
||||
|
||||
fn eqlIgnoreCase(lhs: []const u8, rhs: []const u8) bool {
|
||||
@@ -122,7 +123,7 @@ fn eqlIgnoreCase(lhs: []const u8, rhs: []const u8) bool {
|
||||
}
|
||||
|
||||
pub fn trimVkNamespace(id: []const u8) []const u8 {
|
||||
const prefixes = [_][]const u8{"VK_", "vk", "Vk", "PFN_vk"};
|
||||
const prefixes = [_][]const u8{ "VK_", "vk", "Vk", "PFN_vk" };
|
||||
for (prefixes) |prefix| {
|
||||
if (mem.startsWith(u8, id, prefix)) {
|
||||
return id[prefix.len..];
|
||||
@@ -136,7 +137,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
const WriteError = WriterType.Error;
|
||||
const RenderTypeInfoError = WriteError || std.fmt.ParseIntError || error { OutOfMemory, InvalidRegistry };
|
||||
const RenderTypeInfoError = WriteError || std.fmt.ParseIntError || error{ OutOfMemory, InvalidRegistry };
|
||||
|
||||
const BitflagName = struct {
|
||||
/// Name without FlagBits, so VkSurfaceTransformFlagBitsKHR
|
||||
@@ -176,16 +177,12 @@ fn Renderer(comptime WriterType: type) type {
|
||||
};
|
||||
|
||||
writer: WriterType,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
registry: *const reg.Registry,
|
||||
id_renderer: *IdRenderer,
|
||||
declarations_by_name: std.StringHashMap(*const reg.DeclarationType),
|
||||
|
||||
fn init(writer: WriterType, allocator: *Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !Self {
|
||||
const tags = try allocator.alloc([]const u8, registry.tags.len);
|
||||
errdefer allocator.free(tags);
|
||||
for (tags) |*tag, i| tag.* = registry.tags[i].name;
|
||||
|
||||
fn init(writer: WriterType, allocator: Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !Self {
|
||||
var declarations_by_name = std.StringHashMap(*const reg.DeclarationType).init(allocator);
|
||||
errdefer declarations_by_name.deinit();
|
||||
|
||||
@@ -195,7 +192,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
result.entry.value = &decl.decl_type;
|
||||
result.value_ptr.* = &decl.decl_type;
|
||||
}
|
||||
|
||||
return Self{
|
||||
@@ -209,12 +206,10 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
fn deinit(self: *Self) void {
|
||||
self.declarations_by_name.deinit();
|
||||
self.allocator.free(self.id_renderer.tags);
|
||||
self.id_renderer.deinit();
|
||||
}
|
||||
|
||||
fn writeIdentifier(self: Self, id: []const u8) !void {
|
||||
try self.id_renderer.render(self.writer, id);
|
||||
try id_render.writeIdentifier(self.writer, id);
|
||||
}
|
||||
|
||||
fn writeIdentifierWithCase(self: *Self, case: CaseStyle, id: []const u8) !void {
|
||||
@@ -242,7 +237,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
}
|
||||
|
||||
fn extractBitflagFieldName(self: Self, bitflag_name: BitflagName, field_name: []const u8) ![]const u8 {
|
||||
fn extractBitflagFieldName(bitflag_name: BitflagName, field_name: []const u8) ![]const u8 {
|
||||
var flag_it = id_render.SegmentIterator.init(bitflag_name.base_name);
|
||||
var field_it = id_render.SegmentIterator.init(field_name);
|
||||
|
||||
@@ -290,19 +285,21 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
fn isFlags(self: Self, name: []const u8) bool {
|
||||
const tag = self.id_renderer.getAuthorTag(name);
|
||||
const base_name = if (tag) |tag_name| name[0 .. name.len - tag_name.len] else name;
|
||||
|
||||
const tagless_name = if (tag) |tag_name| name[0 .. name.len - tag_name.len] else name;
|
||||
const base_name = std.mem.trimRight(u8, tagless_name, "0123456789");
|
||||
return mem.endsWith(u8, base_name, "Flags");
|
||||
}
|
||||
|
||||
fn containerHasField(self: Self, container: *const reg.Container, field_name: []const u8) bool {
|
||||
for (container.fields) |field| {
|
||||
if (mem.eql(u8, field, field_name)) {
|
||||
return true;
|
||||
fn resolveDeclaration(self: Self, start_name: []const u8) ?*const reg.DeclarationType {
|
||||
var name = start_name;
|
||||
return while (true) {
|
||||
const decl = self.declarations_by_name.get(name) orelse return null;
|
||||
if (decl.* != .alias) {
|
||||
break decl;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
name = decl.alias.name;
|
||||
} else unreachable;
|
||||
}
|
||||
|
||||
fn isInOutPointer(self: Self, ptr: reg.Pointer) !bool {
|
||||
@@ -310,17 +307,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
return false;
|
||||
}
|
||||
|
||||
var name = ptr.child.name;
|
||||
|
||||
const decl = while (true) {
|
||||
const decl = self.declarations_by_name.get(name) orelse return error.InvalidRegistry;
|
||||
if (decl.* != .alias) {
|
||||
break decl;
|
||||
}
|
||||
|
||||
name = decl.alias.name;
|
||||
} else unreachable;
|
||||
|
||||
const decl = self.resolveDeclaration(ptr.child.name) orelse return error.InvalidRegistry;
|
||||
if (decl.* != .container) {
|
||||
return false;
|
||||
}
|
||||
@@ -373,7 +360,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
},
|
||||
.name => |name| {
|
||||
if ((try self.extractBitflagName(param.param_type.name)) != null or self.isFlags(param.param_type.name)) {
|
||||
if ((try self.extractBitflagName(name)) != null or self.isFlags(name)) {
|
||||
return .bitflags;
|
||||
}
|
||||
},
|
||||
@@ -387,20 +374,20 @@ fn Renderer(comptime WriterType: type) type {
|
||||
return .other;
|
||||
}
|
||||
|
||||
fn classifyCommandDispatch(self: Self, name: []const u8, command: reg.Command) CommandDispatchType {
|
||||
fn classifyCommandDispatch(name: []const u8, command: reg.Command) CommandDispatchType {
|
||||
const device_handles = std.ComptimeStringMap(void, .{
|
||||
.{"VkDevice", {}},
|
||||
.{"VkCommandBuffer", {}},
|
||||
.{"VkQueue", {}},
|
||||
.{ "VkDevice", {} },
|
||||
.{ "VkCommandBuffer", {} },
|
||||
.{ "VkQueue", {} },
|
||||
});
|
||||
|
||||
const override_functions = std.ComptimeStringMap(CommandDispatchType, .{
|
||||
.{"vkGetInstanceProcAddr", .base},
|
||||
.{"vkCreateInstance", .base},
|
||||
.{"vkEnumerateInstanceLayerProperties", .base},
|
||||
.{"vkEnumerateInstanceExtensionProperties", .base},
|
||||
.{"vkEnumerateInstanceVersion", .base},
|
||||
.{"vkGetDeviceProcAddr", .instance},
|
||||
.{ "vkGetInstanceProcAddr", .base },
|
||||
.{ "vkCreateInstance", .base },
|
||||
.{ "vkEnumerateInstanceLayerProperties", .base },
|
||||
.{ "vkEnumerateInstanceExtensionProperties", .base },
|
||||
.{ "vkEnumerateInstanceVersion", .base },
|
||||
.{ "vkGetDeviceProcAddr", .instance },
|
||||
});
|
||||
|
||||
if (override_functions.get(name)) |dispatch_type| {
|
||||
@@ -420,7 +407,6 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
fn render(self: *Self) !void {
|
||||
try self.renderCopyright();
|
||||
try self.writer.writeAll(preamble);
|
||||
|
||||
for (self.registry.api_constants) |api_constant| {
|
||||
@@ -428,7 +414,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
try self.renderDecl(decl);
|
||||
try self.renderDecl(decl);
|
||||
}
|
||||
|
||||
try self.renderCommandPtrs();
|
||||
@@ -436,13 +422,6 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.renderWrappers();
|
||||
}
|
||||
|
||||
fn renderCopyright(self: *Self) !void {
|
||||
var it = mem.split(self.registry.copyright, "\n");
|
||||
while (it.next()) |line| {
|
||||
try self.writer.print("// {s}\n", .{line});
|
||||
}
|
||||
}
|
||||
|
||||
fn renderApiConstant(self: *Self, api_constant: reg.ApiConstant) !void {
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderName(api_constant.name);
|
||||
@@ -467,11 +446,11 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
fn renderApiConstantExpr(self: *Self, expr: []const u8) !void {
|
||||
const adjusted_expr = if (expr.len > 2 and expr[0] == '(' and expr[expr.len - 1] == ')')
|
||||
expr[1 .. expr.len - 1]
|
||||
else
|
||||
expr;
|
||||
expr[1 .. expr.len - 1]
|
||||
else
|
||||
expr;
|
||||
|
||||
var tokenizer = cparse.CTokenizer{.source = adjusted_expr};
|
||||
var tokenizer = cparse.CTokenizer{ .source = adjusted_expr };
|
||||
var peeked: ?cparse.Token = null;
|
||||
while (true) {
|
||||
const tok = peeked orelse (try tokenizer.next()) orelse break;
|
||||
@@ -507,7 +486,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
},
|
||||
.dot => {
|
||||
const decimal = (try tokenizer.next()) orelse return error.InvalidConstantExpr;
|
||||
try self.writer.print("@as(f32, {s}.{s})", .{tok.text, decimal.text});
|
||||
try self.writer.print("@as(f32, {s}.{s})", .{ tok.text, decimal.text });
|
||||
|
||||
const f = (try tokenizer.next()) orelse return error.InvalidConstantExpr;
|
||||
if (f.kind != .id or f.text.len != 1 or (f.text[0] != 'f' and f.text[0] != 'F')) {
|
||||
@@ -536,10 +515,10 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll(zig_name);
|
||||
return;
|
||||
} else if (try self.extractBitflagName(name)) |bitflag_name| {
|
||||
try self.writeIdentifierFmt("{s}Flags{s}{s}", .{
|
||||
try self.writeIdentifierFmt("{s}Flags{s}{s}", .{
|
||||
trimVkNamespace(bitflag_name.base_name),
|
||||
@as([]const u8, if (bitflag_name.revision) |revision| revision else ""),
|
||||
@as([]const u8, if (bitflag_name.tag) |tag| tag else "")
|
||||
@as([]const u8, if (bitflag_name.tag) |tag| tag else ""),
|
||||
});
|
||||
return;
|
||||
} else if (mem.startsWith(u8, name, "vk")) {
|
||||
@@ -579,7 +558,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writeIdentifierFmt("{s}Flags{s}{s}", .{
|
||||
trimVkNamespace(bitflag_name.base_name),
|
||||
@as([]const u8, if (bitflag_name.revision) |revision| revision else ""),
|
||||
@as([]const u8, if (bitflag_name.tag) |tag| tag else "")
|
||||
@as([]const u8, if (bitflag_name.tag) |tag| tag else ""),
|
||||
});
|
||||
try self.writer.writeAll(".IntType");
|
||||
break :blk;
|
||||
@@ -618,7 +597,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
if (child_is_void) {
|
||||
try self.writer.writeAll("c_void");
|
||||
try self.writer.writeAll("anyopaque");
|
||||
} else {
|
||||
try self.renderTypeInfo(pointer.child.*);
|
||||
}
|
||||
@@ -680,7 +659,8 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
} else {
|
||||
try self.renderTypeInfo(field.field_type);
|
||||
try self.renderContainerDefaultField(container, field);
|
||||
try self.renderContainerDefaultField(name, container, field);
|
||||
try self.renderContainerFieldAlignment(field);
|
||||
try self.writer.writeAll(", ");
|
||||
}
|
||||
}
|
||||
@@ -688,7 +668,22 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn renderContainerDefaultField(self: *Self, container: reg.Container, field: reg.Container.Field) !void {
|
||||
fn renderContainerFieldAlignment(self: *Self, field: reg.Container.Field) !void {
|
||||
// Flags structures need to explicitly get their proper alignment: alignOf Flags for 32-bit flags, and alignOf Flag64 for 64-bit flags.
|
||||
const field_type_name = switch (field.field_type) {
|
||||
.name => |name| name,
|
||||
else => return,
|
||||
};
|
||||
const decl = self.resolveDeclaration(field_type_name) orelse return;
|
||||
switch (decl.*) {
|
||||
.bitmask => |mask| {
|
||||
try self.writer.print(" align(@alignOf({s}))", .{try bitmaskFlagsType(mask.bitwidth)});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn renderContainerDefaultField(self: *Self, name: []const u8, container: reg.Container, field: reg.Container.Field) !void {
|
||||
if (mem.eql(u8, field.name, "pNext")) {
|
||||
try self.writer.writeAll(" = null");
|
||||
} else if (mem.eql(u8, field.name, "sType")) {
|
||||
@@ -702,45 +697,26 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
try self.writer.writeAll(" = .");
|
||||
try self.writeIdentifierWithCase(.snake, stype["VK_STRUCTURE_TYPE_".len ..]);
|
||||
try self.writeIdentifierWithCase(.snake, stype["VK_STRUCTURE_TYPE_".len..]);
|
||||
} else if (field.field_type == .name and !container.is_union and mem.eql(u8, "VkBool32", field.field_type.name) and isFeatureStruct(name, container.extends)) {
|
||||
try self.writer.writeAll(" = FALSE");
|
||||
}
|
||||
}
|
||||
|
||||
fn isFeatureStruct(name: []const u8, maybe_extends: ?[]const []const u8) bool {
|
||||
if (std.mem.eql(u8, name, "VkPhysicalDeviceFeatures")) return true;
|
||||
if (maybe_extends) |extends| {
|
||||
return for (extends) |extend| {
|
||||
if (mem.eql(u8, extend, "VkDeviceCreateInfo")) break true;
|
||||
} else false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn renderEnumFieldName(self: *Self, name: []const u8, field_name: []const u8) !void {
|
||||
try self.writeIdentifierWithCase(.snake, try self.extractEnumFieldName(name, field_name));
|
||||
}
|
||||
|
||||
fn renderEnumerationValue(self: *Self, enum_name: []const u8, enumeration: reg.Enum, value: reg.Enum.Value) !void {
|
||||
var current_value = value;
|
||||
var maybe_alias_of: ?[]const u8 = null;
|
||||
|
||||
while (true) {
|
||||
switch (current_value) {
|
||||
.int => |int| try self.writer.print(" = {}, ", .{int}),
|
||||
.bitpos => |pos| try self.writer.print(" = 1 << {}, ", .{pos}),
|
||||
.bit_vector => |bv| try self.writer.print("= 0x{X}, ", .{bv}),
|
||||
.alias => |alias| {
|
||||
// Find the alias
|
||||
current_value = for (enumeration.fields) |field| {
|
||||
if (mem.eql(u8, field.name, alias.name)) {
|
||||
maybe_alias_of = field.name;
|
||||
break field.value;
|
||||
}
|
||||
} else return error.InvalidRegistry; // There is no alias
|
||||
continue;
|
||||
},
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (maybe_alias_of) |alias_of| {
|
||||
try self.writer.writeAll("// alias of ");
|
||||
try self.renderEnumFieldName(enum_name, alias_of);
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fn renderEnumeration(self: *Self, name: []const u8, enumeration: reg.Enum) !void {
|
||||
if (enumeration.is_bitmask) {
|
||||
try self.renderBitmaskBits(name, enumeration);
|
||||
@@ -749,17 +725,37 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderName(name);
|
||||
try self.writer.writeAll(" = extern enum(i32) {");
|
||||
try self.writer.writeAll(" = enum(i32) {");
|
||||
|
||||
for (enumeration.fields) |field| {
|
||||
if (field.value == .alias and field.value.alias.is_compat_alias)
|
||||
if (field.value == .alias)
|
||||
continue;
|
||||
|
||||
try self.renderEnumFieldName(name, field.name);
|
||||
try self.renderEnumerationValue(name, enumeration, field.value);
|
||||
switch (field.value) {
|
||||
.int => |int| try self.writer.print(" = {}, ", .{int}),
|
||||
.bitpos => |pos| try self.writer.print(" = 1 << {}, ", .{pos}),
|
||||
.bit_vector => |bv| try self.writer.print("= 0x{X}, ", .{bv}),
|
||||
.alias => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
try self.writer.writeAll("_,};\n");
|
||||
try self.writer.writeAll("_,");
|
||||
|
||||
for (enumeration.fields) |field| {
|
||||
if (field.value != .alias or field.value.alias.is_compat_alias)
|
||||
continue;
|
||||
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderEnumFieldName(name, field.name);
|
||||
try self.writer.writeAll(" = ");
|
||||
try self.renderName(name);
|
||||
try self.writer.writeByte('.');
|
||||
try self.renderEnumFieldName(name, field.value.alias.name);
|
||||
try self.writer.writeAll(";\n");
|
||||
}
|
||||
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn bitmaskFlagsType(bitwidth: u8) ![]const u8 {
|
||||
@@ -771,15 +767,10 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
fn renderUsingFlagsMixin(self: *Self, name: []const u8, bitwidth: u8) !void {
|
||||
const flags_type = switch (bitwidth) {
|
||||
32 => "Flags",
|
||||
64 => "Flags64",
|
||||
else => return error.InvalidRegistry,
|
||||
};
|
||||
|
||||
const flags_type = try bitmaskFlagsType(bitwidth);
|
||||
try self.writer.writeAll("pub usingnamespace FlagsMixin(");
|
||||
try self.renderName(name);
|
||||
try self.writer.print(", {s});\n", .{ flags_type });
|
||||
try self.writer.print(", {s});\n", .{flags_type});
|
||||
}
|
||||
|
||||
fn renderBitmaskBits(self: *Self, name: []const u8, bits: reg.Enum) !void {
|
||||
@@ -791,7 +782,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
const flags_type = try bitmaskFlagsType(bits.bitwidth);
|
||||
|
||||
if (bits.fields.len == 0) {
|
||||
try self.writer.print("_reserved_bits: {s} = 0,", .{ flags_type });
|
||||
try self.writer.print("_reserved_bits: {s} = 0,", .{flags_type});
|
||||
} else {
|
||||
var flags_by_bitpos = [_]?[]const u8{null} ** 64;
|
||||
for (bits.fields) |field| {
|
||||
@@ -800,24 +791,20 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
}
|
||||
|
||||
for (flags_by_bitpos[0.. bits.bitwidth]) |maybe_flag_name, bitpos| {
|
||||
for (flags_by_bitpos[0..bits.bitwidth]) |maybe_flag_name, bitpos| {
|
||||
if (maybe_flag_name) |flag_name| {
|
||||
const field_name = try self.extractBitflagFieldName(bitflag_name, flag_name);
|
||||
const field_name = try extractBitflagFieldName(bitflag_name, flag_name);
|
||||
try self.writeIdentifierWithCase(.snake, field_name);
|
||||
} else {
|
||||
try self.writer.print("_reserved_bit_{}", .{bitpos});
|
||||
}
|
||||
|
||||
try self.writer.writeAll(": bool ");
|
||||
if (bitpos == 0) { // Force alignment to integer boundaries
|
||||
try self.writer.writeAll("align(@alignOf(Flags)) ");
|
||||
}
|
||||
try self.writer.writeAll("= false, ");
|
||||
try self.writer.writeAll(": bool = false,");
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll("pub usingnamespace FlagsMixin(");
|
||||
try self.renderName(name);
|
||||
try self.writer.print(", {s});\n}};\n", .{ flags_type });
|
||||
try self.writer.print(", {s});\n}};\n", .{flags_type});
|
||||
}
|
||||
|
||||
fn renderBitmask(self: *Self, name: []const u8, bitmask: reg.Bitmask) !void {
|
||||
@@ -833,15 +820,13 @@ fn Renderer(comptime WriterType: type) type {
|
||||
\\ = packed struct {{
|
||||
\\_reserved_bits: {s} = 0,
|
||||
\\pub usingnamespace FlagsMixin(
|
||||
, .{ flags_type }
|
||||
);
|
||||
, .{flags_type});
|
||||
try self.renderName(name);
|
||||
try self.writer.print(
|
||||
\\, {s});
|
||||
\\}};
|
||||
\\
|
||||
, .{ flags_type }
|
||||
);
|
||||
, .{flags_type});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -850,7 +835,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderName(name);
|
||||
try self.writer.print(" = extern enum({s}) {{null_handle = 0, _}};\n", .{backing_type});
|
||||
try self.writer.print(" = enum({s}) {{null_handle = 0, _}};\n", .{backing_type});
|
||||
}
|
||||
|
||||
fn renderAlias(self: *Self, name: []const u8, alias: reg.Alias) !void {
|
||||
@@ -931,32 +916,185 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.writeIdentifierWithCase(.snake, trimVkNamespace(ext.name));
|
||||
try self.writer.writeAll("= Info {\n");
|
||||
try self.writer.print(".name = \"{s}\", .version = {},", .{ext.name, ext.version});
|
||||
try self.writer.print(".name = \"{s}\", .version = {},", .{ ext.name, ext.version });
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn renderWrappers(self: *Self) !void {
|
||||
try self.renderWrappersOfDispatchType("BaseWrapper", .base);
|
||||
try self.renderWrappersOfDispatchType("InstanceWrapper", .instance);
|
||||
try self.renderWrappersOfDispatchType("DeviceWrapper", .device);
|
||||
try self.writer.writeAll(
|
||||
\\pub fn CommandFlagsMixin(comptime CommandFlags: type) type {
|
||||
\\ return struct {
|
||||
\\ pub fn merge(lhs: CommandFlags, rhs: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = @field(lhs, field.name) or @field(rhs, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn intersect(lhs: CommandFlags, rhs: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = @field(lhs, field.name) and @field(rhs, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn complement(self: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = !@field(self, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn subtract(lhs: CommandFlags, rhs: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = @field(lhs, field.name) and !@field(rhs, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn contains(lhs: CommandFlags, rhs: CommandFlags) bool {
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ if (!@field(lhs, field.name) and @field(rhs, field.name)) {
|
||||
\\ return false;
|
||||
\\ }
|
||||
\\ }
|
||||
\\ return true;
|
||||
\\ }
|
||||
\\ };
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
try self.renderWrappersOfDispatchType(.base);
|
||||
try self.renderWrappersOfDispatchType(.instance);
|
||||
try self.renderWrappersOfDispatchType(.device);
|
||||
}
|
||||
|
||||
fn renderWrappersOfDispatchType(self: *Self, name: []const u8, dispatch_type: CommandDispatchType) !void {
|
||||
fn renderWrappersOfDispatchType(self: *Self, dispatch_type: CommandDispatchType) !void {
|
||||
const name = switch (dispatch_type) {
|
||||
.base => "Base",
|
||||
.instance => "Instance",
|
||||
.device => "Device",
|
||||
};
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn {s}(comptime Self: type) type {{
|
||||
\\ return struct {{
|
||||
\\pub const {0s}CommandFlags = packed struct {{
|
||||
\\
|
||||
, .{name}
|
||||
);
|
||||
, .{name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll(" ");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.writeAll(": bool = false,\n");
|
||||
}
|
||||
}
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn CmdType(comptime tag: std.meta.FieldEnum({0s}CommandFlags)) type {{
|
||||
\\ return switch (tag) {{
|
||||
\\
|
||||
, .{name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll((" " ** 8) ++ ".");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.writeAll(" => ");
|
||||
try self.renderCommandPtrName(decl.name);
|
||||
try self.writer.writeAll(",\n");
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll(" };\n}");
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn cmdName(tag: std.meta.FieldEnum({0s}CommandFlags)) [:0]const u8 {{
|
||||
\\ return switch(tag) {{
|
||||
\\
|
||||
, .{name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll((" " ** 8) ++ ".");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.print(
|
||||
\\ => "{s}",
|
||||
\\
|
||||
, .{decl.name});
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll(" };\n}");
|
||||
|
||||
try self.writer.print(
|
||||
\\ pub usingnamespace CommandFlagsMixin({s}CommandFlags);
|
||||
\\}};
|
||||
\\
|
||||
, .{name});
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn {0s}Wrapper(comptime cmds: {0s}CommandFlags) type {{
|
||||
\\ return struct {{
|
||||
\\ dispatch: Dispatch,
|
||||
\\
|
||||
\\ const Self = @This();
|
||||
\\ pub const commands = cmds;
|
||||
\\ pub const Dispatch = blk: {{
|
||||
\\ @setEvalBranchQuota(10_000);
|
||||
\\ const Type = std.builtin.Type;
|
||||
\\ const fields_len = fields_len: {{
|
||||
\\ var fields_len = 0;
|
||||
\\ for (@typeInfo({0s}CommandFlags).Struct.fields) |field| {{
|
||||
\\ fields_len += @boolToInt(@field(cmds, field.name));
|
||||
\\ }}
|
||||
\\ break :fields_len fields_len;
|
||||
\\ }};
|
||||
\\ var fields: [fields_len]Type.StructField = undefined;
|
||||
\\ var i: usize = 0;
|
||||
\\ for (@typeInfo({0s}CommandFlags).Struct.fields) |field| {{
|
||||
\\ if (@field(cmds, field.name)) {{
|
||||
\\ const field_tag = std.enums.nameCast(std.meta.FieldEnum({0s}CommandFlags), field.name);
|
||||
\\ const PfnType = {0s}CommandFlags.CmdType(field_tag);
|
||||
\\ fields[i] = .{{
|
||||
\\ .name = {0s}CommandFlags.cmdName(field_tag),
|
||||
\\ .field_type = PfnType,
|
||||
\\ .default_value = null,
|
||||
\\ .is_comptime = false,
|
||||
\\ .alignment = @alignOf(PfnType),
|
||||
\\ }};
|
||||
\\ i += 1;
|
||||
\\ }}
|
||||
\\ }}
|
||||
\\ break :blk @Type(.{{
|
||||
\\ .Struct = .{{
|
||||
\\ .layout = .Auto,
|
||||
\\ .fields = &fields,
|
||||
\\ .decls = &[_]std.builtin.Type.Declaration{{}},
|
||||
\\ .is_tuple = false,
|
||||
\\ }},
|
||||
\\ }});
|
||||
\\ }};
|
||||
\\
|
||||
, .{name});
|
||||
|
||||
try self.renderWrapperLoader(dispatch_type);
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
if (decl.decl_type == .command) {
|
||||
const command = decl.decl_type.command;
|
||||
if (self.classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.renderWrapper(decl.name, decl.decl_type.command);
|
||||
}
|
||||
}
|
||||
@@ -972,35 +1110,42 @@ fn Renderer(comptime WriterType: type) type {
|
||||
.device => "device: Device, loader: anytype",
|
||||
};
|
||||
|
||||
const loader_first_param = switch (dispatch_type) {
|
||||
.base => ".null_handle, ",
|
||||
.instance => "instance, ",
|
||||
.device => "device, ",
|
||||
const loader_first_arg = switch (dispatch_type) {
|
||||
.base => "Instance.null_handle",
|
||||
.instance => "instance",
|
||||
.device => "device",
|
||||
};
|
||||
|
||||
@setEvalBranchQuota(2000);
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn load({s}) !Self {{
|
||||
\\pub fn load({[params]s}) error{{CommandLoadFailure}}!Self {{
|
||||
\\ var self: Self = undefined;
|
||||
\\ inline for (std.meta.fields(Self)) |field| {{
|
||||
\\ inline for (std.meta.fields(Dispatch)) |field| {{
|
||||
\\ const name = @ptrCast([*:0]const u8, field.name ++ "\x00");
|
||||
\\ const cmd_ptr = loader({s}name) orelse return error.InvalidCommand;
|
||||
\\ @field(self, field.name) = @ptrCast(field.field_type, cmd_ptr);
|
||||
\\ const cmd_ptr = loader({[first_arg]s}, name) orelse return error.CommandLoadFailure;
|
||||
\\ @field(self.dispatch, field.name) = @ptrCast(field.field_type, cmd_ptr);
|
||||
\\ }}
|
||||
\\ return self;
|
||||
\\}}
|
||||
\\
|
||||
, .{params, loader_first_param}
|
||||
);
|
||||
\\pub fn loadNoFail({[params]s}) Self {{
|
||||
\\ var self: Self = undefined;
|
||||
\\ inline for (std.meta.fields(Dispatch)) |field| {{
|
||||
\\ const name = @ptrCast([*:0]const u8, field.name ++ "\x00");
|
||||
\\ const cmd_ptr = loader({[first_arg]s}, name) orelse undefined;
|
||||
\\ @field(self.dispatch, field.name) = @ptrCast(field.field_type, cmd_ptr);
|
||||
\\ }}
|
||||
\\ return self;
|
||||
\\}}
|
||||
, .{ .params = params, .first_arg = loader_first_arg });
|
||||
}
|
||||
|
||||
fn derefName(name: []const u8) []const u8 {
|
||||
var it = id_render.SegmentIterator.init(name);
|
||||
return if (mem.eql(u8, it.next().?, "p"))
|
||||
name[1..]
|
||||
else
|
||||
name;
|
||||
name[1..]
|
||||
else
|
||||
name;
|
||||
}
|
||||
|
||||
fn renderWrapperPrototype(self: *Self, name: []const u8, command: reg.Command, returns: []const ReturnValue) !void {
|
||||
@@ -1009,32 +1154,22 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll("(self: Self, ");
|
||||
|
||||
for (command.params) |param| {
|
||||
switch (try self.classifyParam(param)) {
|
||||
.in_pointer => {
|
||||
// Remove one pointer level
|
||||
try self.writeIdentifierWithCase(.snake, derefName(param.name));
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(param.param_type.pointer.child.*);
|
||||
},
|
||||
.out_pointer => continue, // Return value
|
||||
.in_out_pointer,
|
||||
.bitflags, // Special stuff handled in renderWrapperCall
|
||||
.buffer_len,
|
||||
.mut_buffer_len,
|
||||
.other => {
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(param.param_type);
|
||||
},
|
||||
// This parameter is returned instead.
|
||||
if ((try self.classifyParam(param)) == .out_pointer) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(param.param_type);
|
||||
try self.writer.writeAll(", ");
|
||||
}
|
||||
|
||||
try self.writer.writeAll(") ");
|
||||
|
||||
if (command.return_type.* == .name and mem.eql(u8, command.return_type.name, "VkResult")) {
|
||||
try self.renderErrorSet(command.error_codes);
|
||||
const returns_vk_result = command.return_type.* == .name and mem.eql(u8, command.return_type.name, "VkResult");
|
||||
if (returns_vk_result) {
|
||||
try self.renderErrorSetName(name);
|
||||
try self.writer.writeByte('!');
|
||||
}
|
||||
|
||||
@@ -1048,16 +1183,12 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
fn renderWrapperCall(self: *Self, name: []const u8, command: reg.Command, returns: []const ReturnValue) !void {
|
||||
try self.writer.writeAll("self.");
|
||||
try self.writer.writeAll("self.dispatch.");
|
||||
try self.writeIdentifier(name);
|
||||
try self.writer.writeAll("(");
|
||||
|
||||
for (command.params) |param| {
|
||||
switch (try self.classifyParam(param)) {
|
||||
.in_pointer => {
|
||||
try self.writer.writeByte('&');
|
||||
try self.writeIdentifierWithCase(.snake, derefName(param.name));
|
||||
},
|
||||
.out_pointer => {
|
||||
try self.writer.writeByte('&');
|
||||
if (returns.len > 1) {
|
||||
@@ -1069,10 +1200,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
try self.writer.writeAll(".toInt()");
|
||||
},
|
||||
.in_out_pointer,
|
||||
.buffer_len,
|
||||
.mut_buffer_len,
|
||||
.other => {
|
||||
.in_pointer, .in_out_pointer, .buffer_len, .mut_buffer_len, .other => {
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
},
|
||||
}
|
||||
@@ -1127,6 +1255,11 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writeIdentifierFmt("{s}Result", .{trimVkNamespace(command_name)});
|
||||
}
|
||||
|
||||
fn renderErrorSetName(self: *Self, name: []const u8) !void {
|
||||
try self.writeIdentifierWithCase(.title, trimVkNamespace(name));
|
||||
try self.writer.writeAll("Error");
|
||||
}
|
||||
|
||||
fn renderReturnStruct(self: *Self, command_name: []const u8, returns: []const ReturnValue) !void {
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderReturnStructName(command_name);
|
||||
@@ -1136,16 +1269,13 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(ret.return_value_type);
|
||||
try self.writer.writeAll(", ");
|
||||
|
||||
}
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn renderWrapper(self: *Self, name: []const u8, command: reg.Command) !void {
|
||||
const returns_vk_result = command.return_type.* == .name
|
||||
and mem.eql(u8, command.return_type.name, "VkResult");
|
||||
const returns_void = command.return_type.* == .name
|
||||
and mem.eql(u8, command.return_type.name, "void");
|
||||
const returns_vk_result = command.return_type.* == .name and mem.eql(u8, command.return_type.name, "VkResult");
|
||||
const returns_void = command.return_type.* == .name and mem.eql(u8, command.return_type.name, "void");
|
||||
|
||||
const returns = try self.extractReturns(command);
|
||||
|
||||
@@ -1153,6 +1283,14 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.renderReturnStruct(name, returns);
|
||||
}
|
||||
|
||||
if (returns_vk_result) {
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderErrorSetName(name);
|
||||
try self.writer.writeAll(" = ");
|
||||
try self.renderErrorSet(command.error_codes);
|
||||
try self.writer.writeAll(";\n");
|
||||
}
|
||||
|
||||
try self.renderWrapperPrototype(name, command, returns);
|
||||
|
||||
if (returns.len == 1 and returns[0].origin == .inner_return_value) {
|
||||
@@ -1222,13 +1360,13 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll(") {\n");
|
||||
|
||||
for (command.success_codes) |success| {
|
||||
try self.writer.writeByte('.');
|
||||
try self.writer.writeAll("Result.");
|
||||
try self.renderEnumFieldName("VkResult", success);
|
||||
try self.writer.writeAll(" => {},");
|
||||
}
|
||||
|
||||
for (command.error_codes) |err| {
|
||||
try self.writer.writeByte('.');
|
||||
try self.writer.writeAll("Result.");
|
||||
try self.renderEnumFieldName("VkResult", err);
|
||||
try self.writer.writeAll(" => return error.");
|
||||
try self.renderResultAsErrorName(err);
|
||||
@@ -1241,6 +1379,9 @@ fn Renderer(comptime WriterType: type) type {
|
||||
fn renderErrorSet(self: *Self, errors: []const []const u8) !void {
|
||||
try self.writer.writeAll("error{");
|
||||
for (errors) |name| {
|
||||
if (std.mem.eql(u8, name, "VK_ERROR_UNKNOWN")) {
|
||||
continue;
|
||||
}
|
||||
try self.renderResultAsErrorName(name);
|
||||
try self.writer.writeAll(", ");
|
||||
}
|
||||
@@ -1250,7 +1391,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
fn renderResultAsErrorName(self: *Self, name: []const u8) !void {
|
||||
const error_prefix = "VK_ERROR_";
|
||||
if (mem.startsWith(u8, name, error_prefix)) {
|
||||
try self.writeIdentifierWithCase(.title, name[error_prefix.len ..]);
|
||||
try self.writeIdentifierWithCase(.title, name[error_prefix.len..]);
|
||||
} else {
|
||||
// Apparently some commands (VkAcquireProfilingLockInfoKHR) return
|
||||
// success codes as error...
|
||||
@@ -1260,7 +1401,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn render(writer: anytype, allocator: *Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !void {
|
||||
pub fn render(writer: anytype, allocator: Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !void {
|
||||
var renderer = try Renderer(@TypeOf(writer)).init(writer, allocator, registry, id_renderer);
|
||||
defer renderer.deinit();
|
||||
try renderer.render();
|
||||
|
||||
@@ -3,37 +3,25 @@ const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const Allocator = mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
pub const Attribute = struct {
|
||||
name: []const u8,
|
||||
value: []const u8
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const Content = union(enum) {
|
||||
CharData: []const u8,
|
||||
Comment: []const u8,
|
||||
Element: *Element
|
||||
char_data: []const u8,
|
||||
comment: []const u8,
|
||||
element: *Element,
|
||||
};
|
||||
|
||||
pub const Element = struct {
|
||||
pub const AttributeList = ArrayList(*Attribute);
|
||||
pub const ContentList = ArrayList(Content);
|
||||
|
||||
tag: []const u8,
|
||||
attributes: AttributeList,
|
||||
children: ContentList,
|
||||
attributes: []Attribute = &.{},
|
||||
children: []Content = &.{},
|
||||
|
||||
fn init(tag: []const u8, alloc: *Allocator) Element {
|
||||
return .{
|
||||
.tag = tag,
|
||||
.attributes = AttributeList.init(alloc),
|
||||
.children = ContentList.init(alloc),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getAttribute(self: *Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes.items) |child| {
|
||||
pub fn getAttribute(self: Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes) |child| {
|
||||
if (mem.eql(u8, child.name, attrib_name)) {
|
||||
return child.value;
|
||||
}
|
||||
@@ -42,39 +30,40 @@ pub const Element = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getCharData(self: *Element, child_tag: []const u8) ?[]const u8 {
|
||||
pub fn getCharData(self: Element, child_tag: []const u8) ?[]const u8 {
|
||||
const child = self.findChildByTag(child_tag) orelse return null;
|
||||
if (child.children.items.len != 1) {
|
||||
if (child.children.len != 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return switch (child.children.items[0]) {
|
||||
.CharData => |char_data| char_data,
|
||||
else => null
|
||||
return switch (child.children[0]) {
|
||||
.char_data => |char_data| char_data,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: *Element) ChildIterator {
|
||||
pub fn iterator(self: Element) ChildIterator {
|
||||
return .{
|
||||
.items = self.children.items,
|
||||
.items = self.children,
|
||||
.i = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn elements(self: *Element) ChildElementIterator {
|
||||
pub fn elements(self: Element) ChildElementIterator {
|
||||
return .{
|
||||
.inner = self.iterator(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn findChildByTag(self: *Element, tag: []const u8) ?*Element {
|
||||
return self.findChildrenByTag(tag).next();
|
||||
pub fn findChildByTag(self: Element, tag: []const u8) ?*Element {
|
||||
var children = self.findChildrenByTag(tag);
|
||||
return children.next();
|
||||
}
|
||||
|
||||
pub fn findChildrenByTag(self: *Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
pub fn findChildrenByTag(self: Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
return .{
|
||||
.inner = self.elements(),
|
||||
.tag = tag
|
||||
.tag = tag,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -97,11 +86,11 @@ pub const Element = struct {
|
||||
|
||||
pub fn next(self: *ChildElementIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (child.* != .Element) {
|
||||
if (child.* != .element) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child.*.Element;
|
||||
return child.*.element;
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -126,15 +115,9 @@ pub const Element = struct {
|
||||
};
|
||||
};
|
||||
|
||||
pub const XmlDecl = struct {
|
||||
version: []const u8,
|
||||
encoding: ?[]const u8,
|
||||
standalone: ?bool
|
||||
};
|
||||
|
||||
pub const Document = struct {
|
||||
arena: ArenaAllocator,
|
||||
xml_decl: ?*XmlDecl,
|
||||
xml_decl: ?*Element,
|
||||
root: *Element,
|
||||
|
||||
pub fn deinit(self: Document) void {
|
||||
@@ -143,26 +126,26 @@ pub const Document = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const ParseContext = struct {
|
||||
const Parser = struct {
|
||||
source: []const u8,
|
||||
offset: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
|
||||
fn init(source: []const u8) ParseContext {
|
||||
fn init(source: []const u8) Parser {
|
||||
return .{
|
||||
.source = source,
|
||||
.offset = 0,
|
||||
.line = 0,
|
||||
.column = 0
|
||||
.column = 0,
|
||||
};
|
||||
}
|
||||
|
||||
fn peek(self: *ParseContext) ?u8 {
|
||||
fn peek(self: *Parser) ?u8 {
|
||||
return if (self.offset < self.source.len) self.source[self.offset] else null;
|
||||
}
|
||||
|
||||
fn consume(self: *ParseContext) !u8 {
|
||||
fn consume(self: *Parser) !u8 {
|
||||
if (self.offset < self.source.len) {
|
||||
return self.consumeNoEof();
|
||||
}
|
||||
@@ -170,7 +153,7 @@ const ParseContext = struct {
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn consumeNoEof(self: *ParseContext) u8 {
|
||||
fn consumeNoEof(self: *Parser) u8 {
|
||||
std.debug.assert(self.offset < self.source.len);
|
||||
const c = self.source[self.offset];
|
||||
self.offset += 1;
|
||||
@@ -185,12 +168,12 @@ const ParseContext = struct {
|
||||
return c;
|
||||
}
|
||||
|
||||
fn eat(self: *ParseContext, char: u8) bool {
|
||||
fn eat(self: *Parser, char: u8) bool {
|
||||
self.expect(char) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expect(self: *ParseContext, expected: u8) !void {
|
||||
fn expect(self: *Parser, expected: u8) !void {
|
||||
if (self.peek()) |actual| {
|
||||
if (expected != actual) {
|
||||
return error.UnexpectedCharacter;
|
||||
@@ -203,15 +186,15 @@ const ParseContext = struct {
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn eatStr(self: *ParseContext, text: []const u8) bool {
|
||||
fn eatStr(self: *Parser, text: []const u8) bool {
|
||||
self.expectStr(text) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expectStr(self: *ParseContext, text: []const u8) !void {
|
||||
fn expectStr(self: *Parser, text: []const u8) !void {
|
||||
if (self.source.len < self.offset + text.len) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (std.mem.startsWith(u8, self.source[self.offset ..], text)) {
|
||||
} else if (mem.startsWith(u8, self.source[self.offset..], text)) {
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (i += 1) {
|
||||
_ = self.consumeNoEof();
|
||||
@@ -223,7 +206,7 @@ const ParseContext = struct {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn eatWs(self: *ParseContext) bool {
|
||||
fn eatWs(self: *Parser) bool {
|
||||
var ws = false;
|
||||
|
||||
while (self.peek()) |ch| {
|
||||
@@ -232,75 +215,75 @@ const ParseContext = struct {
|
||||
ws = true;
|
||||
_ = self.consumeNoEof();
|
||||
},
|
||||
else => break
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
fn expectWs(self: *ParseContext) !void {
|
||||
fn expectWs(self: *Parser) !void {
|
||||
if (!self.eatWs()) return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn currentLine(self: ParseContext) []const u8 {
|
||||
fn currentLine(self: Parser) []const u8 {
|
||||
var begin: usize = 0;
|
||||
if (mem.lastIndexOfScalar(u8, self.source[0 .. self.offset], '\n')) |prev_nl| {
|
||||
if (mem.lastIndexOfScalar(u8, self.source[0..self.offset], '\n')) |prev_nl| {
|
||||
begin = prev_nl + 1;
|
||||
}
|
||||
|
||||
var end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse self.source.len;
|
||||
return self.source[begin .. end];
|
||||
return self.source[begin..end];
|
||||
}
|
||||
};
|
||||
|
||||
test "ParseContext" {
|
||||
test "xml: Parser" {
|
||||
{
|
||||
var ctx = ParseContext.init("I like pythons");
|
||||
testing.expectEqual(@as(?u8, 'I'), ctx.peek());
|
||||
testing.expectEqual(@as(u8, 'I'), ctx.consumeNoEof());
|
||||
testing.expectEqual(@as(?u8, ' '), ctx.peek());
|
||||
testing.expectEqual(@as(u8, ' '), try ctx.consume());
|
||||
var parser = Parser.init("I like pythons");
|
||||
try testing.expectEqual(@as(?u8, 'I'), parser.peek());
|
||||
try testing.expectEqual(@as(u8, 'I'), parser.consumeNoEof());
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
try testing.expectEqual(@as(u8, ' '), try parser.consume());
|
||||
|
||||
testing.expect(ctx.eat('l'));
|
||||
testing.expectEqual(@as(?u8, 'i'), ctx.peek());
|
||||
testing.expectEqual(false, ctx.eat('a'));
|
||||
testing.expectEqual(@as(?u8, 'i'), ctx.peek());
|
||||
try testing.expect(parser.eat('l'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eat('a'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
|
||||
try ctx.expect('i');
|
||||
testing.expectEqual(@as(?u8, 'k'), ctx.peek());
|
||||
testing.expectError(error.UnexpectedCharacter, ctx.expect('a'));
|
||||
testing.expectEqual(@as(?u8, 'k'), ctx.peek());
|
||||
try parser.expect('i');
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expect('a'));
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
|
||||
testing.expect(ctx.eatStr("ke"));
|
||||
testing.expectEqual(@as(?u8, ' '), ctx.peek());
|
||||
try testing.expect(parser.eatStr("ke"));
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
|
||||
testing.expect(ctx.eatWs());
|
||||
testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
testing.expectEqual(false, ctx.eatWs());
|
||||
testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try testing.expect(parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
|
||||
testing.expectEqual(false, ctx.eatStr("aaaaaaaaa"));
|
||||
testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try testing.expectEqual(false, parser.eatStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
|
||||
testing.expectError(error.UnexpectedEof, ctx.expectStr("aaaaaaaaa"));
|
||||
testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
testing.expectError(error.UnexpectedCharacter, ctx.expectStr("pytn"));
|
||||
testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try ctx.expectStr("python");
|
||||
testing.expectEqual(@as(?u8, 's'), ctx.peek());
|
||||
try testing.expectError(error.UnexpectedEof, parser.expectStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expectStr("pytn"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try parser.expectStr("python");
|
||||
try testing.expectEqual(@as(?u8, 's'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("");
|
||||
testing.expectEqual(ctx.peek(), null);
|
||||
testing.expectError(error.UnexpectedEof, ctx.consume());
|
||||
testing.expectEqual(ctx.eat('p'), false);
|
||||
testing.expectError(error.UnexpectedEof, ctx.expect('p'));
|
||||
var parser = Parser.init("");
|
||||
try testing.expectEqual(parser.peek(), null);
|
||||
try testing.expectError(error.UnexpectedEof, parser.consume());
|
||||
try testing.expectEqual(parser.eat('p'), false);
|
||||
try testing.expectError(error.UnexpectedEof, parser.expect('p'));
|
||||
}
|
||||
}
|
||||
|
||||
pub const ParseError = error {
|
||||
pub const ParseError = error{
|
||||
IllegalCharacter,
|
||||
UnexpectedEof,
|
||||
UnexpectedCharacter,
|
||||
@@ -311,357 +294,345 @@ pub const ParseError = error {
|
||||
InvalidStandaloneValue,
|
||||
NonMatchingClosingTag,
|
||||
InvalidDocument,
|
||||
OutOfMemory
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub fn parse(backing_allocator: *Allocator, source: []const u8) !Document {
|
||||
var ctx = ParseContext.init(source);
|
||||
return try parseDocument(&ctx, backing_allocator);
|
||||
pub fn parse(backing_allocator: Allocator, source: []const u8) !Document {
|
||||
var parser = Parser.init(source);
|
||||
return try parseDocument(&parser, backing_allocator);
|
||||
}
|
||||
|
||||
fn parseDocument(ctx: *ParseContext, backing_allocator: *Allocator) !Document {
|
||||
fn parseDocument(parser: *Parser, backing_allocator: Allocator) !Document {
|
||||
var doc = Document{
|
||||
.arena = ArenaAllocator.init(backing_allocator),
|
||||
.xml_decl = null,
|
||||
.root = undefined
|
||||
.root = undefined,
|
||||
};
|
||||
|
||||
errdefer doc.deinit();
|
||||
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
const allocator = doc.arena.allocator();
|
||||
|
||||
doc.xml_decl = try tryParseProlog(ctx, &doc.arena.allocator);
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
doc.root = (try tryParseElement(ctx, &doc.arena.allocator)) orelse return error.InvalidDocument;
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
doc.xml_decl = try parseElement(parser, allocator, .xml_decl);
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
if (ctx.peek() != null) return error.InvalidDocument;
|
||||
doc.root = (try parseElement(parser, allocator, .element)) orelse return error.InvalidDocument;
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
if (parser.peek() != null) return error.InvalidDocument;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
fn parseAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
|
||||
const quote = try ctx.consume();
|
||||
fn parseAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
const quote = try parser.consume();
|
||||
if (quote != '"' and quote != '\'') return error.UnexpectedCharacter;
|
||||
|
||||
const begin = ctx.offset;
|
||||
const begin = parser.offset;
|
||||
|
||||
while (true) {
|
||||
const c = ctx.consume() catch return error.UnclosedValue;
|
||||
const c = parser.consume() catch return error.UnclosedValue;
|
||||
if (c == quote) break;
|
||||
}
|
||||
|
||||
const end = ctx.offset - 1;
|
||||
const end = parser.offset - 1;
|
||||
|
||||
return try dupeAndUnescape(alloc, ctx.source[begin .. end]);
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseEqAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('=');
|
||||
_ = ctx.eatWs();
|
||||
fn parseEqAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
|
||||
return try parseAttrValue(ctx, alloc);
|
||||
return try parseAttrValue(parser, alloc);
|
||||
}
|
||||
|
||||
fn parseNameNoDupe(ctx: *ParseContext) ![]const u8 {
|
||||
fn parseNameNoDupe(parser: *Parser) ![]const u8 {
|
||||
// XML's spec on names is very long, so to make this easier
|
||||
// we just take any character that is not special and not whitespace
|
||||
const begin = ctx.offset;
|
||||
const begin = parser.offset;
|
||||
|
||||
while (ctx.peek()) |ch| {
|
||||
while (parser.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => break,
|
||||
'&', '"', '\'', '<', '>', '?', '=', '/' => break,
|
||||
else => _ = ctx.consumeNoEof()
|
||||
else => _ = parser.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = ctx.offset;
|
||||
const end = parser.offset;
|
||||
if (begin == end) return error.InvalidName;
|
||||
|
||||
return ctx.source[begin .. end];
|
||||
return parser.source[begin..end];
|
||||
}
|
||||
|
||||
fn tryParseCharData(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
|
||||
const begin = ctx.offset;
|
||||
fn parseCharData(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
const begin = parser.offset;
|
||||
|
||||
while (ctx.peek()) |ch| {
|
||||
while (parser.peek()) |ch| {
|
||||
switch (ch) {
|
||||
'<' => break,
|
||||
else => _ = ctx.consumeNoEof()
|
||||
else => _ = parser.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = ctx.offset;
|
||||
const end = parser.offset;
|
||||
if (begin == end) return null;
|
||||
|
||||
return try dupeAndUnescape(alloc, ctx.source[begin .. end]);
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseContent(ctx: *ParseContext, alloc: *Allocator) ParseError!Content {
|
||||
if (try tryParseCharData(ctx, alloc)) |cd| {
|
||||
return Content{.CharData = cd};
|
||||
} else if (try tryParseComment(ctx, alloc)) |comment| {
|
||||
return Content{.Comment = comment};
|
||||
} else if (try tryParseElement(ctx, alloc)) |elem| {
|
||||
return Content{.Element = elem};
|
||||
fn parseContent(parser: *Parser, alloc: Allocator) ParseError!Content {
|
||||
if (try parseCharData(parser, alloc)) |cd| {
|
||||
return Content{ .char_data = cd };
|
||||
} else if (try parseComment(parser, alloc)) |comment| {
|
||||
return Content{ .comment = comment };
|
||||
} else if (try parseElement(parser, alloc, .element)) |elem| {
|
||||
return Content{ .element = elem };
|
||||
} else {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
}
|
||||
|
||||
fn tryParseAttr(ctx: *ParseContext, alloc: *Allocator) !?*Attribute {
|
||||
const name = parseNameNoDupe(ctx) catch return null;
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('=');
|
||||
_ = ctx.eatWs();
|
||||
const value = try parseAttrValue(ctx, alloc);
|
||||
fn parseAttr(parser: *Parser, alloc: Allocator) !?Attribute {
|
||||
const name = parseNameNoDupe(parser) catch return null;
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
const value = try parseAttrValue(parser, alloc);
|
||||
|
||||
const attr = try alloc.create(Attribute);
|
||||
attr.name = try mem.dupe(alloc, u8, name);
|
||||
attr.value = value;
|
||||
const attr = Attribute{
|
||||
.name = try alloc.dupe(u8, name),
|
||||
.value = value,
|
||||
};
|
||||
return attr;
|
||||
}
|
||||
|
||||
fn tryParseElement(ctx: *ParseContext, alloc: *Allocator) !?*Element {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(ctx) catch {
|
||||
ctx.offset = start;
|
||||
return null;
|
||||
const ElementKind = enum {
|
||||
xml_decl,
|
||||
element,
|
||||
};
|
||||
|
||||
fn parseElement(parser: *Parser, alloc: Allocator, comptime kind: ElementKind) !?*Element {
|
||||
const start = parser.offset;
|
||||
|
||||
const tag = switch (kind) {
|
||||
.xml_decl => blk: {
|
||||
if (!parser.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(parser), "xml")) {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
}
|
||||
break :blk "xml";
|
||||
},
|
||||
.element => blk: {
|
||||
if (!parser.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(parser) catch {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
};
|
||||
break :blk tag;
|
||||
},
|
||||
};
|
||||
|
||||
var attributes = std.ArrayList(Attribute).init(alloc);
|
||||
defer attributes.deinit();
|
||||
|
||||
var children = std.ArrayList(Content).init(alloc);
|
||||
defer children.deinit();
|
||||
|
||||
while (parser.eatWs()) {
|
||||
const attr = (try parseAttr(parser, alloc)) orelse break;
|
||||
try attributes.append(attr);
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.xml_decl => try parser.expectStr("?>"),
|
||||
.element => {
|
||||
if (!parser.eatStr("/>")) {
|
||||
try parser.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (parser.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (parser.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(parser, alloc);
|
||||
try children.append(content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(parser);
|
||||
if (!mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('>');
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const element = try alloc.create(Element);
|
||||
element.* = Element.init(try std.mem.dupe(alloc, u8, tag), alloc);
|
||||
|
||||
while (ctx.eatWs()) {
|
||||
const attr = (try tryParseAttr(ctx, alloc)) orelse break;
|
||||
try element.attributes.append(attr);
|
||||
}
|
||||
|
||||
if (ctx.eatStr("/>")) {
|
||||
return element;
|
||||
}
|
||||
|
||||
try ctx.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (ctx.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (ctx.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(ctx, alloc);
|
||||
try element.children.append(content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(ctx);
|
||||
if (!std.mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('>');
|
||||
element.* = .{
|
||||
.tag = try alloc.dupe(u8, tag),
|
||||
.attributes = attributes.toOwnedSlice(),
|
||||
.children = children.toOwnedSlice(),
|
||||
};
|
||||
return element;
|
||||
}
|
||||
|
||||
test "tryParseElement" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
test "xml: parseElement" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
const alloc = arena.allocator();
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<= a='b'/>");
|
||||
testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
|
||||
testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
var parser = Parser.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, alloc, .element));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
var parser = Parser.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
|
||||
const size_attr = elem.?.attributes.items[0];
|
||||
testing.expectEqualSlices(u8, size_attr.name, "size");
|
||||
testing.expectEqualSlices(u8, size_attr.value, "15");
|
||||
const size_attr = elem.?.attributes[0];
|
||||
try testing.expectEqualSlices(u8, size_attr.name, "size");
|
||||
try testing.expectEqualSlices(u8, size_attr.value, "15");
|
||||
|
||||
const color_attr = elem.?.attributes.items[1];
|
||||
testing.expectEqualSlices(u8, color_attr.name, "color");
|
||||
testing.expectEqualSlices(u8, color_attr.value, "green");
|
||||
const color_attr = elem.?.attributes[1];
|
||||
try testing.expectEqualSlices(u8, color_attr.name, "color");
|
||||
try testing.expectEqualSlices(u8, color_attr.value, "green");
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<python>test</python>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
|
||||
var parser = Parser.init("<python>test</python>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "test");
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[2].CharData, "d");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[3].Element.tag, "e");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[4].CharData, "f");
|
||||
testing.expectEqualSlices(u8, elem.?.children.items[5].Comment, "g");
|
||||
var parser = Parser.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[1].element.tag, "c");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[2].char_data, "d");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[3].element.tag, "e");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[4].char_data, "f");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[5].comment, "g");
|
||||
}
|
||||
}
|
||||
|
||||
fn tryParseProlog(ctx: *ParseContext, alloc: *Allocator) !?*XmlDecl {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(ctx), "xml")) {
|
||||
ctx.offset = start;
|
||||
return null;
|
||||
}
|
||||
|
||||
const decl = try alloc.create(XmlDecl);
|
||||
decl.encoding = null;
|
||||
decl.standalone = null;
|
||||
|
||||
// Version info is mandatory
|
||||
try ctx.expectWs();
|
||||
try ctx.expectStr("version");
|
||||
decl.version = try parseEqAttrValue(ctx, alloc);
|
||||
|
||||
if (ctx.eatWs()) {
|
||||
// Optional encoding and standalone info
|
||||
var require_ws = false;
|
||||
|
||||
if (ctx.eatStr("encoding")) {
|
||||
decl.encoding = try parseEqAttrValue(ctx, alloc);
|
||||
require_ws = true;
|
||||
}
|
||||
|
||||
if (require_ws == ctx.eatWs() and ctx.eatStr("standalone")) {
|
||||
const standalone = try parseEqAttrValue(ctx, alloc);
|
||||
if (std.mem.eql(u8, standalone, "yes")) {
|
||||
decl.standalone = true;
|
||||
} else if (std.mem.eql(u8, standalone, "no")) {
|
||||
decl.standalone = false;
|
||||
} else {
|
||||
return error.InvalidStandaloneValue;
|
||||
}
|
||||
}
|
||||
|
||||
_ = ctx.eatWs();
|
||||
}
|
||||
|
||||
try ctx.expectStr("?>");
|
||||
return decl;
|
||||
}
|
||||
|
||||
test "tryParseProlog" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
test "xml: parse prolog" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
const a = arena.allocator();
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<?xmla version='aa'?>");
|
||||
testing.expectEqual(@as(?*XmlDecl, null), try tryParseProlog(&ctx, alloc));
|
||||
testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
var parser = Parser.init("<?xmla version='aa'?>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, a, .xml_decl));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<?xml version='aa'?>");
|
||||
const decl = try tryParseProlog(&ctx, alloc);
|
||||
testing.expectEqualSlices(u8, "aa", decl.?.version);
|
||||
testing.expectEqual(@as(?[]const u8, null), decl.?.encoding);
|
||||
testing.expectEqual(@as(?bool, null), decl.?.standalone);
|
||||
var parser = Parser.init("<?xml version='aa'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("encoding"));
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("standalone"));
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<?xml version=\"aa\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try tryParseProlog(&ctx, alloc);
|
||||
testing.expectEqualSlices(u8, "aa", decl.?.version);
|
||||
testing.expectEqualSlices(u8, "bbb", decl.?.encoding.?);
|
||||
testing.expectEqual(@as(?bool, true), decl.?.standalone.?);
|
||||
var parser = Parser.init("<?xml version=\"ccc\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "ccc", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqualSlices(u8, "bbb", decl.?.getAttribute("encoding").?);
|
||||
try testing.expectEqualSlices(u8, "yes", decl.?.getAttribute("standalone").?);
|
||||
}
|
||||
}
|
||||
|
||||
fn trySkipComments(ctx: *ParseContext, alloc: *Allocator) !void {
|
||||
while (try tryParseComment(ctx, alloc)) |_| {
|
||||
_ = ctx.eatWs();
|
||||
fn skipComments(parser: *Parser, alloc: Allocator) !void {
|
||||
while ((try parseComment(parser, alloc)) != null) {
|
||||
_ = parser.eatWs();
|
||||
}
|
||||
}
|
||||
|
||||
fn tryParseComment(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
|
||||
if (!ctx.eatStr("<!--")) return null;
|
||||
fn parseComment(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
if (!parser.eatStr("<!--")) return null;
|
||||
|
||||
const begin = ctx.offset;
|
||||
while (!ctx.eatStr("-->")) {
|
||||
_ = ctx.consume() catch return error.UnclosedComment;
|
||||
const begin = parser.offset;
|
||||
while (!parser.eatStr("-->")) {
|
||||
_ = parser.consume() catch return error.UnclosedComment;
|
||||
}
|
||||
|
||||
const end = ctx.offset - "-->".len;
|
||||
return try mem.dupe(alloc, u8, ctx.source[begin .. end]);
|
||||
const end = parser.offset - "-->".len;
|
||||
return try alloc.dupe(u8, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn unescapeEntity(text: []const u8) !u8 {
|
||||
const EntitySubstition = struct {
|
||||
text: []const u8,
|
||||
replacement: u8
|
||||
};
|
||||
const EntitySubstition = struct { text: []const u8, replacement: u8 };
|
||||
|
||||
const entities = [_]EntitySubstition{
|
||||
.{.text = "<", .replacement = '<'},
|
||||
.{.text = ">", .replacement = '>'},
|
||||
.{.text = "&", .replacement = '&'},
|
||||
.{.text = "'", .replacement = '\''},
|
||||
.{.text = """, .replacement = '"'}
|
||||
.{ .text = "<", .replacement = '<' },
|
||||
.{ .text = ">", .replacement = '>' },
|
||||
.{ .text = "&", .replacement = '&' },
|
||||
.{ .text = "'", .replacement = '\'' },
|
||||
.{ .text = """, .replacement = '"' },
|
||||
};
|
||||
|
||||
for (entities) |entity| {
|
||||
if (std.mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
if (mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
}
|
||||
|
||||
return error.InvalidEntity;
|
||||
}
|
||||
|
||||
fn dupeAndUnescape(alloc: *Allocator, text: []const u8) ![]const u8 {
|
||||
const str = try alloc.alloc(u8, text.len);
|
||||
fn unescape(arena: Allocator, text: []const u8) ![]const u8 {
|
||||
const unescaped = try arena.alloc(u8, text.len);
|
||||
|
||||
var j: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (j += 1) {
|
||||
if (text[i] == '&') {
|
||||
const entity_end = 1 + (mem.indexOfScalarPos(u8, text, i, ';') orelse return error.InvalidEntity);
|
||||
str[j] = try unescapeEntity(text[i .. entity_end]);
|
||||
unescaped[j] = try unescapeEntity(text[i..entity_end]);
|
||||
i = entity_end;
|
||||
} else {
|
||||
str[j] = text[i];
|
||||
unescaped[j] = text[i];
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return alloc.shrink(str, j);
|
||||
return unescaped[0..j];
|
||||
}
|
||||
|
||||
test "dupeAndUnescape" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
test "xml: unescape" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
const a = arena.allocator();
|
||||
|
||||
testing.expectEqualSlices(u8, "test", try dupeAndUnescape(alloc, "test"));
|
||||
testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try dupeAndUnescape(alloc, "a<b&c>d"e'f<"));
|
||||
testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&"));
|
||||
testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&&"));
|
||||
testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&test;"));
|
||||
testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&boa"));
|
||||
try testing.expectEqualSlices(u8, "test", try unescape(a, "test"));
|
||||
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try unescape(a, "a<b&c>d"e'f<"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&test;"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&boa"));
|
||||
}
|
||||
|
||||
test "Top level comments" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
test "xml: top level comments" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
const a = arena.allocator();
|
||||
|
||||
const doc = try parse(alloc, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
testing.expectEqualSlices(u8, "python", doc.root.tag);
|
||||
}
|
||||
const doc = try parse(a, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
try testing.expectEqualSlices(u8, "python", doc.root.tag);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user