forked from mirror/vulkan-zig
Compare commits
24 Commits
zig-stage1
...
zig-0.8.1-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d87813312e | ||
|
|
e17c3593d1 | ||
|
|
9513d33bf8 | ||
|
|
59c5b88d17 | ||
|
|
4588c0fcad | ||
|
|
f55409f98a | ||
|
|
cbf06a8d42 | ||
|
|
c5bb254766 | ||
|
|
5980bac303 | ||
|
|
3bfacc7e16 | ||
|
|
1e594c0f09 | ||
|
|
397e663296 | ||
|
|
933010cfff | ||
|
|
0eccd593ce | ||
|
|
6a2c379146 | ||
|
|
4429151d9c | ||
|
|
77651872ab | ||
|
|
5a51d18bda | ||
|
|
6feeeac109 | ||
|
|
8f10cec149 | ||
|
|
0e65efd9d6 | ||
|
|
b3c71d69ea | ||
|
|
b63533d95b | ||
|
|
419e541a16 |
12
.github/workflows/build.yml
vendored
12
.github/workflows/build.yml
vendored
@@ -2,9 +2,9 @@ name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ zig-stage1-compat ]
|
||||
branches: [ zig-0.8.1-compat ]
|
||||
pull_request:
|
||||
branches: [ zig-stage1-compat ]
|
||||
branches: [ zig-0.8.1-compat ]
|
||||
schedule:
|
||||
- cron: '0 6 * * *'
|
||||
|
||||
@@ -18,16 +18,16 @@ jobs:
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@v1.3.0
|
||||
with:
|
||||
version: master
|
||||
version: 0.8.0
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
zig build -fstage1 test
|
||||
zig build test
|
||||
|
||||
- name: Fetch Vulkan SDK
|
||||
run: |
|
||||
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
|
||||
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-1.3.224-focal.list https://packages.lunarg.com/vulkan/1.3.224/lunarg-vulkan-1.3.224-focal.list
|
||||
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-1.2.189-focal.list https://packages.lunarg.com/vulkan/1.2.189/lunarg-vulkan-1.2.189-focal.list
|
||||
sudo apt update
|
||||
sudo apt install shaderc libglfw3 libglfw3-dev
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
|
||||
- name: Build with latest zig & vk.xml
|
||||
run: |
|
||||
zig build -fstage1 -Dvulkan-registry=./vk.xml
|
||||
zig build -Dvulkan-registry=./vk.xml
|
||||
|
||||
- name: Archive vk.xml
|
||||
uses: actions/upload-artifact@v2
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright © 2020-2022 Robin Voetter
|
||||
Copyright © 2020 Robin Voetter
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
|
||||
39
README.md
39
README.md
@@ -14,13 +14,11 @@ vulkan-zig is automatically tested daily against the latest vk.xml and zig, and
|
||||
|
||||
vulkan-zig aims to be always compatible with the ever-changing Zig master branch (however, development may lag a few days behind). Sometimes, the Zig master branch breaks a bunch of functionality however, which may make the latest version vulkan-zig incompatible with older releases of Zig. This repository aims to have a version compatible for both the latest Zig master, and the latest Zig release. The `master` branch is compatible with the `master` branch of Zig, and versions for older versions of Zig are maintained in the `zig-<version>-compat` branch.
|
||||
|
||||
This branch (zig-stage1-compat) is compatible with the Zig stage 1 compiler.
|
||||
|
||||
## Features
|
||||
### CLI-interface
|
||||
A CLI-interface is provided to generate vk.zig from the [Vulkan XML registry](https://github.com/KhronosGroup/Vulkan-Docs/blob/main/xml), which is built by default when invoking `zig build` in the project root. To generate vk.zig, simply invoke the program as follows:
|
||||
A CLI-interface is provided to generate vk.zig from the [Vulkan XML registry](https://github.com/KhronosGroup/Vulkan-Docs/blob/master/xml), which is built by default when invoking `zig build` in the project root. To generate vk.zig, simply invoke the program as follows:
|
||||
```
|
||||
$ zig-out/bin/vulkan-zig-generator path/to/vk.xml output/path/to/vk.zig
|
||||
$ zig-cache/bin/vulkan-zig-generator path/to/vk.xml output/path/to/vk.zig
|
||||
```
|
||||
This reads the xml file, parses its contents, renders the Vulkan bindings, and formats file, before writing the result to the output path. While the intended usage of vulkan-zig is through direct generation from build.zig (see below), the CLI-interface can be used for one-off generation and vendoring the result.
|
||||
|
||||
@@ -35,6 +33,7 @@ pub fn build(b: *Builder) void {
|
||||
|
||||
// Create a step that generates vk.zig (stored in zig-cache) from the provided vulkan registry.
|
||||
const gen = vkgen.VkGenerateStep.init(b, "path/to/vk.xml", "vk.zig");
|
||||
exe.step.dependOn(&gen.step);
|
||||
|
||||
// Add the generated file as package to the final executable
|
||||
exe.addPackage(gen.package);
|
||||
@@ -72,7 +71,7 @@ Each wrapper struct can be called with an array of the appropriate enums:
|
||||
```zig
|
||||
const vk = @import("vulkan");
|
||||
const BaseDispatch = vk.BaseWrapper(.{
|
||||
.createInstance = true,
|
||||
.CreateInstance,
|
||||
});
|
||||
```
|
||||
The wrapper struct then provides wrapper functions for each function pointer in the dispatch struct:
|
||||
@@ -130,30 +129,9 @@ Wrappers are generated according to the following rules:
|
||||
* As of yet, there is no specific handling of enumeration style commands or other commands which accept slices.
|
||||
|
||||
Furthermore, each wrapper contains a function to load each function pointer member when passed either `PfnGetInstanceProcAddr` or `PfnGetDeviceProcAddr`, which attempts to load each member as function pointer and casts it to the appropriate type. These functions are loaded literally, and any wrongly named member or member with a wrong function pointer type will result in problems.
|
||||
* For `BaseWrapper`, this function has signature `fn load(loader: anytype) error{CommandFailure}!Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr` (with optionally having a different calling convention).
|
||||
* For `InstanceWrapper`, this function has signature `fn load(instance: Instance, loader: anytype) error{CommandFailure}!Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr`.
|
||||
* For `DeviceWrapper`, this function has signature `fn load(device: Device, loader: anytype) error{CommandFailure}!Self`, where the type of `loader` must resemble `PfnGetDeviceProcAddr`.
|
||||
|
||||
Note that these functions accepts a loader with the signature of `anytype` instead of `PfnGetInstanceProcAddr`. This is because it is valid for `vkGetInstanceProcAddr` to load itself, in which case the returned function is to be called with the vulkan calling convention. This calling convention is not required for loading vulkan-zig itself, though, and a loader to be called with any calling convention with the target architecture may be passed in. This is particularly useful when interacting with C libraries that provide `vkGetInstanceProcAddr`.
|
||||
|
||||
```zig
|
||||
// vkGetInstanceProcAddr as provided by GLFW.
|
||||
// Note that vk.Instance and vk.PfnVoidFunction are ABI compatible with VkInstance,
|
||||
// and that `extern` implies the C calling convention.
|
||||
pub extern fn glfwGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction;
|
||||
|
||||
// Or provide a custom implementation.
|
||||
// This function is called with the unspecified Zig-internal calling convention.
|
||||
fn customGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction {
|
||||
...
|
||||
}
|
||||
|
||||
// Both calls are valid, even
|
||||
const vkb = try BaseDispatch.load(glfwGetInstanceProcAddress);
|
||||
const vkb = try BaseDispatch.load(customGetInstanceProcAddress);
|
||||
```
|
||||
|
||||
By default, wrapper `load` functions return `error.CommandLoadFailure` if a call to the loader resulted in `null`. If this behaviour is not desired, one can use `loadNoFail`. This function accepts the same parameters as `load`, but does not return an error any function pointer fails to load and sets its value to `undefined` instead. It is at the programmer's discretion not to invoke invalid functions, which can be tested for by checking whether the required core and extension versions the function requires are supported.
|
||||
* For `BaseWrapper`, this function has signature `fn load(loader: anytype) !Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr` (with optionally having a different calling convention).
|
||||
* For `InstanceWrapper`, this function has signature `fn load(instance: Instance, loader: anytype) !Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr`.
|
||||
* For `DeviceWrapper`, this function has signature `fn load(device: Device, loader: anytype) !Self`, where the type of `loader` must resemble `PfnGetDeviceProcAddr`.
|
||||
|
||||
One can access the underlying unwrapped C functions by doing `wrapper.dispatch.vkFuncYouWant(..)`.
|
||||
|
||||
@@ -220,7 +198,7 @@ Defaults are generated for certain fields of structs:
|
||||
```zig
|
||||
pub const InstanceCreateInfo = extern struct {
|
||||
s_type: StructureType = .instance_create_info,
|
||||
p_next: ?*const anyopaque = null,
|
||||
p_next: ?*const c_void = null,
|
||||
flags: InstanceCreateFlags,
|
||||
...
|
||||
};
|
||||
@@ -252,6 +230,7 @@ pub fn build(b: *Builder) void {
|
||||
const exe = b.addExecutable("my-executable", "src/main.zig");
|
||||
|
||||
const gen = vkgen.VkGenerateStep(b, "path/to/vk.xml", "vk.zig");
|
||||
exe.step.dependOn(&gen.step);
|
||||
exe.addPackage(gen.package);
|
||||
|
||||
const shader_comp = vkgen.ShaderCompileStep.init(
|
||||
|
||||
25
build.zig
25
build.zig
@@ -8,7 +8,6 @@ pub const ResourceGenStep = struct {
|
||||
shader_step: *vkgen.ShaderCompileStep,
|
||||
builder: *Builder,
|
||||
package: std.build.Pkg,
|
||||
output_file: std.build.GeneratedFile,
|
||||
resources: std.ArrayList(u8),
|
||||
|
||||
pub fn init(builder: *Builder, out: []const u8) *ResourceGenStep {
|
||||
@@ -20,17 +19,13 @@ pub const ResourceGenStep = struct {
|
||||
}) catch unreachable;
|
||||
|
||||
self.* = .{
|
||||
.step = Step.init(.custom, "resources", builder.allocator, make),
|
||||
.shader_step = vkgen.ShaderCompileStep.init(builder, &[_][]const u8{ "glslc", "--target-env=vulkan1.2" }, "shaders"),
|
||||
.step = Step.init(.Custom, "resources", builder.allocator, make),
|
||||
.shader_step = vkgen.ShaderCompileStep.init(builder, &[_][]const u8{"glslc", "--target-env=vulkan1.2"}),
|
||||
.builder = builder,
|
||||
.package = .{
|
||||
.name = "resources",
|
||||
.source = .{ .generated = &self.output_file },
|
||||
.dependencies = null,
|
||||
},
|
||||
.output_file = .{
|
||||
.step = &self.step,
|
||||
.path = full_out_path,
|
||||
.dependencies = null,
|
||||
},
|
||||
.resources = std.ArrayList(u8).init(builder.allocator),
|
||||
};
|
||||
@@ -56,21 +51,21 @@ pub const ResourceGenStep = struct {
|
||||
}
|
||||
|
||||
pub fn addShader(self: *ResourceGenStep, name: []const u8, source: []const u8) void {
|
||||
const shader_out_path = self.shader_step.add(source, .{});
|
||||
const shader_out_path = self.shader_step.add(source);
|
||||
var writer = self.resources.writer();
|
||||
|
||||
writer.print("pub const {s} align(@alignOf(u32)) = @embedFile(\"", .{name}) catch unreachable;
|
||||
writer.print("pub const {s} = @embedFile(\"", .{name}) catch unreachable;
|
||||
renderPath(shader_out_path, writer);
|
||||
writer.writeAll("\").*;\n") catch unreachable;
|
||||
writer.writeAll("\");\n") catch unreachable;
|
||||
}
|
||||
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(ResourceGenStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
const dir = std.fs.path.dirname(self.output_file.path.?).?;
|
||||
const dir = std.fs.path.dirname(self.package.path).?;
|
||||
try cwd.makePath(dir);
|
||||
try cwd.writeFile(self.output_file.path.?, self.resources.items);
|
||||
try cwd.writeFile(self.package.path, self.resources.items);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -93,14 +88,16 @@ pub fn build(b: *Builder) void {
|
||||
triangle_exe.linkLibC();
|
||||
triangle_exe.linkSystemLibrary("glfw");
|
||||
|
||||
const vk_xml_path = b.option([]const u8, "vulkan-registry", "Override the path to the Vulkan registry") orelse "examples/vk.xml";
|
||||
const vk_xml_path = b.option([]const u8, "vulkan-registry", "Override the to the Vulkan registry") orelse "examples/vk.xml";
|
||||
|
||||
const gen = vkgen.VkGenerateStep.init(b, vk_xml_path, "vk.zig");
|
||||
triangle_exe.step.dependOn(&gen.step);
|
||||
triangle_exe.addPackage(gen.package);
|
||||
|
||||
const res = ResourceGenStep.init(b, "resources.zig");
|
||||
res.addShader("triangle_vert", "examples/shaders/triangle.vert");
|
||||
res.addShader("triangle_frag", "examples/shaders/triangle.frag");
|
||||
triangle_exe.step.dependOn(&res.step);
|
||||
triangle_exe.addPackage(res.package);
|
||||
|
||||
const triangle_run_cmd = triangle_exe.run();
|
||||
|
||||
@@ -4,11 +4,10 @@ pub usingnamespace @cImport({
|
||||
});
|
||||
|
||||
const vk = @import("vulkan");
|
||||
const c = @This();
|
||||
|
||||
// usually the GLFW vulkan functions are exported if Vulkan is included,
|
||||
// but since thats not the case here, they are manually imported.
|
||||
|
||||
pub extern fn glfwGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction;
|
||||
pub extern fn glfwGetPhysicalDevicePresentationSupport(instance: vk.Instance, pdev: vk.PhysicalDevice, queuefamily: u32) c_int;
|
||||
pub extern fn glfwCreateWindowSurface(instance: vk.Instance, window: *c.GLFWwindow, allocation_callbacks: ?*const vk.AllocationCallbacks, surface: *vk.SurfaceKHR) vk.Result;
|
||||
pub extern fn glfwCreateWindowSurface(instance: vk.Instance, window: *GLFWwindow, allocation_callbacks: ?*const vk.AllocationCallbacks, surface: *vk.SurfaceKHR) vk.Result;
|
||||
|
||||
@@ -3,79 +3,79 @@ const vk = @import("vulkan");
|
||||
const c = @import("c.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const required_device_extensions = [_][*:0]const u8{vk.extension_info.khr_swapchain.name};
|
||||
const required_device_extensions = [_][]const u8{vk.extension_info.khr_swapchain.name};
|
||||
|
||||
const BaseDispatch = vk.BaseWrapper(.{
|
||||
.createInstance = true,
|
||||
const BaseDispatch = vk.BaseWrapper(&.{
|
||||
.createInstance,
|
||||
});
|
||||
|
||||
const InstanceDispatch = vk.InstanceWrapper(.{
|
||||
.destroyInstance = true,
|
||||
.createDevice = true,
|
||||
.destroySurfaceKHR = true,
|
||||
.enumeratePhysicalDevices = true,
|
||||
.getPhysicalDeviceProperties = true,
|
||||
.enumerateDeviceExtensionProperties = true,
|
||||
.getPhysicalDeviceSurfaceFormatsKHR = true,
|
||||
.getPhysicalDeviceSurfacePresentModesKHR = true,
|
||||
.getPhysicalDeviceSurfaceCapabilitiesKHR = true,
|
||||
.getPhysicalDeviceQueueFamilyProperties = true,
|
||||
.getPhysicalDeviceSurfaceSupportKHR = true,
|
||||
.getPhysicalDeviceMemoryProperties = true,
|
||||
.getDeviceProcAddr = true,
|
||||
const InstanceDispatch = vk.InstanceWrapper(&.{
|
||||
.destroyInstance,
|
||||
.createDevice,
|
||||
.destroySurfaceKHR,
|
||||
.enumeratePhysicalDevices,
|
||||
.getPhysicalDeviceProperties,
|
||||
.enumerateDeviceExtensionProperties,
|
||||
.getPhysicalDeviceSurfaceFormatsKHR,
|
||||
.getPhysicalDeviceSurfacePresentModesKHR,
|
||||
.getPhysicalDeviceSurfaceCapabilitiesKHR,
|
||||
.getPhysicalDeviceQueueFamilyProperties,
|
||||
.getPhysicalDeviceSurfaceSupportKHR,
|
||||
.getPhysicalDeviceMemoryProperties,
|
||||
.getDeviceProcAddr,
|
||||
});
|
||||
|
||||
const DeviceDispatch = vk.DeviceWrapper(.{
|
||||
.destroyDevice = true,
|
||||
.getDeviceQueue = true,
|
||||
.createSemaphore = true,
|
||||
.createFence = true,
|
||||
.createImageView = true,
|
||||
.destroyImageView = true,
|
||||
.destroySemaphore = true,
|
||||
.destroyFence = true,
|
||||
.getSwapchainImagesKHR = true,
|
||||
.createSwapchainKHR = true,
|
||||
.destroySwapchainKHR = true,
|
||||
.acquireNextImageKHR = true,
|
||||
.deviceWaitIdle = true,
|
||||
.waitForFences = true,
|
||||
.resetFences = true,
|
||||
.queueSubmit = true,
|
||||
.queuePresentKHR = true,
|
||||
.createCommandPool = true,
|
||||
.destroyCommandPool = true,
|
||||
.allocateCommandBuffers = true,
|
||||
.freeCommandBuffers = true,
|
||||
.queueWaitIdle = true,
|
||||
.createShaderModule = true,
|
||||
.destroyShaderModule = true,
|
||||
.createPipelineLayout = true,
|
||||
.destroyPipelineLayout = true,
|
||||
.createRenderPass = true,
|
||||
.destroyRenderPass = true,
|
||||
.createGraphicsPipelines = true,
|
||||
.destroyPipeline = true,
|
||||
.createFramebuffer = true,
|
||||
.destroyFramebuffer = true,
|
||||
.beginCommandBuffer = true,
|
||||
.endCommandBuffer = true,
|
||||
.allocateMemory = true,
|
||||
.freeMemory = true,
|
||||
.createBuffer = true,
|
||||
.destroyBuffer = true,
|
||||
.getBufferMemoryRequirements = true,
|
||||
.mapMemory = true,
|
||||
.unmapMemory = true,
|
||||
.bindBufferMemory = true,
|
||||
.cmdBeginRenderPass = true,
|
||||
.cmdEndRenderPass = true,
|
||||
.cmdBindPipeline = true,
|
||||
.cmdDraw = true,
|
||||
.cmdSetViewport = true,
|
||||
.cmdSetScissor = true,
|
||||
.cmdBindVertexBuffers = true,
|
||||
.cmdCopyBuffer = true,
|
||||
const DeviceDispatch = vk.DeviceWrapper(&.{
|
||||
.destroyDevice,
|
||||
.getDeviceQueue,
|
||||
.createSemaphore,
|
||||
.createFence,
|
||||
.createImageView,
|
||||
.destroyImageView,
|
||||
.destroySemaphore,
|
||||
.destroyFence,
|
||||
.getSwapchainImagesKHR,
|
||||
.createSwapchainKHR,
|
||||
.destroySwapchainKHR,
|
||||
.acquireNextImageKHR,
|
||||
.deviceWaitIdle,
|
||||
.waitForFences,
|
||||
.resetFences,
|
||||
.queueSubmit,
|
||||
.queuePresentKHR,
|
||||
.createCommandPool,
|
||||
.destroyCommandPool,
|
||||
.allocateCommandBuffers,
|
||||
.freeCommandBuffers,
|
||||
.queueWaitIdle,
|
||||
.createShaderModule,
|
||||
.destroyShaderModule,
|
||||
.createPipelineLayout,
|
||||
.destroyPipelineLayout,
|
||||
.createRenderPass,
|
||||
.destroyRenderPass,
|
||||
.createGraphicsPipelines,
|
||||
.destroyPipeline,
|
||||
.createFramebuffer,
|
||||
.destroyFramebuffer,
|
||||
.beginCommandBuffer,
|
||||
.endCommandBuffer,
|
||||
.allocateMemory,
|
||||
.freeMemory,
|
||||
.createBuffer,
|
||||
.destroyBuffer,
|
||||
.getBufferMemoryRequirements,
|
||||
.mapMemory,
|
||||
.unmapMemory,
|
||||
.bindBufferMemory,
|
||||
.cmdBeginRenderPass,
|
||||
.cmdEndRenderPass,
|
||||
.cmdBindPipeline,
|
||||
.cmdDraw,
|
||||
.cmdSetViewport,
|
||||
.cmdSetScissor,
|
||||
.cmdBindVertexBuffers,
|
||||
.cmdCopyBuffer,
|
||||
});
|
||||
|
||||
pub const GraphicsContext = struct {
|
||||
@@ -93,7 +93,7 @@ pub const GraphicsContext = struct {
|
||||
graphics_queue: Queue,
|
||||
present_queue: Queue,
|
||||
|
||||
pub fn init(allocator: Allocator, app_name: [*:0]const u8, window: *c.GLFWwindow) !GraphicsContext {
|
||||
pub fn init(allocator: *Allocator, app_name: [*:0]const u8, window: *c.GLFWwindow) !GraphicsContext {
|
||||
var self: GraphicsContext = undefined;
|
||||
self.vkb = try BaseDispatch.load(c.glfwGetInstanceProcAddress);
|
||||
|
||||
@@ -108,7 +108,7 @@ pub const GraphicsContext = struct {
|
||||
.api_version = vk.API_VERSION_1_2,
|
||||
};
|
||||
|
||||
self.instance = try self.vkb.createInstance(&.{
|
||||
self.instance = try self.vkb.createInstance(.{
|
||||
.flags = .{},
|
||||
.p_application_info = &app_info,
|
||||
.enabled_layer_count = 0,
|
||||
@@ -131,7 +131,7 @@ pub const GraphicsContext = struct {
|
||||
errdefer self.vkd.destroyDevice(self.dev, null);
|
||||
|
||||
self.graphics_queue = Queue.init(self.vkd, self.dev, candidate.queues.graphics_family);
|
||||
self.present_queue = Queue.init(self.vkd, self.dev, candidate.queues.present_family);
|
||||
self.present_queue = Queue.init(self.vkd, self.dev, candidate.queues.graphics_family);
|
||||
|
||||
self.mem_props = self.vki.getPhysicalDeviceMemoryProperties(self.pdev);
|
||||
|
||||
@@ -144,7 +144,7 @@ pub const GraphicsContext = struct {
|
||||
self.vki.destroyInstance(self.instance, null);
|
||||
}
|
||||
|
||||
pub fn deviceName(self: *const GraphicsContext) []const u8 {
|
||||
pub fn deviceName(self: GraphicsContext) []const u8 {
|
||||
const len = std.mem.indexOfScalar(u8, &self.props.device_name, 0).?;
|
||||
return self.props.device_name[0..len];
|
||||
}
|
||||
@@ -160,7 +160,7 @@ pub const GraphicsContext = struct {
|
||||
}
|
||||
|
||||
pub fn allocate(self: GraphicsContext, requirements: vk.MemoryRequirements, flags: vk.MemoryPropertyFlags) !vk.DeviceMemory {
|
||||
return try self.vkd.allocateMemory(self.dev, &.{
|
||||
return try self.vkd.allocateMemory(self.dev, .{
|
||||
.allocation_size = requirements.size,
|
||||
.memory_type_index = try self.findMemoryTypeIndex(requirements.memory_type_bits, flags),
|
||||
}, null);
|
||||
@@ -210,7 +210,7 @@ fn initializeCandidate(vki: InstanceDispatch, candidate: DeviceCandidate) !vk.De
|
||||
else
|
||||
2;
|
||||
|
||||
return try vki.createDevice(candidate.pdev, &.{
|
||||
return try vki.createDevice(candidate.pdev, .{
|
||||
.flags = .{},
|
||||
.queue_create_info_count = queue_count,
|
||||
.p_queue_create_infos = &qci,
|
||||
@@ -236,7 +236,7 @@ const QueueAllocation = struct {
|
||||
fn pickPhysicalDevice(
|
||||
vki: InstanceDispatch,
|
||||
instance: vk.Instance,
|
||||
allocator: Allocator,
|
||||
allocator: *Allocator,
|
||||
surface: vk.SurfaceKHR,
|
||||
) !DeviceCandidate {
|
||||
var device_count: u32 = undefined;
|
||||
@@ -259,7 +259,7 @@ fn pickPhysicalDevice(
|
||||
fn checkSuitable(
|
||||
vki: InstanceDispatch,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: Allocator,
|
||||
allocator: *Allocator,
|
||||
surface: vk.SurfaceKHR,
|
||||
) !?DeviceCandidate {
|
||||
const props = vki.getPhysicalDeviceProperties(pdev);
|
||||
@@ -283,7 +283,7 @@ fn checkSuitable(
|
||||
return null;
|
||||
}
|
||||
|
||||
fn allocateQueues(vki: InstanceDispatch, pdev: vk.PhysicalDevice, allocator: Allocator, surface: vk.SurfaceKHR) !?QueueAllocation {
|
||||
fn allocateQueues(vki: InstanceDispatch, pdev: vk.PhysicalDevice, allocator: *Allocator, surface: vk.SurfaceKHR) !?QueueAllocation {
|
||||
var family_count: u32 = undefined;
|
||||
vki.getPhysicalDeviceQueueFamilyProperties(pdev, &family_count, null);
|
||||
|
||||
@@ -329,7 +329,7 @@ fn checkSurfaceSupport(vki: InstanceDispatch, pdev: vk.PhysicalDevice, surface:
|
||||
fn checkExtensionSupport(
|
||||
vki: InstanceDispatch,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: Allocator,
|
||||
allocator: *Allocator,
|
||||
) !bool {
|
||||
var count: u32 = undefined;
|
||||
_ = try vki.enumerateDeviceExtensionProperties(pdev, null, &count, null);
|
||||
@@ -343,7 +343,7 @@ fn checkExtensionSupport(
|
||||
for (propsv) |props| {
|
||||
const len = std.mem.indexOfScalar(u8, &props.extension_name, 0).?;
|
||||
const prop_ext_name = props.extension_name[0..len];
|
||||
if (std.mem.eql(u8, std.mem.span(ext), prop_ext_name)) {
|
||||
if (std.mem.eql(u8, ext, prop_ext_name)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -10,7 +10,7 @@ pub const Swapchain = struct {
|
||||
};
|
||||
|
||||
gc: *const GraphicsContext,
|
||||
allocator: Allocator,
|
||||
allocator: *Allocator,
|
||||
|
||||
surface_format: vk.SurfaceFormatKHR,
|
||||
present_mode: vk.PresentModeKHR,
|
||||
@@ -21,11 +21,11 @@ pub const Swapchain = struct {
|
||||
image_index: u32,
|
||||
next_image_acquired: vk.Semaphore,
|
||||
|
||||
pub fn init(gc: *const GraphicsContext, allocator: Allocator, extent: vk.Extent2D) !Swapchain {
|
||||
pub fn init(gc: *const GraphicsContext, allocator: *Allocator, extent: vk.Extent2D) !Swapchain {
|
||||
return try initRecycle(gc, allocator, extent, .null_handle);
|
||||
}
|
||||
|
||||
pub fn initRecycle(gc: *const GraphicsContext, allocator: Allocator, extent: vk.Extent2D, old_handle: vk.SwapchainKHR) !Swapchain {
|
||||
pub fn initRecycle(gc: *const GraphicsContext, allocator: *Allocator, extent: vk.Extent2D, old_handle: vk.SwapchainKHR) !Swapchain {
|
||||
const caps = try gc.vki.getPhysicalDeviceSurfaceCapabilitiesKHR(gc.pdev, gc.surface);
|
||||
const actual_extent = findActualExtent(caps, extent);
|
||||
if (actual_extent.width == 0 or actual_extent.height == 0) {
|
||||
@@ -40,13 +40,10 @@ pub const Swapchain = struct {
|
||||
image_count = std.math.min(image_count, caps.max_image_count);
|
||||
}
|
||||
|
||||
const concurrent = gc.graphics_queue.family != gc.present_queue.family;
|
||||
const qfi = [_]u32{ gc.graphics_queue.family, gc.present_queue.family };
|
||||
const sharing_mode: vk.SharingMode = if (gc.graphics_queue.family != gc.present_queue.family)
|
||||
.concurrent
|
||||
else
|
||||
.exclusive;
|
||||
|
||||
const handle = try gc.vkd.createSwapchainKHR(gc.dev, &.{
|
||||
const handle = try gc.vkd.createSwapchainKHR(gc.dev, .{
|
||||
.flags = .{},
|
||||
.surface = gc.surface,
|
||||
.min_image_count = image_count,
|
||||
@@ -55,7 +52,7 @@ pub const Swapchain = struct {
|
||||
.image_extent = actual_extent,
|
||||
.image_array_layers = 1,
|
||||
.image_usage = .{ .color_attachment_bit = true, .transfer_dst_bit = true },
|
||||
.image_sharing_mode = sharing_mode,
|
||||
.image_sharing_mode = if (concurrent) .concurrent else .exclusive,
|
||||
.queue_family_index_count = qfi.len,
|
||||
.p_queue_family_indices = &qfi,
|
||||
.pre_transform = caps.current_transform,
|
||||
@@ -72,12 +69,9 @@ pub const Swapchain = struct {
|
||||
}
|
||||
|
||||
const swap_images = try initSwapchainImages(gc, handle, surface_format.format, allocator);
|
||||
errdefer {
|
||||
for (swap_images) |si| si.deinit(gc);
|
||||
allocator.free(swap_images);
|
||||
}
|
||||
errdefer for (swap_images) |si| si.deinit(gc);
|
||||
|
||||
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
||||
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, .{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, next_image_acquired, null);
|
||||
|
||||
const result = try gc.vkd.acquireNextImageKHR(gc.dev, handle, std.math.maxInt(u64), next_image_acquired, .null_handle);
|
||||
@@ -101,7 +95,6 @@ pub const Swapchain = struct {
|
||||
|
||||
fn deinitExceptSwapchain(self: Swapchain) void {
|
||||
for (self.swap_images) |si| si.deinit(self.gc);
|
||||
self.allocator.free(self.swap_images);
|
||||
self.gc.vkd.destroySemaphore(self.gc.dev, self.next_image_acquired, null);
|
||||
}
|
||||
|
||||
@@ -166,7 +159,7 @@ pub const Swapchain = struct {
|
||||
}}, current.frame_fence);
|
||||
|
||||
// Step 3: Present the current frame
|
||||
_ = try self.gc.vkd.queuePresentKHR(self.gc.present_queue.handle, &.{
|
||||
_ = try self.gc.vkd.queuePresentKHR(self.gc.present_queue.handle, .{
|
||||
.wait_semaphore_count = 1,
|
||||
.p_wait_semaphores = @ptrCast([*]const vk.Semaphore, ¤t.render_finished),
|
||||
.swapchain_count = 1,
|
||||
@@ -203,7 +196,7 @@ const SwapImage = struct {
|
||||
frame_fence: vk.Fence,
|
||||
|
||||
fn init(gc: *const GraphicsContext, image: vk.Image, format: vk.Format) !SwapImage {
|
||||
const view = try gc.vkd.createImageView(gc.dev, &.{
|
||||
const view = try gc.vkd.createImageView(gc.dev, .{
|
||||
.flags = .{},
|
||||
.image = image,
|
||||
.view_type = .@"2d",
|
||||
@@ -219,13 +212,13 @@ const SwapImage = struct {
|
||||
}, null);
|
||||
errdefer gc.vkd.destroyImageView(gc.dev, view, null);
|
||||
|
||||
const image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
||||
const image_acquired = try gc.vkd.createSemaphore(gc.dev, .{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||
|
||||
const render_finished = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, render_finished, null);
|
||||
const render_finished = try gc.vkd.createSemaphore(gc.dev, .{ .flags = .{} }, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||
|
||||
const frame_fence = try gc.vkd.createFence(gc.dev, &.{ .flags = .{ .signaled_bit = true } }, null);
|
||||
const frame_fence = try gc.vkd.createFence(gc.dev, .{ .flags = .{ .signaled_bit = true } }, null);
|
||||
errdefer gc.vkd.destroyFence(gc.dev, frame_fence, null);
|
||||
|
||||
return SwapImage{
|
||||
@@ -250,7 +243,7 @@ const SwapImage = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, format: vk.Format, allocator: Allocator) ![]SwapImage {
|
||||
fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, format: vk.Format, allocator: *Allocator) ![]SwapImage {
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vkd.getSwapchainImagesKHR(gc.dev, swapchain, &count, null);
|
||||
const images = try allocator.alloc(vk.Image, count);
|
||||
@@ -271,7 +264,7 @@ fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, f
|
||||
return swap_images;
|
||||
}
|
||||
|
||||
fn findSurfaceFormat(gc: *const GraphicsContext, allocator: Allocator) !vk.SurfaceFormatKHR {
|
||||
fn findSurfaceFormat(gc: *const GraphicsContext, allocator: *Allocator) !vk.SurfaceFormatKHR {
|
||||
const preferred = vk.SurfaceFormatKHR{
|
||||
.format = .b8g8r8a8_srgb,
|
||||
.color_space = .srgb_nonlinear_khr,
|
||||
@@ -292,7 +285,7 @@ fn findSurfaceFormat(gc: *const GraphicsContext, allocator: Allocator) !vk.Surfa
|
||||
return surface_formats[0]; // There must always be at least one supported surface format
|
||||
}
|
||||
|
||||
fn findPresentMode(gc: *const GraphicsContext, allocator: Allocator) !vk.PresentModeKHR {
|
||||
fn findPresentMode(gc: *const GraphicsContext, allocator: *Allocator) !vk.PresentModeKHR {
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vki.getPhysicalDeviceSurfacePresentModesKHR(gc.pdev, gc.surface, &count, null);
|
||||
const present_modes = try allocator.alloc(vk.PresentModeKHR, count);
|
||||
|
||||
@@ -20,13 +20,13 @@ const Vertex = struct {
|
||||
.binding = 0,
|
||||
.location = 0,
|
||||
.format = .r32g32_sfloat,
|
||||
.offset = @offsetOf(Vertex, "pos"),
|
||||
.offset = @byteOffsetOf(Vertex, "pos"),
|
||||
},
|
||||
.{
|
||||
.binding = 0,
|
||||
.location = 1,
|
||||
.format = .r32g32b32_sfloat,
|
||||
.offset = @offsetOf(Vertex, "color"),
|
||||
.offset = @byteOffsetOf(Vertex, "color"),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -56,9 +56,7 @@ pub fn main() !void {
|
||||
) orelse return error.WindowInitFailed;
|
||||
defer c.glfwDestroyWindow(window);
|
||||
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
defer _ = gpa.deinit();
|
||||
const allocator = gpa.allocator();
|
||||
const allocator = std.heap.page_allocator;
|
||||
|
||||
const gc = try GraphicsContext.init(allocator, app_name, window);
|
||||
defer gc.deinit();
|
||||
@@ -68,7 +66,7 @@ pub fn main() !void {
|
||||
var swapchain = try Swapchain.init(&gc, allocator, extent);
|
||||
defer swapchain.deinit();
|
||||
|
||||
const pipeline_layout = try gc.vkd.createPipelineLayout(gc.dev, &.{
|
||||
const pipeline_layout = try gc.vkd.createPipelineLayout(gc.dev, .{
|
||||
.flags = .{},
|
||||
.set_layout_count = 0,
|
||||
.p_set_layouts = undefined,
|
||||
@@ -86,13 +84,13 @@ pub fn main() !void {
|
||||
var framebuffers = try createFramebuffers(&gc, allocator, render_pass, swapchain);
|
||||
defer destroyFramebuffers(&gc, allocator, framebuffers);
|
||||
|
||||
const pool = try gc.vkd.createCommandPool(gc.dev, &.{
|
||||
const pool = try gc.vkd.createCommandPool(gc.dev, .{
|
||||
.flags = .{},
|
||||
.queue_family_index = gc.graphics_queue.family,
|
||||
}, null);
|
||||
defer gc.vkd.destroyCommandPool(gc.dev, pool, null);
|
||||
|
||||
const buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
||||
const buffer = try gc.vkd.createBuffer(gc.dev, .{
|
||||
.flags = .{},
|
||||
.size = @sizeOf(@TypeOf(vertices)),
|
||||
.usage = .{ .transfer_dst_bit = true, .vertex_buffer_bit = true },
|
||||
@@ -128,11 +126,10 @@ pub fn main() !void {
|
||||
else => |narrow| return narrow,
|
||||
};
|
||||
|
||||
var w: c_int = undefined;
|
||||
var h: c_int = undefined;
|
||||
c.glfwGetWindowSize(window, &w, &h);
|
||||
|
||||
if (state == .suboptimal or extent.width != @intCast(u32, w) or extent.height != @intCast(u32, h)) {
|
||||
if (state == .suboptimal) {
|
||||
var w: c_int = undefined;
|
||||
var h: c_int = undefined;
|
||||
c.glfwGetWindowSize(window, &w, &h);
|
||||
extent.width = @intCast(u32, w);
|
||||
extent.height = @intCast(u32, h);
|
||||
try swapchain.recreate(extent);
|
||||
@@ -160,7 +157,7 @@ pub fn main() !void {
|
||||
}
|
||||
|
||||
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer) !void {
|
||||
const staging_buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
||||
const staging_buffer = try gc.vkd.createBuffer(gc.dev, .{
|
||||
.flags = .{},
|
||||
.size = @sizeOf(@TypeOf(vertices)),
|
||||
.usage = .{ .transfer_src_bit = true },
|
||||
@@ -189,14 +186,14 @@ fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.B
|
||||
|
||||
fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer, src: vk.Buffer, size: vk.DeviceSize) !void {
|
||||
var cmdbuf: vk.CommandBuffer = undefined;
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, &.{
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, .{
|
||||
.command_pool = pool,
|
||||
.level = .primary,
|
||||
.command_buffer_count = 1,
|
||||
}, @ptrCast([*]vk.CommandBuffer, &cmdbuf));
|
||||
defer gc.vkd.freeCommandBuffers(gc.dev, pool, 1, @ptrCast([*]const vk.CommandBuffer, &cmdbuf));
|
||||
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, .{
|
||||
.flags = .{ .one_time_submit_bit = true },
|
||||
.p_inheritance_info = null,
|
||||
});
|
||||
@@ -226,7 +223,7 @@ fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer,
|
||||
fn createCommandBuffers(
|
||||
gc: *const GraphicsContext,
|
||||
pool: vk.CommandPool,
|
||||
allocator: Allocator,
|
||||
allocator: *Allocator,
|
||||
buffer: vk.Buffer,
|
||||
extent: vk.Extent2D,
|
||||
render_pass: vk.RenderPass,
|
||||
@@ -236,7 +233,7 @@ fn createCommandBuffers(
|
||||
const cmdbufs = try allocator.alloc(vk.CommandBuffer, framebuffers.len);
|
||||
errdefer allocator.free(cmdbufs);
|
||||
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, &.{
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, .{
|
||||
.command_pool = pool,
|
||||
.level = .primary,
|
||||
.command_buffer_count = @truncate(u32, cmdbufs.len),
|
||||
@@ -262,7 +259,7 @@ fn createCommandBuffers(
|
||||
};
|
||||
|
||||
for (cmdbufs) |cmdbuf, i| {
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, .{
|
||||
.flags = .{},
|
||||
.p_inheritance_info = null,
|
||||
});
|
||||
@@ -270,16 +267,13 @@ fn createCommandBuffers(
|
||||
gc.vkd.cmdSetViewport(cmdbuf, 0, 1, @ptrCast([*]const vk.Viewport, &viewport));
|
||||
gc.vkd.cmdSetScissor(cmdbuf, 0, 1, @ptrCast([*]const vk.Rect2D, &scissor));
|
||||
|
||||
// This needs to be a separate definition - see https://github.com/ziglang/zig/issues/7627.
|
||||
const render_area = vk.Rect2D{
|
||||
.offset = .{ .x = 0, .y = 0 },
|
||||
.extent = extent,
|
||||
};
|
||||
|
||||
gc.vkd.cmdBeginRenderPass(cmdbuf, &.{
|
||||
gc.vkd.cmdBeginRenderPass(cmdbuf, .{
|
||||
.render_pass = render_pass,
|
||||
.framebuffer = framebuffers[i],
|
||||
.render_area = render_area,
|
||||
.render_area = .{
|
||||
.offset = .{ .x = 0, .y = 0 },
|
||||
.extent = extent,
|
||||
},
|
||||
.clear_value_count = 1,
|
||||
.p_clear_values = @ptrCast([*]const vk.ClearValue, &clear),
|
||||
}, .@"inline");
|
||||
@@ -296,12 +290,12 @@ fn createCommandBuffers(
|
||||
return cmdbufs;
|
||||
}
|
||||
|
||||
fn destroyCommandBuffers(gc: *const GraphicsContext, pool: vk.CommandPool, allocator: Allocator, cmdbufs: []vk.CommandBuffer) void {
|
||||
fn destroyCommandBuffers(gc: *const GraphicsContext, pool: vk.CommandPool, allocator: *Allocator, cmdbufs: []vk.CommandBuffer) void {
|
||||
gc.vkd.freeCommandBuffers(gc.dev, pool, @truncate(u32, cmdbufs.len), cmdbufs.ptr);
|
||||
allocator.free(cmdbufs);
|
||||
}
|
||||
|
||||
fn createFramebuffers(gc: *const GraphicsContext, allocator: Allocator, render_pass: vk.RenderPass, swapchain: Swapchain) ![]vk.Framebuffer {
|
||||
fn createFramebuffers(gc: *const GraphicsContext, allocator: *Allocator, render_pass: vk.RenderPass, swapchain: Swapchain) ![]vk.Framebuffer {
|
||||
const framebuffers = try allocator.alloc(vk.Framebuffer, swapchain.swap_images.len);
|
||||
errdefer allocator.free(framebuffers);
|
||||
|
||||
@@ -309,7 +303,7 @@ fn createFramebuffers(gc: *const GraphicsContext, allocator: Allocator, render_p
|
||||
errdefer for (framebuffers[0..i]) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
|
||||
for (framebuffers) |*fb| {
|
||||
fb.* = try gc.vkd.createFramebuffer(gc.dev, &.{
|
||||
fb.* = try gc.vkd.createFramebuffer(gc.dev, .{
|
||||
.flags = .{},
|
||||
.render_pass = render_pass,
|
||||
.attachment_count = 1,
|
||||
@@ -324,7 +318,7 @@ fn createFramebuffers(gc: *const GraphicsContext, allocator: Allocator, render_p
|
||||
return framebuffers;
|
||||
}
|
||||
|
||||
fn destroyFramebuffers(gc: *const GraphicsContext, allocator: Allocator, framebuffers: []const vk.Framebuffer) void {
|
||||
fn destroyFramebuffers(gc: *const GraphicsContext, allocator: *Allocator, framebuffers: []const vk.Framebuffer) void {
|
||||
for (framebuffers) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
allocator.free(framebuffers);
|
||||
}
|
||||
@@ -360,7 +354,7 @@ fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.Render
|
||||
.p_preserve_attachments = undefined,
|
||||
};
|
||||
|
||||
return try gc.vkd.createRenderPass(gc.dev, &.{
|
||||
return try gc.vkd.createRenderPass(gc.dev, .{
|
||||
.flags = .{},
|
||||
.attachment_count = 1,
|
||||
.p_attachments = @ptrCast([*]const vk.AttachmentDescription, &color_attachment),
|
||||
@@ -376,17 +370,17 @@ fn createPipeline(
|
||||
layout: vk.PipelineLayout,
|
||||
render_pass: vk.RenderPass,
|
||||
) !vk.Pipeline {
|
||||
const vert = try gc.vkd.createShaderModule(gc.dev, &.{
|
||||
const vert = try gc.vkd.createShaderModule(gc.dev, .{
|
||||
.flags = .{},
|
||||
.code_size = resources.triangle_vert.len,
|
||||
.p_code = @ptrCast([*]const u32, &resources.triangle_vert),
|
||||
.p_code = @ptrCast([*]const u32, resources.triangle_vert),
|
||||
}, null);
|
||||
defer gc.vkd.destroyShaderModule(gc.dev, vert, null);
|
||||
|
||||
const frag = try gc.vkd.createShaderModule(gc.dev, &.{
|
||||
const frag = try gc.vkd.createShaderModule(gc.dev, .{
|
||||
.flags = .{},
|
||||
.code_size = resources.triangle_frag.len,
|
||||
.p_code = @ptrCast([*]const u32, &resources.triangle_frag),
|
||||
.p_code = @ptrCast([*]const u32, resources.triangle_frag),
|
||||
}, null);
|
||||
defer gc.vkd.destroyShaderModule(gc.dev, frag, null);
|
||||
|
||||
|
||||
@@ -3,27 +3,10 @@ const path = std.fs.path;
|
||||
const Builder = std.build.Builder;
|
||||
const Step = std.build.Step;
|
||||
|
||||
/// Stage the shader should be built for. This is passed to the -fshader-stage
|
||||
/// argument when invoking glslc.
|
||||
pub const ShaderStage = enum {
|
||||
vertex,
|
||||
fragment,
|
||||
tesscontrol,
|
||||
tesseval,
|
||||
geometry,
|
||||
compute,
|
||||
};
|
||||
|
||||
/// Utility functionality to help with compiling shaders from build.zig.
|
||||
/// Invokes glslc (or another shader compiler passed to `init`) for each shader
|
||||
/// added via `addShader`.
|
||||
pub const ShaderCompileStep = struct {
|
||||
const AddFileParams = struct {
|
||||
entry_point: ?[]const u8 = null,
|
||||
stage: ?ShaderStage = null,
|
||||
output_filename: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
/// Structure representing a shader to be compiled.
|
||||
const Shader = struct {
|
||||
/// The path to the shader, relative to the current build root.
|
||||
@@ -31,13 +14,6 @@ pub const ShaderCompileStep = struct {
|
||||
|
||||
/// The full output path where the compiled shader binary is placed.
|
||||
full_out_path: []const u8,
|
||||
|
||||
/// The entry point to use when compiling the shader.
|
||||
entry_point: ?[]const u8,
|
||||
|
||||
/// The stage to use when building. If not null, this is passed to
|
||||
/// the -fshader-stage argument.
|
||||
stage: ?ShaderStage,
|
||||
};
|
||||
|
||||
step: Step,
|
||||
@@ -46,21 +22,17 @@ pub const ShaderCompileStep = struct {
|
||||
/// The command and optional arguments used to invoke the shader compiler.
|
||||
glslc_cmd: []const []const u8,
|
||||
|
||||
/// The directory within `zig-cache/` that the compiled shaders are placed in.
|
||||
output_dir: []const u8,
|
||||
|
||||
/// List of shaders that are to be compiled.
|
||||
shaders: std.ArrayList(Shader),
|
||||
|
||||
/// Create a ShaderCompilerStep for `builder`. When this step is invoked by the build
|
||||
/// system, `<glcl_cmd...> <shader_source> -o <dst_addr>` is invoked for each shader.
|
||||
pub fn init(builder: *Builder, glslc_cmd: []const []const u8, output_dir: []const u8) *ShaderCompileStep {
|
||||
pub fn init(builder: *Builder, glslc_cmd: []const []const u8) *ShaderCompileStep {
|
||||
const self = builder.allocator.create(ShaderCompileStep) catch unreachable;
|
||||
self.* = .{
|
||||
.step = Step.init(.custom, "shader-compile", builder.allocator, make),
|
||||
.step = Step.init(.Custom, "shader-compile", builder.allocator, make),
|
||||
.builder = builder,
|
||||
.output_dir = output_dir,
|
||||
.glslc_cmd = builder.dupeStrings(glslc_cmd),
|
||||
.glslc_cmd = glslc_cmd,
|
||||
.shaders = std.ArrayList(Shader).init(builder.allocator),
|
||||
};
|
||||
return self;
|
||||
@@ -69,15 +41,15 @@ pub const ShaderCompileStep = struct {
|
||||
/// Add a shader to be compiled. `src` is shader source path, relative to the project root.
|
||||
/// Returns the full path where the compiled binary will be stored upon successful compilation.
|
||||
/// This path can then be used to include the binary into an executable, for example by passing it
|
||||
/// to @embedFile via an additional generated file. `entry_point` is the entry point to pass to the compiler.
|
||||
/// `stage` is an optional shader stage to pass to the compiler with the flag `-fshader-stage` when building the shader.
|
||||
pub fn add(self: *ShaderCompileStep, src: []const u8, params: AddFileParams) []const u8 {
|
||||
/// to @embedFile via an additional generated file.
|
||||
pub fn add(self: *ShaderCompileStep, src: []const u8) []const u8 {
|
||||
const full_out_path = path.join(self.builder.allocator, &[_][]const u8{
|
||||
self.builder.build_root,
|
||||
self.builder.cache_root,
|
||||
if (params.output_filename) |out| out else std.fmt.allocPrint(self.builder.allocator, "{s}.spv", .{src}) catch unreachable,
|
||||
"shaders",
|
||||
src,
|
||||
}) catch unreachable;
|
||||
self.shaders.append(.{ .source_path = src, .full_out_path = full_out_path, .entry_point = params.entry_point, .stage = params.stage }) catch unreachable;
|
||||
self.shaders.append(.{ .source_path = src, .full_out_path = full_out_path }) catch unreachable;
|
||||
return full_out_path;
|
||||
}
|
||||
|
||||
@@ -86,26 +58,18 @@ pub const ShaderCompileStep = struct {
|
||||
const self = @fieldParentPtr(ShaderCompileStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
var cmd = std.ArrayList([]const u8).init(self.builder.allocator);
|
||||
try cmd.appendSlice(self.glslc_cmd);
|
||||
const base_cmd_len = cmd.items.len;
|
||||
const cmd = try self.builder.allocator.alloc([]const u8, self.glslc_cmd.len + 3);
|
||||
for (self.glslc_cmd) |part, i| {
|
||||
cmd[i] = part;
|
||||
}
|
||||
cmd[cmd.len - 2] = "-o";
|
||||
|
||||
for (self.shaders.items) |shader| {
|
||||
cmd.items.len = base_cmd_len;
|
||||
|
||||
if (shader.entry_point) |entry_point| {
|
||||
try cmd.append(try std.fmt.allocPrint(self.builder.allocator, "-fentry-point={s}", .{entry_point}));
|
||||
}
|
||||
|
||||
if (shader.stage) |stage| {
|
||||
try cmd.append(try std.fmt.allocPrint(self.builder.allocator, "-fshader-stage={s}", .{@tagName(stage)}));
|
||||
}
|
||||
|
||||
const dir = path.dirname(shader.full_out_path).?;
|
||||
try cwd.makePath(dir);
|
||||
|
||||
try cmd.appendSlice(&.{shader.source_path, "-o", shader.full_out_path});
|
||||
try self.builder.spawnChild(cmd.items);
|
||||
cmd[cmd.len - 3] = shader.source_path;
|
||||
cmd[cmd.len - 1] = shader.full_out_path;
|
||||
try self.builder.spawnChild(cmd);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -7,9 +7,10 @@ pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
for (name[1..]) |c| {
|
||||
switch (c) {
|
||||
'0'...'9' => {},
|
||||
else => break,
|
||||
else => return false,
|
||||
}
|
||||
} else return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const primitives = [_][]const u8{
|
||||
@@ -23,6 +24,7 @@ pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
"f32",
|
||||
"f64",
|
||||
"f128",
|
||||
"c_longdouble",
|
||||
"noreturn",
|
||||
"type",
|
||||
"anyerror",
|
||||
@@ -34,13 +36,6 @@ pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
"c_ulong",
|
||||
"c_longlong",
|
||||
"c_ulonglong",
|
||||
"c_longdouble",
|
||||
// Removed in stage 2 in https://github.com/ziglang/zig/commit/05cf44933d753f7a5a53ab289ea60fd43761de57,
|
||||
// but these are still invalid identifiers in stage 1.
|
||||
"undefined",
|
||||
"true",
|
||||
"false",
|
||||
"null",
|
||||
};
|
||||
|
||||
for (primitives) |reserved| {
|
||||
@@ -52,12 +47,16 @@ pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn writeIdentifier(writer: anytype, id: []const u8) !void {
|
||||
fn needZigEscape(name: []const u8) bool {
|
||||
return !std.zig.fmt.isValidId(name) or isZigPrimitiveType(name);
|
||||
}
|
||||
|
||||
pub fn writeIdentifier(out: anytype, id: []const u8) !void {
|
||||
// https://github.com/ziglang/zig/issues/2897
|
||||
if (isZigPrimitiveType(id)) {
|
||||
try writer.print("@\"{}\"", .{std.zig.fmtEscapes(id)});
|
||||
try out.print("{s}_", .{id});
|
||||
} else {
|
||||
try writer.print("{}", .{std.zig.fmtId(id)});
|
||||
try out.print("{}", .{std.zig.fmtId(id)});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,7 +127,7 @@ pub const IdRenderer = struct {
|
||||
tags: []const []const u8,
|
||||
text_cache: std.ArrayList(u8),
|
||||
|
||||
pub fn init(allocator: Allocator, tags: []const []const u8) IdRenderer {
|
||||
pub fn init(allocator: *Allocator, tags: []const []const u8) IdRenderer {
|
||||
return .{
|
||||
.tags = tags,
|
||||
.text_cache = std.ArrayList(u8).init(allocator),
|
||||
@@ -139,7 +138,7 @@ pub const IdRenderer = struct {
|
||||
self.text_cache.deinit();
|
||||
}
|
||||
|
||||
fn renderSnake(self: *IdRenderer, comptime screaming: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
fn renderSnake(self: *IdRenderer, screaming: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
var it = SegmentIterator.init(id);
|
||||
var first = true;
|
||||
const transform = if (screaming) std.ascii.toUpper else std.ascii.toLower;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
pub const generateVk = @import("vulkan/generator.zig").generate;
|
||||
pub const VkGenerateStep = @import("vulkan/build_integration.zig").GenerateStep;
|
||||
pub const ShaderStage = @import("build_integration.zig").ShaderStage;
|
||||
pub const ShaderCompileStep = @import("build_integration.zig").ShaderCompileStep;
|
||||
|
||||
test "main" {
|
||||
|
||||
@@ -8,15 +8,17 @@ pub fn main() !void {
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
const allocator = &arena.allocator;
|
||||
|
||||
var args = try std.process.argsWithAllocator(allocator);
|
||||
const prog_name = args.next() orelse return error.ExecutableNameMissing;
|
||||
var args = std.process.args();
|
||||
const prog_name = try args.next(allocator) orelse return error.ExecutableNameMissing;
|
||||
|
||||
var maybe_xml_path: ?[]const u8 = null;
|
||||
var maybe_out_path: ?[]const u8 = null;
|
||||
|
||||
while (args.next()) |arg| {
|
||||
while (args.next(allocator)) |err_or_arg| {
|
||||
const arg = try err_or_arg;
|
||||
|
||||
if (std.mem.eql(u8, arg, "--help") or std.mem.eql(u8, arg, "-h")) {
|
||||
@setEvalBranchQuota(2000);
|
||||
try stderr.writer().print(
|
||||
@@ -59,10 +61,8 @@ pub fn main() !void {
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(allocator);
|
||||
try generate(allocator, xml_src, out_buffer.writer());
|
||||
try out_buffer.append(0);
|
||||
|
||||
const src = out_buffer.items[0 .. out_buffer.items.len - 1 :0];
|
||||
const tree = try std.zig.parse(allocator, src);
|
||||
const tree = try std.zig.parse(allocator, out_buffer.items);
|
||||
const formatted = try tree.render(allocator);
|
||||
defer allocator.free(formatted);
|
||||
|
||||
|
||||
@@ -21,8 +21,6 @@ pub const GenerateStep = struct {
|
||||
/// name `vulkan`.
|
||||
package: std.build.Pkg,
|
||||
|
||||
output_file: std.build.GeneratedFile,
|
||||
|
||||
/// Initialize a Vulkan generation step, for `builder`. `spec_path` is the path to
|
||||
/// vk.xml, relative to the project root. The generated bindings will be placed at
|
||||
/// `out_path`, which is relative to the zig-cache directory.
|
||||
@@ -35,18 +33,14 @@ pub const GenerateStep = struct {
|
||||
}) catch unreachable;
|
||||
|
||||
self.* = .{
|
||||
.step = Step.init(.custom, "vulkan-generate", builder.allocator, make),
|
||||
.step = Step.init(.Custom, "vulkan-generate", builder.allocator, make),
|
||||
.builder = builder,
|
||||
.spec_path = spec_path,
|
||||
.package = .{
|
||||
.name = "vulkan",
|
||||
.source = .{ .generated = &self.output_file },
|
||||
.dependencies = null,
|
||||
},
|
||||
.output_file = .{
|
||||
.step = &self.step,
|
||||
.path = full_out_path,
|
||||
},
|
||||
.dependencies = null,
|
||||
}
|
||||
};
|
||||
return self;
|
||||
}
|
||||
@@ -76,16 +70,14 @@ pub const GenerateStep = struct {
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(self.builder.allocator);
|
||||
try generate(self.builder.allocator, spec, out_buffer.writer());
|
||||
try out_buffer.append(0);
|
||||
|
||||
const src = out_buffer.items[0 .. out_buffer.items.len - 1 :0];
|
||||
const tree = try std.zig.parse(self.builder.allocator, src);
|
||||
const tree = try std.zig.parse(self.builder.allocator, out_buffer.items);
|
||||
std.debug.assert(tree.errors.len == 0); // If this triggers, vulkan-zig produced invalid code.
|
||||
|
||||
var formatted = try tree.render(self.builder.allocator);
|
||||
|
||||
const dir = path.dirname(self.output_file.path.?).?;
|
||||
const dir = path.dirname(self.package.path).?;
|
||||
try cwd.makePath(dir);
|
||||
try cwd.writeFile(self.output_file.path.?, formatted);
|
||||
try cwd.writeFile(self.package.path, formatted);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -164,11 +164,11 @@ pub const XmlCTokenizer = struct {
|
||||
}
|
||||
|
||||
fn elemToToken(elem: *xml.Element) !?Token {
|
||||
if (elem.children.len != 1 or elem.children[0] != .char_data) {
|
||||
if (elem.children.items.len != 1 or elem.children.items[0] != .CharData) {
|
||||
return error.InvalidXml;
|
||||
}
|
||||
|
||||
const text = elem.children[0].char_data;
|
||||
const text = elem.children.items[0].CharData;
|
||||
if (mem.eql(u8, elem.tag, "type")) {
|
||||
return Token{ .kind = .type_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "enum")) {
|
||||
@@ -203,9 +203,9 @@ pub const XmlCTokenizer = struct {
|
||||
|
||||
if (self.it.next()) |child| {
|
||||
switch (child.*) {
|
||||
.char_data => |cdata| self.ctok = CTokenizer{ .source = cdata, .in_comment = in_comment },
|
||||
.comment => {}, // xml comment
|
||||
.element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
.CharData => |cdata| self.ctok = CTokenizer{ .source = cdata, .in_comment = in_comment },
|
||||
.Comment => {}, // xml comment
|
||||
.Element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
@@ -241,9 +241,9 @@ pub const XmlCTokenizer = struct {
|
||||
};
|
||||
|
||||
// TYPEDEF = kw_typedef DECLARATION ';'
|
||||
pub fn parseTypedef(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||
_ = try xctok.expect(.kw_typedef);
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
_ = try xctok.expect(.semicolon);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
@@ -256,8 +256,8 @@ pub fn parseTypedef(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional:
|
||||
}
|
||||
|
||||
// MEMBER = DECLARATION (':' int)?
|
||||
pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
var field = registry.Container.Field{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.field_type = decl.decl_type,
|
||||
@@ -284,8 +284,8 @@ pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: b
|
||||
return field;
|
||||
}
|
||||
|
||||
pub fn parseParamOrProto(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
pub fn parseParamOrProto(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
@@ -315,7 +315,7 @@ pub const ParseError = error{
|
||||
// DECLARATION = kw_const? type_name DECLARATOR
|
||||
// DECLARATOR = POINTERS (id | name)? ('[' ARRAY_DECLARATOR ']')*
|
||||
// | POINTERS '(' FNPTRSUFFIX
|
||||
fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) ParseError!Declaration {
|
||||
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
|
||||
// Parse declaration constness
|
||||
var tok = try xctok.nextNoEof();
|
||||
const inner_is_const = tok.kind == .kw_const;
|
||||
@@ -333,11 +333,11 @@ fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional:
|
||||
var type_info = TypeInfo{ .name = type_name };
|
||||
|
||||
// Parse pointers
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info, ptrs_optional);
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info);
|
||||
|
||||
// Parse name / fn ptr
|
||||
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info, ptrs_optional)) |decl| {
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info)) |decl| {
|
||||
return decl;
|
||||
}
|
||||
|
||||
@@ -377,7 +377,7 @@ fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional:
|
||||
}
|
||||
|
||||
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
|
||||
fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo, ptrs_optional: bool) !?Declaration {
|
||||
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
|
||||
const lparen = try xctok.peek();
|
||||
if (lparen == null or lparen.?.kind != .lparen) {
|
||||
return null;
|
||||
@@ -404,7 +404,7 @@ fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: Ty
|
||||
},
|
||||
};
|
||||
|
||||
const first_param = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
const first_param = try parseDeclaration(allocator, xctok);
|
||||
if (first_param.name == null) {
|
||||
if (first_param.decl_type != .name or !mem.eql(u8, first_param.decl_type.name, "void")) {
|
||||
return error.InvalidSyntax;
|
||||
@@ -431,7 +431,7 @@ fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: Ty
|
||||
else => return error.InvalidSyntax,
|
||||
}
|
||||
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
try params.append(.{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.param_type = decl.decl_type,
|
||||
@@ -445,7 +445,7 @@ fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: Ty
|
||||
}
|
||||
|
||||
// POINTERS = (kw_const? '*')*
|
||||
fn parsePointers(allocator: Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo, ptrs_optional: bool) !TypeInfo {
|
||||
fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo) !TypeInfo {
|
||||
var type_info = inner;
|
||||
var first_const = inner_const;
|
||||
|
||||
@@ -474,7 +474,7 @@ fn parsePointers(allocator: Allocator, xctok: *XmlCTokenizer, inner_const: bool,
|
||||
type_info = .{
|
||||
.pointer = .{
|
||||
.is_const = is_const or first_const,
|
||||
.is_optional = ptrs_optional, // set elsewhere
|
||||
.is_optional = false, // set elsewhere
|
||||
.size = .one, // set elsewhere
|
||||
.child = child,
|
||||
},
|
||||
@@ -606,7 +606,7 @@ test "parseTypedef" {
|
||||
defer arena.deinit();
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
const decl = try parseTypedef(arena.allocator(), &xctok, false);
|
||||
const decl = try parseTypedef(&arena.allocator, &xctok);
|
||||
|
||||
try testing.expectEqualSlices(u8, "pythons", decl.name);
|
||||
const array = decl.decl_type.typedef.array;
|
||||
|
||||
@@ -12,12 +12,12 @@ const EnumFieldMerger = struct {
|
||||
const EnumExtensionMap = std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(reg.Enum.Field));
|
||||
const FieldSet = std.StringArrayHashMapUnmanaged(void);
|
||||
|
||||
arena: Allocator,
|
||||
arena: *Allocator,
|
||||
registry: *reg.Registry,
|
||||
enum_extensions: EnumExtensionMap,
|
||||
field_set: FieldSet,
|
||||
|
||||
fn init(arena: Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
fn init(arena: *Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
return .{
|
||||
.arena = arena,
|
||||
.registry = registry,
|
||||
@@ -99,7 +99,7 @@ pub const Generator = struct {
|
||||
registry: reg.Registry,
|
||||
id_renderer: IdRenderer,
|
||||
|
||||
fn init(allocator: Allocator, spec: *xml.Element) !Generator {
|
||||
fn init(allocator: *Allocator, spec: *xml.Element) !Generator {
|
||||
const result = try parseXml(allocator, spec);
|
||||
|
||||
const tags = try allocator.alloc([]const u8, result.registry.tags.len);
|
||||
@@ -128,13 +128,13 @@ pub const Generator = struct {
|
||||
|
||||
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
|
||||
fn mergeEnumFields(self: *Generator) !void {
|
||||
var merger = EnumFieldMerger.init(self.arena.allocator(), &self.registry);
|
||||
var merger = EnumFieldMerger.init(&self.arena.allocator, &self.registry);
|
||||
try merger.merge();
|
||||
}
|
||||
|
||||
// https://github.com/KhronosGroup/Vulkan-Docs/pull/1556
|
||||
fn fixupBitFlags(self: *Generator) !void {
|
||||
var seen_bits = std.StringArrayHashMap(void).init(self.arena.allocator());
|
||||
var seen_bits = std.StringArrayHashMap(void).init(&self.arena.allocator);
|
||||
defer seen_bits.deinit();
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
@@ -166,7 +166,7 @@ pub const Generator = struct {
|
||||
}
|
||||
|
||||
fn render(self: *Generator, writer: anytype) !void {
|
||||
try renderRegistry(writer, self.arena.allocator(), &self.registry, &self.id_renderer);
|
||||
try renderRegistry(writer, &self.arena.allocator, &self.registry, &self.id_renderer);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -174,7 +174,7 @@ pub const Generator = struct {
|
||||
/// and the resulting binding is written to `writer`. `allocator` will be used to allocate temporary
|
||||
/// internal datastructures - mostly via an ArenaAllocator, but sometimes a hashmap uses this allocator
|
||||
/// directly.
|
||||
pub fn generate(allocator: Allocator, spec_xml: []const u8, writer: anytype) !void {
|
||||
pub fn generate(allocator: *Allocator, spec_xml: []const u8, writer: anytype) !void {
|
||||
const spec = try xml.parse(allocator, spec_xml);
|
||||
defer spec.deinit();
|
||||
|
||||
|
||||
@@ -17,13 +17,14 @@ pub const ParseResult = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn parseXml(backing_allocator: Allocator, root: *xml.Element) !ParseResult {
|
||||
pub fn parseXml(backing_allocator: *Allocator, root: *xml.Element) !ParseResult {
|
||||
var arena = ArenaAllocator.init(backing_allocator);
|
||||
errdefer arena.deinit();
|
||||
|
||||
const allocator = arena.allocator();
|
||||
const allocator = &arena.allocator;
|
||||
|
||||
var reg = registry.Registry{
|
||||
.copyright = root.getCharData("comment") orelse return error.InvalidRegistry,
|
||||
.decls = try parseDeclarations(allocator, root),
|
||||
.api_constants = try parseApiConstants(allocator, root),
|
||||
.tags = try parseTags(allocator, root),
|
||||
@@ -37,11 +38,11 @@ pub fn parseXml(backing_allocator: Allocator, root: *xml.Element) !ParseResult {
|
||||
};
|
||||
}
|
||||
|
||||
fn parseDeclarations(allocator: Allocator, root: *xml.Element) ![]registry.Declaration {
|
||||
fn parseDeclarations(allocator: *Allocator, root: *xml.Element) ![]registry.Declaration {
|
||||
var types_elem = root.findChildByTag("types") orelse return error.InvalidRegistry;
|
||||
var commands_elem = root.findChildByTag("commands") orelse return error.InvalidRegistry;
|
||||
|
||||
const decl_upper_bound = types_elem.children.len + commands_elem.children.len;
|
||||
const decl_upper_bound = types_elem.children.items.len + commands_elem.children.items.len;
|
||||
const decls = try allocator.alloc(registry.Declaration, decl_upper_bound);
|
||||
|
||||
var count: usize = 0;
|
||||
@@ -51,7 +52,7 @@ fn parseDeclarations(allocator: Allocator, root: *xml.Element) ![]registry.Decla
|
||||
return allocator.shrink(decls, count);
|
||||
}
|
||||
|
||||
fn parseTypes(allocator: Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
|
||||
fn parseTypes(allocator: *Allocator, out: []registry.Declaration, types_elem: *xml.Element) !usize {
|
||||
var i: usize = 0;
|
||||
var it = types_elem.findChildrenByTag("type");
|
||||
while (it.next()) |ty| {
|
||||
@@ -158,11 +159,11 @@ fn parseHandleType(ty: *xml.Element) !registry.Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseBaseType(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
fn parseBaseType(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
const name = ty.getCharData("name") orelse return error.InvalidRegistry;
|
||||
if (ty.getCharData("type")) |_| {
|
||||
var tok = cparse.XmlCTokenizer.init(ty);
|
||||
return try cparse.parseTypedef(allocator, &tok, false);
|
||||
return try cparse.parseTypedef(allocator, &tok);
|
||||
} else {
|
||||
// Either ANativeWindow, AHardwareBuffer or CAMetalLayer. The latter has a lot of
|
||||
// macros, which is why this part is not built into the xml/c parser.
|
||||
@@ -173,7 +174,7 @@ fn parseBaseType(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
|
||||
fn parseContainer(allocator: *Allocator, ty: *xml.Element, is_union: bool) !registry.Declaration {
|
||||
const name = ty.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
|
||||
if (ty.getAttribute("alias")) |alias| {
|
||||
@@ -185,14 +186,14 @@ fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !regis
|
||||
};
|
||||
}
|
||||
|
||||
var members = try allocator.alloc(registry.Container.Field, ty.children.len);
|
||||
var members = try allocator.alloc(registry.Container.Field, ty.children.items.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = ty.findChildrenByTag("member");
|
||||
var maybe_stype: ?[]const u8 = null;
|
||||
while (it.next()) |member| {
|
||||
var xctok = cparse.XmlCTokenizer.init(member);
|
||||
members[i] = try cparse.parseMember(allocator, &xctok, false);
|
||||
members[i] = try cparse.parseMember(allocator, &xctok);
|
||||
if (mem.eql(u8, members[i].name, "sType")) {
|
||||
if (member.getAttribute("values")) |stype| {
|
||||
maybe_stype = stype;
|
||||
@@ -208,7 +209,7 @@ fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !regis
|
||||
if (ty.getAttribute("structextends")) |extends| {
|
||||
const n_structs = std.mem.count(u8, extends, ",") + 1;
|
||||
maybe_extends = try allocator.alloc([]const u8, n_structs);
|
||||
var struct_extends = std.mem.split(u8, extends, ",");
|
||||
var struct_extends = std.mem.split(extends, ",");
|
||||
var j: usize = 0;
|
||||
while (struct_extends.next()) |struct_extend| {
|
||||
maybe_extends.?[j] = struct_extend;
|
||||
@@ -235,9 +236,9 @@ fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !regis
|
||||
};
|
||||
}
|
||||
|
||||
fn parseFuncPointer(allocator: Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
fn parseFuncPointer(allocator: *Allocator, ty: *xml.Element) !registry.Declaration {
|
||||
var xctok = cparse.XmlCTokenizer.init(ty);
|
||||
return try cparse.parseTypedef(allocator, &xctok, true);
|
||||
return try cparse.parseTypedef(allocator, &xctok);
|
||||
}
|
||||
|
||||
// For some reason, the DeclarationType cannot be passed to lenToPointerSize, as
|
||||
@@ -277,7 +278,7 @@ fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSiz
|
||||
|
||||
fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.Element) !void {
|
||||
if (elem.getAttribute("len")) |lens| {
|
||||
var it = mem.split(u8, lens, ",");
|
||||
var it = mem.split(lens, ",");
|
||||
var current_type_info = type_info;
|
||||
while (current_type_info.* == .pointer) {
|
||||
// TODO: Check altlen
|
||||
@@ -294,7 +295,7 @@ fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.El
|
||||
}
|
||||
|
||||
if (elem.getAttribute("optional")) |optionals| {
|
||||
var it = mem.split(u8, optionals, ",");
|
||||
var it = mem.split(optionals, ",");
|
||||
var current_type_info = type_info;
|
||||
while (current_type_info.* == .pointer) {
|
||||
if (it.next()) |current_optional| {
|
||||
@@ -323,7 +324,7 @@ fn parseEnumAlias(elem: *xml.Element) !?registry.Declaration {
|
||||
return null;
|
||||
}
|
||||
|
||||
fn parseEnums(allocator: Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
|
||||
fn parseEnums(allocator: *Allocator, out: []registry.Declaration, root: *xml.Element) !usize {
|
||||
var i: usize = 0;
|
||||
var it = root.findChildrenByTag("enums");
|
||||
while (it.next()) |enums| {
|
||||
@@ -342,7 +343,7 @@ fn parseEnums(allocator: Allocator, out: []registry.Declaration, root: *xml.Elem
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseEnumFields(allocator: Allocator, elem: *xml.Element) !registry.Enum {
|
||||
fn parseEnumFields(allocator: *Allocator, elem: *xml.Element) !registry.Enum {
|
||||
// TODO: `type` was added recently, fall back to checking endswith FlagBits for older versions?
|
||||
const enum_type = elem.getAttribute("type") orelse return error.InvalidRegistry;
|
||||
const is_bitmask = mem.eql(u8, enum_type, "bitmask");
|
||||
@@ -355,7 +356,7 @@ fn parseEnumFields(allocator: Allocator, elem: *xml.Element) !registry.Enum {
|
||||
else
|
||||
32;
|
||||
|
||||
const fields = try allocator.alloc(registry.Enum.Field, elem.children.len);
|
||||
const fields = try allocator.alloc(registry.Enum.Field, elem.children.items.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = elem.findChildrenByTag("enum");
|
||||
@@ -409,7 +410,7 @@ fn parseEnumField(field: *xml.Element) !registry.Enum.Field {
|
||||
};
|
||||
}
|
||||
|
||||
fn parseCommands(allocator: Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
|
||||
fn parseCommands(allocator: *Allocator, out: []registry.Declaration, commands_elem: *xml.Element) !usize {
|
||||
var i: usize = 0;
|
||||
var it = commands_elem.findChildrenByTag("command");
|
||||
while (it.next()) |elem| {
|
||||
@@ -420,14 +421,14 @@ fn parseCommands(allocator: Allocator, out: []registry.Declaration, commands_ele
|
||||
return i;
|
||||
}
|
||||
|
||||
fn splitCommaAlloc(allocator: Allocator, text: []const u8) ![][]const u8 {
|
||||
fn splitCommaAlloc(allocator: *Allocator, text: []const u8) ![][]const u8 {
|
||||
var n_codes: usize = 1;
|
||||
for (text) |c| {
|
||||
if (c == ',') n_codes += 1;
|
||||
}
|
||||
|
||||
const codes = try allocator.alloc([]const u8, n_codes);
|
||||
var it = mem.split(u8, text, ",");
|
||||
var it = mem.split(text, ",");
|
||||
for (codes) |*code| {
|
||||
code.* = it.next().?;
|
||||
}
|
||||
@@ -435,7 +436,7 @@ fn splitCommaAlloc(allocator: Allocator, text: []const u8) ![][]const u8 {
|
||||
return codes;
|
||||
}
|
||||
|
||||
fn parseCommand(allocator: Allocator, elem: *xml.Element) !registry.Declaration {
|
||||
fn parseCommand(allocator: *Allocator, elem: *xml.Element) !registry.Declaration {
|
||||
if (elem.getAttribute("alias")) |alias| {
|
||||
const name = elem.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
return registry.Declaration{
|
||||
@@ -448,15 +449,15 @@ fn parseCommand(allocator: Allocator, elem: *xml.Element) !registry.Declaration
|
||||
|
||||
const proto = elem.findChildByTag("proto") orelse return error.InvalidRegistry;
|
||||
var proto_xctok = cparse.XmlCTokenizer.init(proto);
|
||||
const command_decl = try cparse.parseParamOrProto(allocator, &proto_xctok, false);
|
||||
const command_decl = try cparse.parseParamOrProto(allocator, &proto_xctok);
|
||||
|
||||
var params = try allocator.alloc(registry.Command.Param, elem.children.len);
|
||||
var params = try allocator.alloc(registry.Command.Param, elem.children.items.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = elem.findChildrenByTag("param");
|
||||
while (it.next()) |param| {
|
||||
var xctok = cparse.XmlCTokenizer.init(param);
|
||||
const decl = try cparse.parseParamOrProto(allocator, &xctok, false);
|
||||
const decl = try cparse.parseParamOrProto(allocator, &xctok);
|
||||
params[i] = .{
|
||||
.name = decl.name,
|
||||
.param_type = decl.decl_type.typedef,
|
||||
@@ -468,12 +469,12 @@ fn parseCommand(allocator: Allocator, elem: *xml.Element) !registry.Declaration
|
||||
const return_type = try allocator.create(registry.TypeInfo);
|
||||
return_type.* = command_decl.decl_type.typedef;
|
||||
|
||||
const success_codes: [][]const u8 = if (elem.getAttribute("successcodes")) |codes|
|
||||
const success_codes = if (elem.getAttribute("successcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
|
||||
const error_codes: [][]const u8 = if (elem.getAttribute("errorcodes")) |codes|
|
||||
const error_codes = if (elem.getAttribute("errorcodes")) |codes|
|
||||
try splitCommaAlloc(allocator, codes)
|
||||
else
|
||||
&[_][]const u8{};
|
||||
@@ -499,7 +500,7 @@ fn parseCommand(allocator: Allocator, elem: *xml.Element) !registry.Declaration
|
||||
};
|
||||
}
|
||||
|
||||
fn parseApiConstants(allocator: Allocator, root: *xml.Element) ![]registry.ApiConstant {
|
||||
fn parseApiConstants(allocator: *Allocator, root: *xml.Element) ![]registry.ApiConstant {
|
||||
var enums = blk: {
|
||||
var it = root.findChildrenByTag("enums");
|
||||
while (it.next()) |child| {
|
||||
@@ -526,7 +527,7 @@ fn parseApiConstants(allocator: Allocator, root: *xml.Element) ![]registry.ApiCo
|
||||
break :blk n_defines;
|
||||
};
|
||||
|
||||
const constants = try allocator.alloc(registry.ApiConstant, enums.children.len + n_defines);
|
||||
const constants = try allocator.alloc(registry.ApiConstant, enums.children.items.len + n_defines);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = enums.findChildrenByTag("enum");
|
||||
@@ -563,7 +564,7 @@ fn parseDefines(types: *xml.Element, out: []registry.ApiConstant) !usize {
|
||||
if (mem.eql(u8, name, "VK_HEADER_VERSION")) {
|
||||
out[i] = .{
|
||||
.name = name,
|
||||
.value = .{ .expr = mem.trim(u8, ty.children[2].char_data, " ") },
|
||||
.value = .{ .expr = mem.trim(u8, ty.children.items[2].CharData, " ") },
|
||||
};
|
||||
} else {
|
||||
var xctok = cparse.XmlCTokenizer.init(ty);
|
||||
@@ -578,9 +579,9 @@ fn parseDefines(types: *xml.Element, out: []registry.ApiConstant) !usize {
|
||||
return i;
|
||||
}
|
||||
|
||||
fn parseTags(allocator: Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
fn parseTags(allocator: *Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
var tags_elem = root.findChildByTag("tags") orelse return error.InvalidRegistry;
|
||||
const tags = try allocator.alloc(registry.Tag, tags_elem.children.len);
|
||||
const tags = try allocator.alloc(registry.Tag, tags_elem.children.items.len);
|
||||
|
||||
var i: usize = 0;
|
||||
var it = tags_elem.findChildrenByTag("tag");
|
||||
@@ -596,7 +597,7 @@ fn parseTags(allocator: Allocator, root: *xml.Element) ![]registry.Tag {
|
||||
return allocator.shrink(tags, i);
|
||||
}
|
||||
|
||||
fn parseFeatures(allocator: Allocator, root: *xml.Element) ![]registry.Feature {
|
||||
fn parseFeatures(allocator: *Allocator, root: *xml.Element) ![]registry.Feature {
|
||||
var it = root.findChildrenByTag("feature");
|
||||
var count: usize = 0;
|
||||
while (it.next()) |_| count += 1;
|
||||
@@ -612,14 +613,14 @@ fn parseFeatures(allocator: Allocator, root: *xml.Element) ![]registry.Feature {
|
||||
return features;
|
||||
}
|
||||
|
||||
fn parseFeature(allocator: Allocator, feature: *xml.Element) !registry.Feature {
|
||||
fn parseFeature(allocator: *Allocator, feature: *xml.Element) !registry.Feature {
|
||||
const name = feature.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const feature_level = blk: {
|
||||
const number = feature.getAttribute("number") orelse return error.InvalidRegistry;
|
||||
break :blk try splitFeatureLevel(number, ".");
|
||||
};
|
||||
|
||||
var requires = try allocator.alloc(registry.Require, feature.children.len);
|
||||
var requires = try allocator.alloc(registry.Require, feature.children.items.len);
|
||||
var i: usize = 0;
|
||||
var it = feature.findChildrenByTag("require");
|
||||
while (it.next()) |require| {
|
||||
@@ -683,7 +684,7 @@ fn enumExtOffsetToValue(extnumber: u31, offset: u31) u31 {
|
||||
return extension_value_base + (extnumber - 1) * extension_block + offset;
|
||||
}
|
||||
|
||||
fn parseRequire(allocator: Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
|
||||
fn parseRequire(allocator: *Allocator, require: *xml.Element, extnumber: ?u31) !registry.Require {
|
||||
var n_extends: usize = 0;
|
||||
var n_types: usize = 0;
|
||||
var n_commands: usize = 0;
|
||||
@@ -741,10 +742,10 @@ fn parseRequire(allocator: Allocator, require: *xml.Element, extnumber: ?u31) !r
|
||||
};
|
||||
}
|
||||
|
||||
fn parseExtensions(allocator: Allocator, root: *xml.Element) ![]registry.Extension {
|
||||
fn parseExtensions(allocator: *Allocator, root: *xml.Element) ![]registry.Extension {
|
||||
const extensions_elem = root.findChildByTag("extensions") orelse return error.InvalidRegistry;
|
||||
|
||||
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.len);
|
||||
const extensions = try allocator.alloc(registry.Extension, extensions_elem.children.items.len);
|
||||
var i: usize = 0;
|
||||
var it = extensions_elem.findChildrenByTag("extension");
|
||||
while (it.next()) |extension| {
|
||||
@@ -778,7 +779,7 @@ fn findExtVersion(extension: *xml.Element) !u32 {
|
||||
return error.InvalidRegistry;
|
||||
}
|
||||
|
||||
fn parseExtension(allocator: Allocator, extension: *xml.Element) !registry.Extension {
|
||||
fn parseExtension(allocator: *Allocator, extension: *xml.Element) !registry.Extension {
|
||||
const name = extension.getAttribute("name") orelse return error.InvalidRegistry;
|
||||
const platform = extension.getAttribute("platform");
|
||||
const version = try findExtVersion(extension);
|
||||
@@ -822,7 +823,7 @@ fn parseExtension(allocator: Allocator, extension: *xml.Element) !registry.Exten
|
||||
break :blk try splitCommaAlloc(allocator, requires_str);
|
||||
};
|
||||
|
||||
var requires = try allocator.alloc(registry.Require, extension.children.len);
|
||||
var requires = try allocator.alloc(registry.Require, extension.children.items.len);
|
||||
var i: usize = 0;
|
||||
var it = extension.findChildrenByTag("require");
|
||||
while (it.next()) |require| {
|
||||
@@ -844,7 +845,7 @@ fn parseExtension(allocator: Allocator, extension: *xml.Element) !registry.Exten
|
||||
}
|
||||
|
||||
fn splitFeatureLevel(ver: []const u8, split: []const u8) !registry.FeatureLevel {
|
||||
var it = mem.split(u8, ver, split);
|
||||
var it = mem.split(ver, split);
|
||||
|
||||
const major = it.next() orelse return error.InvalidFeatureLevel;
|
||||
const minor = it.next() orelse return error.InvalidFeatureLevel;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pub const Registry = struct {
|
||||
copyright: []const u8,
|
||||
decls: []Declaration,
|
||||
api_constants: []ApiConstant,
|
||||
tags: []Tag,
|
||||
|
||||
@@ -8,12 +8,14 @@ const CaseStyle = id_render.CaseStyle;
|
||||
const IdRenderer = id_render.IdRenderer;
|
||||
|
||||
const preamble =
|
||||
\\
|
||||
\\// This file is generated from the Khronos Vulkan XML API registry by vulkan-zig.
|
||||
\\
|
||||
\\const std = @import("std");
|
||||
\\const builtin = @import("builtin");
|
||||
\\const root = @import("root");
|
||||
\\const vk = @This();
|
||||
\\
|
||||
\\const GlobalScope = @This();
|
||||
\\
|
||||
\\pub const vulkan_call_conv: std.builtin.CallingConvention = if (builtin.os.tag == .windows and builtin.cpu.arch == .i386)
|
||||
\\ .Stdcall
|
||||
@@ -93,13 +95,13 @@ const foreign_types = std.ComptimeStringMap([]const u8, .{
|
||||
.{ "RROutput", @typeName(c_ulong) },
|
||||
.{ "wl_display", "opaque {}" },
|
||||
.{ "wl_surface", "opaque {}" },
|
||||
.{ "HINSTANCE", "std.os.windows.HINSTANCE" },
|
||||
.{ "HWND", "std.os.windows.HWND" },
|
||||
.{ "HINSTANCE", "std.os.HINSTANCE" },
|
||||
.{ "HWND", "*opaque {}" },
|
||||
.{ "HMONITOR", "*opaque {}" },
|
||||
.{ "HANDLE", "std.os.windows.HANDLE" },
|
||||
.{ "HANDLE", "std.os.HANDLE" },
|
||||
.{ "SECURITY_ATTRIBUTES", "std.os.SECURITY_ATTRIBUTES" },
|
||||
.{ "DWORD", "std.os.windows.DWORD" },
|
||||
.{ "LPCWSTR", "std.os.windows.LPCWSTR" },
|
||||
.{ "DWORD", "std.os.DWORD" },
|
||||
.{ "LPCWSTR", "std.os.LPCWSTR" },
|
||||
.{ "xcb_connection_t", "opaque {}" },
|
||||
.{ "xcb_visualid_t", @typeName(u32) },
|
||||
.{ "xcb_window_t", @typeName(u32) },
|
||||
@@ -177,12 +179,12 @@ fn Renderer(comptime WriterType: type) type {
|
||||
};
|
||||
|
||||
writer: WriterType,
|
||||
allocator: Allocator,
|
||||
allocator: *Allocator,
|
||||
registry: *const reg.Registry,
|
||||
id_renderer: *IdRenderer,
|
||||
declarations_by_name: std.StringHashMap(*const reg.DeclarationType),
|
||||
|
||||
fn init(writer: WriterType, allocator: Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !Self {
|
||||
fn init(writer: WriterType, allocator: *Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !Self {
|
||||
var declarations_by_name = std.StringHashMap(*const reg.DeclarationType).init(allocator);
|
||||
errdefer declarations_by_name.deinit();
|
||||
|
||||
@@ -285,29 +287,27 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
fn isFlags(self: Self, name: []const u8) bool {
|
||||
const tag = self.id_renderer.getAuthorTag(name);
|
||||
const tagless_name = if (tag) |tag_name| name[0 .. name.len - tag_name.len] else name;
|
||||
const base_name = std.mem.trimRight(u8, tagless_name, "0123456789");
|
||||
const base_name = if (tag) |tag_name| name[0 .. name.len - tag_name.len] else name;
|
||||
|
||||
return mem.endsWith(u8, base_name, "Flags");
|
||||
}
|
||||
|
||||
fn resolveDeclaration(self: Self, start_name: []const u8) ?*const reg.DeclarationType {
|
||||
var name = start_name;
|
||||
return while (true) {
|
||||
const decl = self.declarations_by_name.get(name) orelse return null;
|
||||
if (decl.* != .alias) {
|
||||
break decl;
|
||||
}
|
||||
|
||||
name = decl.alias.name;
|
||||
} else unreachable;
|
||||
}
|
||||
|
||||
fn isInOutPointer(self: Self, ptr: reg.Pointer) !bool {
|
||||
if (ptr.child.* != .name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const decl = self.resolveDeclaration(ptr.child.name) orelse return error.InvalidRegistry;
|
||||
var name = ptr.child.name;
|
||||
|
||||
const decl = while (true) {
|
||||
const decl = self.declarations_by_name.get(name) orelse return error.InvalidRegistry;
|
||||
if (decl.* != .alias) {
|
||||
break decl;
|
||||
}
|
||||
|
||||
name = decl.alias.name;
|
||||
} else unreachable;
|
||||
|
||||
if (decl.* != .container) {
|
||||
return false;
|
||||
}
|
||||
@@ -407,7 +407,9 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
fn render(self: *Self) !void {
|
||||
try self.renderCopyright();
|
||||
try self.writer.writeAll(preamble);
|
||||
try self.renderCommandEnums();
|
||||
|
||||
for (self.registry.api_constants) |api_constant| {
|
||||
try self.renderApiConstant(api_constant);
|
||||
@@ -422,6 +424,95 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.renderWrappers();
|
||||
}
|
||||
|
||||
fn renderCommandEnums(self: *Self) !void {
|
||||
try self.renderCommandEnumOfDispatchType(.base);
|
||||
try self.renderCommandEnumOfDispatchType(.instance);
|
||||
try self.renderCommandEnumOfDispatchType(.device);
|
||||
try self.writer.writeAll("\n");
|
||||
}
|
||||
|
||||
fn renderCommandEnumOfDispatchType(self: *Self, dispatch_type: CommandDispatchType) !void {
|
||||
const dispatch_type_name = switch (dispatch_type) {
|
||||
.base => "Base",
|
||||
.instance => "Instance",
|
||||
.device => "Device",
|
||||
};
|
||||
|
||||
try self.writer.print("pub const {s}Command = enum {{\n", .{dispatch_type_name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.writeAll(",\n");
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
try self.writer.print(
|
||||
\\
|
||||
\\pub fn symbol(self: {s}Command) [:0]const u8 {{
|
||||
\\ return switch (self) {{
|
||||
\\
|
||||
, .{dispatch_type_name}
|
||||
);
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll(".");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.print(" => \"{s}\",\n", .{ decl.name });
|
||||
}
|
||||
}
|
||||
|
||||
try self.writer.writeAll("};\n}\n");
|
||||
}
|
||||
|
||||
{
|
||||
try self.writer.print(
|
||||
\\
|
||||
\\pub fn PfnType(comptime self: {s}Command) type {{
|
||||
\\ return switch (self) {{
|
||||
\\
|
||||
, .{dispatch_type_name}
|
||||
);
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll(".");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.writeAll(" => ");
|
||||
try self.renderCommandPtrName(decl.name);
|
||||
try self.writer.writeAll(",\n");
|
||||
}
|
||||
}
|
||||
|
||||
try self.writer.writeAll("};\n}\n");
|
||||
}
|
||||
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn renderCopyright(self: *Self) !void {
|
||||
var it = mem.split(self.registry.copyright, "\n");
|
||||
while (it.next()) |line| {
|
||||
try self.writer.print("// {s}\n", .{line});
|
||||
}
|
||||
}
|
||||
|
||||
fn renderApiConstant(self: *Self, api_constant: reg.ApiConstant) !void {
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderName(api_constant.name);
|
||||
@@ -597,7 +688,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
if (child_is_void) {
|
||||
try self.writer.writeAll("anyopaque");
|
||||
try self.writer.writeAll("c_void");
|
||||
} else {
|
||||
try self.renderTypeInfo(pointer.child.*);
|
||||
}
|
||||
@@ -660,7 +751,6 @@ fn Renderer(comptime WriterType: type) type {
|
||||
} else {
|
||||
try self.renderTypeInfo(field.field_type);
|
||||
try self.renderContainerDefaultField(name, container, field);
|
||||
try self.renderContainerFieldAlignment(field);
|
||||
try self.writer.writeAll(", ");
|
||||
}
|
||||
}
|
||||
@@ -668,21 +758,6 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn renderContainerFieldAlignment(self: *Self, field: reg.Container.Field) !void {
|
||||
// Flags structures need to explicitly get their proper alignment: alignOf Flags for 32-bit flags, and alignOf Flag64 for 64-bit flags.
|
||||
const field_type_name = switch (field.field_type) {
|
||||
.name => |name| name,
|
||||
else => return,
|
||||
};
|
||||
const decl = self.resolveDeclaration(field_type_name) orelse return;
|
||||
switch (decl.*) {
|
||||
.bitmask => |mask| {
|
||||
try self.writer.print(" align(@alignOf({s}))", .{try bitmaskFlagsType(mask.bitwidth)});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn renderContainerDefaultField(self: *Self, name: []const u8, container: reg.Container, field: reg.Container.Field) !void {
|
||||
if (mem.eql(u8, field.name, "pNext")) {
|
||||
try self.writer.writeAll(" = null");
|
||||
@@ -799,7 +874,11 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.print("_reserved_bit_{}", .{bitpos});
|
||||
}
|
||||
|
||||
try self.writer.writeAll(": bool = false,");
|
||||
try self.writer.writeAll(": bool ");
|
||||
if (bitpos == 0) { // Force alignment to integer boundaries
|
||||
try self.writer.print("align(@alignOf({s})) ", .{flags_type});
|
||||
}
|
||||
try self.writer.writeAll("= false, ");
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll("pub usingnamespace FlagsMixin(");
|
||||
@@ -923,49 +1002,6 @@ fn Renderer(comptime WriterType: type) type {
|
||||
}
|
||||
|
||||
fn renderWrappers(self: *Self) !void {
|
||||
try self.writer.writeAll(
|
||||
\\pub fn CommandFlagsMixin(comptime CommandFlags: type) type {
|
||||
\\ return struct {
|
||||
\\ pub fn merge(lhs: CommandFlags, rhs: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = @field(lhs, field.name) or @field(rhs, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn intersect(lhs: CommandFlags, rhs: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = @field(lhs, field.name) and @field(rhs, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn complement(self: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = !@field(self, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn subtract(lhs: CommandFlags, rhs: CommandFlags) CommandFlags {
|
||||
\\ var result: CommandFlags = .{};
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ @field(result, field.name) = @field(lhs, field.name) and !@field(rhs, field.name);
|
||||
\\ }
|
||||
\\ return result;
|
||||
\\ }
|
||||
\\ pub fn contains(lhs: CommandFlags, rhs: CommandFlags) bool {
|
||||
\\ inline for (@typeInfo(CommandFlags).Struct.fields) |field| {
|
||||
\\ if (!@field(lhs, field.name) and @field(rhs, field.name)) {
|
||||
\\ return false;
|
||||
\\ }
|
||||
\\ }
|
||||
\\ return true;
|
||||
\\ }
|
||||
\\ };
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
try self.renderWrappersOfDispatchType(.base);
|
||||
try self.renderWrappersOfDispatchType(.instance);
|
||||
try self.renderWrappersOfDispatchType(.device);
|
||||
@@ -979,115 +1015,31 @@ fn Renderer(comptime WriterType: type) type {
|
||||
};
|
||||
|
||||
try self.writer.print(
|
||||
\\pub const {0s}CommandFlags = packed struct {{
|
||||
\\
|
||||
, .{name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll(" ");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.writeAll(": bool = false,\n");
|
||||
}
|
||||
}
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn CmdType(comptime tag: std.meta.FieldEnum({0s}CommandFlags)) type {{
|
||||
\\ return switch (tag) {{
|
||||
\\
|
||||
, .{name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll((" " ** 8) ++ ".");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.writeAll(" => ");
|
||||
try self.renderCommandPtrName(decl.name);
|
||||
try self.writer.writeAll(",\n");
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll(" };\n}");
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn cmdName(tag: std.meta.FieldEnum({0s}CommandFlags)) [:0]const u8 {{
|
||||
\\ return switch(tag) {{
|
||||
\\
|
||||
, .{name});
|
||||
for (self.registry.decls) |decl| {
|
||||
const command = switch (decl.decl_type) {
|
||||
.command => |cmd| cmd,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (classifyCommandDispatch(decl.name, command) == dispatch_type) {
|
||||
try self.writer.writeAll((" " ** 8) ++ ".");
|
||||
try self.writeIdentifierWithCase(.camel, trimVkNamespace(decl.name));
|
||||
try self.writer.print(
|
||||
\\ => "{s}",
|
||||
\\
|
||||
, .{decl.name});
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll(" };\n}");
|
||||
|
||||
try self.writer.print(
|
||||
\\ pub usingnamespace CommandFlagsMixin({s}CommandFlags);
|
||||
\\}};
|
||||
\\
|
||||
, .{name});
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn {0s}Wrapper(comptime cmds: {0s}CommandFlags) type {{
|
||||
\\pub fn {s}Wrapper(comptime cmds: []const {s}Command) type {{
|
||||
\\ comptime var fields: [cmds.len]std.builtin.TypeInfo.StructField = undefined;
|
||||
\\ inline for (cmds) |cmd, i| {{
|
||||
\\ const PfnType = cmd.PfnType();
|
||||
\\ fields[i] = .{{
|
||||
\\ .name = cmd.symbol(),
|
||||
\\ .field_type = PfnType,
|
||||
\\ .default_value = null,
|
||||
\\ .is_comptime = false,
|
||||
\\ .alignment = @alignOf(PfnType),
|
||||
\\ }};
|
||||
\\ }}
|
||||
\\ const Dispatch = @Type(.{{
|
||||
\\ .Struct = .{{
|
||||
\\ .layout = .Auto,
|
||||
\\ .fields = &fields,
|
||||
\\ .decls = &[_]std.builtin.TypeInfo.Declaration{{}},
|
||||
\\ .is_tuple = false,
|
||||
\\ }},
|
||||
\\ }});
|
||||
\\ return struct {{
|
||||
\\ dispatch: Dispatch,
|
||||
\\
|
||||
\\
|
||||
\\ const Self = @This();
|
||||
\\ pub const commands = cmds;
|
||||
\\ pub const Dispatch = blk: {{
|
||||
\\ @setEvalBranchQuota(10_000);
|
||||
\\ const Type = std.builtin.Type;
|
||||
\\ const fields_len = fields_len: {{
|
||||
\\ var fields_len = 0;
|
||||
\\ for (@typeInfo({0s}CommandFlags).Struct.fields) |field| {{
|
||||
\\ fields_len += @boolToInt(@field(cmds, field.name));
|
||||
\\ }}
|
||||
\\ break :fields_len fields_len;
|
||||
\\ }};
|
||||
\\ var fields: [fields_len]Type.StructField = undefined;
|
||||
\\ var i: usize = 0;
|
||||
\\ for (@typeInfo({0s}CommandFlags).Struct.fields) |field| {{
|
||||
\\ if (@field(cmds, field.name)) {{
|
||||
\\ const field_tag = std.enums.nameCast(std.meta.FieldEnum({0s}CommandFlags), field.name);
|
||||
\\ const PfnType = {0s}CommandFlags.CmdType(field_tag);
|
||||
\\ fields[i] = .{{
|
||||
\\ .name = {0s}CommandFlags.cmdName(field_tag),
|
||||
\\ .field_type = PfnType,
|
||||
\\ .default_value = null,
|
||||
\\ .is_comptime = false,
|
||||
\\ .alignment = @alignOf(PfnType),
|
||||
\\ }};
|
||||
\\ i += 1;
|
||||
\\ }}
|
||||
\\ }}
|
||||
\\ break :blk @Type(.{{
|
||||
\\ .Struct = .{{
|
||||
\\ .layout = .Auto,
|
||||
\\ .fields = &fields,
|
||||
\\ .decls = &[_]std.builtin.Type.Declaration{{}},
|
||||
\\ .is_tuple = false,
|
||||
\\ }},
|
||||
\\ }});
|
||||
\\ }};
|
||||
\\
|
||||
, .{name});
|
||||
, .{ name, name });
|
||||
|
||||
try self.renderWrapperLoader(dispatch_type);
|
||||
|
||||
@@ -1110,34 +1062,26 @@ fn Renderer(comptime WriterType: type) type {
|
||||
.device => "device: Device, loader: anytype",
|
||||
};
|
||||
|
||||
const loader_first_arg = switch (dispatch_type) {
|
||||
.base => "Instance.null_handle",
|
||||
.instance => "instance",
|
||||
.device => "device",
|
||||
const loader_first_param = switch (dispatch_type) {
|
||||
.base => ".null_handle, ",
|
||||
.instance => "instance, ",
|
||||
.device => "device, ",
|
||||
};
|
||||
|
||||
@setEvalBranchQuota(2000);
|
||||
|
||||
try self.writer.print(
|
||||
\\pub fn load({[params]s}) error{{CommandLoadFailure}}!Self {{
|
||||
\\pub fn load({s}) !Self {{
|
||||
\\ var self: Self = undefined;
|
||||
\\ inline for (std.meta.fields(Dispatch)) |field| {{
|
||||
\\ const name = @ptrCast([*:0]const u8, field.name ++ "\x00");
|
||||
\\ const cmd_ptr = loader({[first_arg]s}, name) orelse return error.CommandLoadFailure;
|
||||
\\ const cmd_ptr = loader({s}name) orelse return error.CommandLoadFailure;
|
||||
\\ @field(self.dispatch, field.name) = @ptrCast(field.field_type, cmd_ptr);
|
||||
\\ }}
|
||||
\\ return self;
|
||||
\\}}
|
||||
\\pub fn loadNoFail({[params]s}) Self {{
|
||||
\\ var self: Self = undefined;
|
||||
\\ inline for (std.meta.fields(Dispatch)) |field| {{
|
||||
\\ const name = @ptrCast([*:0]const u8, field.name ++ "\x00");
|
||||
\\ const cmd_ptr = loader({[first_arg]s}, name) orelse undefined;
|
||||
\\ @field(self.dispatch, field.name) = @ptrCast(field.field_type, cmd_ptr);
|
||||
\\ }}
|
||||
\\ return self;
|
||||
\\}}
|
||||
, .{ .params = params, .first_arg = loader_first_arg });
|
||||
\\
|
||||
, .{ params, loader_first_param });
|
||||
}
|
||||
|
||||
fn derefName(name: []const u8) []const u8 {
|
||||
@@ -1154,21 +1098,32 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writer.writeAll("(self: Self, ");
|
||||
|
||||
for (command.params) |param| {
|
||||
// This parameter is returned instead.
|
||||
if ((try self.classifyParam(param)) == .out_pointer) {
|
||||
continue;
|
||||
switch (try self.classifyParam(param)) {
|
||||
.in_pointer => {
|
||||
// Remove one pointer level
|
||||
try self.writeIdentifierWithCase(.snake, derefName(param.name));
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(param.param_type.pointer.child.*);
|
||||
},
|
||||
.out_pointer => continue, // Return value
|
||||
.in_out_pointer,
|
||||
.bitflags, // Special stuff handled in renderWrapperCall
|
||||
.buffer_len,
|
||||
.mut_buffer_len,
|
||||
.other,
|
||||
=> {
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(param.param_type);
|
||||
},
|
||||
}
|
||||
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
try self.writer.writeAll(": ");
|
||||
try self.renderTypeInfo(param.param_type);
|
||||
try self.writer.writeAll(", ");
|
||||
}
|
||||
|
||||
try self.writer.writeAll(") ");
|
||||
|
||||
const returns_vk_result = command.return_type.* == .name and mem.eql(u8, command.return_type.name, "VkResult");
|
||||
if (returns_vk_result) {
|
||||
if (command.error_codes.len > 0) {
|
||||
try self.renderErrorSetName(name);
|
||||
try self.writer.writeByte('!');
|
||||
}
|
||||
@@ -1189,6 +1144,10 @@ fn Renderer(comptime WriterType: type) type {
|
||||
|
||||
for (command.params) |param| {
|
||||
switch (try self.classifyParam(param)) {
|
||||
.in_pointer => {
|
||||
try self.writer.writeByte('&');
|
||||
try self.writeIdentifierWithCase(.snake, derefName(param.name));
|
||||
},
|
||||
.out_pointer => {
|
||||
try self.writer.writeByte('&');
|
||||
if (returns.len > 1) {
|
||||
@@ -1200,7 +1159,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
try self.writer.writeAll(".toInt()");
|
||||
},
|
||||
.in_pointer, .in_out_pointer, .buffer_len, .mut_buffer_len, .other => {
|
||||
.in_out_pointer, .buffer_len, .mut_buffer_len, .other => {
|
||||
try self.writeIdentifierWithCase(.snake, param.name);
|
||||
},
|
||||
}
|
||||
@@ -1283,7 +1242,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
try self.renderReturnStruct(name, returns);
|
||||
}
|
||||
|
||||
if (returns_vk_result) {
|
||||
if (command.error_codes.len > 0) {
|
||||
try self.writer.writeAll("pub const ");
|
||||
try self.renderErrorSetName(name);
|
||||
try self.writer.writeAll(" = ");
|
||||
@@ -1379,9 +1338,6 @@ fn Renderer(comptime WriterType: type) type {
|
||||
fn renderErrorSet(self: *Self, errors: []const []const u8) !void {
|
||||
try self.writer.writeAll("error{");
|
||||
for (errors) |name| {
|
||||
if (std.mem.eql(u8, name, "VK_ERROR_UNKNOWN")) {
|
||||
continue;
|
||||
}
|
||||
try self.renderResultAsErrorName(name);
|
||||
try self.writer.writeAll(", ");
|
||||
}
|
||||
@@ -1401,7 +1357,7 @@ fn Renderer(comptime WriterType: type) type {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn render(writer: anytype, allocator: Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !void {
|
||||
pub fn render(writer: anytype, allocator: *Allocator, registry: *const reg.Registry, id_renderer: *IdRenderer) !void {
|
||||
var renderer = try Renderer(@TypeOf(writer)).init(writer, allocator, registry, id_renderer);
|
||||
defer renderer.deinit();
|
||||
try renderer.render();
|
||||
|
||||
@@ -3,6 +3,7 @@ const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const Allocator = mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
pub const Attribute = struct {
|
||||
name: []const u8,
|
||||
@@ -10,18 +11,29 @@ pub const Attribute = struct {
|
||||
};
|
||||
|
||||
pub const Content = union(enum) {
|
||||
char_data: []const u8,
|
||||
comment: []const u8,
|
||||
element: *Element,
|
||||
CharData: []const u8,
|
||||
Comment: []const u8,
|
||||
Element: *Element,
|
||||
};
|
||||
|
||||
pub const Element = struct {
|
||||
tag: []const u8,
|
||||
attributes: []Attribute = &.{},
|
||||
children: []Content = &.{},
|
||||
pub const AttributeList = ArrayList(*Attribute);
|
||||
pub const ContentList = ArrayList(Content);
|
||||
|
||||
pub fn getAttribute(self: Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes) |child| {
|
||||
tag: []const u8,
|
||||
attributes: AttributeList,
|
||||
children: ContentList,
|
||||
|
||||
fn init(tag: []const u8, alloc: *Allocator) Element {
|
||||
return .{
|
||||
.tag = tag,
|
||||
.attributes = AttributeList.init(alloc),
|
||||
.children = ContentList.init(alloc),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getAttribute(self: *Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes.items) |child| {
|
||||
if (mem.eql(u8, child.name, attrib_name)) {
|
||||
return child.value;
|
||||
}
|
||||
@@ -30,37 +42,36 @@ pub const Element = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getCharData(self: Element, child_tag: []const u8) ?[]const u8 {
|
||||
pub fn getCharData(self: *Element, child_tag: []const u8) ?[]const u8 {
|
||||
const child = self.findChildByTag(child_tag) orelse return null;
|
||||
if (child.children.len != 1) {
|
||||
if (child.children.items.len != 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return switch (child.children[0]) {
|
||||
.char_data => |char_data| char_data,
|
||||
return switch (child.children.items[0]) {
|
||||
.CharData => |char_data| char_data,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: Element) ChildIterator {
|
||||
pub fn iterator(self: *Element) ChildIterator {
|
||||
return .{
|
||||
.items = self.children,
|
||||
.items = self.children.items,
|
||||
.i = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn elements(self: Element) ChildElementIterator {
|
||||
pub fn elements(self: *Element) ChildElementIterator {
|
||||
return .{
|
||||
.inner = self.iterator(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn findChildByTag(self: Element, tag: []const u8) ?*Element {
|
||||
var children = self.findChildrenByTag(tag);
|
||||
return children.next();
|
||||
pub fn findChildByTag(self: *Element, tag: []const u8) ?*Element {
|
||||
return self.findChildrenByTag(tag).next();
|
||||
}
|
||||
|
||||
pub fn findChildrenByTag(self: Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
pub fn findChildrenByTag(self: *Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
return .{
|
||||
.inner = self.elements(),
|
||||
.tag = tag,
|
||||
@@ -86,11 +97,11 @@ pub const Element = struct {
|
||||
|
||||
pub fn next(self: *ChildElementIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (child.* != .element) {
|
||||
if (child.* != .Element) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child.*.element;
|
||||
return child.*.Element;
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -115,9 +126,15 @@ pub const Element = struct {
|
||||
};
|
||||
};
|
||||
|
||||
pub const XmlDecl = struct {
|
||||
version: []const u8,
|
||||
encoding: ?[]const u8,
|
||||
standalone: ?bool,
|
||||
};
|
||||
|
||||
pub const Document = struct {
|
||||
arena: ArenaAllocator,
|
||||
xml_decl: ?*Element,
|
||||
xml_decl: ?*XmlDecl,
|
||||
root: *Element,
|
||||
|
||||
pub fn deinit(self: Document) void {
|
||||
@@ -126,13 +143,13 @@ pub const Document = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const Parser = struct {
|
||||
const ParseContext = struct {
|
||||
source: []const u8,
|
||||
offset: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
|
||||
fn init(source: []const u8) Parser {
|
||||
fn init(source: []const u8) ParseContext {
|
||||
return .{
|
||||
.source = source,
|
||||
.offset = 0,
|
||||
@@ -141,11 +158,11 @@ const Parser = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn peek(self: *Parser) ?u8 {
|
||||
fn peek(self: *ParseContext) ?u8 {
|
||||
return if (self.offset < self.source.len) self.source[self.offset] else null;
|
||||
}
|
||||
|
||||
fn consume(self: *Parser) !u8 {
|
||||
fn consume(self: *ParseContext) !u8 {
|
||||
if (self.offset < self.source.len) {
|
||||
return self.consumeNoEof();
|
||||
}
|
||||
@@ -153,7 +170,7 @@ const Parser = struct {
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn consumeNoEof(self: *Parser) u8 {
|
||||
fn consumeNoEof(self: *ParseContext) u8 {
|
||||
std.debug.assert(self.offset < self.source.len);
|
||||
const c = self.source[self.offset];
|
||||
self.offset += 1;
|
||||
@@ -168,12 +185,12 @@ const Parser = struct {
|
||||
return c;
|
||||
}
|
||||
|
||||
fn eat(self: *Parser, char: u8) bool {
|
||||
fn eat(self: *ParseContext, char: u8) bool {
|
||||
self.expect(char) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expect(self: *Parser, expected: u8) !void {
|
||||
fn expect(self: *ParseContext, expected: u8) !void {
|
||||
if (self.peek()) |actual| {
|
||||
if (expected != actual) {
|
||||
return error.UnexpectedCharacter;
|
||||
@@ -186,15 +203,15 @@ const Parser = struct {
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn eatStr(self: *Parser, text: []const u8) bool {
|
||||
fn eatStr(self: *ParseContext, text: []const u8) bool {
|
||||
self.expectStr(text) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expectStr(self: *Parser, text: []const u8) !void {
|
||||
fn expectStr(self: *ParseContext, text: []const u8) !void {
|
||||
if (self.source.len < self.offset + text.len) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (mem.startsWith(u8, self.source[self.offset..], text)) {
|
||||
} else if (std.mem.startsWith(u8, self.source[self.offset..], text)) {
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (i += 1) {
|
||||
_ = self.consumeNoEof();
|
||||
@@ -206,7 +223,7 @@ const Parser = struct {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn eatWs(self: *Parser) bool {
|
||||
fn eatWs(self: *ParseContext) bool {
|
||||
var ws = false;
|
||||
|
||||
while (self.peek()) |ch| {
|
||||
@@ -222,11 +239,11 @@ const Parser = struct {
|
||||
return ws;
|
||||
}
|
||||
|
||||
fn expectWs(self: *Parser) !void {
|
||||
fn expectWs(self: *ParseContext) !void {
|
||||
if (!self.eatWs()) return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn currentLine(self: Parser) []const u8 {
|
||||
fn currentLine(self: ParseContext) []const u8 {
|
||||
var begin: usize = 0;
|
||||
if (mem.lastIndexOfScalar(u8, self.source[0..self.offset], '\n')) |prev_nl| {
|
||||
begin = prev_nl + 1;
|
||||
@@ -237,49 +254,49 @@ const Parser = struct {
|
||||
}
|
||||
};
|
||||
|
||||
test "xml: Parser" {
|
||||
test "ParseContext" {
|
||||
{
|
||||
var parser = Parser.init("I like pythons");
|
||||
try testing.expectEqual(@as(?u8, 'I'), parser.peek());
|
||||
try testing.expectEqual(@as(u8, 'I'), parser.consumeNoEof());
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
try testing.expectEqual(@as(u8, ' '), try parser.consume());
|
||||
var ctx = ParseContext.init("I like pythons");
|
||||
try testing.expectEqual(@as(?u8, 'I'), ctx.peek());
|
||||
try testing.expectEqual(@as(u8, 'I'), ctx.consumeNoEof());
|
||||
try testing.expectEqual(@as(?u8, ' '), ctx.peek());
|
||||
try testing.expectEqual(@as(u8, ' '), try ctx.consume());
|
||||
|
||||
try testing.expect(parser.eat('l'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eat('a'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
try testing.expect(ctx.eat('l'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), ctx.peek());
|
||||
try testing.expectEqual(false, ctx.eat('a'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), ctx.peek());
|
||||
|
||||
try parser.expect('i');
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expect('a'));
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
try ctx.expect('i');
|
||||
try testing.expectEqual(@as(?u8, 'k'), ctx.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, ctx.expect('a'));
|
||||
try testing.expectEqual(@as(?u8, 'k'), ctx.peek());
|
||||
|
||||
try testing.expect(parser.eatStr("ke"));
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
try testing.expect(ctx.eatStr("ke"));
|
||||
try testing.expectEqual(@as(?u8, ' '), ctx.peek());
|
||||
|
||||
try testing.expect(parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expect(ctx.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try testing.expectEqual(false, ctx.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
|
||||
try testing.expectEqual(false, parser.eatStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectEqual(false, ctx.eatStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
|
||||
try testing.expectError(error.UnexpectedEof, parser.expectStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expectStr("pytn"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try parser.expectStr("python");
|
||||
try testing.expectEqual(@as(?u8, 's'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedEof, ctx.expectStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, ctx.expectStr("pytn"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try ctx.expectStr("python");
|
||||
try testing.expectEqual(@as(?u8, 's'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("");
|
||||
try testing.expectEqual(parser.peek(), null);
|
||||
try testing.expectError(error.UnexpectedEof, parser.consume());
|
||||
try testing.expectEqual(parser.eat('p'), false);
|
||||
try testing.expectError(error.UnexpectedEof, parser.expect('p'));
|
||||
var ctx = ParseContext.init("");
|
||||
try testing.expectEqual(ctx.peek(), null);
|
||||
try testing.expectError(error.UnexpectedEof, ctx.consume());
|
||||
try testing.expectEqual(ctx.eat('p'), false);
|
||||
try testing.expectError(error.UnexpectedEof, ctx.expect('p'));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -297,12 +314,12 @@ pub const ParseError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub fn parse(backing_allocator: Allocator, source: []const u8) !Document {
|
||||
var parser = Parser.init(source);
|
||||
return try parseDocument(&parser, backing_allocator);
|
||||
pub fn parse(backing_allocator: *Allocator, source: []const u8) !Document {
|
||||
var ctx = ParseContext.init(source);
|
||||
return try parseDocument(&ctx, backing_allocator);
|
||||
}
|
||||
|
||||
fn parseDocument(parser: *Parser, backing_allocator: Allocator) !Document {
|
||||
fn parseDocument(ctx: *ParseContext, backing_allocator: *Allocator) !Document {
|
||||
var doc = Document{
|
||||
.arena = ArenaAllocator.init(backing_allocator),
|
||||
.xml_decl = null,
|
||||
@@ -311,271 +328,280 @@ fn parseDocument(parser: *Parser, backing_allocator: Allocator) !Document {
|
||||
|
||||
errdefer doc.deinit();
|
||||
|
||||
const allocator = doc.arena.allocator();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
|
||||
try skipComments(parser, allocator);
|
||||
doc.xml_decl = try tryParseProlog(ctx, &doc.arena.allocator);
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
|
||||
doc.xml_decl = try parseElement(parser, allocator, .xml_decl);
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
doc.root = (try tryParseElement(ctx, &doc.arena.allocator)) orelse return error.InvalidDocument;
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
|
||||
doc.root = (try parseElement(parser, allocator, .element)) orelse return error.InvalidDocument;
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
if (parser.peek() != null) return error.InvalidDocument;
|
||||
if (ctx.peek() != null) return error.InvalidDocument;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
fn parseAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
const quote = try parser.consume();
|
||||
fn parseAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
|
||||
const quote = try ctx.consume();
|
||||
if (quote != '"' and quote != '\'') return error.UnexpectedCharacter;
|
||||
|
||||
const begin = parser.offset;
|
||||
const begin = ctx.offset;
|
||||
|
||||
while (true) {
|
||||
const c = parser.consume() catch return error.UnclosedValue;
|
||||
const c = ctx.consume() catch return error.UnclosedValue;
|
||||
if (c == quote) break;
|
||||
}
|
||||
|
||||
const end = parser.offset - 1;
|
||||
const end = ctx.offset - 1;
|
||||
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseEqAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
fn parseEqAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('=');
|
||||
_ = ctx.eatWs();
|
||||
|
||||
return try parseAttrValue(parser, alloc);
|
||||
return try parseAttrValue(ctx, alloc);
|
||||
}
|
||||
|
||||
fn parseNameNoDupe(parser: *Parser) ![]const u8 {
|
||||
fn parseNameNoDupe(ctx: *ParseContext) ![]const u8 {
|
||||
// XML's spec on names is very long, so to make this easier
|
||||
// we just take any character that is not special and not whitespace
|
||||
const begin = parser.offset;
|
||||
const begin = ctx.offset;
|
||||
|
||||
while (parser.peek()) |ch| {
|
||||
while (ctx.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => break,
|
||||
'&', '"', '\'', '<', '>', '?', '=', '/' => break,
|
||||
else => _ = parser.consumeNoEof(),
|
||||
else => _ = ctx.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = parser.offset;
|
||||
const end = ctx.offset;
|
||||
if (begin == end) return error.InvalidName;
|
||||
|
||||
return parser.source[begin..end];
|
||||
return ctx.source[begin..end];
|
||||
}
|
||||
|
||||
fn parseCharData(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
const begin = parser.offset;
|
||||
fn tryParseCharData(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
|
||||
const begin = ctx.offset;
|
||||
|
||||
while (parser.peek()) |ch| {
|
||||
while (ctx.peek()) |ch| {
|
||||
switch (ch) {
|
||||
'<' => break,
|
||||
else => _ = parser.consumeNoEof(),
|
||||
else => _ = ctx.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = parser.offset;
|
||||
const end = ctx.offset;
|
||||
if (begin == end) return null;
|
||||
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseContent(parser: *Parser, alloc: Allocator) ParseError!Content {
|
||||
if (try parseCharData(parser, alloc)) |cd| {
|
||||
return Content{ .char_data = cd };
|
||||
} else if (try parseComment(parser, alloc)) |comment| {
|
||||
return Content{ .comment = comment };
|
||||
} else if (try parseElement(parser, alloc, .element)) |elem| {
|
||||
return Content{ .element = elem };
|
||||
fn parseContent(ctx: *ParseContext, alloc: *Allocator) ParseError!Content {
|
||||
if (try tryParseCharData(ctx, alloc)) |cd| {
|
||||
return Content{ .CharData = cd };
|
||||
} else if (try tryParseComment(ctx, alloc)) |comment| {
|
||||
return Content{ .Comment = comment };
|
||||
} else if (try tryParseElement(ctx, alloc)) |elem| {
|
||||
return Content{ .Element = elem };
|
||||
} else {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseAttr(parser: *Parser, alloc: Allocator) !?Attribute {
|
||||
const name = parseNameNoDupe(parser) catch return null;
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
const value = try parseAttrValue(parser, alloc);
|
||||
fn tryParseAttr(ctx: *ParseContext, alloc: *Allocator) !?*Attribute {
|
||||
const name = parseNameNoDupe(ctx) catch return null;
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('=');
|
||||
_ = ctx.eatWs();
|
||||
const value = try parseAttrValue(ctx, alloc);
|
||||
|
||||
const attr = Attribute{
|
||||
.name = try alloc.dupe(u8, name),
|
||||
.value = value,
|
||||
};
|
||||
const attr = try alloc.create(Attribute);
|
||||
attr.name = try mem.dupe(alloc, u8, name);
|
||||
attr.value = value;
|
||||
return attr;
|
||||
}
|
||||
|
||||
const ElementKind = enum {
|
||||
xml_decl,
|
||||
element,
|
||||
};
|
||||
|
||||
fn parseElement(parser: *Parser, alloc: Allocator, comptime kind: ElementKind) !?*Element {
|
||||
const start = parser.offset;
|
||||
|
||||
const tag = switch (kind) {
|
||||
.xml_decl => blk: {
|
||||
if (!parser.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(parser), "xml")) {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
}
|
||||
break :blk "xml";
|
||||
},
|
||||
.element => blk: {
|
||||
if (!parser.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(parser) catch {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
};
|
||||
break :blk tag;
|
||||
},
|
||||
fn tryParseElement(ctx: *ParseContext, alloc: *Allocator) !?*Element {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(ctx) catch {
|
||||
ctx.offset = start;
|
||||
return null;
|
||||
};
|
||||
|
||||
var attributes = std.ArrayList(Attribute).init(alloc);
|
||||
defer attributes.deinit();
|
||||
|
||||
var children = std.ArrayList(Content).init(alloc);
|
||||
defer children.deinit();
|
||||
|
||||
while (parser.eatWs()) {
|
||||
const attr = (try parseAttr(parser, alloc)) orelse break;
|
||||
try attributes.append(attr);
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.xml_decl => try parser.expectStr("?>"),
|
||||
.element => {
|
||||
if (!parser.eatStr("/>")) {
|
||||
try parser.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (parser.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (parser.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(parser, alloc);
|
||||
try children.append(content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(parser);
|
||||
if (!mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('>');
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const element = try alloc.create(Element);
|
||||
element.* = .{
|
||||
.tag = try alloc.dupe(u8, tag),
|
||||
.attributes = attributes.toOwnedSlice(),
|
||||
.children = children.toOwnedSlice(),
|
||||
};
|
||||
element.* = Element.init(try std.mem.dupe(alloc, u8, tag), alloc);
|
||||
|
||||
while (ctx.eatWs()) {
|
||||
const attr = (try tryParseAttr(ctx, alloc)) orelse break;
|
||||
try element.attributes.append(attr);
|
||||
}
|
||||
|
||||
if (ctx.eatStr("/>")) {
|
||||
return element;
|
||||
}
|
||||
|
||||
try ctx.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (ctx.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (ctx.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(ctx, alloc);
|
||||
try element.children.append(content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(ctx);
|
||||
if (!std.mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('>');
|
||||
return element;
|
||||
}
|
||||
|
||||
test "xml: parseElement" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
test "tryParseElement" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const alloc = arena.allocator();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
{
|
||||
var parser = Parser.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, alloc, .element));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
var ctx = ParseContext.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
|
||||
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
|
||||
const size_attr = elem.?.attributes[0];
|
||||
const size_attr = elem.?.attributes.items[0];
|
||||
try testing.expectEqualSlices(u8, size_attr.name, "size");
|
||||
try testing.expectEqualSlices(u8, size_attr.value, "15");
|
||||
|
||||
const color_attr = elem.?.attributes[1];
|
||||
const color_attr = elem.?.attributes.items[1];
|
||||
try testing.expectEqualSlices(u8, color_attr.name, "color");
|
||||
try testing.expectEqualSlices(u8, color_attr.value, "green");
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<python>test</python>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
var ctx = ParseContext.init("<python>test</python>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "test");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[1].element.tag, "c");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[2].char_data, "d");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[3].element.tag, "e");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[4].char_data, "f");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[5].comment, "g");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[2].CharData, "d");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[3].Element.tag, "e");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[4].CharData, "f");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[5].Comment, "g");
|
||||
}
|
||||
}
|
||||
|
||||
test "xml: parse prolog" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
fn tryParseProlog(ctx: *ParseContext, alloc: *Allocator) !?*XmlDecl {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(ctx), "xml")) {
|
||||
ctx.offset = start;
|
||||
return null;
|
||||
}
|
||||
|
||||
const decl = try alloc.create(XmlDecl);
|
||||
decl.encoding = null;
|
||||
decl.standalone = null;
|
||||
|
||||
// Version info is mandatory
|
||||
try ctx.expectWs();
|
||||
try ctx.expectStr("version");
|
||||
decl.version = try parseEqAttrValue(ctx, alloc);
|
||||
|
||||
if (ctx.eatWs()) {
|
||||
// Optional encoding and standalone info
|
||||
var require_ws = false;
|
||||
|
||||
if (ctx.eatStr("encoding")) {
|
||||
decl.encoding = try parseEqAttrValue(ctx, alloc);
|
||||
require_ws = true;
|
||||
}
|
||||
|
||||
if (require_ws == ctx.eatWs() and ctx.eatStr("standalone")) {
|
||||
const standalone = try parseEqAttrValue(ctx, alloc);
|
||||
if (std.mem.eql(u8, standalone, "yes")) {
|
||||
decl.standalone = true;
|
||||
} else if (std.mem.eql(u8, standalone, "no")) {
|
||||
decl.standalone = false;
|
||||
} else {
|
||||
return error.InvalidStandaloneValue;
|
||||
}
|
||||
}
|
||||
|
||||
_ = ctx.eatWs();
|
||||
}
|
||||
|
||||
try ctx.expectStr("?>");
|
||||
return decl;
|
||||
}
|
||||
|
||||
test "tryParseProlog" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xmla version='aa'?>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, a, .xml_decl));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
var ctx = ParseContext.init("<?xmla version='aa'?>");
|
||||
try testing.expectEqual(@as(?*XmlDecl, null), try tryParseProlog(&ctx, alloc));
|
||||
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xml version='aa'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("encoding"));
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("standalone"));
|
||||
var ctx = ParseContext.init("<?xml version='aa'?>");
|
||||
const decl = try tryParseProlog(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.version);
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.encoding);
|
||||
try testing.expectEqual(@as(?bool, null), decl.?.standalone);
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xml version=\"ccc\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "ccc", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqualSlices(u8, "bbb", decl.?.getAttribute("encoding").?);
|
||||
try testing.expectEqualSlices(u8, "yes", decl.?.getAttribute("standalone").?);
|
||||
var ctx = ParseContext.init("<?xml version=\"aa\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try tryParseProlog(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.version);
|
||||
try testing.expectEqualSlices(u8, "bbb", decl.?.encoding.?);
|
||||
try testing.expectEqual(@as(?bool, true), decl.?.standalone.?);
|
||||
}
|
||||
}
|
||||
|
||||
fn skipComments(parser: *Parser, alloc: Allocator) !void {
|
||||
while ((try parseComment(parser, alloc)) != null) {
|
||||
_ = parser.eatWs();
|
||||
fn trySkipComments(ctx: *ParseContext, alloc: *Allocator) !void {
|
||||
while (try tryParseComment(ctx, alloc)) |_| {
|
||||
_ = ctx.eatWs();
|
||||
}
|
||||
}
|
||||
|
||||
fn parseComment(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
if (!parser.eatStr("<!--")) return null;
|
||||
fn tryParseComment(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
|
||||
if (!ctx.eatStr("<!--")) return null;
|
||||
|
||||
const begin = parser.offset;
|
||||
while (!parser.eatStr("-->")) {
|
||||
_ = parser.consume() catch return error.UnclosedComment;
|
||||
const begin = ctx.offset;
|
||||
while (!ctx.eatStr("-->")) {
|
||||
_ = ctx.consume() catch return error.UnclosedComment;
|
||||
}
|
||||
|
||||
const end = parser.offset - "-->".len;
|
||||
return try alloc.dupe(u8, parser.source[begin..end]);
|
||||
const end = ctx.offset - "-->".len;
|
||||
return try mem.dupe(alloc, u8, ctx.source[begin..end]);
|
||||
}
|
||||
|
||||
fn unescapeEntity(text: []const u8) !u8 {
|
||||
@@ -590,49 +616,49 @@ fn unescapeEntity(text: []const u8) !u8 {
|
||||
};
|
||||
|
||||
for (entities) |entity| {
|
||||
if (mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
if (std.mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
}
|
||||
|
||||
return error.InvalidEntity;
|
||||
}
|
||||
|
||||
fn unescape(arena: Allocator, text: []const u8) ![]const u8 {
|
||||
const unescaped = try arena.alloc(u8, text.len);
|
||||
fn dupeAndUnescape(alloc: *Allocator, text: []const u8) ![]const u8 {
|
||||
const str = try alloc.alloc(u8, text.len);
|
||||
|
||||
var j: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (j += 1) {
|
||||
if (text[i] == '&') {
|
||||
const entity_end = 1 + (mem.indexOfScalarPos(u8, text, i, ';') orelse return error.InvalidEntity);
|
||||
unescaped[j] = try unescapeEntity(text[i..entity_end]);
|
||||
str[j] = try unescapeEntity(text[i..entity_end]);
|
||||
i = entity_end;
|
||||
} else {
|
||||
unescaped[j] = text[i];
|
||||
str[j] = text[i];
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return unescaped[0..j];
|
||||
return alloc.shrink(str, j);
|
||||
}
|
||||
|
||||
test "xml: unescape" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
test "dupeAndUnescape" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
try testing.expectEqualSlices(u8, "test", try unescape(a, "test"));
|
||||
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try unescape(a, "a<b&c>d"e'f<"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&test;"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&boa"));
|
||||
try testing.expectEqualSlices(u8, "test", try dupeAndUnescape(alloc, "test"));
|
||||
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try dupeAndUnescape(alloc, "a<b&c>d"e'f<"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&&"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&test;"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&boa"));
|
||||
}
|
||||
|
||||
test "xml: top level comments" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
test "Top level comments" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
const doc = try parse(a, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
const doc = try parse(alloc, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
try testing.expectEqualSlices(u8, "python", doc.root.tag);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user