forked from mirror/vulkan-zig
Merge pull request #73 from ashpil/more-defaults
More defaults for struct fields
This commit is contained in:
@@ -109,10 +109,7 @@ pub const GraphicsContext = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
self.instance = try self.vkb.createInstance(&.{
|
self.instance = try self.vkb.createInstance(&.{
|
||||||
.flags = .{},
|
|
||||||
.p_application_info = &app_info,
|
.p_application_info = &app_info,
|
||||||
.enabled_layer_count = 0,
|
|
||||||
.pp_enabled_layer_names = undefined,
|
|
||||||
.enabled_extension_count = glfw_exts_count,
|
.enabled_extension_count = glfw_exts_count,
|
||||||
.pp_enabled_extension_names = @ptrCast([*]const [*:0]const u8, glfw_exts),
|
.pp_enabled_extension_names = @ptrCast([*]const [*:0]const u8, glfw_exts),
|
||||||
}, null);
|
}, null);
|
||||||
@@ -192,13 +189,11 @@ fn initializeCandidate(vki: InstanceDispatch, candidate: DeviceCandidate) !vk.De
|
|||||||
const priority = [_]f32{1};
|
const priority = [_]f32{1};
|
||||||
const qci = [_]vk.DeviceQueueCreateInfo{
|
const qci = [_]vk.DeviceQueueCreateInfo{
|
||||||
.{
|
.{
|
||||||
.flags = .{},
|
|
||||||
.queue_family_index = candidate.queues.graphics_family,
|
.queue_family_index = candidate.queues.graphics_family,
|
||||||
.queue_count = 1,
|
.queue_count = 1,
|
||||||
.p_queue_priorities = &priority,
|
.p_queue_priorities = &priority,
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.flags = .{},
|
|
||||||
.queue_family_index = candidate.queues.present_family,
|
.queue_family_index = candidate.queues.present_family,
|
||||||
.queue_count = 1,
|
.queue_count = 1,
|
||||||
.p_queue_priorities = &priority,
|
.p_queue_priorities = &priority,
|
||||||
@@ -211,14 +206,10 @@ fn initializeCandidate(vki: InstanceDispatch, candidate: DeviceCandidate) !vk.De
|
|||||||
2;
|
2;
|
||||||
|
|
||||||
return try vki.createDevice(candidate.pdev, &.{
|
return try vki.createDevice(candidate.pdev, &.{
|
||||||
.flags = .{},
|
|
||||||
.queue_create_info_count = queue_count,
|
.queue_create_info_count = queue_count,
|
||||||
.p_queue_create_infos = &qci,
|
.p_queue_create_infos = &qci,
|
||||||
.enabled_layer_count = 0,
|
|
||||||
.pp_enabled_layer_names = undefined,
|
|
||||||
.enabled_extension_count = required_device_extensions.len,
|
.enabled_extension_count = required_device_extensions.len,
|
||||||
.pp_enabled_extension_names = @ptrCast([*]const [*:0]const u8, &required_device_extensions),
|
.pp_enabled_extension_names = @ptrCast([*]const [*:0]const u8, &required_device_extensions),
|
||||||
.p_enabled_features = null,
|
|
||||||
}, null);
|
}, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -47,7 +47,6 @@ pub const Swapchain = struct {
|
|||||||
.exclusive;
|
.exclusive;
|
||||||
|
|
||||||
const handle = try gc.vkd.createSwapchainKHR(gc.dev, &.{
|
const handle = try gc.vkd.createSwapchainKHR(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.surface = gc.surface,
|
.surface = gc.surface,
|
||||||
.min_image_count = image_count,
|
.min_image_count = image_count,
|
||||||
.image_format = surface_format.format,
|
.image_format = surface_format.format,
|
||||||
@@ -77,7 +76,7 @@ pub const Swapchain = struct {
|
|||||||
allocator.free(swap_images);
|
allocator.free(swap_images);
|
||||||
}
|
}
|
||||||
|
|
||||||
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{}, null);
|
||||||
errdefer gc.vkd.destroySemaphore(gc.dev, next_image_acquired, null);
|
errdefer gc.vkd.destroySemaphore(gc.dev, next_image_acquired, null);
|
||||||
|
|
||||||
const result = try gc.vkd.acquireNextImageKHR(gc.dev, handle, std.math.maxInt(u64), next_image_acquired, .null_handle);
|
const result = try gc.vkd.acquireNextImageKHR(gc.dev, handle, std.math.maxInt(u64), next_image_acquired, .null_handle);
|
||||||
@@ -172,7 +171,6 @@ pub const Swapchain = struct {
|
|||||||
.swapchain_count = 1,
|
.swapchain_count = 1,
|
||||||
.p_swapchains = @ptrCast([*]const vk.SwapchainKHR, &self.handle),
|
.p_swapchains = @ptrCast([*]const vk.SwapchainKHR, &self.handle),
|
||||||
.p_image_indices = @ptrCast([*]const u32, &self.image_index),
|
.p_image_indices = @ptrCast([*]const u32, &self.image_index),
|
||||||
.p_results = null,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Step 4: Acquire next frame
|
// Step 4: Acquire next frame
|
||||||
@@ -204,7 +202,6 @@ const SwapImage = struct {
|
|||||||
|
|
||||||
fn init(gc: *const GraphicsContext, image: vk.Image, format: vk.Format) !SwapImage {
|
fn init(gc: *const GraphicsContext, image: vk.Image, format: vk.Format) !SwapImage {
|
||||||
const view = try gc.vkd.createImageView(gc.dev, &.{
|
const view = try gc.vkd.createImageView(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.image = image,
|
.image = image,
|
||||||
.view_type = .@"2d",
|
.view_type = .@"2d",
|
||||||
.format = format,
|
.format = format,
|
||||||
@@ -219,10 +216,10 @@ const SwapImage = struct {
|
|||||||
}, null);
|
}, null);
|
||||||
errdefer gc.vkd.destroyImageView(gc.dev, view, null);
|
errdefer gc.vkd.destroyImageView(gc.dev, view, null);
|
||||||
|
|
||||||
const image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
const image_acquired = try gc.vkd.createSemaphore(gc.dev, &.{}, null);
|
||||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||||
|
|
||||||
const render_finished = try gc.vkd.createSemaphore(gc.dev, &.{ .flags = .{} }, null);
|
const render_finished = try gc.vkd.createSemaphore(gc.dev, &.{}, null);
|
||||||
errdefer gc.vkd.destroySemaphore(gc.dev, render_finished, null);
|
errdefer gc.vkd.destroySemaphore(gc.dev, render_finished, null);
|
||||||
|
|
||||||
const frame_fence = try gc.vkd.createFence(gc.dev, &.{ .flags = .{ .signaled_bit = true } }, null);
|
const frame_fence = try gc.vkd.createFence(gc.dev, &.{ .flags = .{ .signaled_bit = true } }, null);
|
||||||
|
|||||||
@@ -92,18 +92,14 @@ pub fn main() !void {
|
|||||||
defer destroyFramebuffers(&gc, allocator, framebuffers);
|
defer destroyFramebuffers(&gc, allocator, framebuffers);
|
||||||
|
|
||||||
const pool = try gc.vkd.createCommandPool(gc.dev, &.{
|
const pool = try gc.vkd.createCommandPool(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.queue_family_index = gc.graphics_queue.family,
|
.queue_family_index = gc.graphics_queue.family,
|
||||||
}, null);
|
}, null);
|
||||||
defer gc.vkd.destroyCommandPool(gc.dev, pool, null);
|
defer gc.vkd.destroyCommandPool(gc.dev, pool, null);
|
||||||
|
|
||||||
const buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
const buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.size = @sizeOf(@TypeOf(vertices)),
|
.size = @sizeOf(@TypeOf(vertices)),
|
||||||
.usage = .{ .transfer_dst_bit = true, .vertex_buffer_bit = true },
|
.usage = .{ .transfer_dst_bit = true, .vertex_buffer_bit = true },
|
||||||
.sharing_mode = .exclusive,
|
.sharing_mode = .exclusive,
|
||||||
.queue_family_index_count = 0,
|
|
||||||
.p_queue_family_indices = undefined,
|
|
||||||
}, null);
|
}, null);
|
||||||
defer gc.vkd.destroyBuffer(gc.dev, buffer, null);
|
defer gc.vkd.destroyBuffer(gc.dev, buffer, null);
|
||||||
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, buffer);
|
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, buffer);
|
||||||
@@ -166,12 +162,9 @@ pub fn main() !void {
|
|||||||
|
|
||||||
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer) !void {
|
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer) !void {
|
||||||
const staging_buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
const staging_buffer = try gc.vkd.createBuffer(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.size = @sizeOf(@TypeOf(vertices)),
|
.size = @sizeOf(@TypeOf(vertices)),
|
||||||
.usage = .{ .transfer_src_bit = true },
|
.usage = .{ .transfer_src_bit = true },
|
||||||
.sharing_mode = .exclusive,
|
.sharing_mode = .exclusive,
|
||||||
.queue_family_index_count = 0,
|
|
||||||
.p_queue_family_indices = undefined,
|
|
||||||
}, null);
|
}, null);
|
||||||
defer gc.vkd.destroyBuffer(gc.dev, staging_buffer, null);
|
defer gc.vkd.destroyBuffer(gc.dev, staging_buffer, null);
|
||||||
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, staging_buffer);
|
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, staging_buffer);
|
||||||
@@ -203,7 +196,6 @@ fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer,
|
|||||||
|
|
||||||
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
||||||
.flags = .{ .one_time_submit_bit = true },
|
.flags = .{ .one_time_submit_bit = true },
|
||||||
.p_inheritance_info = null,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const region = vk.BufferCopy{
|
const region = vk.BufferCopy{
|
||||||
@@ -216,13 +208,8 @@ fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer,
|
|||||||
try gc.vkd.endCommandBuffer(cmdbuf);
|
try gc.vkd.endCommandBuffer(cmdbuf);
|
||||||
|
|
||||||
const si = vk.SubmitInfo{
|
const si = vk.SubmitInfo{
|
||||||
.wait_semaphore_count = 0,
|
|
||||||
.p_wait_semaphores = undefined,
|
|
||||||
.p_wait_dst_stage_mask = undefined,
|
|
||||||
.command_buffer_count = 1,
|
.command_buffer_count = 1,
|
||||||
.p_command_buffers = @ptrCast([*]const vk.CommandBuffer, &cmdbuf),
|
.p_command_buffers = @ptrCast([*]const vk.CommandBuffer, &cmdbuf),
|
||||||
.signal_semaphore_count = 0,
|
|
||||||
.p_signal_semaphores = undefined,
|
|
||||||
};
|
};
|
||||||
try gc.vkd.queueSubmit(gc.graphics_queue.handle, 1, @ptrCast([*]const vk.SubmitInfo, &si), .null_handle);
|
try gc.vkd.queueSubmit(gc.graphics_queue.handle, 1, @ptrCast([*]const vk.SubmitInfo, &si), .null_handle);
|
||||||
try gc.vkd.queueWaitIdle(gc.graphics_queue.handle);
|
try gc.vkd.queueWaitIdle(gc.graphics_queue.handle);
|
||||||
@@ -267,10 +254,7 @@ fn createCommandBuffers(
|
|||||||
};
|
};
|
||||||
|
|
||||||
for (cmdbufs) |cmdbuf, i| {
|
for (cmdbufs) |cmdbuf, i| {
|
||||||
try gc.vkd.beginCommandBuffer(cmdbuf, &.{
|
try gc.vkd.beginCommandBuffer(cmdbuf, &.{});
|
||||||
.flags = .{},
|
|
||||||
.p_inheritance_info = null,
|
|
||||||
});
|
|
||||||
|
|
||||||
gc.vkd.cmdSetViewport(cmdbuf, 0, 1, @ptrCast([*]const vk.Viewport, &viewport));
|
gc.vkd.cmdSetViewport(cmdbuf, 0, 1, @ptrCast([*]const vk.Viewport, &viewport));
|
||||||
gc.vkd.cmdSetScissor(cmdbuf, 0, 1, @ptrCast([*]const vk.Rect2D, &scissor));
|
gc.vkd.cmdSetScissor(cmdbuf, 0, 1, @ptrCast([*]const vk.Rect2D, &scissor));
|
||||||
@@ -315,7 +299,6 @@ fn createFramebuffers(gc: *const GraphicsContext, allocator: Allocator, render_p
|
|||||||
|
|
||||||
for (framebuffers) |*fb| {
|
for (framebuffers) |*fb| {
|
||||||
fb.* = try gc.vkd.createFramebuffer(gc.dev, &.{
|
fb.* = try gc.vkd.createFramebuffer(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.render_pass = render_pass,
|
.render_pass = render_pass,
|
||||||
.attachment_count = 1,
|
.attachment_count = 1,
|
||||||
.p_attachments = @ptrCast([*]const vk.ImageView, &swapchain.swap_images[i].view),
|
.p_attachments = @ptrCast([*]const vk.ImageView, &swapchain.swap_images[i].view),
|
||||||
@@ -336,7 +319,6 @@ fn destroyFramebuffers(gc: *const GraphicsContext, allocator: Allocator, framebu
|
|||||||
|
|
||||||
fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.RenderPass {
|
fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.RenderPass {
|
||||||
const color_attachment = vk.AttachmentDescription{
|
const color_attachment = vk.AttachmentDescription{
|
||||||
.flags = .{},
|
|
||||||
.format = swapchain.surface_format.format,
|
.format = swapchain.surface_format.format,
|
||||||
.samples = .{ .@"1_bit" = true },
|
.samples = .{ .@"1_bit" = true },
|
||||||
.load_op = .clear,
|
.load_op = .clear,
|
||||||
@@ -353,26 +335,16 @@ fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.Render
|
|||||||
};
|
};
|
||||||
|
|
||||||
const subpass = vk.SubpassDescription{
|
const subpass = vk.SubpassDescription{
|
||||||
.flags = .{},
|
|
||||||
.pipeline_bind_point = .graphics,
|
.pipeline_bind_point = .graphics,
|
||||||
.input_attachment_count = 0,
|
|
||||||
.p_input_attachments = undefined,
|
|
||||||
.color_attachment_count = 1,
|
.color_attachment_count = 1,
|
||||||
.p_color_attachments = @ptrCast([*]const vk.AttachmentReference, &color_attachment_ref),
|
.p_color_attachments = @ptrCast([*]const vk.AttachmentReference, &color_attachment_ref),
|
||||||
.p_resolve_attachments = null,
|
|
||||||
.p_depth_stencil_attachment = null,
|
|
||||||
.preserve_attachment_count = 0,
|
|
||||||
.p_preserve_attachments = undefined,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return try gc.vkd.createRenderPass(gc.dev, &.{
|
return try gc.vkd.createRenderPass(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.attachment_count = 1,
|
.attachment_count = 1,
|
||||||
.p_attachments = @ptrCast([*]const vk.AttachmentDescription, &color_attachment),
|
.p_attachments = @ptrCast([*]const vk.AttachmentDescription, &color_attachment),
|
||||||
.subpass_count = 1,
|
.subpass_count = 1,
|
||||||
.p_subpasses = @ptrCast([*]const vk.SubpassDescription, &subpass),
|
.p_subpasses = @ptrCast([*]const vk.SubpassDescription, &subpass),
|
||||||
.dependency_count = 0,
|
|
||||||
.p_dependencies = undefined,
|
|
||||||
}, null);
|
}, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -382,14 +354,12 @@ fn createPipeline(
|
|||||||
render_pass: vk.RenderPass,
|
render_pass: vk.RenderPass,
|
||||||
) !vk.Pipeline {
|
) !vk.Pipeline {
|
||||||
const vert = try gc.vkd.createShaderModule(gc.dev, &.{
|
const vert = try gc.vkd.createShaderModule(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.code_size = shaders.triangle_vert.len,
|
.code_size = shaders.triangle_vert.len,
|
||||||
.p_code = @ptrCast([*]const u32, &shaders.triangle_vert),
|
.p_code = @ptrCast([*]const u32, &shaders.triangle_vert),
|
||||||
}, null);
|
}, null);
|
||||||
defer gc.vkd.destroyShaderModule(gc.dev, vert, null);
|
defer gc.vkd.destroyShaderModule(gc.dev, vert, null);
|
||||||
|
|
||||||
const frag = try gc.vkd.createShaderModule(gc.dev, &.{
|
const frag = try gc.vkd.createShaderModule(gc.dev, &.{
|
||||||
.flags = .{},
|
|
||||||
.code_size = shaders.triangle_frag.len,
|
.code_size = shaders.triangle_frag.len,
|
||||||
.p_code = @ptrCast([*]const u32, &shaders.triangle_frag),
|
.p_code = @ptrCast([*]const u32, &shaders.triangle_frag),
|
||||||
}, null);
|
}, null);
|
||||||
@@ -397,23 +367,18 @@ fn createPipeline(
|
|||||||
|
|
||||||
const pssci = [_]vk.PipelineShaderStageCreateInfo{
|
const pssci = [_]vk.PipelineShaderStageCreateInfo{
|
||||||
.{
|
.{
|
||||||
.flags = .{},
|
|
||||||
.stage = .{ .vertex_bit = true },
|
.stage = .{ .vertex_bit = true },
|
||||||
.module = vert,
|
.module = vert,
|
||||||
.p_name = "main",
|
.p_name = "main",
|
||||||
.p_specialization_info = null,
|
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.flags = .{},
|
|
||||||
.stage = .{ .fragment_bit = true },
|
.stage = .{ .fragment_bit = true },
|
||||||
.module = frag,
|
.module = frag,
|
||||||
.p_name = "main",
|
.p_name = "main",
|
||||||
.p_specialization_info = null,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const pvisci = vk.PipelineVertexInputStateCreateInfo{
|
const pvisci = vk.PipelineVertexInputStateCreateInfo{
|
||||||
.flags = .{},
|
|
||||||
.vertex_binding_description_count = 1,
|
.vertex_binding_description_count = 1,
|
||||||
.p_vertex_binding_descriptions = @ptrCast([*]const vk.VertexInputBindingDescription, &Vertex.binding_description),
|
.p_vertex_binding_descriptions = @ptrCast([*]const vk.VertexInputBindingDescription, &Vertex.binding_description),
|
||||||
.vertex_attribute_description_count = Vertex.attribute_description.len,
|
.vertex_attribute_description_count = Vertex.attribute_description.len,
|
||||||
@@ -421,13 +386,11 @@ fn createPipeline(
|
|||||||
};
|
};
|
||||||
|
|
||||||
const piasci = vk.PipelineInputAssemblyStateCreateInfo{
|
const piasci = vk.PipelineInputAssemblyStateCreateInfo{
|
||||||
.flags = .{},
|
|
||||||
.topology = .triangle_list,
|
.topology = .triangle_list,
|
||||||
.primitive_restart_enable = vk.FALSE,
|
.primitive_restart_enable = vk.FALSE,
|
||||||
};
|
};
|
||||||
|
|
||||||
const pvsci = vk.PipelineViewportStateCreateInfo{
|
const pvsci = vk.PipelineViewportStateCreateInfo{
|
||||||
.flags = .{},
|
|
||||||
.viewport_count = 1,
|
.viewport_count = 1,
|
||||||
.p_viewports = undefined, // set in createCommandBuffers with cmdSetViewport
|
.p_viewports = undefined, // set in createCommandBuffers with cmdSetViewport
|
||||||
.scissor_count = 1,
|
.scissor_count = 1,
|
||||||
@@ -435,7 +398,6 @@ fn createPipeline(
|
|||||||
};
|
};
|
||||||
|
|
||||||
const prsci = vk.PipelineRasterizationStateCreateInfo{
|
const prsci = vk.PipelineRasterizationStateCreateInfo{
|
||||||
.flags = .{},
|
|
||||||
.depth_clamp_enable = vk.FALSE,
|
.depth_clamp_enable = vk.FALSE,
|
||||||
.rasterizer_discard_enable = vk.FALSE,
|
.rasterizer_discard_enable = vk.FALSE,
|
||||||
.polygon_mode = .fill,
|
.polygon_mode = .fill,
|
||||||
@@ -449,11 +411,9 @@ fn createPipeline(
|
|||||||
};
|
};
|
||||||
|
|
||||||
const pmsci = vk.PipelineMultisampleStateCreateInfo{
|
const pmsci = vk.PipelineMultisampleStateCreateInfo{
|
||||||
.flags = .{},
|
|
||||||
.rasterization_samples = .{ .@"1_bit" = true },
|
.rasterization_samples = .{ .@"1_bit" = true },
|
||||||
.sample_shading_enable = vk.FALSE,
|
.sample_shading_enable = vk.FALSE,
|
||||||
.min_sample_shading = 1,
|
.min_sample_shading = 1,
|
||||||
.p_sample_mask = null,
|
|
||||||
.alpha_to_coverage_enable = vk.FALSE,
|
.alpha_to_coverage_enable = vk.FALSE,
|
||||||
.alpha_to_one_enable = vk.FALSE,
|
.alpha_to_one_enable = vk.FALSE,
|
||||||
};
|
};
|
||||||
@@ -470,7 +430,6 @@ fn createPipeline(
|
|||||||
};
|
};
|
||||||
|
|
||||||
const pcbsci = vk.PipelineColorBlendStateCreateInfo{
|
const pcbsci = vk.PipelineColorBlendStateCreateInfo{
|
||||||
.flags = .{},
|
|
||||||
.logic_op_enable = vk.FALSE,
|
.logic_op_enable = vk.FALSE,
|
||||||
.logic_op = .copy,
|
.logic_op = .copy,
|
||||||
.attachment_count = 1,
|
.attachment_count = 1,
|
||||||
|
|||||||
@@ -263,6 +263,7 @@ pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: b
|
|||||||
.field_type = decl.decl_type,
|
.field_type = decl.decl_type,
|
||||||
.bits = null,
|
.bits = null,
|
||||||
.is_buffer_len = false,
|
.is_buffer_len = false,
|
||||||
|
.is_optional = false,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (try xctok.peek()) |tok| {
|
if (try xctok.peek()) |tok| {
|
||||||
|
|||||||
@@ -199,6 +199,15 @@ fn parseContainer(allocator: Allocator, ty: *xml.Element, is_union: bool) !regis
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (member.getAttribute("optional")) |optionals| {
|
||||||
|
var optional_it = mem.split(u8, optionals, ",");
|
||||||
|
if (optional_it.next()) |first_optional| {
|
||||||
|
members[i].is_optional = mem.eql(u8, first_optional, "true");
|
||||||
|
} else {
|
||||||
|
// Optional is empty, probably incorrect.
|
||||||
|
return error.InvalidRegistry;
|
||||||
|
}
|
||||||
|
}
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -245,7 +254,7 @@ fn parseFuncPointer(allocator: Allocator, ty: *xml.Element) !registry.Declaratio
|
|||||||
return try cparse.parseTypedef(allocator, &xctok, true);
|
return try cparse.parseTypedef(allocator, &xctok, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// For some reason, the DeclarationType cannot be passed to lenToPointerSize, as
|
// For some reason, the DeclarationType cannot be passed to lenToPointer, as
|
||||||
// that causes the Zig compiler to generate invalid code for the function. Using a
|
// that causes the Zig compiler to generate invalid code for the function. Using a
|
||||||
// dedicated enum fixes the issue...
|
// dedicated enum fixes the issue...
|
||||||
const Fields = union(enum) {
|
const Fields = union(enum) {
|
||||||
@@ -253,13 +262,14 @@ const Fields = union(enum) {
|
|||||||
container: []registry.Container.Field,
|
container: []registry.Container.Field,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSize {
|
// returns .{ size, nullable }
|
||||||
|
fn lenToPointer(fields: Fields, len: []const u8) std.meta.Tuple(&.{ registry.Pointer.PointerSize, bool }) {
|
||||||
switch (fields) {
|
switch (fields) {
|
||||||
.command => |params| {
|
.command => |params| {
|
||||||
for (params) |*param| {
|
for (params) |*param| {
|
||||||
if (mem.eql(u8, param.name, len)) {
|
if (mem.eql(u8, param.name, len)) {
|
||||||
param.is_buffer_len = true;
|
param.is_buffer_len = true;
|
||||||
return .{ .other_field = param.name };
|
return .{ .{ .other_field = param.name }, false };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -267,16 +277,16 @@ fn lenToPointerSize(fields: Fields, len: []const u8) registry.Pointer.PointerSiz
|
|||||||
for (members) |*member| {
|
for (members) |*member| {
|
||||||
if (mem.eql(u8, member.name, len)) {
|
if (mem.eql(u8, member.name, len)) {
|
||||||
member.is_buffer_len = true;
|
member.is_buffer_len = true;
|
||||||
return .{ .other_field = member.name };
|
return .{ .{ .other_field = member.name }, member.is_optional };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mem.eql(u8, len, "null-terminated")) {
|
if (mem.eql(u8, len, "null-terminated")) {
|
||||||
return .zero_terminated;
|
return .{ .zero_terminated, false };
|
||||||
} else {
|
} else {
|
||||||
return .many;
|
return .{ .many, false };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -286,7 +296,11 @@ fn parsePointerMeta(fields: Fields, type_info: *registry.TypeInfo, elem: *xml.El
|
|||||||
var current_type_info = type_info;
|
var current_type_info = type_info;
|
||||||
while (current_type_info.* == .pointer) {
|
while (current_type_info.* == .pointer) {
|
||||||
// TODO: Check altlen
|
// TODO: Check altlen
|
||||||
const size = if (it.next()) |len_str| lenToPointerSize(fields, len_str) else .many;
|
const size = if (it.next()) |len_str| blk: {
|
||||||
|
const size_optional = lenToPointer(fields, len_str);
|
||||||
|
current_type_info.pointer.is_optional = size_optional[1];
|
||||||
|
break :blk size_optional[0];
|
||||||
|
} else .many;
|
||||||
current_type_info.pointer.size = size;
|
current_type_info.pointer.size = size;
|
||||||
current_type_info = current_type_info.pointer.child;
|
current_type_info = current_type_info.pointer.child;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,6 +61,7 @@ pub const Container = struct {
|
|||||||
field_type: TypeInfo,
|
field_type: TypeInfo,
|
||||||
bits: ?usize,
|
bits: ?usize,
|
||||||
is_buffer_len: bool,
|
is_buffer_len: bool,
|
||||||
|
is_optional: bool,
|
||||||
};
|
};
|
||||||
|
|
||||||
stype: ?[]const u8,
|
stype: ?[]const u8,
|
||||||
|
|||||||
@@ -749,7 +749,9 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
try self.renderTypeInfo(field.field_type);
|
try self.renderTypeInfo(field.field_type);
|
||||||
try self.renderContainerDefaultField(name, container, field);
|
if (!container.is_union) {
|
||||||
|
try self.renderContainerDefaultField(name, container, field);
|
||||||
|
}
|
||||||
try self.writer.writeAll(", ");
|
try self.writer.writeAll(", ");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -758,9 +760,7 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn renderContainerDefaultField(self: *Self, name: []const u8, container: reg.Container, field: reg.Container.Field) !void {
|
fn renderContainerDefaultField(self: *Self, name: []const u8, container: reg.Container, field: reg.Container.Field) !void {
|
||||||
if (mem.eql(u8, field.name, "pNext")) {
|
if (mem.eql(u8, field.name, "sType")) {
|
||||||
try self.writer.writeAll(" = null");
|
|
||||||
} else if (mem.eql(u8, field.name, "sType")) {
|
|
||||||
if (container.stype == null) {
|
if (container.stype == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -776,8 +776,30 @@ fn Renderer(comptime WriterType: type) type {
|
|||||||
|
|
||||||
try self.writer.writeAll(" = .");
|
try self.writer.writeAll(" = .");
|
||||||
try self.writeIdentifierWithCase(.snake, stype["VK_STRUCTURE_TYPE_".len..]);
|
try self.writeIdentifierWithCase(.snake, stype["VK_STRUCTURE_TYPE_".len..]);
|
||||||
} else if (field.field_type == .name and !container.is_union and mem.eql(u8, "VkBool32", field.field_type.name) and isFeatureStruct(name, container.extends)) {
|
} else if (field.field_type == .name and mem.eql(u8, "VkBool32", field.field_type.name) and isFeatureStruct(name, container.extends)) {
|
||||||
try self.writer.writeAll(" = FALSE");
|
try self.writer.writeAll(" = FALSE");
|
||||||
|
} else if (field.is_optional) {
|
||||||
|
if (field.field_type == .name) {
|
||||||
|
const field_type_name = field.field_type.name;
|
||||||
|
if (self.resolveDeclaration(field_type_name)) |decl_type| {
|
||||||
|
if (decl_type == .handle) {
|
||||||
|
try self.writer.writeAll(" = .null_handle");
|
||||||
|
} else if (decl_type == .bitmask) {
|
||||||
|
try self.writer.writeAll(" = .{}");
|
||||||
|
} else if (decl_type == .typedef and decl_type.typedef == .command_ptr) {
|
||||||
|
try self.writer.writeAll(" = null");
|
||||||
|
} else if ((decl_type == .typedef and builtin_types.has(decl_type.typedef.name)) or
|
||||||
|
(decl_type == .foreign and builtin_types.has(field_type_name)))
|
||||||
|
{
|
||||||
|
try self.writer.writeAll(" = 0");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (field.field_type == .pointer) {
|
||||||
|
try self.writer.writeAll(" = null");
|
||||||
|
}
|
||||||
|
} else if (field.field_type == .pointer and field.field_type.pointer.is_optional) {
|
||||||
|
// pointer nullability could be here or above
|
||||||
|
try self.writer.writeAll(" = null");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user