forked from mirror/vulkan-zig
Compare commits
361 Commits
zig-0.8.1-
...
instructiv
| Author | SHA1 | Date | |
|---|---|---|---|
| c93e5a0fe2 | |||
| d1902bddd6 | |||
|
|
8961518db2 | ||
|
|
7acf3a1163 | ||
|
|
4b7b9a8b94 | ||
|
|
4066c2c526 | ||
|
|
c9c4dae703 | ||
|
|
ecf97034c4 | ||
|
|
3c7d4021e9 | ||
|
|
f879074293 | ||
|
|
571c59180d | ||
|
|
8322c9593f | ||
|
|
33407b8e4e | ||
|
|
93bc6f1ac3 | ||
|
|
bed9e2d224 | ||
|
|
ed429842b8 | ||
|
|
cb618dea81 | ||
|
|
e492b17810 | ||
|
|
39565ca4c3 | ||
|
|
f99c5994dc | ||
|
|
2a1dba26ff | ||
|
|
9e6c4640bf | ||
|
|
0fd576a7e5 | ||
|
|
c66bddee00 | ||
|
|
0e164cd730 | ||
|
|
e191c3ccbb | ||
|
|
970d7f2e3a | ||
|
|
c1c56e9ec5 | ||
|
|
c9f2c528cc | ||
|
|
114654790e | ||
|
|
2582717df1 | ||
|
|
9b4090c852 | ||
|
|
783b70a12e | ||
|
|
dfc5a3db4e | ||
|
|
fdbe6d60d1 | ||
|
|
fa2948199c | ||
|
|
a5394e1b05 | ||
|
|
c8c59544fc | ||
|
|
7b7adcd206 | ||
|
|
42e7a8409b | ||
|
|
208f0612de | ||
|
|
355e4828dd | ||
|
|
d9fd4eb51a | ||
|
|
40ad90a912 | ||
|
|
dd978e3023 | ||
|
|
57d587c8a4 | ||
|
|
d046ace4d8 | ||
|
|
f6e9565ab6 | ||
|
|
dcb1d96c59 | ||
|
|
dcd538828c | ||
|
|
19cb72207d | ||
|
|
604416bf44 | ||
|
|
1fd5a6e217 | ||
|
|
55f3bb3c24 | ||
|
|
bb470f16da | ||
|
|
dd6e61d689 | ||
|
|
e43d635893 | ||
|
|
6268a6fec4 | ||
|
|
06dae6c920 | ||
|
|
efad55ba0d | ||
|
|
0e29c7b70f | ||
|
|
9bfa67682f | ||
|
|
981b20c395 | ||
|
|
9ee4cf349f | ||
|
|
976dbfbcca | ||
|
|
f7b21d034f | ||
|
|
28c14dcf37 | ||
|
|
d03459819c | ||
|
|
18f38ef2b7 | ||
|
|
492521b50d | ||
|
|
54ab76b667 | ||
|
|
543381c7dc | ||
|
|
d7ad51c9b8 | ||
|
|
9f6e6177b1 | ||
|
|
0125bcc466 | ||
|
|
c2e755d934 | ||
|
|
66b7b773bb | ||
|
|
982178e027 | ||
|
|
51c5566f92 | ||
|
|
a2167a79d5 | ||
|
|
b3eb45d6a4 | ||
|
|
4707aca68d | ||
|
|
7a6986faba | ||
|
|
a2b49336d1 | ||
|
|
062357916c | ||
|
|
e9199a0abc | ||
|
|
ff4cff5f64 | ||
|
|
3c8f827322 | ||
|
|
f637a0d252 | ||
|
|
c79bbd86e8 | ||
|
|
f537d09955 | ||
|
|
a44d86899b | ||
|
|
4321b8bff9 | ||
|
|
f2c2e0ff80 | ||
|
|
cfaf58a4de | ||
|
|
ac5ad34a95 | ||
|
|
8bd83cad43 | ||
|
|
794239076b | ||
|
|
35f69b4ea6 | ||
|
|
949de7684e | ||
|
|
aa3808940e | ||
|
|
2115516b9b | ||
|
|
cb7873e8e4 | ||
|
|
defc4c53d3 | ||
|
|
8694a69697 | ||
|
|
1260c86f60 | ||
|
|
fe036177e9 | ||
|
|
a71c58bc65 | ||
|
|
c408e57d6d | ||
|
|
66c2a743ba | ||
|
|
143b506553 | ||
|
|
65f1b0252b | ||
|
|
883ab2c2c4 | ||
|
|
7ac69f90ef | ||
|
|
1fdb930ae3 | ||
|
|
c5725dfb2e | ||
|
|
e1f290399e | ||
|
|
c3c9333d73 | ||
|
|
c212653f20 | ||
|
|
82ca63bce8 | ||
|
|
f811e5b926 | ||
|
|
2ac60ff7cd | ||
|
|
d19313ba75 | ||
|
|
110d836854 | ||
|
|
e4d6c50b55 | ||
|
|
9479fe480a | ||
|
|
d4a04e0bea | ||
|
|
500d7cc3e8 | ||
|
|
571cecdda6 | ||
|
|
2047f7e7f2 | ||
|
|
bab8f1883d | ||
|
|
8391911683 | ||
|
|
9c45afca2e | ||
|
|
e60869528c | ||
|
|
e840119290 | ||
|
|
ac4103a733 | ||
|
|
b7ef4e7a76 | ||
|
|
7f161ce0ed | ||
|
|
d1e6ebf931 | ||
|
|
b1f3820d08 | ||
|
|
85532cf243 | ||
|
|
d0897a14da | ||
|
|
cebfcc6045 | ||
|
|
2b6d7691f1 | ||
|
|
be2ebfbff4 | ||
|
|
62e981ad4e | ||
|
|
e3bc661349 | ||
|
|
d5e577f3f0 | ||
|
|
e72a6bb2d0 | ||
|
|
290aaca5b8 | ||
|
|
57ad4d5e16 | ||
|
|
27d9b028eb | ||
|
|
6d46195c99 | ||
|
|
1e4e90ffb1 | ||
|
|
14259cc6f5 | ||
|
|
9fbfa26046 | ||
|
|
348ac5d35a | ||
|
|
190a4a054d | ||
|
|
2287412536 | ||
|
|
5b99c4992a | ||
|
|
ed9401c72e | ||
|
|
4bc0666679 | ||
|
|
510c6b836b | ||
|
|
391998e535 | ||
|
|
a5aac7ad14 | ||
|
|
e014248b3b | ||
|
|
4156f738f0 | ||
|
|
f172cbbd7d | ||
|
|
69402977bc | ||
|
|
9743c9b1d3 | ||
|
|
c7d3723710 | ||
|
|
086276bd05 | ||
|
|
6c9133bc24 | ||
|
|
4f15927ba2 | ||
|
|
8a0be4f5de | ||
|
|
ab10325893 | ||
|
|
a4eb8399e0 | ||
|
|
03da126bc0 | ||
|
|
308dd180f0 | ||
|
|
0f757bfa34 | ||
|
|
c75e71333d | ||
|
|
f6c751cdd3 | ||
|
|
3d98b8a273 | ||
|
|
ff48aa1581 | ||
|
|
de4ec3efa3 | ||
|
|
bbf9c886de | ||
|
|
21383ef259 | ||
|
|
a795a6d5cf | ||
|
|
97f5da4da6 | ||
|
|
7b0e3d5a9d | ||
|
|
3094dc5e27 | ||
|
|
9ab420e0d7 | ||
|
|
9bb8e7b1f7 | ||
|
|
d59005bab6 | ||
|
|
55e7dcf270 | ||
|
|
303d06b3d8 | ||
|
|
8b452b9edd | ||
|
|
17366388db | ||
|
|
02939ff026 | ||
|
|
2a796113e8 | ||
|
|
b4f7ead7e6 | ||
|
|
40a7b871be | ||
|
|
ab12ecc5d4 | ||
|
|
a893f0d880 | ||
|
|
04105c3bb0 | ||
|
|
d8cfd033b1 | ||
|
|
070500f5e5 | ||
|
|
c801ac0d2e | ||
|
|
075e29c8a1 | ||
|
|
07b13e976f | ||
|
|
e83a035210 | ||
|
|
27c38931db | ||
|
|
c887a0fcc4 | ||
|
|
ac035d5ebe | ||
|
|
9a4b43e15d | ||
|
|
886b75272f | ||
|
|
71403a013b | ||
|
|
c294b849d2 | ||
|
|
ef75aa0fd6 | ||
|
|
09565850a9 | ||
|
|
1070100e2e | ||
|
|
3829d2eb99 | ||
|
|
010160107e | ||
|
|
c203897352 | ||
|
|
b09e14a649 | ||
|
|
782d75ac4c | ||
|
|
9d1ffe0fb3 | ||
|
|
0873a35392 | ||
|
|
ddc4ae1938 | ||
|
|
e9e4e0d77f | ||
|
|
dee86a06cb | ||
|
|
2a631c4695 | ||
|
|
16b4524166 | ||
|
|
737cc52908 | ||
|
|
72959e3b11 | ||
|
|
235edc8c67 | ||
|
|
08dc9f508c | ||
|
|
f7a4e4346e | ||
|
|
d9377c4c70 | ||
|
|
8c4cb60b3f | ||
|
|
6527b04aaa | ||
|
|
ee22921bb6 | ||
|
|
e798643829 | ||
|
|
2ef2905c3d | ||
|
|
a56d6e31a8 | ||
|
|
8db9029286 | ||
|
|
be59386118 | ||
|
|
3f89cdcb80 | ||
|
|
5a64a8e0af | ||
|
|
d1ddf52e82 | ||
|
|
75aa7e5556 | ||
|
|
cc95608912 | ||
|
|
135fc51b3b | ||
|
|
a21db9e2b5 | ||
|
|
08a72b582d | ||
|
|
db198fb4fa | ||
|
|
09d2de4fb6 | ||
|
|
80a201f89a | ||
|
|
9e85717471 | ||
|
|
977b4ca5f4 | ||
|
|
5b5b7d047f | ||
|
|
77885af37e | ||
|
|
5c7b6e06bb | ||
|
|
837893c637 | ||
|
|
d7a608d520 | ||
|
|
5af6ffe864 | ||
|
|
2bd9927cfe | ||
|
|
5d274abc8c | ||
|
|
cc87740422 | ||
|
|
f6f5f66f20 | ||
|
|
d96e32eb6e | ||
|
|
fdf43d846a | ||
|
|
0b4b6f8acb | ||
|
|
4a59a1ad03 | ||
|
|
be31203f2e | ||
|
|
05deff877b | ||
|
|
8bf440cdf0 | ||
|
|
7fdaece86c | ||
|
|
2f2125046c | ||
|
|
aac20d107a | ||
|
|
470615632d | ||
|
|
3781fe779c | ||
|
|
5b1385de70 | ||
|
|
ed13b3ec74 | ||
|
|
9607e97fe9 | ||
|
|
7b80166108 | ||
|
|
0f496af7e9 | ||
|
|
ec4b4a0d46 | ||
|
|
e9615a2ce2 | ||
|
|
a09cca0801 | ||
|
|
86ba5f9980 | ||
|
|
e37b7a0f1b | ||
|
|
d7f499c949 | ||
|
|
4332a44c28 | ||
|
|
1e1af94949 | ||
|
|
ac1eb6fa17 | ||
|
|
dc2015adc3 | ||
|
|
809537f536 | ||
|
|
e2268a7eb4 | ||
|
|
b337356ecc | ||
|
|
e4c0d63aaa | ||
|
|
10fe28eddc | ||
|
|
fd4302f848 | ||
|
|
2c9aa3655d | ||
|
|
eb4373bb7c | ||
|
|
9034c90184 | ||
|
|
bf0d0bc43b | ||
|
|
0fffe145ed | ||
|
|
7c2d1d466b | ||
|
|
c169871f96 | ||
|
|
d9a57859c6 | ||
|
|
d1e949a434 | ||
|
|
2952901939 | ||
|
|
cc9ebf50bb | ||
|
|
797ae8af88 | ||
|
|
e634a4b434 | ||
|
|
c036464d21 | ||
|
|
a3f4e6a400 | ||
|
|
156843cb9a | ||
|
|
9aab9f4480 | ||
|
|
c6b46172c8 | ||
|
|
8862336b77 | ||
|
|
caf8e6be99 | ||
|
|
087cd02413 | ||
|
|
d9c1c79223 | ||
|
|
a6063cc89e | ||
|
|
596672d631 | ||
|
|
941a940122 | ||
|
|
5afeb68873 | ||
|
|
58787a9dc3 | ||
|
|
5a08cb8f9f | ||
|
|
55c2da886b | ||
|
|
90d81c1a04 | ||
|
|
5f9c17de9a | ||
|
|
d800f1f9a9 | ||
|
|
907adb6d91 | ||
|
|
9e40721655 | ||
|
|
07e530719b | ||
|
|
89e16f69a8 | ||
|
|
eb417c6aa0 | ||
|
|
9166080041 | ||
|
|
c1493a8218 | ||
|
|
c788b09e78 | ||
|
|
5b759fe124 | ||
|
|
701aaeb2cd | ||
|
|
a054c7a657 | ||
|
|
fa249d7d73 | ||
|
|
871d4e7251 | ||
|
|
7df0b39eae | ||
|
|
9b289704c7 | ||
|
|
e3b1249a45 | ||
|
|
ecf3ba4a39 | ||
|
|
27c8d4862c | ||
|
|
844c1564d7 | ||
|
|
a36db3a2f6 | ||
|
|
722ddfdb6b | ||
|
|
e36930ad5d | ||
|
|
79d901a3e0 | ||
|
|
de0a048f45 | ||
|
|
511211f038 | ||
|
|
6f965fead0 |
42
.github/workflows/build.yml
vendored
42
.github/workflows/build.yml
vendored
@@ -10,31 +10,51 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@v1.3.0
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: master
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
zig build test
|
||||
- name: Check formatting
|
||||
run: zig fmt --check .
|
||||
|
||||
- name: Fetch Vulkan SDK
|
||||
- name: Fetch latest Vulkan SDK
|
||||
run: |
|
||||
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | sudo apt-key add -
|
||||
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-1.2.176-focal.list https://packages.lunarg.com/vulkan/1.2.176/lunarg-vulkan-1.2.176-focal.list
|
||||
sudo wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list
|
||||
sudo apt update
|
||||
sudo apt install shaderc libglfw3 libglfw3-dev
|
||||
|
||||
- name: Fetch latest vk.xml
|
||||
run: |
|
||||
wget https://raw.githubusercontent.com/KhronosGroup/Vulkan-Docs/main/xml/vk.xml
|
||||
wget https://raw.githubusercontent.com/KhronosGroup/Vulkan-Docs/main/xml/video.xml
|
||||
|
||||
- name: Build with latest zig & vk.xml
|
||||
run: |
|
||||
zig build -Dvulkan-registry=./vk.xml
|
||||
- name: Test and install with latest zig & latest vk.xml
|
||||
run: zig build test install -Dregistry=$(pwd)/vk.xml
|
||||
|
||||
- name: Test and install with latest zig & latest vk.xml & latest video.xml
|
||||
run: zig build test install -p zig-out-video -Dregistry=$(pwd)/vk.xml -Dvideo=$(pwd)/video.xml
|
||||
|
||||
- name: Build example with latest zig & vk.xml from dependency
|
||||
run: zig build --build-file $(pwd)/examples/build.zig
|
||||
|
||||
- name: Build example with latest zig & latest vk.xml
|
||||
run: zig build --build-file $(pwd)/examples/build.zig -Doverride-registry=$(pwd)/vk.xml
|
||||
|
||||
- name: Build example with latest zig & vk.xml from dependency & use zig shaders
|
||||
run: zig build --build-file $(pwd)/examples/build.zig -Dzig-shader
|
||||
|
||||
- name: Archive vk.zig
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: vk.zig
|
||||
path: |
|
||||
zig-out/src/vk.zig
|
||||
zig-out-video/src/vk.zig
|
||||
if-no-files-found: error
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1,5 @@
|
||||
zig-cache/
|
||||
zig-out/
|
||||
.vscode/.zig-cache/
|
||||
.zig-cache/
|
||||
examples/.zig-cache
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright © 2020 Robin Voetter
|
||||
Copyright © Robin Voetter
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
|
||||
282
README.md
282
README.md
@@ -10,38 +10,113 @@ vulkan-zig attempts to provide a better experience to programming Vulkan applica
|
||||
|
||||
vulkan-zig is automatically tested daily against the latest vk.xml and zig, and supports vk.xml from version 1.x.163.
|
||||
|
||||
## Example
|
||||
|
||||
A partial implementation of https://vulkan-tutorial.com is implemented in [examples/triangle.zig](examples/triangle.zig). This example can be ran by executing `zig build --build-file $(pwd)/examples/build.zig run-triangle` in vulkan-zig's root. See in particular the [build file](examples/build.zig), which contains a concrete example of how to use vulkan-zig as a dependency.
|
||||
|
||||
### Zig versions
|
||||
|
||||
vulkan-zig aims to be always compatible with the ever-changing Zig master branch (however, development may lag a few days behind). Sometimes, the Zig master branch breaks a bunch of functionality however, which may make the latest version vulkan-zig incompatible with older releases of Zig. This repository aims to have a version compatible for both the latest Zig master, and the latest Zig release. The `master` branch is compatible with the `master` branch of Zig, and versions for older versions of Zig are maintained in the `zig-<version>-compat` branch.
|
||||
|
||||
`master` is compatible and tested with the Zig self-hosted compiler. The `zig-stage1-compat` branch contains a version which is compatible with the Zig stage 1 compiler.
|
||||
|
||||
## Features
|
||||
### CLI-interface
|
||||
A CLI-interface is provided to generate vk.zig from the [Vulkan XML registry](https://github.com/KhronosGroup/Vulkan-Docs/blob/master/xml), which is built by default when invoking `zig build` in the project root. To generate vk.zig, simply invoke the program as follows:
|
||||
|
||||
A CLI-interface is provided to generate vk.zig from the [Vulkan XML registry](https://github.com/KhronosGroup/Vulkan-Docs/blob/main/xml), which is built by default when invoking `zig build` in the project root. To generate vk.zig, simply invoke the program as follows:
|
||||
```
|
||||
$ zig-cache/bin/vulkan-zig-generator path/to/vk.xml output/path/to/vk.zig
|
||||
$ zig-out/bin/vulkan-zig-generator path/to/vk.xml output/path/to/vk.zig
|
||||
```
|
||||
This reads the xml file, parses its contents, renders the Vulkan bindings, and formats file, before writing the result to the output path. While the intended usage of vulkan-zig is through direct generation from build.zig (see below), the CLI-interface can be used for one-off generation and vendoring the result.
|
||||
|
||||
### Generation from build.zig
|
||||
Vulkan bindings can be generated from the Vulkan XML registry at compile time with build.zig, by using the provided Vulkan generation step:
|
||||
`path/to/vk.xml` can be obtained from several sources:
|
||||
- From the LunarG Vulkan SDK. This can either be obtained from [LunarG](https://www.lunarg.com/vulkan-sdk) or usually using the package manager. The registry can then be found at `$VULKAN_SDK/share/vulkan/registry/vk.xml`.
|
||||
- Directly from the [Vulkan-Headers GitHub repository](https://github.com/KhronosGroup/Vulkan-Headers/blob/main/registry/vk.xml).
|
||||
|
||||
### Generation with the package manager from build.zig
|
||||
|
||||
There is also support for adding this project as a dependency through zig package manager in its current form. In order to do this, add this repo as a dependency in your build.zig.zon:
|
||||
```zig
|
||||
const vkgen = @import("vulkan-zig/generator/index.zig");
|
||||
|
||||
pub fn build(b: *Builder) void {
|
||||
...
|
||||
const exe = b.addExecutable("my-executable", "src/main.zig");
|
||||
|
||||
// Create a step that generates vk.zig (stored in zig-cache) from the provided vulkan registry.
|
||||
const gen = vkgen.VkGenerateStep.init(b, "path/to/vk.xml", "vk.zig");
|
||||
exe.step.dependOn(&gen.step);
|
||||
|
||||
// Add the generated file as package to the final executable
|
||||
exe.addPackagePath("vulkan", gen.full_out_path);
|
||||
.{
|
||||
// -- snip --
|
||||
.dependencies = .{
|
||||
// -- snip --
|
||||
.vulkan_zig = .{
|
||||
.url = "https://github.com/Snektron/vulkan-zig/archive/<commit SHA>.tar.gz",
|
||||
.hash = "<dependency hash>",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
This reads vk.xml, parses its contents, and renders the Vulkan bindings to "vk.zig", which is then formatted and placed in `zig-cache`. The resulting file can then be added to an executable by using `addPackagePath`.
|
||||
And then in your build.zig file, you'll need to add a line like this to your build function:
|
||||
```zig
|
||||
const vulkan = b.dependency("vulkan_zig", .{
|
||||
.registry = b.path("path/to/vk.xml"),
|
||||
}).module("vulkan-zig");
|
||||
exe.root_module.addImport("vulkan", vulkan);
|
||||
```
|
||||
That will allow you to `@import("vulkan")` in your executable's source.
|
||||
|
||||
#### Generating bindings directly from Vulkan-Headers
|
||||
|
||||
Bindings can be generated directly from the Vulkan-Headers repository by adding Vulkan-Headers as a dependency, and then passing the path to `vk.xml` from that dependency:
|
||||
```zig
|
||||
.{
|
||||
// -- snip --
|
||||
.dependencies = .{
|
||||
// -- snip --
|
||||
.vulkan_headers = .{
|
||||
.url = "https://github.com/KhronosGroup/Vulkan-Headers/archive/v1.3.283.tar.gz",
|
||||
.hash = "<dependency hash>",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
```zig
|
||||
const vulkan = b.dependency("vulkan_zig", .{
|
||||
.registry = b.dependency("vulkan_headers", .{}).path("registry/vk.xml"),
|
||||
}).module("vulkan-zig");
|
||||
exe.root_module.addImport("vulkan", vulkan);
|
||||
```
|
||||
|
||||
### Manual generation with the package manager from build.zig
|
||||
|
||||
Bindings can also be generated by invoking the generator directly. This may be useful is some special cases, for example, it integrates particularly well with fetching the registry via the package manager. This can be done by adding the Vulkan-Headers repository to your dependencies, and then passing the `vk.xml` inside it to vulkan-zig-generator:
|
||||
```zig
|
||||
.{
|
||||
// -- snip --
|
||||
.depdendencies = .{
|
||||
// -- snip --
|
||||
.vulkan_headers = .{
|
||||
.url = "https://github.com/KhronosGroup/Vulkan-Headers/archive/<commit SHA>.tar.gz",
|
||||
.hash = "<dependency hash>",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
```
|
||||
And then pass `vk.xml` to vulkan-zig-generator as follows:
|
||||
```zig
|
||||
// Get the (lazy) path to vk.xml:
|
||||
const registry = b.dependency("vulkan_headers", .{}).path("registry/vk.xml");
|
||||
// Get generator executable reference
|
||||
const vk_gen = b.dependency("vulkan_zig", .{}).artifact("vulkan-zig-generator");
|
||||
// Set up a run step to generate the bindings
|
||||
const vk_generate_cmd = b.addRunArtifact(vk_gen);
|
||||
// Pass the registry to the generator
|
||||
vk_generate_cmd.addFileArg(registry);
|
||||
// Create a module from the generator's output...
|
||||
const vulkan_zig = b.addModule("vulkan-zig", .{
|
||||
.root_source_file = vk_generate_cmd.addOutputFileArg("vk.zig"),
|
||||
});
|
||||
// ... and pass it as a module to your executable's build command
|
||||
exe.root_module.addImport("vulkan", vulkan_zig);
|
||||
```
|
||||
|
||||
See [examples/build.zig](examples/build.zig) and [examples/build.zig.zon](examples/build.zig.zon) for a concrete example.
|
||||
|
||||
### Function & field renaming
|
||||
|
||||
Functions and fields are renamed to be more or less in line with [Zig's standard library style](https://ziglang.org/documentation/master/#Style-Guide):
|
||||
* The vk prefix is removed everywhere
|
||||
* Structs like `VkInstanceCreateInfo` are renamed to `InstanceCreateInfo`.
|
||||
@@ -52,8 +127,9 @@ Functions and fields are renamed to be more or less in line with [Zig's standard
|
||||
* Container fields and function parameter names are generated in (lower) snake case in a similar manner: `ppEnabledLayerNames` becomes `pp_enabled_layer_names`.
|
||||
* Any name which is either an illegal Zig name or a reserved identifier is rendered using `@"name"` syntax. For example, `VK_IMAGE_TYPE_2D` is translated to `@"2d"`.
|
||||
|
||||
### Function pointers & Wrappers
|
||||
vulkan-zig provides no integration for statically linking libvulkan, and these symbols are not generated at all. Instead, vulkan functions are to be loaded dynamically. For each Vulkan function, a function pointer type is generated using the exact parameters and return types as defined by the Vulkan specification:
|
||||
### Dispatch Tables
|
||||
|
||||
Vulkan-zig provides no integration for statically linking libvulkan, and these symbols are not generated at all. Instead, vulkan functions are to be loaded dynamically. For each Vulkan function, a function pointer type is generated using the exact parameters and return types as defined by the Vulkan specification:
|
||||
```zig
|
||||
pub const PfnCreateInstance = fn (
|
||||
p_create_info: *const InstanceCreateInfo,
|
||||
@@ -62,28 +138,24 @@ pub const PfnCreateInstance = fn (
|
||||
) callconv(vulkan_call_conv) Result;
|
||||
```
|
||||
|
||||
For each function, a wrapper is generated into one of three structs:
|
||||
* BaseWrapper. This contains wrappers for functions which are loaded by `vkGetInstanceProcAddr` without an instance, such as `vkCreateInstance`, `vkEnumerateInstanceVersion`, etc.
|
||||
* InstanceWrapper. This contains wrappers for functions which are otherwise loaded by `vkGetInstanceProcAddr`.
|
||||
* DeviceWrapper. This contains wrappers for functions which are loaded by `vkGetDeviceProcAddr`.
|
||||
A set of _dispatch table_ structures is generated. A dispatch table simply contains a set of (optional) function pointers to Vulkan API functions, and not much else. Function pointers grouped by the nature of the function as follows:
|
||||
* Vulkan functions which are loaded by `vkGetInstanceProcAddr` without the need for passing an instance are placed in `BaseDispatch`.
|
||||
* Vulkan functions which are loaded by `vkGetInstanceProcAddr` but do need an instance are placed in `InstanceDispatch`.
|
||||
* Vulkan functions which are loaded by `vkGetDeviceProcAddr` are placed in `DeviceDispatch`.
|
||||
|
||||
### Wrappers
|
||||
|
||||
To provide more interesting functionality, a set of _wrapper_ types is also generated, one for each dispatch table type. These contain the Zig-versions of each Vulkan API function, along with corresponding error set definitions, return type definitions, etc, where appropriate.
|
||||
|
||||
Each wrapper struct is to be used as a mixin on a struct containing **just** function pointers as members:
|
||||
```zig
|
||||
const vk = @import("vulkan");
|
||||
const BaseDispatch = struct {
|
||||
vkCreateInstance: vk.PfnCreateInstance,
|
||||
usingnamespace vk.BaseWrapper(@This());
|
||||
};
|
||||
```
|
||||
The wrapper struct then provides wrapper functions for each function pointer in the dispatch struct:
|
||||
```zig
|
||||
pub const BaseWrapper(comptime Self: type) type {
|
||||
return struct {
|
||||
pub fn createInstance(
|
||||
self: Self,
|
||||
create_info: InstanceCreateInfo,
|
||||
p_allocator: ?*const AllocationCallbacks,
|
||||
) error{
|
||||
pub const BaseWrapper = struct {
|
||||
const Self = @This();
|
||||
const Dispatch = CreateDispatchStruct(cmds);
|
||||
|
||||
dispatch: Dispatch,
|
||||
|
||||
pub const CreateInstanceError = error{
|
||||
OutOfHostMemory,
|
||||
OutOfDeviceMemory,
|
||||
InitializationFailed,
|
||||
@@ -91,9 +163,14 @@ pub const BaseWrapper(comptime Self: type) type {
|
||||
ExtensionNotPresent,
|
||||
IncompatibleDriver,
|
||||
Unknown,
|
||||
}!Instance {
|
||||
};
|
||||
pub fn createInstance(
|
||||
self: Self,
|
||||
create_info: InstanceCreateInfo,
|
||||
p_allocator: ?*const AllocationCallbacks,
|
||||
) CreateInstanceError!Instance {
|
||||
var instance: Instance = undefined;
|
||||
const result = self.vkCreateInstance(
|
||||
const result = self.dispatch.vkCreateInstance.?(
|
||||
&create_info,
|
||||
p_allocator,
|
||||
&instance,
|
||||
@@ -112,8 +189,7 @@ pub const BaseWrapper(comptime Self: type) type {
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
}
|
||||
};
|
||||
```
|
||||
Wrappers are generated according to the following rules:
|
||||
* The return type is determined from the original return type and the parameters.
|
||||
@@ -124,12 +200,56 @@ Wrappers are generated according to the following rules:
|
||||
* Error codes are translated into Zig errors.
|
||||
* As of yet, there is no specific handling of enumeration style commands or other commands which accept slices.
|
||||
|
||||
Furthermore, each wrapper contains a function to load each function pointer member when passed either `PfnGetInstanceProcAddr` or `PfnGetDeviceProcAddr`, which attempts to load each member as function pointer and casts it to the appropriate type. These functions are loaded literally, and any wrongly named member or member with a wrong function pointer type will result in problems.
|
||||
* For `BaseWrapper`, this function has signature `fn load(loader: PfnGetInstanceProcAddr) !Self`.
|
||||
* For `InstanceWrapper`, this function has signature `fn load(instance: Instance, loader: PfnGetInstanceProcAddr) !Self`.
|
||||
* For `DeviceWrapper`, this function has signature `fn load(device: Device, loader: PfnGetDeviceProcAddr) !Self`.
|
||||
#### Initializing Wrappers
|
||||
|
||||
Wrapper types are initialized by the `load` function, which must be passed a _loader_: A function which loads a function pointer by name.
|
||||
* For `BaseWrapper`, this function has signature `fn load(loader: anytype) Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr` (with optionally having a different calling convention).
|
||||
* For `InstanceWrapper`, this function has signature `fn load(instance: Instance, loader: anytype) Self`, where the type of `loader` must resemble `PfnGetInstanceProcAddr`.
|
||||
* For `DeviceWrapper`, this function has signature `fn load(device: Device, loader: anytype) Self`, where the type of `loader` must resemble `PfnGetDeviceProcAddr`.
|
||||
|
||||
Note that these functions accepts a loader with the signature of `anytype` instead of `PfnGetInstanceProcAddr`. This is because it is valid for `vkGetInstanceProcAddr` to load itself, in which case the returned function is to be called with the vulkan calling convention. This calling convention is not required for loading vulkan-zig itself, though, and a loader to be called with any calling convention with the target architecture may be passed in. This is particularly useful when interacting with C libraries that provide `vkGetInstanceProcAddr`.
|
||||
|
||||
```zig
|
||||
// vkGetInstanceProcAddr as provided by GLFW.
|
||||
// Note that vk.Instance and vk.PfnVoidFunction are ABI compatible with VkInstance,
|
||||
// and that `extern` implies the C calling convention.
|
||||
pub extern fn glfwGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction;
|
||||
|
||||
// Or provide a custom implementation.
|
||||
// This function is called with the unspecified Zig-internal calling convention.
|
||||
fn customGetInstanceProcAddress(instance: vk.Instance, procname: [*:0]const u8) vk.PfnVoidFunction {
|
||||
...
|
||||
}
|
||||
|
||||
// Both calls are valid.
|
||||
const vkb = BaseWrapper.load(glfwGetInstanceProcAddress);
|
||||
const vkb = BaseWrapper.load(customGetInstanceProcAddress);
|
||||
```
|
||||
|
||||
The `load` function tries to load all function pointers unconditionally, regardless of enabled extensions or platform. If a function pointer could not be loaded, its entry in the dispatch table is set to `null`. When invoking a function on a wrapper table, the function pointer is checked for null, and there will be a crash or undefined behavior if it was not loaded properly. That means that **it is up to the programmer to ensure that a function pointer is valid for the platform before calling it**, either by checking whether the associated extension or Vulkan version is supported or simply by checking whether the function pointer is non-null.
|
||||
|
||||
One can access the underlying unwrapped C functions by doing `wrapper.dispatch.vkFuncYouWant.?(..)`.
|
||||
|
||||
#### Proxying Wrappers
|
||||
|
||||
Proxying wrappers wrap a wrapper and a pointer to the associated handle in a single struct, and automatically passes this handle to commands as appropriate. Besides the proxying wrappers for instances and devices, there are also proxying wrappers for queues and command buffers. Proxying wrapper type are constructed in the same way as a regular wrapper, by passing an api specification to them. To initialize a proxying wrapper, it must be passed a handle and a pointer to an appropriate wrapper. For queue and command buffer proxying wrappers, a pointer to a device wrapper must be passed.
|
||||
|
||||
```zig
|
||||
const InstanceWrapper = vk.InstanceWrapper;
|
||||
const Instance = vk.InstanceProxy;
|
||||
|
||||
const instance_handle = try vkb.createInstance(...);
|
||||
const vki = try InstanceWrapper.load(instance_handle, vkb.dispatch.vkGetInstanceProcAddr.?);
|
||||
const instance = Instance.load(instance_handle, &vki);
|
||||
defer instance.destroyInstance(null);
|
||||
```
|
||||
|
||||
For queue and command buffer proxying wrappers, the `queue` and `cmd` prefix is removed for functions where appropriate. Note that the device proxying wrappers also have the queue and command buffer functions made available for convenience, but there the prefix is not stripped.
|
||||
|
||||
Note that the proxy must be passed a _pointer_ to a wrapper. This is because there was a limitation with LLVM in the past, where a struct with an object pointer and its associated function pointers wouldn't be optimized properly. By using a separate function pointer, LLVM knows that the "vtable" dispatch struct can never be modified and so it can subject each call to vtable optimizations.
|
||||
|
||||
### Bitflags
|
||||
|
||||
Packed structs of bools are used for bit flags in vulkan-zig, instead of both a `FlagBits` and `Flags` variant. Places where either of these variants are used are both replaced by this packed struct instead. This means that even in places where just one flag would normally be accepted, the packed struct is accepted. The programmer is responsible for only enabling a single bit.
|
||||
|
||||
Each bit is defaulted to `false`, and the first `bool` is aligned to guarantee the overal alignment
|
||||
@@ -178,6 +298,7 @@ pub fn FlagsMixin(comptime FlagsType: type) type {
|
||||
```
|
||||
|
||||
### Handles
|
||||
|
||||
Handles are generated to a non-exhaustive enum, backed by a `u64` for non-dispatchable handles and `usize` for dispatchable ones:
|
||||
```zig
|
||||
const Instance = extern enum(usize) { null_handle = 0, _ };
|
||||
@@ -185,6 +306,7 @@ const Instance = extern enum(usize) { null_handle = 0, _ };
|
||||
This means that handles are type-safe even when compiling for a 32-bit target.
|
||||
|
||||
### Struct defaults
|
||||
|
||||
Defaults are generated for certain fields of structs:
|
||||
* sType is defaulted to the appropriate value.
|
||||
* pNext is defaulted to `null`.
|
||||
@@ -192,13 +314,14 @@ Defaults are generated for certain fields of structs:
|
||||
```zig
|
||||
pub const InstanceCreateInfo = extern struct {
|
||||
s_type: StructureType = .instance_create_info,
|
||||
p_next: ?*const c_void = null,
|
||||
p_next: ?*const anyopaque = null,
|
||||
flags: InstanceCreateFlags,
|
||||
...
|
||||
};
|
||||
```
|
||||
|
||||
### Pointer types
|
||||
|
||||
Pointer types in both commands (wrapped and function pointers) and struct fields are augmented with the following information, where available in the registry:
|
||||
* Pointer optional-ness.
|
||||
* Pointer const-ness.
|
||||
@@ -207,44 +330,63 @@ Pointer types in both commands (wrapped and function pointers) and struct fields
|
||||
Note that this information is not everywhere as useful in the registry, leading to places where optional-ness is not correct. Most notably, CreateInfo type structures which take a slice often have the item count marked as optional, but the pointer itself not. As of yet, this is not fixed in vulkan-zig. If drivers properly follow the Vulkan specification, these can be initialized to `undefined`, however, [that is not always the case](https://zeux.io/2019/07/17/serializing-pipeline-cache/).
|
||||
|
||||
### Platform types
|
||||
|
||||
Defaults with the same ABI layout are generated for most platform-defined types. These can either by bitcasted to, or overridden by defining them in the project root:
|
||||
```zig
|
||||
pub const xcb_connection_t = if (@hasDecl(root, "xcb_connection_t")) root.xcb_connection_t else @Type(.Opaque);
|
||||
pub const xcb_connection_t = if (@hasDecl(root, "xcb_connection_t")) root.xcb_connection_t else opaque{};
|
||||
```
|
||||
For some times (such as those from Google Games Platform) no default is known. Usage of these without providing a concrete type in the project root generates a compile error.
|
||||
For some times (such as those from Google Games Platform) no default is known, but an `opaque{}` will be used by default. Usage of these without providing a concrete type in the project root is likely an error.
|
||||
|
||||
### Shader compilation
|
||||
vulkan-zig provides functionality to help compiling shaders using glslc. It can be used from build.zig as follows:
|
||||
|
||||
Shaders should be compiled by invoking a shader compiler via the build system. For example:
|
||||
```zig
|
||||
const vkgen = @import("vulkan-zig/generator/index.zig");
|
||||
|
||||
pub fn build(b: *Builder) void {
|
||||
...
|
||||
const exe = b.addExecutable("my-executable", "src/main.zig");
|
||||
|
||||
const gen = vkgen.VkGenerateStep(b, "path/to/vk.xml", "vk.zig");
|
||||
exe.step.dependOn(&gen.step);
|
||||
exe.addPackagePath("vulkan", gen.full_out_path);
|
||||
|
||||
const shader_comp = vkgen.ShaderCompileStep.init(
|
||||
builder,
|
||||
&[_][]const u8{"glslc", "--target-env=vulkan1.2"}, // Path to glslc and additional parameters
|
||||
);
|
||||
exe.step.dependOn(&shader_comp.step);
|
||||
const spv_path = shader_comp.addShader("path/to/shader.frag");
|
||||
const vert_cmd = b.addSystemCommand(&.{
|
||||
"glslc",
|
||||
"--target-env=vulkan1.2",
|
||||
"-o"
|
||||
});
|
||||
const vert_spv = vert_cmd.addOutputFileArg("vert.spv");
|
||||
vert_cmd.addFileArg(b.path("shaders/triangle.vert"));
|
||||
exe.root_module.addAnonymousImport("vertex_shader", .{
|
||||
.root_source_file = vert_spv
|
||||
});
|
||||
...
|
||||
}
|
||||
```
|
||||
Upon compilation, glslc is then invoked to compile each shader, and the result is placed within `zig-cache`. `addShader` returns the full path to the compiled shader code. This file can then be included in the project, as is done in [build.zig for the example](build.zig) by generating an additional file which uses `@embedFile`.
|
||||
|
||||
Note that SPIR-V must be 32-bit aligned when fed to Vulkan. The easiest way to do this is to dereference the shader's bytecode and manually align it as follows:
|
||||
```zig
|
||||
const vert_spv align(@alignOf(u32)) = @embedFile("vertex_shader").*;
|
||||
```
|
||||
|
||||
See [examples/build.zig](examples/build.zig) for a working example.
|
||||
|
||||
For more advanced shader compiler usage, one may consider a library such as [shader_compiler](https://github.com/Games-by-Mason/shader_compiler).
|
||||
|
||||
### Vulkan Video
|
||||
|
||||
Vulkan-zig also supports generating Vulkan Video bindings. To do this, one additionally pass `--video <video.xml>` to the generator, or pass `-Dvideo=<video.xml>` to build.zig. If using vulkan-zig via the Zig package manager, the following also works:
|
||||
```zig
|
||||
const vulkan_headers = b.dependency("vulkan_headers");
|
||||
const vulkan = b.dependency("vulkan_zig", .{
|
||||
.registry = vulkan_headers.path("registry/vk.xml"),
|
||||
.video = vulkan_headers.path("registery/video.xml"),
|
||||
}).module("vulkan-zig");
|
||||
```
|
||||
|
||||
The Vulkan Video bindings are not generated by default. In this case, the relevant definitions must be supplied by the user. See [platform types](#platform-types) for how this is done.
|
||||
|
||||
## Limitations
|
||||
* Currently, the self-hosted version of Zig's cache-hash API is not yet ready for usage, which means that the bindings are regenerated every time an executable is built.
|
||||
|
||||
* vulkan-zig has as of yet no functionality for selecting feature levels and extensions when generating bindings. This is because when an extension is promoted to Vulkan core, its fields and commands are renamed to lose the extensions author tag (for example, VkSemaphoreWaitFlagsKHR was renamed to VkSemaphoreWaitFlags when it was promoted from an extension to Vulkan 1.2 core). This leads to inconsistencies when only items from up to a certain feature level is included, as these promoted items then need to re-gain a tag.
|
||||
|
||||
## Example
|
||||
A partial implementation of https://vulkan-tutorial.org is implemented in [examples/triangle.zig](examples/triangle.zig). This example can be ran by executing `zig build run-triangle` in vulkan-zig's root.
|
||||
|
||||
## See also
|
||||
* Implementation of https://vulkan-tutorial.org: https://github.com/andrewrk/zig-vulkan-triangle.
|
||||
|
||||
* Implementation of https://vulkan-tutorial.com using `@cImport`'ed bindings: https://github.com/andrewrk/zig-vulkan-triangle.
|
||||
* Alternative binding generator: https://github.com/SpexGuy/Zig-Vulkan-Headers
|
||||
* Zig bindings for GLFW: https://github.com/hexops/mach-glfw
|
||||
* With vulkan-zig integration example: https://github.com/hexops/mach-glfw-vulkan-example
|
||||
* Advanced shader compilation: https://github.com/Games-by-Mason/shader_compiler
|
||||
|
||||
153
build.zig
153
build.zig
@@ -1,107 +1,66 @@
|
||||
const std = @import("std");
|
||||
const vkgen = @import("generator/index.zig");
|
||||
const Step = std.build.Step;
|
||||
const Builder = std.build.Builder;
|
||||
|
||||
pub const ResourceGenStep = struct {
|
||||
step: Step,
|
||||
shader_step: *vkgen.ShaderCompileStep,
|
||||
builder: *Builder,
|
||||
package: std.build.Pkg,
|
||||
resources: std.ArrayList(u8),
|
||||
|
||||
pub fn init(builder: *Builder, out: []const u8) *ResourceGenStep {
|
||||
const self = builder.allocator.create(ResourceGenStep) catch unreachable;
|
||||
const full_out_path = std.fs.path.join(builder.allocator, &[_][]const u8{
|
||||
builder.build_root,
|
||||
builder.cache_root,
|
||||
out,
|
||||
}) catch unreachable;
|
||||
|
||||
self.* = .{
|
||||
.step = Step.init(.Custom, "resources", builder.allocator, make),
|
||||
.shader_step = vkgen.ShaderCompileStep.init(builder, &[_][]const u8{"glslc", "--target-env=vulkan1.2"}),
|
||||
.builder = builder,
|
||||
.package = .{
|
||||
.name = "resources",
|
||||
.path = full_out_path,
|
||||
.dependencies = null,
|
||||
},
|
||||
.resources = std.ArrayList(u8).init(builder.allocator),
|
||||
};
|
||||
|
||||
self.step.dependOn(&self.shader_step.step);
|
||||
return self;
|
||||
}
|
||||
|
||||
fn renderPath(self: *ResourceGenStep, path: []const u8, writer: anytype) void {
|
||||
const separators = &[_]u8{ std.fs.path.sep_windows, std.fs.path.sep_posix };
|
||||
var i: usize = 0;
|
||||
while (std.mem.indexOfAnyPos(u8, path, i, separators)) |j| {
|
||||
writer.writeAll(path[i .. j]) catch unreachable;
|
||||
switch (std.fs.path.sep) {
|
||||
std.fs.path.sep_windows => writer.writeAll("\\\\") catch unreachable,
|
||||
std.fs.path.sep_posix => writer.writeByte(std.fs.path.sep_posix) catch unreachable,
|
||||
else => unreachable
|
||||
}
|
||||
|
||||
i = j + 1;
|
||||
}
|
||||
writer.writeAll(path[i..]) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addShader(self: *ResourceGenStep, name: []const u8, source: []const u8) void {
|
||||
const shader_out_path = self.shader_step.add(source);
|
||||
var writer = self.resources.writer();
|
||||
|
||||
writer.print("pub const {s} = @embedFile(\"", .{ name }) catch unreachable;
|
||||
self.renderPath(shader_out_path, writer);
|
||||
writer.writeAll("\");\n") catch unreachable;
|
||||
}
|
||||
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(ResourceGenStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
const dir = std.fs.path.dirname(self.package.path).?;
|
||||
try cwd.makePath(dir);
|
||||
try cwd.writeFile(self.package.path, self.resources.items);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn build(b: *Builder) void {
|
||||
var test_step = b.step("test", "Run all the tests");
|
||||
test_step.dependOn(&b.addTest("generator/index.zig").step);
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const mode = b.standardReleaseOptions();
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
const maybe_registry = b.option(std.Build.LazyPath, "registry", "Set the path to the Vulkan registry (vk.xml)");
|
||||
const maybe_video = b.option(std.Build.LazyPath, "video", "Set the path to the Vulkan Video registry (video.xml)");
|
||||
const test_step = b.step("test", "Run all the tests");
|
||||
|
||||
const generator_exe = b.addExecutable("vulkan-zig-generator", "generator/main.zig");
|
||||
generator_exe.setTarget(target);
|
||||
generator_exe.setBuildMode(mode);
|
||||
generator_exe.install();
|
||||
const root_module = b.createModule(.{
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const triangle_exe = b.addExecutable("triangle", "examples/triangle.zig");
|
||||
triangle_exe.setTarget(target);
|
||||
triangle_exe.setBuildMode(mode);
|
||||
triangle_exe.install();
|
||||
triangle_exe.linkLibC();
|
||||
triangle_exe.linkSystemLibrary("glfw");
|
||||
// Using the package manager, this artifact can be obtained by the user
|
||||
// through `b.dependency(<name in build.zig.zon>, .{}).artifact("vulkan-zig-generator")`.
|
||||
// with that, the user need only `.addArg("path/to/vk.xml")`, and then obtain
|
||||
// a file source to the generated code with `.addOutputArg("vk.zig")`
|
||||
const generator_exe = b.addExecutable(.{
|
||||
.name = "vulkan-zig-generator",
|
||||
.root_module = root_module,
|
||||
});
|
||||
b.installArtifact(generator_exe);
|
||||
|
||||
const vk_xml_path = b.option([]const u8, "vulkan-registry", "Override the to the Vulkan registry") orelse "examples/vk.xml";
|
||||
// Or they can skip all that, and just make sure to pass `.registry = "path/to/vk.xml"` to `b.dependency`,
|
||||
// and then obtain the module directly via `.module("vulkan-zig")`.
|
||||
if (maybe_registry) |registry| {
|
||||
const vk_generate_cmd = b.addRunArtifact(generator_exe);
|
||||
|
||||
const gen = vkgen.VkGenerateStep.init(b, vk_xml_path, "vk.zig");
|
||||
triangle_exe.step.dependOn(&gen.step);
|
||||
triangle_exe.addPackage(gen.package);
|
||||
if (maybe_video) |video| {
|
||||
vk_generate_cmd.addArg("--video");
|
||||
vk_generate_cmd.addFileArg(video);
|
||||
}
|
||||
|
||||
const res = ResourceGenStep.init(b, "resources.zig");
|
||||
res.addShader("triangle_vert", "examples/shaders/triangle.vert");
|
||||
res.addShader("triangle_frag", "examples/shaders/triangle.frag");
|
||||
triangle_exe.step.dependOn(&res.step);
|
||||
triangle_exe.addPackage(res.package);
|
||||
vk_generate_cmd.addFileArg(registry);
|
||||
|
||||
const triangle_run_cmd = triangle_exe.run();
|
||||
triangle_run_cmd.step.dependOn(b.getInstallStep());
|
||||
const triangle_run_step = b.step("run-triangle", "Run the triangle example");
|
||||
triangle_run_step.dependOn(&triangle_run_cmd.step);
|
||||
const vk_zig = vk_generate_cmd.addOutputFileArg("vk.zig");
|
||||
const vk_zig_module = b.addModule("vulkan-zig", .{
|
||||
.root_source_file = vk_zig,
|
||||
});
|
||||
|
||||
// Also install vk.zig, if passed.
|
||||
|
||||
const vk_zig_install_step = b.addInstallFile(vk_zig, "src/vk.zig");
|
||||
b.getInstallStep().dependOn(&vk_zig_install_step.step);
|
||||
|
||||
// And run tests on this vk.zig too.
|
||||
|
||||
// This test needs to be an object so that vulkan-zig can import types from the root.
|
||||
// It does not need to run anyway.
|
||||
const ref_all_decls_test = b.addObject(.{
|
||||
.name = "ref-all-decls-test",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("test/ref_all_decls.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
}),
|
||||
});
|
||||
ref_all_decls_test.root_module.addImport("vulkan", vk_zig_module);
|
||||
test_step.dependOn(&ref_all_decls_test.step);
|
||||
}
|
||||
|
||||
const test_target = b.addTest(.{ .root_module = root_module });
|
||||
test_step.dependOn(&b.addRunArtifact(test_target).step);
|
||||
}
|
||||
|
||||
12
build.zig.zon
Normal file
12
build.zig.zon
Normal file
@@ -0,0 +1,12 @@
|
||||
.{
|
||||
.name = .vulkan,
|
||||
.fingerprint = 0xbe155a03c72db6af,
|
||||
.version = "0.0.0",
|
||||
.minimum_zig_version = "0.15.1",
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"src",
|
||||
},
|
||||
}
|
||||
100
examples/build.zig
Normal file
100
examples/build.zig
Normal file
@@ -0,0 +1,100 @@
|
||||
const std = @import("std");
|
||||
|
||||
const vkgen = @import("vulkan_zig");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
const maybe_override_registry = b.option([]const u8, "override-registry", "Override the path to the Vulkan registry used for the examples");
|
||||
const use_zig_shaders = b.option(bool, "zig-shader", "Use Zig shaders instead of GLSL") orelse false;
|
||||
|
||||
const registry = b.dependency("vulkan_headers", .{}).path("registry/vk.xml");
|
||||
|
||||
const triangle_exe = b.addExecutable(.{
|
||||
.name = "triangle",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("triangle.zig"),
|
||||
.target = target,
|
||||
.link_libc = true,
|
||||
.optimize = optimize,
|
||||
}),
|
||||
// TODO: Remove this once x86_64 is stable
|
||||
.use_llvm = true,
|
||||
});
|
||||
b.installArtifact(triangle_exe);
|
||||
triangle_exe.linkSystemLibrary("glfw");
|
||||
|
||||
const registry_path: std.Build.LazyPath = if (maybe_override_registry) |override_registry|
|
||||
.{ .cwd_relative = override_registry }
|
||||
else
|
||||
registry;
|
||||
|
||||
const vulkan = b.dependency("vulkan_zig", .{
|
||||
.registry = registry_path,
|
||||
}).module("vulkan-zig");
|
||||
|
||||
triangle_exe.root_module.addImport("vulkan", vulkan);
|
||||
|
||||
if (use_zig_shaders) {
|
||||
const spirv_target = b.resolveTargetQuery(.{
|
||||
.cpu_arch = .spirv32,
|
||||
.os_tag = .vulkan,
|
||||
.cpu_model = .{ .explicit = &std.Target.spirv.cpu.vulkan_v1_2 },
|
||||
.ofmt = .spirv,
|
||||
});
|
||||
|
||||
const vert_spv = b.addObject(.{
|
||||
.name = "vertex_shader",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("shaders/vertex.zig"),
|
||||
.target = spirv_target,
|
||||
}),
|
||||
.use_llvm = false,
|
||||
});
|
||||
triangle_exe.root_module.addAnonymousImport(
|
||||
"vertex_shader",
|
||||
.{ .root_source_file = vert_spv.getEmittedBin() },
|
||||
);
|
||||
|
||||
const frag_spv = b.addObject(.{
|
||||
.name = "fragment_shader",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("shaders/fragment.zig"),
|
||||
.target = spirv_target,
|
||||
}),
|
||||
.use_llvm = false,
|
||||
});
|
||||
triangle_exe.root_module.addAnonymousImport(
|
||||
"fragment_shader",
|
||||
.{ .root_source_file = frag_spv.getEmittedBin() },
|
||||
);
|
||||
} else {
|
||||
const vert_cmd = b.addSystemCommand(&.{
|
||||
"glslc",
|
||||
"--target-env=vulkan1.2",
|
||||
"-o",
|
||||
});
|
||||
const vert_spv = vert_cmd.addOutputFileArg("vert.spv");
|
||||
vert_cmd.addFileArg(b.path("shaders/triangle.vert"));
|
||||
triangle_exe.root_module.addAnonymousImport("vertex_shader", .{
|
||||
.root_source_file = vert_spv,
|
||||
});
|
||||
|
||||
const frag_cmd = b.addSystemCommand(&.{
|
||||
"glslc",
|
||||
"--target-env=vulkan1.2",
|
||||
"-o",
|
||||
});
|
||||
const frag_spv = frag_cmd.addOutputFileArg("frag.spv");
|
||||
frag_cmd.addFileArg(b.path("shaders/triangle.frag"));
|
||||
triangle_exe.root_module.addAnonymousImport("fragment_shader", .{
|
||||
.root_source_file = frag_spv,
|
||||
});
|
||||
}
|
||||
|
||||
const triangle_run_cmd = b.addRunArtifact(triangle_exe);
|
||||
triangle_run_cmd.step.dependOn(b.getInstallStep());
|
||||
|
||||
const triangle_run_step = b.step("run-triangle", "Run the triangle example");
|
||||
triangle_run_step.dependOn(&triangle_run_cmd.step);
|
||||
}
|
||||
15
examples/build.zig.zon
Normal file
15
examples/build.zig.zon
Normal file
@@ -0,0 +1,15 @@
|
||||
.{
|
||||
.name = .vulkan_zig_examples,
|
||||
.fingerprint = 0x60508bcca14cfc6d,
|
||||
.version = "0.1.0",
|
||||
.dependencies = .{
|
||||
.vulkan_zig = .{
|
||||
.path = "..",
|
||||
},
|
||||
.vulkan_headers = .{
|
||||
.url = "https://github.com/KhronosGroup/Vulkan-Headers/archive/v1.3.283.tar.gz",
|
||||
.hash = "N-V-__8AAAkkoQGn5z1yoNVrwqZfnYmZp8AZ5CJgoHRMQI0c",
|
||||
},
|
||||
},
|
||||
.paths = .{""},
|
||||
}
|
||||
@@ -1,10 +1,29 @@
|
||||
pub usingnamespace @cImport({
|
||||
const c = @cImport({
|
||||
@cDefine("GLFW_INCLUDE_NONE", {});
|
||||
@cInclude("GLFW/glfw3.h");
|
||||
});
|
||||
|
||||
const vk = @import("vulkan");
|
||||
|
||||
// Re-export the GLFW things that we need
|
||||
pub const GLFW_TRUE = c.GLFW_TRUE;
|
||||
pub const GLFW_FALSE = c.GLFW_FALSE;
|
||||
pub const GLFW_CLIENT_API = c.GLFW_CLIENT_API;
|
||||
pub const GLFW_NO_API = c.GLFW_NO_API;
|
||||
|
||||
pub const GLFWwindow = c.GLFWwindow;
|
||||
|
||||
pub const glfwInit = c.glfwInit;
|
||||
pub const glfwTerminate = c.glfwTerminate;
|
||||
pub const glfwVulkanSupported = c.glfwVulkanSupported;
|
||||
pub const glfwWindowHint = c.glfwWindowHint;
|
||||
pub const glfwCreateWindow = c.glfwCreateWindow;
|
||||
pub const glfwDestroyWindow = c.glfwDestroyWindow;
|
||||
pub const glfwWindowShouldClose = c.glfwWindowShouldClose;
|
||||
pub const glfwGetRequiredInstanceExtensions = c.glfwGetRequiredInstanceExtensions;
|
||||
pub const glfwGetFramebufferSize = c.glfwGetFramebufferSize;
|
||||
pub const glfwPollEvents = c.glfwPollEvents;
|
||||
|
||||
// usually the GLFW vulkan functions are exported if Vulkan is included,
|
||||
// but since thats not the case here, they are manually imported.
|
||||
|
||||
|
||||
@@ -3,161 +3,144 @@ const vk = @import("vulkan");
|
||||
const c = @import("c.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const required_device_extensions = [_][]const u8{
|
||||
vk.extension_info.khr_swapchain.name
|
||||
};
|
||||
const required_device_extensions = [_][*:0]const u8{vk.extensions.khr_swapchain.name};
|
||||
|
||||
const BaseDispatch = struct {
|
||||
vkCreateInstance: vk.PfnCreateInstance,
|
||||
usingnamespace vk.BaseWrapper(@This());
|
||||
};
|
||||
/// There are 3 levels of bindings in vulkan-zig:
|
||||
/// - The Dispatch types (vk.BaseDispatch, vk.InstanceDispatch, vk.DeviceDispatch)
|
||||
/// are "plain" structs which just contain the function pointers for a particular
|
||||
/// object.
|
||||
/// - The Wrapper types (vk.Basewrapper, vk.InstanceWrapper, vk.DeviceWrapper) contains
|
||||
/// the Dispatch type, as well as Ziggified Vulkan functions - these return Zig errors,
|
||||
/// etc.
|
||||
/// - The Proxy types (vk.InstanceProxy, vk.DeviceProxy, vk.CommandBufferProxy,
|
||||
/// vk.QueueProxy) contain a pointer to a Wrapper and also contain the object's handle.
|
||||
/// Calling Ziggified functions on these types automatically passes the handle as
|
||||
/// the first parameter of each function. Note that this type accepts a pointer to
|
||||
/// a wrapper struct as there is a problem with LLVM where embedding function pointers
|
||||
/// and object pointer in the same struct leads to missed optimizations. If the wrapper
|
||||
/// member is a pointer, LLVM will try to optimize it as any other vtable.
|
||||
/// The wrappers contain
|
||||
const BaseWrapper = vk.BaseWrapper;
|
||||
const InstanceWrapper = vk.InstanceWrapper;
|
||||
const DeviceWrapper = vk.DeviceWrapper;
|
||||
|
||||
const InstanceDispatch = struct {
|
||||
vkDestroyInstance: vk.PfnDestroyInstance,
|
||||
vkCreateDevice: vk.PfnCreateDevice,
|
||||
vkDestroySurfaceKHR: vk.PfnDestroySurfaceKHR,
|
||||
vkEnumeratePhysicalDevices: vk.PfnEnumeratePhysicalDevices,
|
||||
vkGetPhysicalDeviceProperties: vk.PfnGetPhysicalDeviceProperties,
|
||||
vkEnumerateDeviceExtensionProperties: vk.PfnEnumerateDeviceExtensionProperties,
|
||||
vkGetPhysicalDeviceSurfaceFormatsKHR: vk.PfnGetPhysicalDeviceSurfaceFormatsKHR,
|
||||
vkGetPhysicalDeviceSurfacePresentModesKHR: vk.PfnGetPhysicalDeviceSurfacePresentModesKHR,
|
||||
vkGetPhysicalDeviceSurfaceCapabilitiesKHR: vk.PfnGetPhysicalDeviceSurfaceCapabilitiesKHR,
|
||||
vkGetPhysicalDeviceQueueFamilyProperties: vk.PfnGetPhysicalDeviceQueueFamilyProperties,
|
||||
vkGetPhysicalDeviceSurfaceSupportKHR: vk.PfnGetPhysicalDeviceSurfaceSupportKHR,
|
||||
vkGetPhysicalDeviceMemoryProperties: vk.PfnGetPhysicalDeviceMemoryProperties,
|
||||
vkGetDeviceProcAddr: vk.PfnGetDeviceProcAddr,
|
||||
usingnamespace vk.InstanceWrapper(@This());
|
||||
};
|
||||
|
||||
const DeviceDispatch = struct {
|
||||
vkDestroyDevice: vk.PfnDestroyDevice,
|
||||
vkGetDeviceQueue: vk.PfnGetDeviceQueue,
|
||||
vkCreateSemaphore: vk.PfnCreateSemaphore,
|
||||
vkCreateFence: vk.PfnCreateFence,
|
||||
vkCreateImageView: vk.PfnCreateImageView,
|
||||
vkDestroyImageView: vk.PfnDestroyImageView,
|
||||
vkDestroySemaphore: vk.PfnDestroySemaphore,
|
||||
vkDestroyFence: vk.PfnDestroyFence,
|
||||
vkGetSwapchainImagesKHR: vk.PfnGetSwapchainImagesKHR,
|
||||
vkCreateSwapchainKHR: vk.PfnCreateSwapchainKHR,
|
||||
vkDestroySwapchainKHR: vk.PfnDestroySwapchainKHR,
|
||||
vkAcquireNextImageKHR: vk.PfnAcquireNextImageKHR,
|
||||
vkDeviceWaitIdle: vk.PfnDeviceWaitIdle,
|
||||
vkWaitForFences: vk.PfnWaitForFences,
|
||||
vkResetFences: vk.PfnResetFences,
|
||||
vkQueueSubmit: vk.PfnQueueSubmit,
|
||||
vkQueuePresentKHR: vk.PfnQueuePresentKHR,
|
||||
vkCreateCommandPool: vk.PfnCreateCommandPool,
|
||||
vkDestroyCommandPool: vk.PfnDestroyCommandPool,
|
||||
vkAllocateCommandBuffers: vk.PfnAllocateCommandBuffers,
|
||||
vkFreeCommandBuffers: vk.PfnFreeCommandBuffers,
|
||||
vkQueueWaitIdle: vk.PfnQueueWaitIdle,
|
||||
vkCreateShaderModule: vk.PfnCreateShaderModule,
|
||||
vkDestroyShaderModule: vk.PfnDestroyShaderModule,
|
||||
vkCreatePipelineLayout: vk.PfnCreatePipelineLayout,
|
||||
vkDestroyPipelineLayout: vk.PfnDestroyPipelineLayout,
|
||||
vkCreateRenderPass: vk.PfnCreateRenderPass,
|
||||
vkDestroyRenderPass: vk.PfnDestroyRenderPass,
|
||||
vkCreateGraphicsPipelines: vk.PfnCreateGraphicsPipelines,
|
||||
vkDestroyPipeline: vk.PfnDestroyPipeline,
|
||||
vkCreateFramebuffer: vk.PfnCreateFramebuffer,
|
||||
vkDestroyFramebuffer: vk.PfnDestroyFramebuffer,
|
||||
vkBeginCommandBuffer: vk.PfnBeginCommandBuffer,
|
||||
vkEndCommandBuffer: vk.PfnEndCommandBuffer,
|
||||
vkAllocateMemory: vk.PfnAllocateMemory,
|
||||
vkFreeMemory: vk.PfnFreeMemory,
|
||||
vkCreateBuffer: vk.PfnCreateBuffer,
|
||||
vkDestroyBuffer: vk.PfnDestroyBuffer,
|
||||
vkGetBufferMemoryRequirements: vk.PfnGetBufferMemoryRequirements,
|
||||
vkMapMemory: vk.PfnMapMemory,
|
||||
vkUnmapMemory: vk.PfnUnmapMemory,
|
||||
vkBindBufferMemory: vk.PfnBindBufferMemory,
|
||||
vkCmdBeginRenderPass: vk.PfnCmdBeginRenderPass,
|
||||
vkCmdEndRenderPass: vk.PfnCmdEndRenderPass,
|
||||
vkCmdBindPipeline: vk.PfnCmdBindPipeline,
|
||||
vkCmdDraw: vk.PfnCmdDraw,
|
||||
vkCmdSetViewport: vk.PfnCmdSetViewport,
|
||||
vkCmdSetScissor: vk.PfnCmdSetScissor,
|
||||
vkCmdBindVertexBuffers: vk.PfnCmdBindVertexBuffers,
|
||||
vkCmdCopyBuffer: vk.PfnCmdCopyBuffer,
|
||||
usingnamespace vk.DeviceWrapper(@This());
|
||||
};
|
||||
const Instance = vk.InstanceProxy;
|
||||
const Device = vk.DeviceProxy;
|
||||
|
||||
pub const GraphicsContext = struct {
|
||||
vkb: BaseDispatch,
|
||||
vki: InstanceDispatch,
|
||||
vkd: DeviceDispatch,
|
||||
pub const CommandBuffer = vk.CommandBufferProxy;
|
||||
|
||||
instance: vk.Instance,
|
||||
allocator: Allocator,
|
||||
|
||||
vkb: BaseWrapper,
|
||||
|
||||
instance: Instance,
|
||||
debug_messenger: vk.DebugUtilsMessengerEXT,
|
||||
surface: vk.SurfaceKHR,
|
||||
pdev: vk.PhysicalDevice,
|
||||
props: vk.PhysicalDeviceProperties,
|
||||
mem_props: vk.PhysicalDeviceMemoryProperties,
|
||||
|
||||
dev: vk.Device,
|
||||
dev: Device,
|
||||
graphics_queue: Queue,
|
||||
present_queue: Queue,
|
||||
|
||||
pub fn init(allocator: *Allocator, app_name: [*:0]const u8, window: *c.GLFWwindow) !GraphicsContext {
|
||||
pub fn init(allocator: Allocator, app_name: [*:0]const u8, window: *c.GLFWwindow) !GraphicsContext {
|
||||
var self: GraphicsContext = undefined;
|
||||
self.vkb = try BaseDispatch.load(c.glfwGetInstanceProcAddress);
|
||||
self.allocator = allocator;
|
||||
self.vkb = BaseWrapper.load(c.glfwGetInstanceProcAddress);
|
||||
|
||||
var extension_names: std.ArrayList([*:0]const u8) = .empty;
|
||||
defer extension_names.deinit(allocator);
|
||||
try extension_names.append(allocator, vk.extensions.ext_debug_utils.name);
|
||||
// the following extensions are to support vulkan in mac os
|
||||
// see https://github.com/glfw/glfw/issues/2335
|
||||
try extension_names.append(allocator, vk.extensions.khr_portability_enumeration.name);
|
||||
try extension_names.append(allocator, vk.extensions.khr_get_physical_device_properties_2.name);
|
||||
|
||||
var glfw_exts_count: u32 = 0;
|
||||
const glfw_exts = c.glfwGetRequiredInstanceExtensions(&glfw_exts_count);
|
||||
try extension_names.appendSlice(allocator, @ptrCast(glfw_exts[0..glfw_exts_count]));
|
||||
|
||||
const app_info = vk.ApplicationInfo{
|
||||
const instance = try self.vkb.createInstance(&.{
|
||||
.p_application_info = &.{
|
||||
.p_application_name = app_name,
|
||||
.application_version = vk.makeApiVersion(0, 0, 0, 0),
|
||||
.application_version = @bitCast(vk.makeApiVersion(0, 0, 0, 0)),
|
||||
.p_engine_name = app_name,
|
||||
.engine_version = vk.makeApiVersion(0, 0, 0, 0),
|
||||
.api_version = vk.API_VERSION_1_2,
|
||||
};
|
||||
|
||||
self.instance = try self.vkb.createInstance(.{
|
||||
.flags = .{},
|
||||
.p_application_info = &app_info,
|
||||
.enabled_layer_count = 0,
|
||||
.pp_enabled_layer_names = undefined,
|
||||
.enabled_extension_count = glfw_exts_count,
|
||||
.pp_enabled_extension_names = @ptrCast([*]const [*:0]const u8, glfw_exts),
|
||||
.engine_version = @bitCast(vk.makeApiVersion(0, 0, 0, 0)),
|
||||
.api_version = @bitCast(vk.API_VERSION_1_2),
|
||||
},
|
||||
.enabled_extension_count = @intCast(extension_names.items.len),
|
||||
.pp_enabled_extension_names = extension_names.items.ptr,
|
||||
// enumerate_portability_bit_khr to support vulkan in mac os
|
||||
// see https://github.com/glfw/glfw/issues/2335
|
||||
.flags = .{ .enumerate_portability_bit_khr = true },
|
||||
}, null);
|
||||
|
||||
self.vki = try InstanceDispatch.load(self.instance, c.glfwGetInstanceProcAddress);
|
||||
errdefer self.vki.destroyInstance(self.instance, null);
|
||||
const vki = try allocator.create(InstanceWrapper);
|
||||
errdefer allocator.destroy(vki);
|
||||
vki.* = InstanceWrapper.load(instance, self.vkb.dispatch.vkGetInstanceProcAddr.?);
|
||||
self.instance = Instance.init(instance, vki);
|
||||
errdefer self.instance.destroyInstance(null);
|
||||
|
||||
self.surface = try createSurface(self.vki, self.instance, window);
|
||||
errdefer self.vki.destroySurfaceKHR(self.instance, self.surface, null);
|
||||
self.debug_messenger = try self.instance.createDebugUtilsMessengerEXT(&.{
|
||||
.message_severity = .{
|
||||
//.verbose_bit_ext = true,
|
||||
//.info_bit_ext = true,
|
||||
.warning_bit_ext = true,
|
||||
.error_bit_ext = true,
|
||||
},
|
||||
.message_type = .{
|
||||
.general_bit_ext = true,
|
||||
.validation_bit_ext = true,
|
||||
.performance_bit_ext = true,
|
||||
},
|
||||
.pfn_user_callback = &debugUtilsMessengerCallback,
|
||||
.p_user_data = null,
|
||||
}, null);
|
||||
|
||||
const candidate = try pickPhysicalDevice(self.vki, self.instance, allocator, self.surface);
|
||||
self.surface = try createSurface(self.instance, window);
|
||||
errdefer self.instance.destroySurfaceKHR(self.surface, null);
|
||||
|
||||
const candidate = try pickPhysicalDevice(self.instance, allocator, self.surface);
|
||||
self.pdev = candidate.pdev;
|
||||
self.props = candidate.props;
|
||||
self.dev = try initializeCandidate(self.vki, candidate);
|
||||
self.vkd = try DeviceDispatch.load(self.dev, self.vki.vkGetDeviceProcAddr);
|
||||
errdefer self.vkd.destroyDevice(self.dev, null);
|
||||
|
||||
self.graphics_queue = Queue.init(self.vkd, self.dev, candidate.queues.graphics_family);
|
||||
self.present_queue = Queue.init(self.vkd, self.dev, candidate.queues.graphics_family);
|
||||
const dev = try initializeCandidate(self.instance, candidate);
|
||||
|
||||
self.mem_props = self.vki.getPhysicalDeviceMemoryProperties(self.pdev);
|
||||
const vkd = try allocator.create(DeviceWrapper);
|
||||
errdefer allocator.destroy(vkd);
|
||||
vkd.* = DeviceWrapper.load(dev, self.instance.wrapper.dispatch.vkGetDeviceProcAddr.?);
|
||||
self.dev = Device.init(dev, vkd);
|
||||
errdefer self.dev.destroyDevice(null);
|
||||
|
||||
self.graphics_queue = Queue.init(self.dev, candidate.queues.graphics_family);
|
||||
self.present_queue = Queue.init(self.dev, candidate.queues.present_family);
|
||||
|
||||
self.mem_props = self.instance.getPhysicalDeviceMemoryProperties(self.pdev);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: GraphicsContext) void {
|
||||
self.vkd.destroyDevice(self.dev, null);
|
||||
self.vki.destroySurfaceKHR(self.instance, self.surface, null);
|
||||
self.vki.destroyInstance(self.instance, null);
|
||||
self.dev.destroyDevice(null);
|
||||
self.instance.destroySurfaceKHR(self.surface, null);
|
||||
self.instance.destroyDebugUtilsMessengerEXT(self.debug_messenger, null);
|
||||
self.instance.destroyInstance(null);
|
||||
|
||||
// Don't forget to free the tables to prevent a memory leak.
|
||||
self.allocator.destroy(self.dev.wrapper);
|
||||
self.allocator.destroy(self.instance.wrapper);
|
||||
}
|
||||
|
||||
pub fn deviceName(self: GraphicsContext) []const u8 {
|
||||
const len = std.mem.indexOfScalar(u8, &self.props.device_name, 0).?;
|
||||
return self.props.device_name[0 .. len];
|
||||
pub fn deviceName(self: *const GraphicsContext) []const u8 {
|
||||
return std.mem.sliceTo(&self.props.device_name, 0);
|
||||
}
|
||||
|
||||
pub fn findMemoryTypeIndex(self: GraphicsContext, memory_type_bits: u32, flags: vk.MemoryPropertyFlags) !u32 {
|
||||
for (self.mem_props.memory_types[0 .. self.mem_props.memory_type_count]) |mem_type, i| {
|
||||
if (memory_type_bits & (@as(u32, 1) << @truncate(u5, i)) != 0 and mem_type.property_flags.contains(flags)) {
|
||||
return @truncate(u32, i);
|
||||
for (self.mem_props.memory_types[0..self.mem_props.memory_type_count], 0..) |mem_type, i| {
|
||||
if (memory_type_bits & (@as(u32, 1) << @truncate(i)) != 0 and mem_type.property_flags.contains(flags)) {
|
||||
return @truncate(i);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,7 +148,7 @@ pub const GraphicsContext = struct {
|
||||
}
|
||||
|
||||
pub fn allocate(self: GraphicsContext, requirements: vk.MemoryRequirements, flags: vk.MemoryPropertyFlags) !vk.DeviceMemory {
|
||||
return try self.vkd.allocateMemory(self.dev, .{
|
||||
return try self.dev.allocateMemory(&.{
|
||||
.allocation_size = requirements.size,
|
||||
.memory_type_index = try self.findMemoryTypeIndex(requirements.memory_type_bits, flags),
|
||||
}, null);
|
||||
@@ -176,38 +159,36 @@ pub const Queue = struct {
|
||||
handle: vk.Queue,
|
||||
family: u32,
|
||||
|
||||
fn init(vkd: DeviceDispatch, dev: vk.Device, family: u32) Queue {
|
||||
fn init(device: Device, family: u32) Queue {
|
||||
return .{
|
||||
.handle = vkd.getDeviceQueue(dev, family, 0),
|
||||
.handle = device.getDeviceQueue(family, 0),
|
||||
.family = family,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
fn createSurface(vki: InstanceDispatch, instance: vk.Instance, window: *c.GLFWwindow) !vk.SurfaceKHR {
|
||||
fn createSurface(instance: Instance, window: *c.GLFWwindow) !vk.SurfaceKHR {
|
||||
var surface: vk.SurfaceKHR = undefined;
|
||||
if (c.glfwCreateWindowSurface(instance, window, null, &surface) != .success) {
|
||||
if (c.glfwCreateWindowSurface(instance.handle, window, null, &surface) != .success) {
|
||||
return error.SurfaceInitFailed;
|
||||
}
|
||||
|
||||
return surface;
|
||||
}
|
||||
|
||||
fn initializeCandidate(vki: InstanceDispatch, candidate: DeviceCandidate) !vk.Device {
|
||||
fn initializeCandidate(instance: Instance, candidate: DeviceCandidate) !vk.Device {
|
||||
const priority = [_]f32{1};
|
||||
const qci = [_]vk.DeviceQueueCreateInfo{
|
||||
.{
|
||||
.flags = .{},
|
||||
.queue_family_index = candidate.queues.graphics_family,
|
||||
.queue_count = 1,
|
||||
.p_queue_priorities = &priority,
|
||||
},
|
||||
.{
|
||||
.flags = .{},
|
||||
.queue_family_index = candidate.queues.present_family,
|
||||
.queue_count = 1,
|
||||
.p_queue_priorities = &priority,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const queue_count: u32 = if (candidate.queues.graphics_family == candidate.queues.present_family)
|
||||
@@ -215,15 +196,11 @@ fn initializeCandidate(vki: InstanceDispatch, candidate: DeviceCandidate) !vk.De
|
||||
else
|
||||
2;
|
||||
|
||||
return try vki.createDevice(candidate.pdev, .{
|
||||
.flags = .{},
|
||||
return try instance.createDevice(candidate.pdev, &.{
|
||||
.queue_create_info_count = queue_count,
|
||||
.p_queue_create_infos = &qci,
|
||||
.enabled_layer_count = 0,
|
||||
.pp_enabled_layer_names = undefined,
|
||||
.enabled_extension_count = required_device_extensions.len,
|
||||
.pp_enabled_extension_names = @ptrCast([*]const [*:0]const u8, &required_device_extensions),
|
||||
.p_enabled_features = null,
|
||||
.pp_enabled_extension_names = @ptrCast(&required_device_extensions),
|
||||
}, null);
|
||||
}
|
||||
|
||||
@@ -238,22 +215,27 @@ const QueueAllocation = struct {
|
||||
present_family: u32,
|
||||
};
|
||||
|
||||
fn debugUtilsMessengerCallback(severity: vk.DebugUtilsMessageSeverityFlagsEXT, msg_type: vk.DebugUtilsMessageTypeFlagsEXT, callback_data: ?*const vk.DebugUtilsMessengerCallbackDataEXT, _: ?*anyopaque) callconv(.c) vk.Bool32 {
|
||||
const severity_str = if (severity.verbose_bit_ext) "verbose" else if (severity.info_bit_ext) "info" else if (severity.warning_bit_ext) "warning" else if (severity.error_bit_ext) "error" else "unknown";
|
||||
|
||||
const type_str = if (msg_type.general_bit_ext) "general" else if (msg_type.validation_bit_ext) "validation" else if (msg_type.performance_bit_ext) "performance" else if (msg_type.device_address_binding_bit_ext) "device addr" else "unknown";
|
||||
|
||||
const message: [*c]const u8 = if (callback_data) |cb_data| cb_data.p_message else "NO MESSAGE!";
|
||||
std.debug.print("[{s}][{s}]. Message:\n {s}\n", .{ severity_str, type_str, message });
|
||||
|
||||
return .false;
|
||||
}
|
||||
|
||||
fn pickPhysicalDevice(
|
||||
vki: InstanceDispatch,
|
||||
instance: vk.Instance,
|
||||
allocator: *Allocator,
|
||||
instance: Instance,
|
||||
allocator: Allocator,
|
||||
surface: vk.SurfaceKHR,
|
||||
) !DeviceCandidate {
|
||||
var device_count: u32 = undefined;
|
||||
_ = try vki.enumeratePhysicalDevices(instance, &device_count, null);
|
||||
|
||||
const pdevs = try allocator.alloc(vk.PhysicalDevice, device_count);
|
||||
const pdevs = try instance.enumeratePhysicalDevicesAlloc(allocator);
|
||||
defer allocator.free(pdevs);
|
||||
|
||||
_ = try vki.enumeratePhysicalDevices(instance, &device_count, pdevs.ptr);
|
||||
|
||||
for (pdevs) |pdev| {
|
||||
if (try checkSuitable(vki, pdev, allocator, surface)) |candidate| {
|
||||
if (try checkSuitable(instance, pdev, allocator, surface)) |candidate| {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
@@ -262,56 +244,46 @@ fn pickPhysicalDevice(
|
||||
}
|
||||
|
||||
fn checkSuitable(
|
||||
vki: InstanceDispatch,
|
||||
instance: Instance,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
surface: vk.SurfaceKHR,
|
||||
) !?DeviceCandidate {
|
||||
const props = vki.getPhysicalDeviceProperties(pdev);
|
||||
|
||||
if (!try checkExtensionSupport(vki, pdev, allocator)) {
|
||||
if (!try checkExtensionSupport(instance, pdev, allocator)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!try checkSurfaceSupport(vki, pdev, surface)) {
|
||||
if (!try checkSurfaceSupport(instance, pdev, surface)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (try allocateQueues(vki, pdev, allocator, surface)) |allocation| {
|
||||
if (try allocateQueues(instance, pdev, allocator, surface)) |allocation| {
|
||||
const props = instance.getPhysicalDeviceProperties(pdev);
|
||||
return DeviceCandidate{
|
||||
.pdev = pdev,
|
||||
.props = props,
|
||||
.queues = allocation
|
||||
.queues = allocation,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn allocateQueues(
|
||||
vki: InstanceDispatch,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: *Allocator,
|
||||
surface: vk.SurfaceKHR
|
||||
) !?QueueAllocation {
|
||||
var family_count: u32 = undefined;
|
||||
vki.getPhysicalDeviceQueueFamilyProperties(pdev, &family_count, null);
|
||||
|
||||
const families = try allocator.alloc(vk.QueueFamilyProperties, family_count);
|
||||
fn allocateQueues(instance: Instance, pdev: vk.PhysicalDevice, allocator: Allocator, surface: vk.SurfaceKHR) !?QueueAllocation {
|
||||
const families = try instance.getPhysicalDeviceQueueFamilyPropertiesAlloc(pdev, allocator);
|
||||
defer allocator.free(families);
|
||||
vki.getPhysicalDeviceQueueFamilyProperties(pdev, &family_count, families.ptr);
|
||||
|
||||
var graphics_family: ?u32 = null;
|
||||
var present_family: ?u32 = null;
|
||||
|
||||
for (families) |properties, i| {
|
||||
const family = @intCast(u32, i);
|
||||
for (families, 0..) |properties, i| {
|
||||
const family: u32 = @intCast(i);
|
||||
|
||||
if (graphics_family == null and properties.queue_flags.contains(.{.graphics_bit = true})) {
|
||||
if (graphics_family == null and properties.queue_flags.graphics_bit) {
|
||||
graphics_family = family;
|
||||
}
|
||||
|
||||
if (present_family == null and (try vki.getPhysicalDeviceSurfaceSupportKHR(pdev, family, surface)) == vk.TRUE) {
|
||||
if (present_family == null and (try instance.getPhysicalDeviceSurfaceSupportKHR(pdev, family, surface)) == .true) {
|
||||
present_family = family;
|
||||
}
|
||||
}
|
||||
@@ -319,41 +291,34 @@ fn allocateQueues(
|
||||
if (graphics_family != null and present_family != null) {
|
||||
return QueueAllocation{
|
||||
.graphics_family = graphics_family.?,
|
||||
.present_family = present_family.?
|
||||
.present_family = present_family.?,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn checkSurfaceSupport(vki: InstanceDispatch, pdev: vk.PhysicalDevice, surface: vk.SurfaceKHR) !bool {
|
||||
fn checkSurfaceSupport(instance: Instance, pdev: vk.PhysicalDevice, surface: vk.SurfaceKHR) !bool {
|
||||
var format_count: u32 = undefined;
|
||||
_ = try vki.getPhysicalDeviceSurfaceFormatsKHR(pdev, surface, &format_count, null);
|
||||
_ = try instance.getPhysicalDeviceSurfaceFormatsKHR(pdev, surface, &format_count, null);
|
||||
|
||||
var present_mode_count: u32 = undefined;
|
||||
_ = try vki.getPhysicalDeviceSurfacePresentModesKHR(pdev, surface, &present_mode_count, null);
|
||||
_ = try instance.getPhysicalDeviceSurfacePresentModesKHR(pdev, surface, &present_mode_count, null);
|
||||
|
||||
return format_count > 0 and present_mode_count > 0;
|
||||
}
|
||||
|
||||
fn checkExtensionSupport(
|
||||
vki: InstanceDispatch,
|
||||
instance: Instance,
|
||||
pdev: vk.PhysicalDevice,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
) !bool {
|
||||
var count: u32 = undefined;
|
||||
_ = try vki.enumerateDeviceExtensionProperties(pdev, null, &count, null);
|
||||
|
||||
const propsv = try allocator.alloc(vk.ExtensionProperties, count);
|
||||
const propsv = try instance.enumerateDeviceExtensionPropertiesAlloc(pdev, null, allocator);
|
||||
defer allocator.free(propsv);
|
||||
|
||||
_ = try vki.enumerateDeviceExtensionProperties(pdev, null, &count, propsv.ptr);
|
||||
|
||||
for (required_device_extensions) |ext| {
|
||||
for (propsv) |props| {
|
||||
const len = std.mem.indexOfScalar(u8, &props.extension_name, 0).?;
|
||||
const prop_ext_name = props.extension_name[0 .. len];
|
||||
if (std.mem.eql(u8, ext, prop_ext_name)) {
|
||||
if (std.mem.eql(u8, std.mem.span(ext), std.mem.sliceTo(&props.extension_name, 0))) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
|
||||
12
examples/shaders/fragment.zig
Normal file
12
examples/shaders/fragment.zig
Normal file
@@ -0,0 +1,12 @@
|
||||
const std = @import("std");
|
||||
const gpu = std.gpu;
|
||||
|
||||
extern const v_color: @Vector(3, f32) addrspace(.input);
|
||||
extern var f_color: @Vector(4, f32) addrspace(.output);
|
||||
|
||||
export fn main() callconv(.spirv_fragment) void {
|
||||
gpu.location(&v_color, 0);
|
||||
gpu.location(&f_color, 0);
|
||||
|
||||
f_color = .{ v_color[0], v_color[1], v_color[2], 1.0 };
|
||||
}
|
||||
16
examples/shaders/vertex.zig
Normal file
16
examples/shaders/vertex.zig
Normal file
@@ -0,0 +1,16 @@
|
||||
const std = @import("std");
|
||||
const gpu = std.gpu;
|
||||
|
||||
extern const a_pos: @Vector(2, f32) addrspace(.input);
|
||||
extern const a_color: @Vector(3, f32) addrspace(.input);
|
||||
|
||||
extern var v_color: @Vector(3, f32) addrspace(.output);
|
||||
|
||||
export fn main() callconv(.spirv_vertex) void {
|
||||
gpu.location(&a_pos, 0);
|
||||
gpu.location(&a_color, 1);
|
||||
gpu.location(&v_color, 0);
|
||||
|
||||
gpu.position_out.* = .{ a_pos[0], a_pos[1], 0.0, 1.0 };
|
||||
v_color = a_color;
|
||||
}
|
||||
@@ -10,7 +10,7 @@ pub const Swapchain = struct {
|
||||
};
|
||||
|
||||
gc: *const GraphicsContext,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
|
||||
surface_format: vk.SurfaceFormatKHR,
|
||||
present_mode: vk.PresentModeKHR,
|
||||
@@ -21,12 +21,12 @@ pub const Swapchain = struct {
|
||||
image_index: u32,
|
||||
next_image_acquired: vk.Semaphore,
|
||||
|
||||
pub fn init(gc: *const GraphicsContext, allocator: *Allocator, extent: vk.Extent2D) !Swapchain {
|
||||
pub fn init(gc: *const GraphicsContext, allocator: Allocator, extent: vk.Extent2D) !Swapchain {
|
||||
return try initRecycle(gc, allocator, extent, .null_handle);
|
||||
}
|
||||
|
||||
pub fn initRecycle(gc: *const GraphicsContext, allocator: *Allocator, extent: vk.Extent2D, old_handle: vk.SwapchainKHR) !Swapchain {
|
||||
const caps = try gc.vki.getPhysicalDeviceSurfaceCapabilitiesKHR(gc.pdev, gc.surface);
|
||||
pub fn initRecycle(gc: *const GraphicsContext, allocator: Allocator, extent: vk.Extent2D, old_handle: vk.SwapchainKHR) !Swapchain {
|
||||
const caps = try gc.instance.getPhysicalDeviceSurfaceCapabilitiesKHR(gc.pdev, gc.surface);
|
||||
const actual_extent = findActualExtent(caps, extent);
|
||||
if (actual_extent.width == 0 or actual_extent.height == 0) {
|
||||
return error.InvalidSurfaceDimensions;
|
||||
@@ -37,45 +37,56 @@ pub const Swapchain = struct {
|
||||
|
||||
var image_count = caps.min_image_count + 1;
|
||||
if (caps.max_image_count > 0) {
|
||||
image_count = std.math.min(image_count, caps.max_image_count);
|
||||
image_count = @min(image_count, caps.max_image_count);
|
||||
}
|
||||
|
||||
const concurrent = gc.graphics_queue.family != gc.present_queue.family;
|
||||
const qfi = [_]u32{gc.graphics_queue.family, gc.present_queue.family};
|
||||
const qfi = [_]u32{ gc.graphics_queue.family, gc.present_queue.family };
|
||||
const sharing_mode: vk.SharingMode = if (gc.graphics_queue.family != gc.present_queue.family)
|
||||
.concurrent
|
||||
else
|
||||
.exclusive;
|
||||
|
||||
const handle = try gc.vkd.createSwapchainKHR(gc.dev, .{
|
||||
.flags = .{},
|
||||
const handle = gc.dev.createSwapchainKHR(&.{
|
||||
.surface = gc.surface,
|
||||
.min_image_count = image_count,
|
||||
.image_format = surface_format.format,
|
||||
.image_color_space = surface_format.color_space,
|
||||
.image_extent = actual_extent,
|
||||
.image_array_layers = 1,
|
||||
.image_usage = .{.color_attachment_bit = true, .transfer_dst_bit = true},
|
||||
.image_sharing_mode = if (concurrent) .concurrent else .exclusive,
|
||||
.image_usage = .{ .color_attachment_bit = true, .transfer_dst_bit = true },
|
||||
.image_sharing_mode = sharing_mode,
|
||||
.queue_family_index_count = qfi.len,
|
||||
.p_queue_family_indices = &qfi,
|
||||
.pre_transform = caps.current_transform,
|
||||
.composite_alpha = .{.opaque_bit_khr = true},
|
||||
.composite_alpha = .{ .opaque_bit_khr = true },
|
||||
.present_mode = present_mode,
|
||||
.clipped = vk.TRUE,
|
||||
.clipped = .true,
|
||||
.old_swapchain = old_handle,
|
||||
}, null);
|
||||
errdefer gc.vkd.destroySwapchainKHR(gc.dev, handle, null);
|
||||
}, null) catch {
|
||||
return error.SwapchainCreationFailed;
|
||||
};
|
||||
errdefer gc.dev.destroySwapchainKHR(handle, null);
|
||||
|
||||
if (old_handle != .null_handle) {
|
||||
// Apparently, the old swapchain handle still needs to be destroyed after recreating.
|
||||
gc.vkd.destroySwapchainKHR(gc.dev, old_handle, null);
|
||||
gc.dev.destroySwapchainKHR(old_handle, null);
|
||||
}
|
||||
|
||||
const swap_images = try initSwapchainImages(gc, handle, surface_format.format, allocator);
|
||||
errdefer for (swap_images) |si| si.deinit(gc);
|
||||
errdefer {
|
||||
for (swap_images) |si| si.deinit(gc);
|
||||
allocator.free(swap_images);
|
||||
}
|
||||
|
||||
var next_image_acquired = try gc.vkd.createSemaphore(gc.dev, .{.flags = .{}}, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, next_image_acquired, null);
|
||||
var next_image_acquired = try gc.dev.createSemaphore(&.{}, null);
|
||||
errdefer gc.dev.destroySemaphore(next_image_acquired, null);
|
||||
|
||||
const result = try gc.vkd.acquireNextImageKHR(gc.dev, handle, std.math.maxInt(u64), next_image_acquired, .null_handle);
|
||||
if (result.result != .success) {
|
||||
const result = try gc.dev.acquireNextImageKHR(handle, std.math.maxInt(u64), next_image_acquired, .null_handle);
|
||||
// event with a .suboptimal_khr we can still go on to present
|
||||
// if we error even for .suboptimal_khr the example will crash and segfault
|
||||
// on resize, since even the recreated swapchain can be suboptimal during a
|
||||
// resize.
|
||||
if (result.result == .not_ready or result.result == .timeout) {
|
||||
return error.ImageAcquireFailed;
|
||||
}
|
||||
|
||||
@@ -95,7 +106,8 @@ pub const Swapchain = struct {
|
||||
|
||||
fn deinitExceptSwapchain(self: Swapchain) void {
|
||||
for (self.swap_images) |si| si.deinit(self.gc);
|
||||
self.gc.vkd.destroySemaphore(self.gc.dev, self.next_image_acquired, null);
|
||||
self.allocator.free(self.swap_images);
|
||||
self.gc.dev.destroySemaphore(self.next_image_acquired, null);
|
||||
}
|
||||
|
||||
pub fn waitForAllFences(self: Swapchain) !void {
|
||||
@@ -103,8 +115,10 @@ pub const Swapchain = struct {
|
||||
}
|
||||
|
||||
pub fn deinit(self: Swapchain) void {
|
||||
// if we have no swapchain none of these should exist and we can just return
|
||||
if (self.handle == .null_handle) return;
|
||||
self.deinitExceptSwapchain();
|
||||
self.gc.vkd.destroySwapchainKHR(self.gc.dev, self.handle, null);
|
||||
self.gc.dev.destroySwapchainKHR(self.handle, null);
|
||||
}
|
||||
|
||||
pub fn recreate(self: *Swapchain, new_extent: vk.Extent2D) !void {
|
||||
@@ -112,7 +126,18 @@ pub const Swapchain = struct {
|
||||
const allocator = self.allocator;
|
||||
const old_handle = self.handle;
|
||||
self.deinitExceptSwapchain();
|
||||
self.* = try initRecycle(gc, allocator, new_extent, old_handle);
|
||||
// set current handle to NULL_HANDLE to signal that the current swapchain does no longer need to be
|
||||
// de-initialized if we fail to recreate it.
|
||||
self.handle = .null_handle;
|
||||
self.* = initRecycle(gc, allocator, new_extent, old_handle) catch |err| switch (err) {
|
||||
error.SwapchainCreationFailed => {
|
||||
// we failed while recreating so our current handle still exists,
|
||||
// but we won't destroy it in the deferred deinit of this object.
|
||||
gc.dev.destroySwapchainKHR(old_handle, null);
|
||||
return err;
|
||||
},
|
||||
else => return err,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn currentImage(self: Swapchain) vk.Image {
|
||||
@@ -144,33 +169,31 @@ pub const Swapchain = struct {
|
||||
// Step 1: Make sure the current frame has finished rendering
|
||||
const current = self.currentSwapImage();
|
||||
try current.waitForFence(self.gc);
|
||||
try self.gc.vkd.resetFences(self.gc.dev, 1, @ptrCast([*]const vk.Fence, ¤t.frame_fence));
|
||||
try self.gc.dev.resetFences(1, @ptrCast(¤t.frame_fence));
|
||||
|
||||
// Step 2: Submit the command buffer
|
||||
const wait_stage = [_]vk.PipelineStageFlags{.{.top_of_pipe_bit = true}};
|
||||
try self.gc.vkd.queueSubmit(self.gc.graphics_queue.handle, 1, &[_]vk.SubmitInfo{.{
|
||||
const wait_stage = [_]vk.PipelineStageFlags{.{ .top_of_pipe_bit = true }};
|
||||
try self.gc.dev.queueSubmit(self.gc.graphics_queue.handle, 1, &[_]vk.SubmitInfo{.{
|
||||
.wait_semaphore_count = 1,
|
||||
.p_wait_semaphores = @ptrCast([*]const vk.Semaphore, ¤t.image_acquired),
|
||||
.p_wait_semaphores = @ptrCast(¤t.image_acquired),
|
||||
.p_wait_dst_stage_mask = &wait_stage,
|
||||
.command_buffer_count = 1,
|
||||
.p_command_buffers = @ptrCast([*]const vk.CommandBuffer, &cmdbuf),
|
||||
.p_command_buffers = @ptrCast(&cmdbuf),
|
||||
.signal_semaphore_count = 1,
|
||||
.p_signal_semaphores = @ptrCast([*]const vk.Semaphore, ¤t.render_finished),
|
||||
.p_signal_semaphores = @ptrCast(¤t.render_finished),
|
||||
}}, current.frame_fence);
|
||||
|
||||
// Step 3: Present the current frame
|
||||
_ = try self.gc.vkd.queuePresentKHR(self.gc.present_queue.handle, .{
|
||||
_ = try self.gc.dev.queuePresentKHR(self.gc.present_queue.handle, &.{
|
||||
.wait_semaphore_count = 1,
|
||||
.p_wait_semaphores = @ptrCast([*]const vk.Semaphore, ¤t.render_finished),
|
||||
.p_wait_semaphores = @ptrCast(¤t.render_finished),
|
||||
.swapchain_count = 1,
|
||||
.p_swapchains = @ptrCast([*]const vk.SwapchainKHR, &self.handle),
|
||||
.p_image_indices = @ptrCast([*]const u32, &self.image_index),
|
||||
.p_results = null,
|
||||
.p_swapchains = @ptrCast(&self.handle),
|
||||
.p_image_indices = @ptrCast(&self.image_index),
|
||||
});
|
||||
|
||||
// Step 4: Acquire next frame
|
||||
const result = try self.gc.vkd.acquireNextImageKHR(
|
||||
self.gc.dev,
|
||||
const result = try self.gc.dev.acquireNextImageKHR(
|
||||
self.handle,
|
||||
std.math.maxInt(u64),
|
||||
self.next_image_acquired,
|
||||
@@ -196,30 +219,29 @@ const SwapImage = struct {
|
||||
frame_fence: vk.Fence,
|
||||
|
||||
fn init(gc: *const GraphicsContext, image: vk.Image, format: vk.Format) !SwapImage {
|
||||
const view = try gc.vkd.createImageView(gc.dev, .{
|
||||
.flags = .{},
|
||||
const view = try gc.dev.createImageView(&.{
|
||||
.image = image,
|
||||
.view_type = .@"2d",
|
||||
.format = format,
|
||||
.components = .{.r = .identity, .g = .identity, .b = .identity, .a = .identity},
|
||||
.components = .{ .r = .identity, .g = .identity, .b = .identity, .a = .identity },
|
||||
.subresource_range = .{
|
||||
.aspect_mask = .{.color_bit = true},
|
||||
.aspect_mask = .{ .color_bit = true },
|
||||
.base_mip_level = 0,
|
||||
.level_count = 1,
|
||||
.base_array_layer = 0,
|
||||
.layer_count = 1,
|
||||
},
|
||||
}, null);
|
||||
errdefer gc.vkd.destroyImageView(gc.dev, view, null);
|
||||
errdefer gc.dev.destroyImageView(view, null);
|
||||
|
||||
const image_acquired = try gc.vkd.createSemaphore(gc.dev, .{.flags = .{}}, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||
const image_acquired = try gc.dev.createSemaphore(&.{}, null);
|
||||
errdefer gc.dev.destroySemaphore(image_acquired, null);
|
||||
|
||||
const render_finished = try gc.vkd.createSemaphore(gc.dev, .{.flags = .{}}, null);
|
||||
errdefer gc.vkd.destroySemaphore(gc.dev, image_acquired, null);
|
||||
const render_finished = try gc.dev.createSemaphore(&.{}, null);
|
||||
errdefer gc.dev.destroySemaphore(render_finished, null);
|
||||
|
||||
const frame_fence = try gc.vkd.createFence(gc.dev, .{.flags = .{.signaled_bit = true}}, null);
|
||||
errdefer gc.vkd.destroyFence(gc.dev, frame_fence, null);
|
||||
const frame_fence = try gc.dev.createFence(&.{ .flags = .{ .signaled_bit = true } }, null);
|
||||
errdefer gc.dev.destroyFence(frame_fence, null);
|
||||
|
||||
return SwapImage{
|
||||
.image = image,
|
||||
@@ -232,29 +254,26 @@ const SwapImage = struct {
|
||||
|
||||
fn deinit(self: SwapImage, gc: *const GraphicsContext) void {
|
||||
self.waitForFence(gc) catch return;
|
||||
gc.vkd.destroyImageView(gc.dev, self.view, null);
|
||||
gc.vkd.destroySemaphore(gc.dev, self.image_acquired, null);
|
||||
gc.vkd.destroySemaphore(gc.dev, self.render_finished, null);
|
||||
gc.vkd.destroyFence(gc.dev, self.frame_fence, null);
|
||||
gc.dev.destroyImageView(self.view, null);
|
||||
gc.dev.destroySemaphore(self.image_acquired, null);
|
||||
gc.dev.destroySemaphore(self.render_finished, null);
|
||||
gc.dev.destroyFence(self.frame_fence, null);
|
||||
}
|
||||
|
||||
fn waitForFence(self: SwapImage, gc: *const GraphicsContext) !void {
|
||||
_ = try gc.vkd.waitForFences(gc.dev, 1, @ptrCast([*]const vk.Fence, &self.frame_fence), vk.TRUE, std.math.maxInt(u64));
|
||||
_ = try gc.dev.waitForFences(1, @ptrCast(&self.frame_fence), .true, std.math.maxInt(u64));
|
||||
}
|
||||
};
|
||||
|
||||
fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, format: vk.Format, allocator: *Allocator) ![]SwapImage {
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vkd.getSwapchainImagesKHR(gc.dev, swapchain, &count, null);
|
||||
const images = try allocator.alloc(vk.Image, count);
|
||||
fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, format: vk.Format, allocator: Allocator) ![]SwapImage {
|
||||
const images = try gc.dev.getSwapchainImagesAllocKHR(swapchain, allocator);
|
||||
defer allocator.free(images);
|
||||
_ = try gc.vkd.getSwapchainImagesKHR(gc.dev, swapchain, &count, images.ptr);
|
||||
|
||||
const swap_images = try allocator.alloc(SwapImage, count);
|
||||
errdefer allocator.free(images);
|
||||
const swap_images = try allocator.alloc(SwapImage, images.len);
|
||||
errdefer allocator.free(swap_images);
|
||||
|
||||
var i: usize = 0;
|
||||
errdefer for (swap_images[0 .. i]) |si| si.deinit(gc);
|
||||
errdefer for (swap_images[0..i]) |si| si.deinit(gc);
|
||||
|
||||
for (images) |image| {
|
||||
swap_images[i] = try SwapImage.init(gc, image, format);
|
||||
@@ -264,17 +283,14 @@ fn initSwapchainImages(gc: *const GraphicsContext, swapchain: vk.SwapchainKHR, f
|
||||
return swap_images;
|
||||
}
|
||||
|
||||
fn findSurfaceFormat(gc: *const GraphicsContext, allocator: *Allocator) !vk.SurfaceFormatKHR {
|
||||
fn findSurfaceFormat(gc: *const GraphicsContext, allocator: Allocator) !vk.SurfaceFormatKHR {
|
||||
const preferred = vk.SurfaceFormatKHR{
|
||||
.format = .b8g8r8a8_srgb,
|
||||
.color_space = .srgb_nonlinear_khr,
|
||||
};
|
||||
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vki.getPhysicalDeviceSurfaceFormatsKHR(gc.pdev, gc.surface, &count, null);
|
||||
const surface_formats = try allocator.alloc(vk.SurfaceFormatKHR, count);
|
||||
const surface_formats = try gc.instance.getPhysicalDeviceSurfaceFormatsAllocKHR(gc.pdev, gc.surface, allocator);
|
||||
defer allocator.free(surface_formats);
|
||||
_ = try gc.vki.getPhysicalDeviceSurfaceFormatsKHR(gc.pdev, gc.surface, &count, surface_formats.ptr);
|
||||
|
||||
for (surface_formats) |sfmt| {
|
||||
if (std.meta.eql(sfmt, preferred)) {
|
||||
@@ -285,12 +301,9 @@ fn findSurfaceFormat(gc: *const GraphicsContext, allocator: *Allocator) !vk.Surf
|
||||
return surface_formats[0]; // There must always be at least one supported surface format
|
||||
}
|
||||
|
||||
fn findPresentMode(gc: *const GraphicsContext, allocator: *Allocator) !vk.PresentModeKHR {
|
||||
var count: u32 = undefined;
|
||||
_ = try gc.vki.getPhysicalDeviceSurfacePresentModesKHR(gc.pdev, gc.surface, &count, null);
|
||||
const present_modes = try allocator.alloc(vk.PresentModeKHR, count);
|
||||
fn findPresentMode(gc: *const GraphicsContext, allocator: Allocator) !vk.PresentModeKHR {
|
||||
const present_modes = try gc.instance.getPhysicalDeviceSurfacePresentModesAllocKHR(gc.pdev, gc.surface, allocator);
|
||||
defer allocator.free(present_modes);
|
||||
_ = try gc.vki.getPhysicalDeviceSurfacePresentModesKHR(gc.pdev, gc.surface, &count, present_modes.ptr);
|
||||
|
||||
const preferred = [_]vk.PresentModeKHR{
|
||||
.mailbox_khr,
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
const std = @import("std");
|
||||
const vk = @import("vulkan");
|
||||
const c = @import("c.zig");
|
||||
const resources = @import("resources");
|
||||
const GraphicsContext = @import("graphics_context.zig").GraphicsContext;
|
||||
const Swapchain = @import("swapchain.zig").Swapchain;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const vert_spv align(@alignOf(u32)) = @embedFile("vertex_shader").*;
|
||||
const frag_spv align(@alignOf(u32)) = @embedFile("fragment_shader").*;
|
||||
|
||||
const app_name = "vulkan-zig triangle example";
|
||||
|
||||
const Vertex = struct {
|
||||
@@ -20,13 +22,13 @@ const Vertex = struct {
|
||||
.binding = 0,
|
||||
.location = 0,
|
||||
.format = .r32g32_sfloat,
|
||||
.offset = @byteOffsetOf(Vertex, "pos"),
|
||||
.offset = @offsetOf(Vertex, "pos"),
|
||||
},
|
||||
.{
|
||||
.binding = 0,
|
||||
.location = 1,
|
||||
.format = .r32g32b32_sfloat,
|
||||
.offset = @byteOffsetOf(Vertex, "color"),
|
||||
.offset = @offsetOf(Vertex, "color"),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -35,76 +37,96 @@ const Vertex = struct {
|
||||
};
|
||||
|
||||
const vertices = [_]Vertex{
|
||||
.{.pos = .{0, -0.5}, .color = .{1, 0, 0}},
|
||||
.{.pos = .{0.5, 0.5}, .color = .{0, 1, 0}},
|
||||
.{.pos = .{-0.5, 0.5}, .color = .{0, 0, 1}},
|
||||
.{ .pos = .{ 0, -0.5 }, .color = .{ 1, 0, 0 } },
|
||||
.{ .pos = .{ 0.5, 0.5 }, .color = .{ 0, 1, 0 } },
|
||||
.{ .pos = .{ -0.5, 0.5 }, .color = .{ 0, 0, 1 } },
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
if (c.glfwInit() != c.GLFW_TRUE) return error.GlfwInitFailed;
|
||||
defer c.glfwTerminate();
|
||||
|
||||
var extent = vk.Extent2D{.width = 800, .height = 600};
|
||||
if (c.glfwVulkanSupported() != c.GLFW_TRUE) {
|
||||
std.log.err("GLFW could not find libvulkan", .{});
|
||||
return error.NoVulkan;
|
||||
}
|
||||
|
||||
var extent = vk.Extent2D{ .width = 800, .height = 600 };
|
||||
|
||||
c.glfwWindowHint(c.GLFW_CLIENT_API, c.GLFW_NO_API);
|
||||
const window = c.glfwCreateWindow(
|
||||
@intCast(c_int, extent.width),
|
||||
@intCast(c_int, extent.height),
|
||||
@intCast(extent.width),
|
||||
@intCast(extent.height),
|
||||
app_name,
|
||||
null,
|
||||
null
|
||||
null,
|
||||
) orelse return error.WindowInitFailed;
|
||||
defer c.glfwDestroyWindow(window);
|
||||
|
||||
const allocator = std.heap.page_allocator;
|
||||
// According to the GLFW docs:
|
||||
//
|
||||
// > Window systems put limits on window sizes. Very large or very small window dimensions
|
||||
// > may be overridden by the window system on creation. Check the actual size after creation.
|
||||
// -- https://www.glfw.org/docs/3.3/group__window.html#ga3555a418df92ad53f917597fe2f64aeb
|
||||
//
|
||||
// This happens in practice, for example, when using Wayland with a scaling factor that is not a
|
||||
// divisor of the initial window size (see https://github.com/Snektron/vulkan-zig/pull/192).
|
||||
// To fix it, just fetch the actual size here, after the windowing system has had the time to
|
||||
// update the window.
|
||||
extent.width, extent.height = blk: {
|
||||
var w: c_int = undefined;
|
||||
var h: c_int = undefined;
|
||||
c.glfwGetFramebufferSize(window, &w, &h);
|
||||
break :blk .{ @intCast(w), @intCast(h) };
|
||||
};
|
||||
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
defer _ = gpa.deinit();
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
const gc = try GraphicsContext.init(allocator, app_name, window);
|
||||
defer gc.deinit();
|
||||
|
||||
std.debug.print("Using device: {s}\n", .{ gc.deviceName() });
|
||||
std.log.debug("Using device: {s}", .{gc.deviceName()});
|
||||
|
||||
var swapchain = try Swapchain.init(&gc, allocator, extent);
|
||||
defer swapchain.deinit();
|
||||
|
||||
const pipeline_layout = try gc.vkd.createPipelineLayout(gc.dev, .{
|
||||
const pipeline_layout = try gc.dev.createPipelineLayout(&.{
|
||||
.flags = .{},
|
||||
.set_layout_count = 0,
|
||||
.p_set_layouts = undefined,
|
||||
.push_constant_range_count = 0,
|
||||
.p_push_constant_ranges = undefined,
|
||||
}, null);
|
||||
defer gc.vkd.destroyPipelineLayout(gc.dev, pipeline_layout, null);
|
||||
defer gc.dev.destroyPipelineLayout(pipeline_layout, null);
|
||||
|
||||
const render_pass = try createRenderPass(&gc, swapchain);
|
||||
defer gc.vkd.destroyRenderPass(gc.dev, render_pass, null);
|
||||
defer gc.dev.destroyRenderPass(render_pass, null);
|
||||
|
||||
var pipeline = try createPipeline(&gc, extent, pipeline_layout, render_pass);
|
||||
defer gc.vkd.destroyPipeline(gc.dev, pipeline, null);
|
||||
const pipeline = try createPipeline(&gc, pipeline_layout, render_pass);
|
||||
defer gc.dev.destroyPipeline(pipeline, null);
|
||||
|
||||
var framebuffers = try createFramebuffers(&gc, allocator, render_pass, swapchain);
|
||||
defer destroyFramebuffers(&gc, allocator, framebuffers);
|
||||
|
||||
const pool = try gc.vkd.createCommandPool(gc.dev, .{
|
||||
.flags = .{},
|
||||
const pool = try gc.dev.createCommandPool(&.{
|
||||
.queue_family_index = gc.graphics_queue.family,
|
||||
}, null);
|
||||
defer gc.vkd.destroyCommandPool(gc.dev, pool, null);
|
||||
defer gc.dev.destroyCommandPool(pool, null);
|
||||
|
||||
const buffer = try gc.vkd.createBuffer(gc.dev, .{
|
||||
.flags = .{},
|
||||
const buffer = try gc.dev.createBuffer(&.{
|
||||
.size = @sizeOf(@TypeOf(vertices)),
|
||||
.usage = .{.transfer_dst_bit = true, .vertex_buffer_bit = true},
|
||||
.usage = .{ .transfer_dst_bit = true, .vertex_buffer_bit = true },
|
||||
.sharing_mode = .exclusive,
|
||||
.queue_family_index_count = 0,
|
||||
.p_queue_family_indices = undefined,
|
||||
}, null);
|
||||
defer gc.vkd.destroyBuffer(gc.dev, buffer, null);
|
||||
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, buffer);
|
||||
const memory = try gc.allocate(mem_reqs, .{.device_local_bit = true});
|
||||
defer gc.vkd.freeMemory(gc.dev, memory, null);
|
||||
try gc.vkd.bindBufferMemory(gc.dev, buffer, memory, 0);
|
||||
defer gc.dev.destroyBuffer(buffer, null);
|
||||
const mem_reqs = gc.dev.getBufferMemoryRequirements(buffer);
|
||||
const memory = try gc.allocate(mem_reqs, .{ .device_local_bit = true });
|
||||
defer gc.dev.freeMemory(memory, null);
|
||||
try gc.dev.bindBufferMemory(buffer, memory, 0);
|
||||
|
||||
try uploadVertices(&gc, pool, buffer, memory);
|
||||
try uploadVertices(&gc, pool, buffer);
|
||||
|
||||
var cmdbufs = try createCommandBuffers(
|
||||
&gc,
|
||||
@@ -114,24 +136,27 @@ pub fn main() !void {
|
||||
swapchain.extent,
|
||||
render_pass,
|
||||
pipeline,
|
||||
framebuffers
|
||||
framebuffers,
|
||||
);
|
||||
defer destroyCommandBuffers(&gc, pool, allocator, cmdbufs);
|
||||
|
||||
var state: Swapchain.PresentState = .optimal;
|
||||
while (c.glfwWindowShouldClose(window) == c.GLFW_FALSE) {
|
||||
const cmdbuf = cmdbufs[swapchain.image_index];
|
||||
|
||||
const state = swapchain.present(cmdbuf) catch |err| switch (err) {
|
||||
error.OutOfDateKHR => Swapchain.PresentState.suboptimal,
|
||||
else => |narrow| return narrow,
|
||||
};
|
||||
|
||||
if (state == .suboptimal) {
|
||||
var w: c_int = undefined;
|
||||
var h: c_int = undefined;
|
||||
c.glfwGetWindowSize(window, &w, &h);
|
||||
extent.width = @intCast(u32, w);
|
||||
extent.height = @intCast(u32, h);
|
||||
c.glfwGetFramebufferSize(window, &w, &h);
|
||||
|
||||
// Don't present or resize swapchain while the window is minimized
|
||||
if (w == 0 or h == 0) {
|
||||
c.glfwPollEvents();
|
||||
continue;
|
||||
}
|
||||
|
||||
const cmdbuf = cmdbufs[swapchain.image_index];
|
||||
|
||||
if (state == .suboptimal or extent.width != @as(u32, @intCast(w)) or extent.height != @as(u32, @intCast(h))) {
|
||||
extent.width = @intCast(w);
|
||||
extent.height = @intCast(h);
|
||||
try swapchain.recreate(extent);
|
||||
|
||||
destroyFramebuffers(&gc, allocator, framebuffers);
|
||||
@@ -146,58 +171,57 @@ pub fn main() !void {
|
||||
swapchain.extent,
|
||||
render_pass,
|
||||
pipeline,
|
||||
framebuffers
|
||||
framebuffers,
|
||||
);
|
||||
}
|
||||
state = swapchain.present(cmdbuf) catch |err| switch (err) {
|
||||
error.OutOfDateKHR => Swapchain.PresentState.suboptimal,
|
||||
else => |narrow| return narrow,
|
||||
};
|
||||
|
||||
c.glfwSwapBuffers(window);
|
||||
c.glfwPollEvents();
|
||||
|
||||
}
|
||||
|
||||
try swapchain.waitForAllFences();
|
||||
try gc.dev.deviceWaitIdle();
|
||||
}
|
||||
|
||||
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer, memory: vk.DeviceMemory) !void {
|
||||
const staging_buffer = try gc.vkd.createBuffer(gc.dev, .{
|
||||
.flags = .{},
|
||||
fn uploadVertices(gc: *const GraphicsContext, pool: vk.CommandPool, buffer: vk.Buffer) !void {
|
||||
const staging_buffer = try gc.dev.createBuffer(&.{
|
||||
.size = @sizeOf(@TypeOf(vertices)),
|
||||
.usage = .{.transfer_src_bit = true},
|
||||
.usage = .{ .transfer_src_bit = true },
|
||||
.sharing_mode = .exclusive,
|
||||
.queue_family_index_count = 0,
|
||||
.p_queue_family_indices = undefined,
|
||||
}, null);
|
||||
defer gc.vkd.destroyBuffer(gc.dev, staging_buffer, null);
|
||||
const mem_reqs = gc.vkd.getBufferMemoryRequirements(gc.dev, staging_buffer);
|
||||
const staging_memory = try gc.allocate(mem_reqs, .{.host_visible_bit = true, .host_coherent_bit = true});
|
||||
defer gc.vkd.freeMemory(gc.dev, staging_memory, null);
|
||||
try gc.vkd.bindBufferMemory(gc.dev, staging_buffer, staging_memory, 0);
|
||||
defer gc.dev.destroyBuffer(staging_buffer, null);
|
||||
const mem_reqs = gc.dev.getBufferMemoryRequirements(staging_buffer);
|
||||
const staging_memory = try gc.allocate(mem_reqs, .{ .host_visible_bit = true, .host_coherent_bit = true });
|
||||
defer gc.dev.freeMemory(staging_memory, null);
|
||||
try gc.dev.bindBufferMemory(staging_buffer, staging_memory, 0);
|
||||
|
||||
{
|
||||
const data = try gc.vkd.mapMemory(gc.dev, staging_memory, 0, vk.WHOLE_SIZE, .{});
|
||||
defer gc.vkd.unmapMemory(gc.dev, staging_memory);
|
||||
const data = try gc.dev.mapMemory(staging_memory, 0, vk.WHOLE_SIZE, .{});
|
||||
defer gc.dev.unmapMemory(staging_memory);
|
||||
|
||||
const gpu_vertices = @ptrCast([*]Vertex, @alignCast(@alignOf(Vertex), data));
|
||||
for (vertices) |vertex, i| {
|
||||
gpu_vertices[i] = vertex;
|
||||
}
|
||||
const gpu_vertices: [*]Vertex = @ptrCast(@alignCast(data));
|
||||
@memcpy(gpu_vertices, vertices[0..]);
|
||||
}
|
||||
|
||||
try copyBuffer(gc, pool, buffer, staging_buffer, @sizeOf(@TypeOf(vertices)));
|
||||
}
|
||||
|
||||
fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer, src: vk.Buffer, size: vk.DeviceSize) !void {
|
||||
var cmdbuf: vk.CommandBuffer = undefined;
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, .{
|
||||
var cmdbuf_handle: vk.CommandBuffer = undefined;
|
||||
try gc.dev.allocateCommandBuffers(&.{
|
||||
.command_pool = pool,
|
||||
.level = .primary,
|
||||
.command_buffer_count = 1,
|
||||
}, @ptrCast([*]vk.CommandBuffer, &cmdbuf));
|
||||
defer gc.vkd.freeCommandBuffers(gc.dev, pool, 1, @ptrCast([*]const vk.CommandBuffer, &cmdbuf));
|
||||
}, @ptrCast(&cmdbuf_handle));
|
||||
defer gc.dev.freeCommandBuffers(pool, 1, @ptrCast(&cmdbuf_handle));
|
||||
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, .{
|
||||
.flags = .{.one_time_submit_bit = true},
|
||||
.p_inheritance_info = null,
|
||||
const cmdbuf = GraphicsContext.CommandBuffer.init(cmdbuf_handle, gc.dev.wrapper);
|
||||
|
||||
try cmdbuf.beginCommandBuffer(&.{
|
||||
.flags = .{ .one_time_submit_bit = true },
|
||||
});
|
||||
|
||||
const region = vk.BufferCopy{
|
||||
@@ -205,27 +229,23 @@ fn copyBuffer(gc: *const GraphicsContext, pool: vk.CommandPool, dst: vk.Buffer,
|
||||
.dst_offset = 0,
|
||||
.size = size,
|
||||
};
|
||||
gc.vkd.cmdCopyBuffer(cmdbuf, src, dst, 1, @ptrCast([*]const vk.BufferCopy, ®ion));
|
||||
cmdbuf.copyBuffer(src, dst, 1, @ptrCast(®ion));
|
||||
|
||||
try gc.vkd.endCommandBuffer(cmdbuf);
|
||||
try cmdbuf.endCommandBuffer();
|
||||
|
||||
const si = vk.SubmitInfo{
|
||||
.wait_semaphore_count = 0,
|
||||
.p_wait_semaphores = undefined,
|
||||
.p_wait_dst_stage_mask = undefined,
|
||||
.command_buffer_count = 1,
|
||||
.p_command_buffers = @ptrCast([*]const vk.CommandBuffer, &cmdbuf),
|
||||
.signal_semaphore_count = 0,
|
||||
.p_signal_semaphores = undefined,
|
||||
.p_command_buffers = (&cmdbuf.handle)[0..1],
|
||||
.p_wait_dst_stage_mask = undefined,
|
||||
};
|
||||
try gc.vkd.queueSubmit(gc.graphics_queue.handle, 1, @ptrCast([*]const vk.SubmitInfo, &si), .null_handle);
|
||||
try gc.vkd.queueWaitIdle(gc.graphics_queue.handle);
|
||||
try gc.dev.queueSubmit(gc.graphics_queue.handle, 1, @ptrCast(&si), .null_handle);
|
||||
try gc.dev.queueWaitIdle(gc.graphics_queue.handle);
|
||||
}
|
||||
|
||||
fn createCommandBuffers(
|
||||
gc: *const GraphicsContext,
|
||||
pool: vk.CommandPool,
|
||||
allocator: *Allocator,
|
||||
allocator: Allocator,
|
||||
buffer: vk.Buffer,
|
||||
extent: vk.Extent2D,
|
||||
render_pass: vk.RenderPass,
|
||||
@@ -235,86 +255,80 @@ fn createCommandBuffers(
|
||||
const cmdbufs = try allocator.alloc(vk.CommandBuffer, framebuffers.len);
|
||||
errdefer allocator.free(cmdbufs);
|
||||
|
||||
try gc.vkd.allocateCommandBuffers(gc.dev, .{
|
||||
try gc.dev.allocateCommandBuffers(&.{
|
||||
.command_pool = pool,
|
||||
.level = .primary,
|
||||
.command_buffer_count = @truncate(u32, cmdbufs.len),
|
||||
.command_buffer_count = @intCast(cmdbufs.len),
|
||||
}, cmdbufs.ptr);
|
||||
errdefer gc.vkd.freeCommandBuffers(gc.dev, pool, @truncate(u32, cmdbufs.len), cmdbufs.ptr);
|
||||
errdefer gc.dev.freeCommandBuffers(pool, @intCast(cmdbufs.len), cmdbufs.ptr);
|
||||
|
||||
const clear = vk.ClearValue{
|
||||
.color = .{.float_32 = .{0, 0, 0, 1}},
|
||||
.color = .{ .float_32 = .{ 0, 0, 0, 1 } },
|
||||
};
|
||||
|
||||
const viewport = vk.Viewport{
|
||||
.x = 0,
|
||||
.y = 0,
|
||||
.width = @intToFloat(f32, extent.width),
|
||||
.height = @intToFloat(f32, extent.height),
|
||||
.width = @floatFromInt(extent.width),
|
||||
.height = @floatFromInt(extent.height),
|
||||
.min_depth = 0,
|
||||
.max_depth = 1,
|
||||
};
|
||||
|
||||
const scissor = vk.Rect2D{
|
||||
.offset = .{.x = 0, .y = 0},
|
||||
.offset = .{ .x = 0, .y = 0 },
|
||||
.extent = extent,
|
||||
};
|
||||
|
||||
for (cmdbufs) |cmdbuf, i| {
|
||||
try gc.vkd.beginCommandBuffer(cmdbuf, .{
|
||||
.flags = .{},
|
||||
.p_inheritance_info = null,
|
||||
});
|
||||
for (cmdbufs, framebuffers) |cmdbuf, framebuffer| {
|
||||
try gc.dev.beginCommandBuffer(cmdbuf, &.{});
|
||||
|
||||
gc.vkd.cmdSetViewport(cmdbuf, 0, 1, @ptrCast([*]const vk.Viewport, &viewport));
|
||||
gc.vkd.cmdSetScissor(cmdbuf, 0, 1, @ptrCast([*]const vk.Rect2D, &scissor));
|
||||
gc.dev.cmdSetViewport(cmdbuf, 0, 1, @ptrCast(&viewport));
|
||||
gc.dev.cmdSetScissor(cmdbuf, 0, 1, @ptrCast(&scissor));
|
||||
|
||||
gc.vkd.cmdBeginRenderPass(cmdbuf, .{
|
||||
.render_pass = render_pass,
|
||||
.framebuffer = framebuffers[i],
|
||||
.render_area = .{
|
||||
.offset = .{.x = 0, .y = 0},
|
||||
// This needs to be a separate definition - see https://github.com/ziglang/zig/issues/7627.
|
||||
const render_area = vk.Rect2D{
|
||||
.offset = .{ .x = 0, .y = 0 },
|
||||
.extent = extent,
|
||||
},
|
||||
};
|
||||
|
||||
gc.dev.cmdBeginRenderPass(cmdbuf, &.{
|
||||
.render_pass = render_pass,
|
||||
.framebuffer = framebuffer,
|
||||
.render_area = render_area,
|
||||
.clear_value_count = 1,
|
||||
.p_clear_values = @ptrCast([*]const vk.ClearValue, &clear),
|
||||
.p_clear_values = @ptrCast(&clear),
|
||||
}, .@"inline");
|
||||
|
||||
gc.vkd.cmdBindPipeline(cmdbuf, .graphics, pipeline);
|
||||
gc.dev.cmdBindPipeline(cmdbuf, .graphics, pipeline);
|
||||
const offset = [_]vk.DeviceSize{0};
|
||||
gc.vkd.cmdBindVertexBuffers(cmdbuf, 0, 1, @ptrCast([*]const vk.Buffer, &buffer), &offset);
|
||||
gc.vkd.cmdDraw(cmdbuf, vertices.len, 1, 0, 0);
|
||||
gc.dev.cmdBindVertexBuffers(cmdbuf, 0, 1, @ptrCast(&buffer), &offset);
|
||||
gc.dev.cmdDraw(cmdbuf, vertices.len, 1, 0, 0);
|
||||
|
||||
gc.vkd.cmdEndRenderPass(cmdbuf);
|
||||
try gc.vkd.endCommandBuffer(cmdbuf);
|
||||
gc.dev.cmdEndRenderPass(cmdbuf);
|
||||
try gc.dev.endCommandBuffer(cmdbuf);
|
||||
}
|
||||
|
||||
return cmdbufs;
|
||||
}
|
||||
|
||||
fn destroyCommandBuffers(gc: *const GraphicsContext, pool: vk.CommandPool, allocator: *Allocator, cmdbufs: []vk.CommandBuffer) void {
|
||||
gc.vkd.freeCommandBuffers(gc.dev, pool, @truncate(u32, cmdbufs.len), cmdbufs.ptr);
|
||||
fn destroyCommandBuffers(gc: *const GraphicsContext, pool: vk.CommandPool, allocator: Allocator, cmdbufs: []vk.CommandBuffer) void {
|
||||
gc.dev.freeCommandBuffers(pool, @truncate(cmdbufs.len), cmdbufs.ptr);
|
||||
allocator.free(cmdbufs);
|
||||
}
|
||||
|
||||
fn createFramebuffers(
|
||||
gc: *const GraphicsContext,
|
||||
allocator: *Allocator,
|
||||
render_pass: vk.RenderPass,
|
||||
swapchain: Swapchain
|
||||
) ![]vk.Framebuffer {
|
||||
fn createFramebuffers(gc: *const GraphicsContext, allocator: Allocator, render_pass: vk.RenderPass, swapchain: Swapchain) ![]vk.Framebuffer {
|
||||
const framebuffers = try allocator.alloc(vk.Framebuffer, swapchain.swap_images.len);
|
||||
errdefer allocator.free(framebuffers);
|
||||
|
||||
var i: usize = 0;
|
||||
errdefer for (framebuffers[0 .. i]) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
errdefer for (framebuffers[0..i]) |fb| gc.dev.destroyFramebuffer(fb, null);
|
||||
|
||||
for (framebuffers) |*fb| {
|
||||
fb.* = try gc.vkd.createFramebuffer(gc.dev, .{
|
||||
.flags = .{},
|
||||
fb.* = try gc.dev.createFramebuffer(&.{
|
||||
.render_pass = render_pass,
|
||||
.attachment_count = 1,
|
||||
.p_attachments = @ptrCast([*]const vk.ImageView, &swapchain.swap_images[i].view),
|
||||
.p_attachments = @ptrCast(&swapchain.swap_images[i].view),
|
||||
.width = swapchain.extent.width,
|
||||
.height = swapchain.extent.height,
|
||||
.layers = 1,
|
||||
@@ -325,21 +339,20 @@ fn createFramebuffers(
|
||||
return framebuffers;
|
||||
}
|
||||
|
||||
fn destroyFramebuffers(gc: *const GraphicsContext, allocator: *Allocator, framebuffers: []const vk.Framebuffer) void {
|
||||
for (framebuffers) |fb| gc.vkd.destroyFramebuffer(gc.dev, fb, null);
|
||||
fn destroyFramebuffers(gc: *const GraphicsContext, allocator: Allocator, framebuffers: []const vk.Framebuffer) void {
|
||||
for (framebuffers) |fb| gc.dev.destroyFramebuffer(fb, null);
|
||||
allocator.free(framebuffers);
|
||||
}
|
||||
|
||||
fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.RenderPass {
|
||||
const color_attachment = vk.AttachmentDescription{
|
||||
.flags = .{},
|
||||
.format = swapchain.surface_format.format,
|
||||
.samples = .{.@"1_bit" = true},
|
||||
.samples = .{ .@"1_bit" = true },
|
||||
.load_op = .clear,
|
||||
.store_op = .store,
|
||||
.stencil_load_op = .dont_care,
|
||||
.stencil_store_op = .dont_care,
|
||||
.initial_layout = .@"undefined",
|
||||
.initial_layout = .undefined,
|
||||
.final_layout = .present_src_khr,
|
||||
};
|
||||
|
||||
@@ -349,82 +362,62 @@ fn createRenderPass(gc: *const GraphicsContext, swapchain: Swapchain) !vk.Render
|
||||
};
|
||||
|
||||
const subpass = vk.SubpassDescription{
|
||||
.flags = .{},
|
||||
.pipeline_bind_point = .graphics,
|
||||
.input_attachment_count = 0,
|
||||
.p_input_attachments = undefined,
|
||||
.color_attachment_count = 1,
|
||||
.p_color_attachments = @ptrCast([*]const vk.AttachmentReference, &color_attachment_ref),
|
||||
.p_resolve_attachments = null,
|
||||
.p_depth_stencil_attachment = null,
|
||||
.preserve_attachment_count = 0,
|
||||
.p_preserve_attachments = undefined,
|
||||
.p_color_attachments = @ptrCast(&color_attachment_ref),
|
||||
};
|
||||
|
||||
return try gc.vkd.createRenderPass(gc.dev, .{
|
||||
.flags = .{},
|
||||
return try gc.dev.createRenderPass(&.{
|
||||
.attachment_count = 1,
|
||||
.p_attachments = @ptrCast([*]const vk.AttachmentDescription, &color_attachment),
|
||||
.p_attachments = @ptrCast(&color_attachment),
|
||||
.subpass_count = 1,
|
||||
.p_subpasses = @ptrCast([*]const vk.SubpassDescription, &subpass),
|
||||
.dependency_count = 0,
|
||||
.p_dependencies = undefined,
|
||||
.p_subpasses = @ptrCast(&subpass),
|
||||
}, null);
|
||||
}
|
||||
|
||||
fn createPipeline(
|
||||
gc: *const GraphicsContext,
|
||||
extent: vk.Extent2D,
|
||||
layout: vk.PipelineLayout,
|
||||
render_pass: vk.RenderPass,
|
||||
) !vk.Pipeline {
|
||||
const vert = try gc.vkd.createShaderModule(gc.dev, .{
|
||||
.flags = .{},
|
||||
.code_size = resources.triangle_vert.len,
|
||||
.p_code = @ptrCast([*]const u32, resources.triangle_vert),
|
||||
const vert = try gc.dev.createShaderModule(&.{
|
||||
.code_size = vert_spv.len,
|
||||
.p_code = @ptrCast(&vert_spv),
|
||||
}, null);
|
||||
defer gc.vkd.destroyShaderModule(gc.dev, vert, null);
|
||||
defer gc.dev.destroyShaderModule(vert, null);
|
||||
|
||||
const frag = try gc.vkd.createShaderModule(gc.dev, .{
|
||||
.flags = .{},
|
||||
.code_size = resources.triangle_frag.len,
|
||||
.p_code = @ptrCast([*]const u32, resources.triangle_frag),
|
||||
const frag = try gc.dev.createShaderModule(&.{
|
||||
.code_size = frag_spv.len,
|
||||
.p_code = @ptrCast(&frag_spv),
|
||||
}, null);
|
||||
defer gc.vkd.destroyShaderModule(gc.dev, frag, null);
|
||||
defer gc.dev.destroyShaderModule(frag, null);
|
||||
|
||||
const pssci = [_]vk.PipelineShaderStageCreateInfo{
|
||||
.{
|
||||
.flags = .{},
|
||||
.stage = .{.vertex_bit = true},
|
||||
.stage = .{ .vertex_bit = true },
|
||||
.module = vert,
|
||||
.p_name = "main",
|
||||
.p_specialization_info = null,
|
||||
},
|
||||
.{
|
||||
.flags = .{},
|
||||
.stage = .{.fragment_bit = true},
|
||||
.stage = .{ .fragment_bit = true },
|
||||
.module = frag,
|
||||
.p_name = "main",
|
||||
.p_specialization_info = null,
|
||||
},
|
||||
};
|
||||
|
||||
const pvisci = vk.PipelineVertexInputStateCreateInfo{
|
||||
.flags = .{},
|
||||
.vertex_binding_description_count = 1,
|
||||
.p_vertex_binding_descriptions = @ptrCast([*]const vk.VertexInputBindingDescription, &Vertex.binding_description),
|
||||
.p_vertex_binding_descriptions = @ptrCast(&Vertex.binding_description),
|
||||
.vertex_attribute_description_count = Vertex.attribute_description.len,
|
||||
.p_vertex_attribute_descriptions = &Vertex.attribute_description,
|
||||
};
|
||||
|
||||
const piasci = vk.PipelineInputAssemblyStateCreateInfo{
|
||||
.flags = .{},
|
||||
.topology = .triangle_list,
|
||||
.primitive_restart_enable = vk.FALSE,
|
||||
.primitive_restart_enable = .false,
|
||||
};
|
||||
|
||||
const pvsci = vk.PipelineViewportStateCreateInfo{
|
||||
.flags = .{},
|
||||
.viewport_count = 1,
|
||||
.p_viewports = undefined, // set in createCommandBuffers with cmdSetViewport
|
||||
.scissor_count = 1,
|
||||
@@ -432,13 +425,12 @@ fn createPipeline(
|
||||
};
|
||||
|
||||
const prsci = vk.PipelineRasterizationStateCreateInfo{
|
||||
.flags = .{},
|
||||
.depth_clamp_enable = vk.FALSE,
|
||||
.rasterizer_discard_enable = vk.FALSE,
|
||||
.depth_clamp_enable = .false,
|
||||
.rasterizer_discard_enable = .false,
|
||||
.polygon_mode = .fill,
|
||||
.cull_mode = .{.back_bit = true},
|
||||
.cull_mode = .{ .back_bit = true },
|
||||
.front_face = .clockwise,
|
||||
.depth_bias_enable = vk.FALSE,
|
||||
.depth_bias_enable = .false,
|
||||
.depth_bias_constant_factor = 0,
|
||||
.depth_bias_clamp = 0,
|
||||
.depth_bias_slope_factor = 0,
|
||||
@@ -446,36 +438,33 @@ fn createPipeline(
|
||||
};
|
||||
|
||||
const pmsci = vk.PipelineMultisampleStateCreateInfo{
|
||||
.flags = .{},
|
||||
.rasterization_samples = .{.@"1_bit" = true},
|
||||
.sample_shading_enable = vk.FALSE,
|
||||
.rasterization_samples = .{ .@"1_bit" = true },
|
||||
.sample_shading_enable = .false,
|
||||
.min_sample_shading = 1,
|
||||
.p_sample_mask = null,
|
||||
.alpha_to_coverage_enable = vk.FALSE,
|
||||
.alpha_to_one_enable = vk.FALSE,
|
||||
.alpha_to_coverage_enable = .false,
|
||||
.alpha_to_one_enable = .false,
|
||||
};
|
||||
|
||||
const pcbas = vk.PipelineColorBlendAttachmentState{
|
||||
.blend_enable = vk.FALSE,
|
||||
.blend_enable = .false,
|
||||
.src_color_blend_factor = .one,
|
||||
.dst_color_blend_factor = .zero,
|
||||
.color_blend_op = .add,
|
||||
.src_alpha_blend_factor = .one,
|
||||
.dst_alpha_blend_factor = .zero,
|
||||
.alpha_blend_op = .add,
|
||||
.color_write_mask = .{.r_bit = true, .g_bit = true, .b_bit = true, .a_bit = true},
|
||||
.color_write_mask = .{ .r_bit = true, .g_bit = true, .b_bit = true, .a_bit = true },
|
||||
};
|
||||
|
||||
const pcbsci = vk.PipelineColorBlendStateCreateInfo{
|
||||
.flags = .{},
|
||||
.logic_op_enable = vk.FALSE,
|
||||
.logic_op_enable = .false,
|
||||
.logic_op = .copy,
|
||||
.attachment_count = 1,
|
||||
.p_attachments = @ptrCast([*]const vk.PipelineColorBlendAttachmentState, &pcbas),
|
||||
.blend_constants = [_]f32{0, 0, 0, 0},
|
||||
.p_attachments = @ptrCast(&pcbas),
|
||||
.blend_constants = [_]f32{ 0, 0, 0, 0 },
|
||||
};
|
||||
|
||||
const dynstate = [_]vk.DynamicState{.viewport, .scissor};
|
||||
const dynstate = [_]vk.DynamicState{ .viewport, .scissor };
|
||||
const pdsci = vk.PipelineDynamicStateCreateInfo{
|
||||
.flags = .{},
|
||||
.dynamic_state_count = dynstate.len,
|
||||
@@ -503,12 +492,12 @@ fn createPipeline(
|
||||
};
|
||||
|
||||
var pipeline: vk.Pipeline = undefined;
|
||||
_ = try gc.vkd.createGraphicsPipelines(
|
||||
gc.dev,
|
||||
_ = try gc.dev.createGraphicsPipelines(
|
||||
.null_handle,
|
||||
1, @ptrCast([*]const vk.GraphicsPipelineCreateInfo, &gpci),
|
||||
1,
|
||||
@ptrCast(&gpci),
|
||||
null,
|
||||
@ptrCast([*]vk.Pipeline, &pipeline),
|
||||
@ptrCast(&pipeline),
|
||||
);
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
16562
examples/vk.xml
16562
examples/vk.xml
File diff suppressed because it is too large
Load Diff
@@ -1,75 +0,0 @@
|
||||
const std = @import("std");
|
||||
const path = std.fs.path;
|
||||
const Builder = std.build.Builder;
|
||||
const Step = std.build.Step;
|
||||
|
||||
/// Utility functionality to help with compiling shaders from build.zig.
|
||||
/// Invokes glslc (or another shader compiler passed to `init`) for each shader
|
||||
/// added via `addShader`.
|
||||
pub const ShaderCompileStep = struct {
|
||||
/// Structure representing a shader to be compiled.
|
||||
const Shader = struct {
|
||||
/// The path to the shader, relative to the current build root.
|
||||
source_path: []const u8,
|
||||
|
||||
/// The full output path where the compiled shader binary is placed.
|
||||
full_out_path: []const u8,
|
||||
};
|
||||
|
||||
step: Step,
|
||||
builder: *Builder,
|
||||
|
||||
/// The command and optional arguments used to invoke the shader compiler.
|
||||
glslc_cmd: []const []const u8,
|
||||
|
||||
/// List of shaders that are to be compiled.
|
||||
shaders: std.ArrayList(Shader),
|
||||
|
||||
/// Create a ShaderCompilerStep for `builder`. When this step is invoked by the build
|
||||
/// system, `<glcl_cmd...> <shader_source> -o <dst_addr>` is invoked for each shader.
|
||||
pub fn init(builder: *Builder, glslc_cmd: []const []const u8) *ShaderCompileStep {
|
||||
const self = builder.allocator.create(ShaderCompileStep) catch unreachable;
|
||||
self.* = .{
|
||||
.step = Step.init(.Custom, "shader-compile", builder.allocator, make),
|
||||
.builder = builder,
|
||||
.glslc_cmd = glslc_cmd,
|
||||
.shaders = std.ArrayList(Shader).init(builder.allocator),
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Add a shader to be compiled. `src` is shader source path, relative to the project root.
|
||||
/// Returns the full path where the compiled binary will be stored upon successful compilation.
|
||||
/// This path can then be used to include the binary into an executable, for example by passing it
|
||||
/// to @embedFile via an additional generated file.
|
||||
pub fn add(self: *ShaderCompileStep, src: []const u8) []const u8 {
|
||||
const full_out_path = path.join(self.builder.allocator, &[_][]const u8{
|
||||
self.builder.build_root,
|
||||
self.builder.cache_root,
|
||||
"shaders",
|
||||
src,
|
||||
}) catch unreachable;
|
||||
self.shaders.append(.{.source_path = src, .full_out_path = full_out_path}) catch unreachable;
|
||||
return full_out_path;
|
||||
}
|
||||
|
||||
/// Internal build function.
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(ShaderCompileStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
const cmd = try self.builder.allocator.alloc([]const u8, self.glslc_cmd.len + 3);
|
||||
for (self.glslc_cmd) |part, i| {
|
||||
cmd[i] = part;
|
||||
}
|
||||
cmd[cmd.len - 2] = "-o";
|
||||
|
||||
for (self.shaders.items) |shader| {
|
||||
const dir = path.dirname(shader.full_out_path).?;
|
||||
try cwd.makePath(dir);
|
||||
cmd[cmd.len - 3] = shader.source_path;
|
||||
cmd[cmd.len - 1] = shader.full_out_path;
|
||||
try self.builder.spawnChild(cmd);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1,9 +0,0 @@
|
||||
pub const generateVk = @import("vulkan/generator.zig").generate;
|
||||
pub const VkGenerateStep = @import("vulkan/build_integration.zig").GenerateStep;
|
||||
pub const generateSpirv = @import("spirv/generator.zig").generate;
|
||||
pub const ShaderCompileStep = @import("build_integration.zig").ShaderCompileStep;
|
||||
|
||||
test "main" {
|
||||
_ = @import("xml.zig");
|
||||
_ = @import("vulkan/c_parse.zig");
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
const std = @import("std");
|
||||
const generate = @import("vulkan/generator.zig").generate;
|
||||
|
||||
const usage = "Usage: {s} [-h|--help] <spec xml path> <output zig source>\n";
|
||||
|
||||
pub fn main() !void {
|
||||
const stderr = std.io.getStdErr();
|
||||
const stdout = std.io.getStdOut();
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = &arena.allocator;
|
||||
|
||||
var args = std.process.args();
|
||||
const prog_name = try args.next(allocator) orelse return error.ExecutableNameMissing;
|
||||
|
||||
var maybe_xml_path: ?[]const u8 = null;
|
||||
var maybe_out_path: ?[]const u8 = null;
|
||||
|
||||
while (args.next(allocator)) |err_or_arg| {
|
||||
const arg = try err_or_arg;
|
||||
|
||||
if (std.mem.eql(u8, arg, "--help") or std.mem.eql(u8, arg, "-h")) {
|
||||
@setEvalBranchQuota(2000);
|
||||
try stderr.writer().print(
|
||||
\\Utility to generate a Zig binding from the Vulkan XML API registry.
|
||||
\\
|
||||
\\The most recent Vulkan XML API registry can be obtained from
|
||||
\\https://github.com/KhronosGroup/Vulkan-Docs/blob/master/xml/vk.xml,
|
||||
\\and the most recent LunarG Vulkan SDK version can be found at
|
||||
\\$VULKAN_SDK/x86_64/share/vulkan/registry/vk.xml.
|
||||
\\
|
||||
\\
|
||||
++ usage,
|
||||
.{ prog_name },
|
||||
);
|
||||
return;
|
||||
} else if (maybe_xml_path == null) {
|
||||
maybe_xml_path = arg;
|
||||
} else if (maybe_out_path == null) {
|
||||
maybe_out_path = arg;
|
||||
} else {
|
||||
try stderr.writer().print("Error: Superficial argument '{s}'\n", .{ arg });
|
||||
}
|
||||
}
|
||||
|
||||
const xml_path = maybe_xml_path orelse {
|
||||
try stderr.writer().print("Error: Missing required argument <spec xml path>\n" ++ usage, .{ prog_name });
|
||||
return;
|
||||
};
|
||||
|
||||
const out_path = maybe_out_path orelse {
|
||||
try stderr.writer().print("Error: Missing required argument <output zig source>\n" ++ usage, .{ prog_name });
|
||||
return;
|
||||
};
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
const xml_src = cwd.readFileAlloc(allocator, xml_path, std.math.maxInt(usize)) catch |err| {
|
||||
try stderr.writer().print("Error: Failed to open input file '{s}' ({s})\n", .{ xml_path, @errorName(err) });
|
||||
return;
|
||||
};
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(allocator);
|
||||
try generate(allocator, xml_src, out_buffer.writer());
|
||||
|
||||
const tree = try std.zig.parse(allocator, out_buffer.items);
|
||||
const formatted = try tree.render(allocator);
|
||||
defer allocator.free(formatted);
|
||||
|
||||
if (std.fs.path.dirname(out_path)) |dir| {
|
||||
cwd.makePath(dir) catch |err| {
|
||||
try stderr.writer().print("Error: Failed to create output directory '{s}' ({s})\n", .{ dir, @errorName(err) });
|
||||
return;
|
||||
};
|
||||
}
|
||||
|
||||
cwd.writeFile(out_path, formatted) catch |err| {
|
||||
try stderr.writer().print("Error: Failed to write to output file '{s}' ({s})\n", .{ out_path, @errorName(err) });
|
||||
return;
|
||||
};
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
const std = @import("std");
|
||||
const generate = @import("generator.zig").generate;
|
||||
const path = std.fs.path;
|
||||
const Builder = std.build.Builder;
|
||||
const Step = std.build.Step;
|
||||
|
||||
/// build.zig integration for Vulkan binding generation. This step can be used to generate
|
||||
/// Vulkan bindings at compiletime from vk.xml, by providing the path to vk.xml and the output
|
||||
/// path relative to zig-cache. The final package can then be obtained by `package()`, the result
|
||||
/// of which can be added to the project using `std.build.Builder.addPackage`.
|
||||
pub const GenerateStep = struct {
|
||||
step: Step,
|
||||
builder: *Builder,
|
||||
|
||||
/// The path to vk.xml
|
||||
spec_path: []const u8,
|
||||
|
||||
/// The package representing the generated bindings. The generated bindings will be placed
|
||||
/// in `package.path`. When using this step, this member should be passed to
|
||||
/// `std.build.Builder.addPackage`, which causes the bindings to become available under the
|
||||
/// name `vulkan`.
|
||||
package: std.build.Pkg,
|
||||
|
||||
/// Initialize a Vulkan generation step, for `builder`. `spec_path` is the path to
|
||||
/// vk.xml, relative to the project root. The generated bindings will be placed at
|
||||
/// `out_path`, which is relative to the zig-cache directory.
|
||||
pub fn init(builder: *Builder, spec_path: []const u8, out_path: []const u8) *GenerateStep {
|
||||
const self = builder.allocator.create(GenerateStep) catch unreachable;
|
||||
const full_out_path = path.join(builder.allocator, &[_][]const u8{
|
||||
builder.build_root,
|
||||
builder.cache_root,
|
||||
out_path,
|
||||
}) catch unreachable;
|
||||
|
||||
self.* = .{
|
||||
.step = Step.init(.Custom, "vulkan-generate", builder.allocator, make),
|
||||
.builder = builder,
|
||||
.spec_path = spec_path,
|
||||
.package = .{
|
||||
.name = "vulkan",
|
||||
.path = full_out_path,
|
||||
.dependencies = null,
|
||||
}
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Initialize a Vulkan generation step for `builder`, by extracting vk.xml from the LunarG installation
|
||||
/// root. Typically, the location of the LunarG SDK root can be retrieved by querying for the VULKAN_SDK
|
||||
/// environment variable, set by activating the environment setup script located in the SDK root.
|
||||
/// `builder` and `out_path` are used in the same manner as `init`.
|
||||
pub fn initFromSdk(builder: *Builder, sdk_path: []const u8, out_path: []const u8) *GenerateStep {
|
||||
const spec_path = std.fs.path.join(
|
||||
builder.allocator,
|
||||
&[_][]const u8{sdk_path, "share/vulkan/registry/vk.xml"},
|
||||
) catch unreachable;
|
||||
|
||||
return init(builder, spec_path, out_path);
|
||||
}
|
||||
|
||||
/// Internal build function. This reads `vk.xml`, and passes it to `generate`, which then generates
|
||||
/// the final bindings. The resulting generated bindings are not formatted, which is why an ArrayList
|
||||
/// writer is passed instead of a file writer. This is then formatted into standard formatting
|
||||
/// by parsing it and rendering with `std.zig.parse` and `std.zig.render` respectively.
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(GenerateStep, "step", step);
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
const spec = try cwd.readFileAlloc(self.builder.allocator, self.spec_path, std.math.maxInt(usize));
|
||||
|
||||
var out_buffer = std.ArrayList(u8).init(self.builder.allocator);
|
||||
try generate(self.builder.allocator, spec, out_buffer.writer());
|
||||
|
||||
const tree = try std.zig.parse(self.builder.allocator, out_buffer.items);
|
||||
|
||||
var formatted = try tree.render(self.builder.allocator);
|
||||
|
||||
const dir = path.dirname(self.package.path).?;
|
||||
try cwd.makePath(dir);
|
||||
try cwd.writeFile(self.package.path, formatted);
|
||||
}
|
||||
};
|
||||
@@ -1,174 +0,0 @@
|
||||
const std = @import("std");
|
||||
const reg = @import("registry.zig");
|
||||
const xml = @import("../xml.zig");
|
||||
const renderRegistry = @import("render.zig").render;
|
||||
const parseXml = @import("parse.zig").parseXml;
|
||||
const IdRenderer = @import("../id_render.zig").IdRenderer;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const FeatureLevel = reg.FeatureLevel;
|
||||
|
||||
const EnumFieldMerger = struct {
|
||||
const EnumExtensionMap = std.StringArrayHashMap(std.ArrayListUnmanaged(reg.Enum.Field));
|
||||
const FieldSet = std.StringArrayHashMap(void);
|
||||
|
||||
gpa: *Allocator,
|
||||
reg_arena: *Allocator,
|
||||
registry: *reg.Registry,
|
||||
enum_extensions: EnumExtensionMap,
|
||||
field_set: FieldSet,
|
||||
|
||||
fn init(gpa: *Allocator, reg_arena: *Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
return .{
|
||||
.gpa = gpa,
|
||||
.reg_arena = reg_arena,
|
||||
.registry = registry,
|
||||
.enum_extensions = EnumExtensionMap.init(gpa),
|
||||
.field_set = FieldSet.init(gpa),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *EnumFieldMerger) void {
|
||||
for (self.enum_extensions.values()) |*value| {
|
||||
value.deinit(self.gpa);
|
||||
}
|
||||
|
||||
self.field_set.deinit();
|
||||
self.enum_extensions.deinit();
|
||||
}
|
||||
|
||||
fn putEnumExtension(self: *EnumFieldMerger, enum_name: []const u8, field: reg.Enum.Field) !void {
|
||||
const res = try self.enum_extensions.getOrPut(enum_name);
|
||||
if (!res.found_existing) {
|
||||
res.value_ptr.* = std.ArrayListUnmanaged(reg.Enum.Field){};
|
||||
}
|
||||
|
||||
try res.value_ptr.append(self.gpa, field);
|
||||
}
|
||||
|
||||
fn addRequires(self: *EnumFieldMerger, reqs: []const reg.Require) !void {
|
||||
for (reqs) |req| {
|
||||
for (req.extends) |enum_ext| {
|
||||
try self.putEnumExtension(enum_ext.extends, enum_ext.field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn mergeEnumFields(self: *EnumFieldMerger, name: []const u8, base_enum: *reg.Enum) !void {
|
||||
// If there are no extensions for this enum, assume its valid.
|
||||
const extensions = self.enum_extensions.get(name) orelse return;
|
||||
|
||||
self.field_set.clearRetainingCapacity();
|
||||
|
||||
const n_fields_upper_bound = base_enum.fields.len + extensions.items.len;
|
||||
const new_fields = try self.reg_arena.alloc(reg.Enum.Field, n_fields_upper_bound);
|
||||
var i: usize = 0;
|
||||
|
||||
for (base_enum.fields) |field| {
|
||||
const res = try self.field_set.getOrPut(field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Assume that if a field name clobbers, the value is the same
|
||||
for (extensions.items) |field| {
|
||||
const res = try self.field_set.getOrPut(field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Existing base_enum.fields was allocatued by `self.reg_arena`, so
|
||||
// it gets cleaned up whenever that is deinited.
|
||||
base_enum.fields = self.reg_arena.shrink(new_fields, i);
|
||||
}
|
||||
|
||||
fn merge(self: *EnumFieldMerger) !void {
|
||||
for (self.registry.features) |feature| {
|
||||
try self.addRequires(feature.requires);
|
||||
}
|
||||
|
||||
for (self.registry.extensions) |ext| {
|
||||
try self.addRequires(ext.requires);
|
||||
}
|
||||
|
||||
// Merge all the enum fields.
|
||||
// Assume that all keys of enum_extensions appear in `self.registry.decls`
|
||||
for (self.registry.decls) |*decl| {
|
||||
if (decl.decl_type == .enumeration) {
|
||||
try self.mergeEnumFields(decl.name, &decl.decl_type.enumeration);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Generator = struct {
|
||||
gpa: *Allocator,
|
||||
reg_arena: std.heap.ArenaAllocator,
|
||||
registry: reg.Registry,
|
||||
id_renderer: IdRenderer,
|
||||
|
||||
fn init(allocator: *Allocator, spec: *xml.Element) !Generator {
|
||||
const result = try parseXml(allocator, spec);
|
||||
|
||||
const tags = try allocator.alloc([]const u8, result.registry.tags.len);
|
||||
for (tags) |*tag, i| tag.* = result.registry.tags[i].name;
|
||||
|
||||
return Generator{
|
||||
.gpa = allocator,
|
||||
.reg_arena = result.arena,
|
||||
.registry = result.registry,
|
||||
.id_renderer = IdRenderer.init(allocator, tags),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: Generator) void {
|
||||
self.gpa.free(self.id_renderer.tags);
|
||||
self.reg_arena.deinit();
|
||||
}
|
||||
|
||||
fn stripFlagBits(self: Generator, name: []const u8) []const u8 {
|
||||
const tagless = self.id_renderer.stripAuthorTag(name);
|
||||
return tagless[0 .. tagless.len - "FlagBits".len];
|
||||
}
|
||||
|
||||
fn stripFlags(self: Generator, name: []const u8) []const u8 {
|
||||
const tagless = self.id_renderer.stripAuthorTag(name);
|
||||
return tagless[0 .. tagless.len - "Flags".len];
|
||||
}
|
||||
|
||||
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
|
||||
fn mergeEnumFields(self: *Generator) !void {
|
||||
var merger = EnumFieldMerger.init(self.gpa, &self.reg_arena.allocator, &self.registry);
|
||||
defer merger.deinit();
|
||||
try merger.merge();
|
||||
}
|
||||
|
||||
fn fixupTags(self: *Generator) !void {
|
||||
var fixer_upper = TagFixerUpper.init(self.gpa, &self.registry, &self.id_renderer);
|
||||
defer fixer_upper.deinit();
|
||||
try fixer_upper.fixup();
|
||||
}
|
||||
|
||||
fn render(self: *Generator, writer: anytype) !void {
|
||||
try renderRegistry(writer, &self.reg_arena.allocator, &self.registry, &self.id_renderer);
|
||||
}
|
||||
};
|
||||
|
||||
/// Main function for generating the Vulkan bindings. vk.xml is to be provided via `spec_xml`,
|
||||
/// and the resulting binding is written to `writer`. `allocator` will be used to allocate temporary
|
||||
/// internal datastructures - mostly via an ArenaAllocator, but sometimes a hashmap uses this allocator
|
||||
/// directly.
|
||||
pub fn generate(allocator: *Allocator, spec_xml: []const u8, writer: anytype) !void {
|
||||
const spec = try xml.parse(allocator, spec_xml);
|
||||
defer spec.deinit();
|
||||
|
||||
var gen = try Generator.init(allocator, spec.root);
|
||||
defer gen.deinit();
|
||||
|
||||
try gen.mergeEnumFields();
|
||||
try gen.render(writer);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,667 +0,0 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const Allocator = mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
pub const Attribute = struct {
|
||||
name: []const u8,
|
||||
value: []const u8
|
||||
};
|
||||
|
||||
pub const Content = union(enum) {
|
||||
CharData: []const u8,
|
||||
Comment: []const u8,
|
||||
Element: *Element
|
||||
};
|
||||
|
||||
pub const Element = struct {
|
||||
pub const AttributeList = ArrayList(*Attribute);
|
||||
pub const ContentList = ArrayList(Content);
|
||||
|
||||
tag: []const u8,
|
||||
attributes: AttributeList,
|
||||
children: ContentList,
|
||||
|
||||
fn init(tag: []const u8, alloc: *Allocator) Element {
|
||||
return .{
|
||||
.tag = tag,
|
||||
.attributes = AttributeList.init(alloc),
|
||||
.children = ContentList.init(alloc),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getAttribute(self: *Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes.items) |child| {
|
||||
if (mem.eql(u8, child.name, attrib_name)) {
|
||||
return child.value;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getCharData(self: *Element, child_tag: []const u8) ?[]const u8 {
|
||||
const child = self.findChildByTag(child_tag) orelse return null;
|
||||
if (child.children.items.len != 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return switch (child.children.items[0]) {
|
||||
.CharData => |char_data| char_data,
|
||||
else => null
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: *Element) ChildIterator {
|
||||
return .{
|
||||
.items = self.children.items,
|
||||
.i = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn elements(self: *Element) ChildElementIterator {
|
||||
return .{
|
||||
.inner = self.iterator(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn findChildByTag(self: *Element, tag: []const u8) ?*Element {
|
||||
return self.findChildrenByTag(tag).next();
|
||||
}
|
||||
|
||||
pub fn findChildrenByTag(self: *Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
return .{
|
||||
.inner = self.elements(),
|
||||
.tag = tag
|
||||
};
|
||||
}
|
||||
|
||||
pub const ChildIterator = struct {
|
||||
items: []Content,
|
||||
i: usize,
|
||||
|
||||
pub fn next(self: *ChildIterator) ?*Content {
|
||||
if (self.i < self.items.len) {
|
||||
self.i += 1;
|
||||
return &self.items[self.i - 1];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ChildElementIterator = struct {
|
||||
inner: ChildIterator,
|
||||
|
||||
pub fn next(self: *ChildElementIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (child.* != .Element) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child.*.Element;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const FindChildrenByTagIterator = struct {
|
||||
inner: ChildElementIterator,
|
||||
tag: []const u8,
|
||||
|
||||
pub fn next(self: *FindChildrenByTagIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (!mem.eql(u8, child.tag, self.tag)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub const XmlDecl = struct {
|
||||
version: []const u8,
|
||||
encoding: ?[]const u8,
|
||||
standalone: ?bool
|
||||
};
|
||||
|
||||
pub const Document = struct {
|
||||
arena: ArenaAllocator,
|
||||
xml_decl: ?*XmlDecl,
|
||||
root: *Element,
|
||||
|
||||
pub fn deinit(self: Document) void {
|
||||
var arena = self.arena; // Copy to stack so self can be taken by value.
|
||||
arena.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
const ParseContext = struct {
|
||||
source: []const u8,
|
||||
offset: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
|
||||
fn init(source: []const u8) ParseContext {
|
||||
return .{
|
||||
.source = source,
|
||||
.offset = 0,
|
||||
.line = 0,
|
||||
.column = 0
|
||||
};
|
||||
}
|
||||
|
||||
fn peek(self: *ParseContext) ?u8 {
|
||||
return if (self.offset < self.source.len) self.source[self.offset] else null;
|
||||
}
|
||||
|
||||
fn consume(self: *ParseContext) !u8 {
|
||||
if (self.offset < self.source.len) {
|
||||
return self.consumeNoEof();
|
||||
}
|
||||
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn consumeNoEof(self: *ParseContext) u8 {
|
||||
std.debug.assert(self.offset < self.source.len);
|
||||
const c = self.source[self.offset];
|
||||
self.offset += 1;
|
||||
|
||||
if (c == '\n') {
|
||||
self.line += 1;
|
||||
self.column = 0;
|
||||
} else {
|
||||
self.column += 1;
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
fn eat(self: *ParseContext, char: u8) bool {
|
||||
self.expect(char) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expect(self: *ParseContext, expected: u8) !void {
|
||||
if (self.peek()) |actual| {
|
||||
if (expected != actual) {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
_ = self.consumeNoEof();
|
||||
return;
|
||||
}
|
||||
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn eatStr(self: *ParseContext, text: []const u8) bool {
|
||||
self.expectStr(text) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expectStr(self: *ParseContext, text: []const u8) !void {
|
||||
if (self.source.len < self.offset + text.len) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (std.mem.startsWith(u8, self.source[self.offset ..], text)) {
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (i += 1) {
|
||||
_ = self.consumeNoEof();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn eatWs(self: *ParseContext) bool {
|
||||
var ws = false;
|
||||
|
||||
while (self.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => {
|
||||
ws = true;
|
||||
_ = self.consumeNoEof();
|
||||
},
|
||||
else => break
|
||||
}
|
||||
}
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
fn expectWs(self: *ParseContext) !void {
|
||||
if (!self.eatWs()) return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn currentLine(self: ParseContext) []const u8 {
|
||||
var begin: usize = 0;
|
||||
if (mem.lastIndexOfScalar(u8, self.source[0 .. self.offset], '\n')) |prev_nl| {
|
||||
begin = prev_nl + 1;
|
||||
}
|
||||
|
||||
var end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse self.source.len;
|
||||
return self.source[begin .. end];
|
||||
}
|
||||
};
|
||||
|
||||
test "ParseContext" {
|
||||
{
|
||||
var ctx = ParseContext.init("I like pythons");
|
||||
try testing.expectEqual(@as(?u8, 'I'), ctx.peek());
|
||||
try testing.expectEqual(@as(u8, 'I'), ctx.consumeNoEof());
|
||||
try testing.expectEqual(@as(?u8, ' '), ctx.peek());
|
||||
try testing.expectEqual(@as(u8, ' '), try ctx.consume());
|
||||
|
||||
try testing.expect(ctx.eat('l'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), ctx.peek());
|
||||
try testing.expectEqual(false, ctx.eat('a'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), ctx.peek());
|
||||
|
||||
try ctx.expect('i');
|
||||
try testing.expectEqual(@as(?u8, 'k'), ctx.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, ctx.expect('a'));
|
||||
try testing.expectEqual(@as(?u8, 'k'), ctx.peek());
|
||||
|
||||
try testing.expect(ctx.eatStr("ke"));
|
||||
try testing.expectEqual(@as(?u8, ' '), ctx.peek());
|
||||
|
||||
try testing.expect(ctx.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try testing.expectEqual(false, ctx.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
|
||||
try testing.expectEqual(false, ctx.eatStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
|
||||
try testing.expectError(error.UnexpectedEof, ctx.expectStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, ctx.expectStr("pytn"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), ctx.peek());
|
||||
try ctx.expectStr("python");
|
||||
try testing.expectEqual(@as(?u8, 's'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("");
|
||||
try testing.expectEqual(ctx.peek(), null);
|
||||
try testing.expectError(error.UnexpectedEof, ctx.consume());
|
||||
try testing.expectEqual(ctx.eat('p'), false);
|
||||
try testing.expectError(error.UnexpectedEof, ctx.expect('p'));
|
||||
}
|
||||
}
|
||||
|
||||
pub const ParseError = error {
|
||||
IllegalCharacter,
|
||||
UnexpectedEof,
|
||||
UnexpectedCharacter,
|
||||
UnclosedValue,
|
||||
UnclosedComment,
|
||||
InvalidName,
|
||||
InvalidEntity,
|
||||
InvalidStandaloneValue,
|
||||
NonMatchingClosingTag,
|
||||
InvalidDocument,
|
||||
OutOfMemory
|
||||
};
|
||||
|
||||
pub fn parse(backing_allocator: *Allocator, source: []const u8) !Document {
|
||||
var ctx = ParseContext.init(source);
|
||||
return try parseDocument(&ctx, backing_allocator);
|
||||
}
|
||||
|
||||
fn parseDocument(ctx: *ParseContext, backing_allocator: *Allocator) !Document {
|
||||
var doc = Document{
|
||||
.arena = ArenaAllocator.init(backing_allocator),
|
||||
.xml_decl = null,
|
||||
.root = undefined
|
||||
};
|
||||
|
||||
errdefer doc.deinit();
|
||||
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
|
||||
doc.xml_decl = try tryParseProlog(ctx, &doc.arena.allocator);
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
|
||||
doc.root = (try tryParseElement(ctx, &doc.arena.allocator)) orelse return error.InvalidDocument;
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, &doc.arena.allocator);
|
||||
|
||||
if (ctx.peek() != null) return error.InvalidDocument;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
fn parseAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
|
||||
const quote = try ctx.consume();
|
||||
if (quote != '"' and quote != '\'') return error.UnexpectedCharacter;
|
||||
|
||||
const begin = ctx.offset;
|
||||
|
||||
while (true) {
|
||||
const c = ctx.consume() catch return error.UnclosedValue;
|
||||
if (c == quote) break;
|
||||
}
|
||||
|
||||
const end = ctx.offset - 1;
|
||||
|
||||
return try dupeAndUnescape(alloc, ctx.source[begin .. end]);
|
||||
}
|
||||
|
||||
fn parseEqAttrValue(ctx: *ParseContext, alloc: *Allocator) ![]const u8 {
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('=');
|
||||
_ = ctx.eatWs();
|
||||
|
||||
return try parseAttrValue(ctx, alloc);
|
||||
}
|
||||
|
||||
fn parseNameNoDupe(ctx: *ParseContext) ![]const u8 {
|
||||
// XML's spec on names is very long, so to make this easier
|
||||
// we just take any character that is not special and not whitespace
|
||||
const begin = ctx.offset;
|
||||
|
||||
while (ctx.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => break,
|
||||
'&', '"', '\'', '<', '>', '?', '=', '/' => break,
|
||||
else => _ = ctx.consumeNoEof()
|
||||
}
|
||||
}
|
||||
|
||||
const end = ctx.offset;
|
||||
if (begin == end) return error.InvalidName;
|
||||
|
||||
return ctx.source[begin .. end];
|
||||
}
|
||||
|
||||
fn tryParseCharData(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
|
||||
const begin = ctx.offset;
|
||||
|
||||
while (ctx.peek()) |ch| {
|
||||
switch (ch) {
|
||||
'<' => break,
|
||||
else => _ = ctx.consumeNoEof()
|
||||
}
|
||||
}
|
||||
|
||||
const end = ctx.offset;
|
||||
if (begin == end) return null;
|
||||
|
||||
return try dupeAndUnescape(alloc, ctx.source[begin .. end]);
|
||||
}
|
||||
|
||||
fn parseContent(ctx: *ParseContext, alloc: *Allocator) ParseError!Content {
|
||||
if (try tryParseCharData(ctx, alloc)) |cd| {
|
||||
return Content{.CharData = cd};
|
||||
} else if (try tryParseComment(ctx, alloc)) |comment| {
|
||||
return Content{.Comment = comment};
|
||||
} else if (try tryParseElement(ctx, alloc)) |elem| {
|
||||
return Content{.Element = elem};
|
||||
} else {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
}
|
||||
|
||||
fn tryParseAttr(ctx: *ParseContext, alloc: *Allocator) !?*Attribute {
|
||||
const name = parseNameNoDupe(ctx) catch return null;
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('=');
|
||||
_ = ctx.eatWs();
|
||||
const value = try parseAttrValue(ctx, alloc);
|
||||
|
||||
const attr = try alloc.create(Attribute);
|
||||
attr.name = try mem.dupe(alloc, u8, name);
|
||||
attr.value = value;
|
||||
return attr;
|
||||
}
|
||||
|
||||
fn tryParseElement(ctx: *ParseContext, alloc: *Allocator) !?*Element {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(ctx) catch {
|
||||
ctx.offset = start;
|
||||
return null;
|
||||
};
|
||||
|
||||
const element = try alloc.create(Element);
|
||||
element.* = Element.init(try std.mem.dupe(alloc, u8, tag), alloc);
|
||||
|
||||
while (ctx.eatWs()) {
|
||||
const attr = (try tryParseAttr(ctx, alloc)) orelse break;
|
||||
try element.attributes.append(attr);
|
||||
}
|
||||
|
||||
if (ctx.eatStr("/>")) {
|
||||
return element;
|
||||
}
|
||||
|
||||
try ctx.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (ctx.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (ctx.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(ctx, alloc);
|
||||
try element.children.append(content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(ctx);
|
||||
if (!std.mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('>');
|
||||
return element;
|
||||
}
|
||||
|
||||
test "tryParseElement" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
|
||||
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
|
||||
const size_attr = elem.?.attributes.items[0];
|
||||
try testing.expectEqualSlices(u8, size_attr.name, "size");
|
||||
try testing.expectEqualSlices(u8, size_attr.value, "15");
|
||||
|
||||
const color_attr = elem.?.attributes.items[1];
|
||||
try testing.expectEqualSlices(u8, color_attr.name, "color");
|
||||
try testing.expectEqualSlices(u8, color_attr.value, "green");
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<python>test</python>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[2].CharData, "d");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[3].Element.tag, "e");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[4].CharData, "f");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[5].Comment, "g");
|
||||
}
|
||||
}
|
||||
|
||||
fn tryParseProlog(ctx: *ParseContext, alloc: *Allocator) !?*XmlDecl {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(ctx), "xml")) {
|
||||
ctx.offset = start;
|
||||
return null;
|
||||
}
|
||||
|
||||
const decl = try alloc.create(XmlDecl);
|
||||
decl.encoding = null;
|
||||
decl.standalone = null;
|
||||
|
||||
// Version info is mandatory
|
||||
try ctx.expectWs();
|
||||
try ctx.expectStr("version");
|
||||
decl.version = try parseEqAttrValue(ctx, alloc);
|
||||
|
||||
if (ctx.eatWs()) {
|
||||
// Optional encoding and standalone info
|
||||
var require_ws = false;
|
||||
|
||||
if (ctx.eatStr("encoding")) {
|
||||
decl.encoding = try parseEqAttrValue(ctx, alloc);
|
||||
require_ws = true;
|
||||
}
|
||||
|
||||
if (require_ws == ctx.eatWs() and ctx.eatStr("standalone")) {
|
||||
const standalone = try parseEqAttrValue(ctx, alloc);
|
||||
if (std.mem.eql(u8, standalone, "yes")) {
|
||||
decl.standalone = true;
|
||||
} else if (std.mem.eql(u8, standalone, "no")) {
|
||||
decl.standalone = false;
|
||||
} else {
|
||||
return error.InvalidStandaloneValue;
|
||||
}
|
||||
}
|
||||
|
||||
_ = ctx.eatWs();
|
||||
}
|
||||
|
||||
try ctx.expectStr("?>");
|
||||
return decl;
|
||||
}
|
||||
|
||||
test "tryParseProlog" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<?xmla version='aa'?>");
|
||||
try testing.expectEqual(@as(?*XmlDecl, null), try tryParseProlog(&ctx, alloc));
|
||||
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<?xml version='aa'?>");
|
||||
const decl = try tryParseProlog(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.version);
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.encoding);
|
||||
try testing.expectEqual(@as(?bool, null), decl.?.standalone);
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<?xml version=\"aa\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try tryParseProlog(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.version);
|
||||
try testing.expectEqualSlices(u8, "bbb", decl.?.encoding.?);
|
||||
try testing.expectEqual(@as(?bool, true), decl.?.standalone.?);
|
||||
}
|
||||
}
|
||||
|
||||
fn trySkipComments(ctx: *ParseContext, alloc: *Allocator) !void {
|
||||
while (try tryParseComment(ctx, alloc)) |_| {
|
||||
_ = ctx.eatWs();
|
||||
}
|
||||
}
|
||||
|
||||
fn tryParseComment(ctx: *ParseContext, alloc: *Allocator) !?[]const u8 {
|
||||
if (!ctx.eatStr("<!--")) return null;
|
||||
|
||||
const begin = ctx.offset;
|
||||
while (!ctx.eatStr("-->")) {
|
||||
_ = ctx.consume() catch return error.UnclosedComment;
|
||||
}
|
||||
|
||||
const end = ctx.offset - "-->".len;
|
||||
return try mem.dupe(alloc, u8, ctx.source[begin .. end]);
|
||||
}
|
||||
|
||||
fn unescapeEntity(text: []const u8) !u8 {
|
||||
const EntitySubstition = struct {
|
||||
text: []const u8,
|
||||
replacement: u8
|
||||
};
|
||||
|
||||
const entities = [_]EntitySubstition{
|
||||
.{.text = "<", .replacement = '<'},
|
||||
.{.text = ">", .replacement = '>'},
|
||||
.{.text = "&", .replacement = '&'},
|
||||
.{.text = "'", .replacement = '\''},
|
||||
.{.text = """, .replacement = '"'}
|
||||
};
|
||||
|
||||
for (entities) |entity| {
|
||||
if (std.mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
}
|
||||
|
||||
return error.InvalidEntity;
|
||||
}
|
||||
|
||||
fn dupeAndUnescape(alloc: *Allocator, text: []const u8) ![]const u8 {
|
||||
const str = try alloc.alloc(u8, text.len);
|
||||
|
||||
var j: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (j += 1) {
|
||||
if (text[i] == '&') {
|
||||
const entity_end = 1 + (mem.indexOfScalarPos(u8, text, i, ';') orelse return error.InvalidEntity);
|
||||
str[j] = try unescapeEntity(text[i .. entity_end]);
|
||||
i = entity_end;
|
||||
} else {
|
||||
str[j] = text[i];
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return alloc.shrink(str, j);
|
||||
}
|
||||
|
||||
test "dupeAndUnescape" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
try testing.expectEqualSlices(u8, "test", try dupeAndUnescape(alloc, "test"));
|
||||
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try dupeAndUnescape(alloc, "a<b&c>d"e'f<"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&&"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&test;"));
|
||||
try testing.expectError(error.InvalidEntity, dupeAndUnescape(alloc, "python&boa"));
|
||||
}
|
||||
|
||||
test "Top level comments" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
var alloc = &arena.allocator;
|
||||
|
||||
const doc = try parse(alloc, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
try testing.expectEqualSlices(u8, "python", doc.root.tag);
|
||||
}
|
||||
@@ -2,6 +2,60 @@ const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
|
||||
pub fn isZigPrimitiveType(name: []const u8) bool {
|
||||
if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) {
|
||||
for (name[1..]) |c| {
|
||||
switch (c) {
|
||||
'0'...'9' => {},
|
||||
else => break,
|
||||
}
|
||||
} else return true;
|
||||
}
|
||||
|
||||
const primitives = [_][]const u8{
|
||||
"void",
|
||||
"comptime_float",
|
||||
"comptime_int",
|
||||
"bool",
|
||||
"isize",
|
||||
"usize",
|
||||
"f16",
|
||||
"f32",
|
||||
"f64",
|
||||
"f128",
|
||||
"noreturn",
|
||||
"type",
|
||||
"anyerror",
|
||||
"c_short",
|
||||
"c_ushort",
|
||||
"c_int",
|
||||
"c_uint",
|
||||
"c_long",
|
||||
"c_ulong",
|
||||
"c_longlong",
|
||||
"c_ulonglong",
|
||||
"c_longdouble",
|
||||
// Removed in stage 2 in https://github.com/ziglang/zig/commit/05cf44933d753f7a5a53ab289ea60fd43761de57,
|
||||
// but these are still invalid identifiers in stage 1.
|
||||
"undefined",
|
||||
"true",
|
||||
"false",
|
||||
"null",
|
||||
};
|
||||
|
||||
for (primitives) |reserved| {
|
||||
if (mem.eql(u8, reserved, name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn writeIdentifier(w: *std.Io.Writer, id: []const u8) !void {
|
||||
try w.print("{f}", .{std.zig.fmtId(id)});
|
||||
}
|
||||
|
||||
pub const CaseStyle = enum {
|
||||
snake,
|
||||
screaming_snake,
|
||||
@@ -51,7 +105,7 @@ pub const SegmentIterator = struct {
|
||||
}
|
||||
|
||||
const end = self.nextBoundary();
|
||||
const word = self.text[self.offset .. end];
|
||||
const word = self.text[self.offset..end];
|
||||
self.offset = end;
|
||||
return word;
|
||||
}
|
||||
@@ -67,12 +121,12 @@ pub const SegmentIterator = struct {
|
||||
|
||||
pub const IdRenderer = struct {
|
||||
tags: []const []const u8,
|
||||
text_cache: std.ArrayList(u8),
|
||||
text_cache: std.Io.Writer.Allocating,
|
||||
|
||||
pub fn init(allocator: *Allocator, tags: []const []const u8) IdRenderer {
|
||||
pub fn init(allocator: Allocator, tags: []const []const u8) IdRenderer {
|
||||
return .{
|
||||
.tags = tags,
|
||||
.text_cache = std.ArrayList(u8).init(allocator),
|
||||
.text_cache = .init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -83,25 +137,24 @@ pub const IdRenderer = struct {
|
||||
fn renderSnake(self: *IdRenderer, screaming: bool, id: []const u8, tag: ?[]const u8) !void {
|
||||
var it = SegmentIterator.init(id);
|
||||
var first = true;
|
||||
const transform = if (screaming) std.ascii.toUpper else std.ascii.toLower;
|
||||
|
||||
while (it.next()) |segment| {
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
try self.text_cache.append('_');
|
||||
try self.text_cache.writer.writeByte('_');
|
||||
}
|
||||
|
||||
for (segment) |c| {
|
||||
try self.text_cache.append(transform(c));
|
||||
try self.text_cache.writer.writeByte(if (screaming) std.ascii.toUpper(c) else std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
|
||||
if (tag) |name| {
|
||||
try self.text_cache.append('_');
|
||||
try self.text_cache.writer.writeByte('_');
|
||||
|
||||
for (name) |c| {
|
||||
try self.text_cache.append(transform(c));
|
||||
try self.text_cache.writer.writeByte(if (screaming) std.ascii.toUpper(c) else std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -113,7 +166,7 @@ pub const IdRenderer = struct {
|
||||
while (it.next()) |segment| {
|
||||
var i: usize = 0;
|
||||
while (i < segment.len and std.ascii.isDigit(segment[i])) {
|
||||
try self.text_cache.append(segment[i]);
|
||||
try self.text_cache.writer.writeByte(segment[i]);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
@@ -122,38 +175,34 @@ pub const IdRenderer = struct {
|
||||
}
|
||||
|
||||
if (i == 0 and lower_first) {
|
||||
try self.text_cache.append(std.ascii.toLower(segment[i]));
|
||||
try self.text_cache.writer.writeByte(std.ascii.toLower(segment[i]));
|
||||
} else {
|
||||
try self.text_cache.append(std.ascii.toUpper(segment[i]));
|
||||
try self.text_cache.writer.writeByte(std.ascii.toUpper(segment[i]));
|
||||
}
|
||||
lower_first = false;
|
||||
|
||||
for (segment[i + 1..]) |c| {
|
||||
try self.text_cache.append(std.ascii.toLower(c));
|
||||
for (segment[i + 1 ..]) |c| {
|
||||
try self.text_cache.writer.writeByte(std.ascii.toLower(c));
|
||||
}
|
||||
}
|
||||
|
||||
if (tag) |name| {
|
||||
try self.text_cache.appendSlice(name);
|
||||
try self.text_cache.writer.writeAll(name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(self: IdRenderer, out: anytype, id: []const u8) !void {
|
||||
try out.print("{}", .{ std.zig.fmtId(id) });
|
||||
pub fn renderFmt(self: *IdRenderer, out: *std.Io.Writer, comptime fmt: []const u8, args: anytype) !void {
|
||||
_ = self.text_cache.writer.consumeAll();
|
||||
try self.text_cache.writer.print(fmt, args);
|
||||
try writeIdentifier(out, self.text_cache.writer.buffered());
|
||||
}
|
||||
|
||||
pub fn renderFmt(self: *IdRenderer, out: anytype, comptime fmt: []const u8, args: anytype) !void {
|
||||
self.text_cache.items.len = 0;
|
||||
try std.fmt.format(self.text_cache.writer(), fmt, args);
|
||||
try out.print("{}", .{ std.zig.fmtId(self.text_cache.items) });
|
||||
}
|
||||
|
||||
pub fn renderWithCase(self: *IdRenderer, out: anytype, case_style: CaseStyle, id: []const u8) !void {
|
||||
pub fn renderWithCase(self: *IdRenderer, out: *std.Io.Writer, case_style: CaseStyle, id: []const u8) !void {
|
||||
const tag = self.getAuthorTag(id);
|
||||
// The trailing underscore doesn't need to be removed here as its removed by the SegmentIterator.
|
||||
const adjusted_id = if (tag) |name| id[0 .. id.len - name.len] else id;
|
||||
|
||||
self.text_cache.items.len = 0;
|
||||
_ = self.text_cache.writer.consumeAll();
|
||||
|
||||
switch (case_style) {
|
||||
.snake => try self.renderSnake(false, adjusted_id, tag),
|
||||
@@ -162,7 +211,7 @@ pub const IdRenderer = struct {
|
||||
.camel => try self.renderCamel(false, adjusted_id, tag),
|
||||
}
|
||||
|
||||
try out.print("{}", .{ std.zig.fmtId(self.text_cache.items) });
|
||||
try writeIdentifier(out, self.text_cache.writer.buffered());
|
||||
}
|
||||
|
||||
pub fn getAuthorTag(self: IdRenderer, id: []const u8) ?[]const u8 {
|
||||
190
src/main.zig
Normal file
190
src/main.zig
Normal file
@@ -0,0 +1,190 @@
|
||||
const std = @import("std");
|
||||
|
||||
const generator = @import("vulkan/generator.zig");
|
||||
|
||||
fn invalidUsage(prog_name: []const u8, comptime fmt: []const u8, args: anytype) noreturn {
|
||||
std.log.err(fmt, args);
|
||||
std.log.err("see {s} --help for usage", .{prog_name});
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
fn reportParseErrors(tree: std.zig.Ast) !void {
|
||||
var buf: [1024]u8 = undefined;
|
||||
var stderr = std.fs.File.stderr().writer(&buf);
|
||||
const w = &stderr.interface;
|
||||
for (tree.errors) |err| {
|
||||
const loc = tree.tokenLocation(0, err.token);
|
||||
try w.print("(vulkan-zig error):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
|
||||
try tree.renderError(err, w);
|
||||
try w.print("\n{s}\n", .{tree.source[loc.line_start..loc.line_end]});
|
||||
for (0..loc.column) |_| {
|
||||
try w.writeAll(" ");
|
||||
}
|
||||
try w.writeAll("^\n");
|
||||
}
|
||||
try w.flush();
|
||||
}
|
||||
|
||||
fn oomPanic() noreturn {
|
||||
@panic("Out of memory");
|
||||
}
|
||||
|
||||
pub fn main() !void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var args = std.process.argsWithAllocator(allocator) catch |err| switch (err) {
|
||||
error.OutOfMemory => oomPanic(),
|
||||
};
|
||||
const prog_name = args.next() orelse "vulkan-zig-generator";
|
||||
|
||||
var maybe_xml_path: ?[]const u8 = null;
|
||||
var maybe_out_path: ?[]const u8 = null;
|
||||
var maybe_video_xml_path: ?[]const u8 = null;
|
||||
var debug: bool = false;
|
||||
var api = generator.Api.vulkan;
|
||||
|
||||
while (args.next()) |arg| {
|
||||
if (std.mem.eql(u8, arg, "--help") or std.mem.eql(u8, arg, "-h")) {
|
||||
@setEvalBranchQuota(2000);
|
||||
var buf: [1024]u8 = undefined;
|
||||
var w = std.fs.File.stdout().writer(&buf);
|
||||
w.interface.print(
|
||||
\\Utility to generate a Zig binding from the Vulkan XML API registry.
|
||||
\\
|
||||
\\The most recent Vulkan XML API registry can be obtained from
|
||||
\\https://github.com/KhronosGroup/Vulkan-Docs/blob/master/xml/vk.xml,
|
||||
\\and the most recent LunarG Vulkan SDK version can be found at
|
||||
\\$VULKAN_SDK/x86_64/share/vulkan/registry/vk.xml.
|
||||
\\
|
||||
\\Usage: {s} [options] <spec xml path> <output zig source>
|
||||
\\Options:
|
||||
\\-h --help show this message and exit.
|
||||
\\-a --api <api> Generate API for 'vulkan' or 'vulkansc'. Defaults to 'vulkan'.
|
||||
\\--debug Write out unformatted source if does not parse correctly.
|
||||
\\--video <path> Also gnerate Vulkan Video API bindings from video.xml
|
||||
\\ registry at <path>.
|
||||
\\
|
||||
,
|
||||
.{prog_name},
|
||||
) catch |err| {
|
||||
std.process.fatal("failed to write to stdout: {s}", .{@errorName(err)});
|
||||
};
|
||||
w.interface.flush() catch |err| {
|
||||
std.process.fatal("failed to flush stdout: {s}", .{@errorName(err)});
|
||||
};
|
||||
return;
|
||||
} else if (std.mem.eql(u8, arg, "-a") or std.mem.eql(u8, arg, "--api")) {
|
||||
const api_str = args.next() orelse {
|
||||
invalidUsage(prog_name, "{s} expects argument <api>", .{arg});
|
||||
};
|
||||
api = std.meta.stringToEnum(generator.Api, api_str) orelse {
|
||||
invalidUsage(prog_name, "invalid api '{s}'", .{api_str});
|
||||
};
|
||||
} else if (std.mem.eql(u8, arg, "--debug")) {
|
||||
debug = true;
|
||||
} else if (std.mem.eql(u8, arg, "--video")) {
|
||||
maybe_video_xml_path = args.next() orelse {
|
||||
invalidUsage(prog_name, "{s} expects argument <path>", .{arg});
|
||||
};
|
||||
} else if (maybe_xml_path == null) {
|
||||
maybe_xml_path = arg;
|
||||
} else if (maybe_out_path == null) {
|
||||
maybe_out_path = arg;
|
||||
} else {
|
||||
invalidUsage(prog_name, "superficial argument '{s}'", .{arg});
|
||||
}
|
||||
}
|
||||
|
||||
const xml_path = maybe_xml_path orelse {
|
||||
invalidUsage(prog_name, "missing required argument <spec xml path>", .{});
|
||||
};
|
||||
|
||||
const out_path = maybe_out_path orelse {
|
||||
invalidUsage(prog_name, "missing required argument <output zig source>", .{});
|
||||
};
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
const xml_src = cwd.readFileAlloc(xml_path, allocator, .unlimited) catch |err| {
|
||||
std.process.fatal("failed to open input file '{s}' ({s})", .{ xml_path, @errorName(err) });
|
||||
};
|
||||
|
||||
const maybe_video_xml_src = if (maybe_video_xml_path) |video_xml_path|
|
||||
cwd.readFileAlloc(video_xml_path, allocator, .unlimited) catch |err| {
|
||||
std.process.fatal("failed to open input file '{s}' ({s})", .{ video_xml_path, @errorName(err) });
|
||||
}
|
||||
else
|
||||
null;
|
||||
|
||||
var aw: std.Io.Writer.Allocating = .init(allocator);
|
||||
generator.generate(allocator, api, xml_src, maybe_video_xml_src, &aw.writer) catch |err| {
|
||||
if (debug) {
|
||||
return err;
|
||||
}
|
||||
|
||||
switch (err) {
|
||||
error.InvalidXml => {
|
||||
std.log.err("invalid vulkan registry - invalid xml", .{});
|
||||
std.log.err("please check that the correct vk.xml file is passed", .{});
|
||||
std.process.exit(1);
|
||||
},
|
||||
error.InvalidRegistry => {
|
||||
std.log.err("invalid vulkan registry - registry is valid xml but contents are invalid", .{});
|
||||
std.log.err("please check that the correct vk.xml file is passed", .{});
|
||||
std.process.exit(1);
|
||||
},
|
||||
error.UnhandledBitfieldStruct => {
|
||||
std.log.err("unhandled struct with bit fields detected in vk.xml", .{});
|
||||
std.log.err("this is a bug in vulkan-zig", .{});
|
||||
std.log.err("please make a bug report at https://github.com/Snektron/vulkan-zig/issues/", .{});
|
||||
std.process.exit(1);
|
||||
},
|
||||
error.OutOfMemory, error.WriteFailed => oomPanic(),
|
||||
}
|
||||
};
|
||||
|
||||
aw.writer.writeByte(0) catch oomPanic();
|
||||
|
||||
const buffered = aw.writer.buffered();
|
||||
const src = buffered[0 .. buffered.len - 1 :0];
|
||||
const tree = std.zig.Ast.parse(allocator, src, .zig) catch |err| switch (err) {
|
||||
error.OutOfMemory => oomPanic(),
|
||||
};
|
||||
|
||||
const formatted = if (tree.errors.len > 0) blk: {
|
||||
std.log.err("generated invalid zig code", .{});
|
||||
std.log.err("this is a bug in vulkan-zig", .{});
|
||||
std.log.err("please make a bug report at https://github.com/Snektron/vulkan-zig/issues/", .{});
|
||||
std.log.err("or run with --debug to write out unformatted source", .{});
|
||||
|
||||
reportParseErrors(tree) catch |err| {
|
||||
std.process.fatal("failed to dump ast errors: {s}", .{@errorName(err)});
|
||||
};
|
||||
|
||||
if (debug) {
|
||||
break :blk src;
|
||||
}
|
||||
std.process.exit(1);
|
||||
} else tree.renderAlloc(allocator) catch |err| switch (err) {
|
||||
error.OutOfMemory => oomPanic(),
|
||||
};
|
||||
|
||||
if (std.fs.path.dirname(out_path)) |dir| {
|
||||
cwd.makePath(dir) catch |err| {
|
||||
std.process.fatal("failed to create output directory '{s}' ({s})", .{ dir, @errorName(err) });
|
||||
};
|
||||
}
|
||||
|
||||
cwd.writeFile(.{
|
||||
.sub_path = out_path,
|
||||
.data = formatted,
|
||||
}) catch |err| {
|
||||
std.process.fatal("failed to write to output file '{s}' ({s})", .{ out_path, @errorName(err) });
|
||||
};
|
||||
}
|
||||
|
||||
test "main" {
|
||||
_ = @import("xml.zig");
|
||||
_ = @import("vulkan/c_parse.zig");
|
||||
}
|
||||
@@ -70,7 +70,7 @@ pub const CTokenizer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const token_text = self.source[start .. self.offset];
|
||||
const token_text = self.source[start..self.offset];
|
||||
|
||||
const kind = if (mem.eql(u8, token_text, "typedef"))
|
||||
Token.Kind.kw_typedef
|
||||
@@ -83,24 +83,32 @@ pub const CTokenizer = struct {
|
||||
else
|
||||
Token.Kind.id;
|
||||
|
||||
return .{.kind = kind, .text = token_text};
|
||||
return .{ .kind = kind, .text = token_text };
|
||||
}
|
||||
|
||||
fn int(self: *CTokenizer) Token {
|
||||
const start = self.offset;
|
||||
_ = self.consumeNoEof();
|
||||
|
||||
const hex = self.peek() == 'x';
|
||||
if (hex) {
|
||||
_ = self.consumeNoEof();
|
||||
}
|
||||
|
||||
while (true) {
|
||||
const c = self.peek() orelse break;
|
||||
switch (c) {
|
||||
switch (self.peek() orelse break) {
|
||||
'0'...'9' => _ = self.consumeNoEof(),
|
||||
'A'...'F', 'a'...'f' => {
|
||||
if (!hex) break;
|
||||
_ = self.consumeNoEof();
|
||||
},
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.kind = .int,
|
||||
.text = self.source[start .. self.offset],
|
||||
.text = self.source[start..self.offset],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -115,7 +123,7 @@ pub const CTokenizer = struct {
|
||||
|
||||
pub fn next(self: *CTokenizer) !?Token {
|
||||
self.skipws();
|
||||
if (mem.startsWith(u8, self.source[self.offset ..], "//") or self.in_comment) {
|
||||
if (mem.startsWith(u8, self.source[self.offset..], "//") or self.in_comment) {
|
||||
const end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse {
|
||||
self.offset = self.source.len;
|
||||
self.in_comment = true;
|
||||
@@ -143,15 +151,12 @@ pub const CTokenizer = struct {
|
||||
']' => kind = .rbracket,
|
||||
'(' => kind = .lparen,
|
||||
')' => kind = .rparen,
|
||||
else => return error.UnexpectedCharacter
|
||||
else => return error.UnexpectedCharacter,
|
||||
}
|
||||
|
||||
const start = self.offset;
|
||||
_ = self.consumeNoEof();
|
||||
return Token{
|
||||
.kind = kind,
|
||||
.text = self.source[start .. self.offset]
|
||||
};
|
||||
return Token{ .kind = kind, .text = self.source[start..self.offset] };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -167,19 +172,22 @@ pub const XmlCTokenizer = struct {
|
||||
}
|
||||
|
||||
fn elemToToken(elem: *xml.Element) !?Token {
|
||||
if (elem.children.items.len != 1 or elem.children.items[0] != .CharData) {
|
||||
// Sometimes we encounter empty comment tags. Filter those out
|
||||
// by early returning here, otherwise the next check will
|
||||
// determine that the input is not valid XML.
|
||||
if (mem.eql(u8, elem.tag, "comment")) {
|
||||
return null;
|
||||
} else if (elem.children.len != 1 or elem.children[0] != .char_data) {
|
||||
return error.InvalidXml;
|
||||
}
|
||||
|
||||
const text = elem.children.items[0].CharData;
|
||||
const text = elem.children[0].char_data;
|
||||
if (mem.eql(u8, elem.tag, "type")) {
|
||||
return Token{.kind = .type_name, .text = text};
|
||||
return Token{ .kind = .type_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "enum")) {
|
||||
return Token{.kind = .enum_name, .text = text};
|
||||
return Token{ .kind = .enum_name, .text = text };
|
||||
} else if (mem.eql(u8, elem.tag, "name")) {
|
||||
return Token{.kind = .name, .text = text};
|
||||
} else if (mem.eql(u8, elem.tag, "comment")) {
|
||||
return null;
|
||||
return Token{ .kind = .name, .text = text };
|
||||
} else {
|
||||
return error.InvalidTag;
|
||||
}
|
||||
@@ -206,9 +214,9 @@ pub const XmlCTokenizer = struct {
|
||||
|
||||
if (self.it.next()) |child| {
|
||||
switch (child.*) {
|
||||
.CharData => |cdata| self.ctok = CTokenizer{.source = cdata, .in_comment = in_comment},
|
||||
.Comment => {}, // xml comment
|
||||
.Element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
.char_data => |cdata| self.ctok = CTokenizer{ .source = cdata, .in_comment = in_comment },
|
||||
.comment => {}, // xml comment
|
||||
.element => |elem| if (!in_comment) if (try elemToToken(elem)) |tok| return tok,
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
@@ -244,9 +252,9 @@ pub const XmlCTokenizer = struct {
|
||||
};
|
||||
|
||||
// TYPEDEF = kw_typedef DECLARATION ';'
|
||||
pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||
pub fn parseTypedef(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
_ = try xctok.expect(.kw_typedef);
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
_ = try xctok.expect(.semicolon);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
@@ -254,18 +262,19 @@ pub fn parseTypedef(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Decl
|
||||
|
||||
return registry.Declaration{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.decl_type = .{.typedef = decl.decl_type},
|
||||
.decl_type = .{ .typedef = decl.decl_type },
|
||||
};
|
||||
}
|
||||
|
||||
// MEMBER = DECLARATION (':' int)?
|
||||
pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
var field = registry.Container.Field {
|
||||
pub fn parseMember(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Container.Field {
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
var field = registry.Container.Field{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.field_type = decl.decl_type,
|
||||
.bits = null,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
};
|
||||
|
||||
if (try xctok.peek()) |tok| {
|
||||
@@ -287,20 +296,40 @@ pub fn parseMember(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Conta
|
||||
return field;
|
||||
}
|
||||
|
||||
pub fn parseParamOrProto(allocator: *Allocator, xctok: *XmlCTokenizer) !registry.Declaration {
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
pub fn parseParamOrProto(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) !registry.Declaration {
|
||||
var decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
if (try xctok.peek()) |_| {
|
||||
return error.InvalidSyntax;
|
||||
}
|
||||
|
||||
// Decay pointers
|
||||
switch (decl.decl_type) {
|
||||
.array => {
|
||||
const child = try allocator.create(TypeInfo);
|
||||
child.* = decl.decl_type;
|
||||
|
||||
decl.decl_type = .{
|
||||
.pointer = .{
|
||||
.is_const = decl.is_const,
|
||||
.is_optional = false,
|
||||
.size = .one,
|
||||
.child = child,
|
||||
},
|
||||
};
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return registry.Declaration{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.decl_type = .{.typedef = decl.decl_type},
|
||||
.decl_type = .{ .typedef = decl.decl_type },
|
||||
};
|
||||
}
|
||||
|
||||
pub const Declaration = struct {
|
||||
name: ?[]const u8, // Parameter names may be optional, especially in case of func(void)
|
||||
decl_type: TypeInfo,
|
||||
is_const: bool,
|
||||
};
|
||||
|
||||
pub const ParseError = error{
|
||||
@@ -318,7 +347,7 @@ pub const ParseError = error{
|
||||
// DECLARATION = kw_const? type_name DECLARATOR
|
||||
// DECLARATOR = POINTERS (id | name)? ('[' ARRAY_DECLARATOR ']')*
|
||||
// | POINTERS '(' FNPTRSUFFIX
|
||||
fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Declaration {
|
||||
fn parseDeclaration(allocator: Allocator, xctok: *XmlCTokenizer, ptrs_optional: bool) ParseError!Declaration {
|
||||
// Parse declaration constness
|
||||
var tok = try xctok.nextNoEof();
|
||||
const inner_is_const = tok.kind == .kw_const;
|
||||
@@ -333,15 +362,19 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
||||
if (tok.kind != .type_name and tok.kind != .id) return error.InvalidSyntax;
|
||||
const type_name = tok.text;
|
||||
|
||||
var type_info = TypeInfo{.name = type_name};
|
||||
var type_info = TypeInfo{ .name = type_name };
|
||||
|
||||
// Parse pointers
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info);
|
||||
type_info = try parsePointers(allocator, xctok, inner_is_const, type_info, ptrs_optional);
|
||||
|
||||
// Parse name / fn ptr
|
||||
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info)) |decl| {
|
||||
return decl;
|
||||
if (try parseFnPtrSuffix(allocator, xctok, type_info, ptrs_optional)) |decl| {
|
||||
return Declaration{
|
||||
.name = decl.name,
|
||||
.decl_type = decl.decl_type,
|
||||
.is_const = inner_is_const,
|
||||
};
|
||||
}
|
||||
|
||||
const name = blk: {
|
||||
@@ -364,8 +397,10 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
||||
inner_type.* = .{
|
||||
.array = .{
|
||||
.size = array_size,
|
||||
.valid_size = .all, // Refined later
|
||||
.is_optional = true,
|
||||
.child = child,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// update the inner_type pointer so it points to the proper
|
||||
@@ -376,11 +411,12 @@ fn parseDeclaration(allocator: *Allocator, xctok: *XmlCTokenizer) ParseError!Dec
|
||||
return Declaration{
|
||||
.name = name,
|
||||
.decl_type = type_info,
|
||||
.is_const = inner_is_const,
|
||||
};
|
||||
}
|
||||
|
||||
// FNPTRSUFFIX = kw_vkapi_ptr '*' name' ')' '(' ('void' | (DECLARATION (',' DECLARATION)*)?) ')'
|
||||
fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo) !?Declaration {
|
||||
fn parseFnPtrSuffix(allocator: Allocator, xctok: *XmlCTokenizer, return_type: TypeInfo, ptrs_optional: bool) !?Declaration {
|
||||
const lparen = try xctok.peek();
|
||||
if (lparen == null or lparen.?.kind != .lparen) {
|
||||
return null;
|
||||
@@ -403,11 +439,12 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
||||
.return_type = return_type_heap,
|
||||
.success_codes = &[_][]const u8{},
|
||||
.error_codes = &[_][]const u8{},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
.is_const = false,
|
||||
};
|
||||
|
||||
const first_param = try parseDeclaration(allocator, xctok);
|
||||
const first_param = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
if (first_param.name == null) {
|
||||
if (first_param.decl_type != .name or !mem.eql(u8, first_param.decl_type.name, "void")) {
|
||||
return error.InvalidSyntax;
|
||||
@@ -420,11 +457,12 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
||||
// There is no good way to estimate the number of parameters beforehand.
|
||||
// Fortunately, there are usually a relatively low number of parameters to a function pointer,
|
||||
// so an ArrayList backed by an arena allocator is good enough.
|
||||
var params = std.ArrayList(registry.Command.Param).init(allocator);
|
||||
try params.append(.{
|
||||
var params: std.ArrayList(registry.Command.Param) = .empty;
|
||||
try params.append(allocator, .{
|
||||
.name = first_param.name.?,
|
||||
.param_type = first_param.decl_type,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
});
|
||||
|
||||
while (true) {
|
||||
@@ -434,21 +472,22 @@ fn parseFnPtrSuffix(allocator: *Allocator, xctok: *XmlCTokenizer, return_type: T
|
||||
else => return error.InvalidSyntax,
|
||||
}
|
||||
|
||||
const decl = try parseDeclaration(allocator, xctok);
|
||||
try params.append(.{
|
||||
const decl = try parseDeclaration(allocator, xctok, ptrs_optional);
|
||||
try params.append(allocator, .{
|
||||
.name = decl.name orelse return error.MissingTypeIdentifier,
|
||||
.param_type = decl.decl_type,
|
||||
.is_buffer_len = false,
|
||||
.is_optional = false,
|
||||
});
|
||||
}
|
||||
|
||||
_ = try xctok.nextNoEof();
|
||||
command_ptr.decl_type.command_ptr.params = params.toOwnedSlice();
|
||||
command_ptr.decl_type.command_ptr.params = try params.toOwnedSlice(allocator);
|
||||
return command_ptr;
|
||||
}
|
||||
|
||||
// POINTERS = (kw_const? '*')*
|
||||
fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo) !TypeInfo {
|
||||
fn parsePointers(allocator: Allocator, xctok: *XmlCTokenizer, inner_const: bool, inner: TypeInfo, ptrs_optional: bool) !TypeInfo {
|
||||
var type_info = inner;
|
||||
var first_const = inner_const;
|
||||
|
||||
@@ -477,7 +516,7 @@ fn parsePointers(allocator: *Allocator, xctok: *XmlCTokenizer, inner_const: bool
|
||||
type_info = .{
|
||||
.pointer = .{
|
||||
.is_const = is_const or first_const,
|
||||
.is_optional = false, // set elsewhere
|
||||
.is_optional = ptrs_optional, // set elsewhere
|
||||
.size = .one, // set elsewhere
|
||||
.child = child,
|
||||
},
|
||||
@@ -500,32 +539,45 @@ fn parseArrayDeclarator(xctok: *XmlCTokenizer) !?ArraySize {
|
||||
.int = std.fmt.parseInt(usize, size_tok.text, 10) catch |err| switch (err) {
|
||||
error.Overflow => return error.Overflow,
|
||||
error.InvalidCharacter => unreachable,
|
||||
}
|
||||
},
|
||||
.enum_name => .{.alias = size_tok.text},
|
||||
else => return error.InvalidSyntax
|
||||
},
|
||||
// Sometimes, arrays are declared as `<type>T</type> <name>aa</name>[<enum>SIZE</enum>]`,
|
||||
// and sometimes just as `<type>T</type> <name>aa</name>[SIZE]`, so we have to account
|
||||
// for both `.enum_name` and `.id` here.
|
||||
.enum_name, .id => .{ .alias = size_tok.text },
|
||||
else => return error.InvalidSyntax,
|
||||
};
|
||||
|
||||
_ = try xctok.expect(.rbracket);
|
||||
return size;
|
||||
}
|
||||
|
||||
pub fn parseVersion(xctok: *XmlCTokenizer) ![4][]const u8 {
|
||||
pub fn parseVersion(xctok: *XmlCTokenizer) !registry.ApiConstant.Value {
|
||||
_ = try xctok.expect(.hash);
|
||||
const define = try xctok.expect(.id);
|
||||
if (!mem.eql(u8, define.text, "define")) {
|
||||
return error.InvalidVersion;
|
||||
}
|
||||
|
||||
const name = try xctok.expect(.name);
|
||||
_ = try xctok.expect(.name);
|
||||
const vk_make_version = try xctok.expect(.type_name);
|
||||
if (!mem.eql(u8, vk_make_version.text, "VK_MAKE_API_VERSION")) {
|
||||
if (mem.eql(u8, vk_make_version.text, "VK_MAKE_API_VERSION")) {
|
||||
return .{
|
||||
.version = try parseVersionValues(xctok, 4),
|
||||
};
|
||||
} else if (mem.eql(u8, vk_make_version.text, "VK_MAKE_VIDEO_STD_VERSION")) {
|
||||
return .{
|
||||
.video_std_version = try parseVersionValues(xctok, 3),
|
||||
};
|
||||
} else {
|
||||
return error.NotVersion;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseVersionValues(xctok: *XmlCTokenizer, comptime count: usize) ![count][]const u8 {
|
||||
_ = try xctok.expect(.lparen);
|
||||
var version: [4][]const u8 = undefined;
|
||||
for (version) |*part, i| {
|
||||
var version: [count][]const u8 = undefined;
|
||||
for (&version, 0..) |*part, i| {
|
||||
if (i != 0) {
|
||||
_ = try xctok.expect(.comma);
|
||||
}
|
||||
@@ -551,33 +603,27 @@ fn testTokenizer(tokenizer: anytype, expected_tokens: []const Token) !void {
|
||||
}
|
||||
|
||||
test "CTokenizer" {
|
||||
var ctok = CTokenizer {
|
||||
.source = \\typedef ([const)]** VKAPI_PTR 123,;aaaa
|
||||
};
|
||||
var ctok = CTokenizer{ .source = "typedef ([const)]** VKAPI_PTR 123,;aaaa" };
|
||||
|
||||
try testTokenizer(
|
||||
&ctok,
|
||||
&[_]Token{
|
||||
.{.kind = .kw_typedef, .text = "typedef"},
|
||||
.{.kind = .lparen, .text = "("},
|
||||
.{.kind = .lbracket, .text = "["},
|
||||
.{.kind = .kw_const, .text = "const"},
|
||||
.{.kind = .rparen, .text = ")"},
|
||||
.{.kind = .rbracket, .text = "]"},
|
||||
.{.kind = .star, .text = "*"},
|
||||
.{.kind = .star, .text = "*"},
|
||||
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||
.{.kind = .int, .text = "123"},
|
||||
.{.kind = .comma, .text = ","},
|
||||
.{.kind = .semicolon, .text = ";"},
|
||||
.{.kind = .id, .text = "aaaa"},
|
||||
}
|
||||
);
|
||||
try testTokenizer(&ctok, &[_]Token{
|
||||
.{ .kind = .kw_typedef, .text = "typedef" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .lbracket, .text = "[" },
|
||||
.{ .kind = .kw_const, .text = "const" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .rbracket, .text = "]" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .kw_vkapi_ptr, .text = "VKAPI_PTR" },
|
||||
.{ .kind = .int, .text = "123" },
|
||||
.{ .kind = .comma, .text = "," },
|
||||
.{ .kind = .semicolon, .text = ";" },
|
||||
.{ .kind = .id, .text = "aaaa" },
|
||||
});
|
||||
}
|
||||
|
||||
test "XmlCTokenizer" {
|
||||
const document = try xml.parse(
|
||||
testing.allocator,
|
||||
const document = try xml.parse(testing.allocator,
|
||||
\\<root>// comment <name>commented name</name> <type>commented type</type> trailing
|
||||
\\ typedef void (VKAPI_PTR *<name>PFN_vkVoidFunction</name>)(void);
|
||||
\\</root>
|
||||
@@ -586,27 +632,23 @@ test "XmlCTokenizer" {
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
|
||||
try testTokenizer(
|
||||
&xctok,
|
||||
&[_]Token{
|
||||
.{.kind = .kw_typedef, .text = "typedef"},
|
||||
.{.kind = .id, .text = "void"},
|
||||
.{.kind = .lparen, .text = "("},
|
||||
.{.kind = .kw_vkapi_ptr, .text = "VKAPI_PTR"},
|
||||
.{.kind = .star, .text = "*"},
|
||||
.{.kind = .name, .text = "PFN_vkVoidFunction"},
|
||||
.{.kind = .rparen, .text = ")"},
|
||||
.{.kind = .lparen, .text = "("},
|
||||
.{.kind = .id, .text = "void"},
|
||||
.{.kind = .rparen, .text = ")"},
|
||||
.{.kind = .semicolon, .text = ";"},
|
||||
}
|
||||
);
|
||||
try testTokenizer(&xctok, &[_]Token{
|
||||
.{ .kind = .kw_typedef, .text = "typedef" },
|
||||
.{ .kind = .id, .text = "void" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .kw_vkapi_ptr, .text = "VKAPI_PTR" },
|
||||
.{ .kind = .star, .text = "*" },
|
||||
.{ .kind = .name, .text = "PFN_vkVoidFunction" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .lparen, .text = "(" },
|
||||
.{ .kind = .id, .text = "void" },
|
||||
.{ .kind = .rparen, .text = ")" },
|
||||
.{ .kind = .semicolon, .text = ";" },
|
||||
});
|
||||
}
|
||||
|
||||
test "parseTypedef" {
|
||||
const document = try xml.parse(
|
||||
testing.allocator,
|
||||
const document = try xml.parse(testing.allocator,
|
||||
\\<root> // comment <name>commented name</name> trailing
|
||||
\\ typedef const struct <type>Python</type>* pythons[4];
|
||||
\\ // more comments
|
||||
@@ -619,11 +661,11 @@ test "parseTypedef" {
|
||||
defer arena.deinit();
|
||||
|
||||
var xctok = XmlCTokenizer.init(document.root);
|
||||
const decl = try parseTypedef(&arena.allocator, &xctok);
|
||||
const decl = try parseTypedef(arena.allocator(), &xctok, false);
|
||||
|
||||
try testing.expectEqualSlices(u8, "pythons", decl.name);
|
||||
const array = decl.decl_type.typedef.array;
|
||||
try testing.expectEqual(ArraySize{.int = 4}, array.size);
|
||||
try testing.expectEqual(ArraySize{ .int = 4 }, array.size);
|
||||
const ptr = array.child.pointer;
|
||||
try testing.expectEqual(true, ptr.is_const);
|
||||
try testing.expectEqualSlices(u8, "Python", ptr.child.name);
|
||||
273
src/vulkan/generator.zig
Normal file
273
src/vulkan/generator.zig
Normal file
@@ -0,0 +1,273 @@
|
||||
const std = @import("std");
|
||||
const reg = @import("registry.zig");
|
||||
const xml = @import("../xml.zig");
|
||||
const renderRegistry = @import("render.zig").render;
|
||||
const parseXml = @import("parse.zig").parseXml;
|
||||
const IdRenderer = @import("../id_render.zig").IdRenderer;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const FeatureLevel = reg.FeatureLevel;
|
||||
|
||||
const EnumFieldMerger = struct {
|
||||
const EnumExtensionMap = std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(reg.Enum.Field));
|
||||
const ApiConstantMap = std.StringArrayHashMapUnmanaged(reg.ApiConstant);
|
||||
const FieldSet = std.StringArrayHashMapUnmanaged(void);
|
||||
|
||||
arena: Allocator,
|
||||
registry: *reg.Registry,
|
||||
enum_extensions: EnumExtensionMap,
|
||||
api_constants: ApiConstantMap,
|
||||
field_set: FieldSet,
|
||||
|
||||
fn init(arena: Allocator, registry: *reg.Registry) EnumFieldMerger {
|
||||
return .{
|
||||
.arena = arena,
|
||||
.registry = registry,
|
||||
.enum_extensions = .{},
|
||||
.api_constants = .{},
|
||||
.field_set = .{},
|
||||
};
|
||||
}
|
||||
|
||||
fn putEnumExtension(self: *EnumFieldMerger, enum_name: []const u8, field: reg.Enum.Field) !void {
|
||||
const res = try self.enum_extensions.getOrPut(self.arena, enum_name);
|
||||
if (!res.found_existing) {
|
||||
res.value_ptr.* = .empty;
|
||||
}
|
||||
|
||||
try res.value_ptr.append(self.arena, field);
|
||||
}
|
||||
|
||||
fn addRequires(self: *EnumFieldMerger, reqs: []const reg.Require) !void {
|
||||
for (reqs) |req| {
|
||||
for (req.extends) |enum_ext| {
|
||||
switch (enum_ext.value) {
|
||||
.field => try self.putEnumExtension(enum_ext.extends, enum_ext.value.field),
|
||||
.new_api_constant_expr => |expr| try self.api_constants.put(
|
||||
self.arena,
|
||||
enum_ext.extends,
|
||||
.{
|
||||
.name = enum_ext.extends,
|
||||
.value = .{ .expr = expr },
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn mergeEnumFields(self: *EnumFieldMerger, name: []const u8, base_enum: *reg.Enum) !void {
|
||||
// If there are no extensions for this enum, assume its valid.
|
||||
const extensions = self.enum_extensions.get(name) orelse return;
|
||||
|
||||
self.field_set.clearRetainingCapacity();
|
||||
|
||||
const n_fields_upper_bound = base_enum.fields.len + extensions.items.len;
|
||||
const new_fields = try self.arena.alloc(reg.Enum.Field, n_fields_upper_bound);
|
||||
var i: usize = 0;
|
||||
|
||||
for (base_enum.fields) |field| {
|
||||
const res = try self.field_set.getOrPut(self.arena, field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Assume that if a field name clobbers, the value is the same
|
||||
for (extensions.items) |field| {
|
||||
const res = try self.field_set.getOrPut(self.arena, field.name);
|
||||
if (!res.found_existing) {
|
||||
new_fields[i] = field;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Existing base_enum.fields was allocated by `self.arena`, so
|
||||
// it gets cleaned up whenever that is deinited.
|
||||
base_enum.fields = new_fields[0..i];
|
||||
}
|
||||
|
||||
fn merge(self: *EnumFieldMerger) !void {
|
||||
for (self.registry.api_constants) |api_constant| {
|
||||
try self.api_constants.put(self.arena, api_constant.name, api_constant);
|
||||
}
|
||||
|
||||
for (self.registry.features) |feature| {
|
||||
try self.addRequires(feature.requires);
|
||||
}
|
||||
|
||||
for (self.registry.extensions) |ext| {
|
||||
try self.addRequires(ext.requires);
|
||||
}
|
||||
|
||||
// Merge all the enum fields.
|
||||
// Assume that all keys of enum_extensions appear in `self.registry.decls`
|
||||
for (self.registry.decls) |*decl| {
|
||||
if (decl.decl_type == .enumeration) {
|
||||
try self.mergeEnumFields(decl.name, &decl.decl_type.enumeration);
|
||||
}
|
||||
}
|
||||
|
||||
self.registry.api_constants = self.api_constants.values();
|
||||
}
|
||||
};
|
||||
|
||||
pub const Generator = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
registry: reg.Registry,
|
||||
id_renderer: IdRenderer,
|
||||
have_video: bool,
|
||||
|
||||
fn init(allocator: Allocator, spec: *xml.Element, maybe_video_spec: ?*xml.Element, api: reg.Api) !Generator {
|
||||
const result = try parseXml(allocator, spec, maybe_video_spec, api);
|
||||
|
||||
const tags = try allocator.alloc([]const u8, result.registry.tags.len);
|
||||
for (tags, result.registry.tags) |*tag, registry_tag| tag.* = registry_tag.name;
|
||||
|
||||
return Generator{
|
||||
.arena = result.arena,
|
||||
.registry = result.registry,
|
||||
.id_renderer = IdRenderer.init(allocator, tags),
|
||||
.have_video = maybe_video_spec != null,
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: Generator) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
fn stripFlagBits(self: Generator, name: []const u8) []const u8 {
|
||||
const tagless = self.id_renderer.stripAuthorTag(name);
|
||||
return tagless[0 .. tagless.len - "FlagBits".len];
|
||||
}
|
||||
|
||||
fn stripFlags(self: Generator, name: []const u8) []const u8 {
|
||||
const tagless = self.id_renderer.stripAuthorTag(name);
|
||||
return tagless[0 .. tagless.len - "Flags".len];
|
||||
}
|
||||
|
||||
// Solve `registry.declarations` according to `registry.extensions` and `registry.features`.
|
||||
fn mergeEnumFields(self: *Generator) !void {
|
||||
var merger = EnumFieldMerger.init(self.arena.allocator(), &self.registry);
|
||||
try merger.merge();
|
||||
}
|
||||
|
||||
// https://github.com/KhronosGroup/Vulkan-Docs/pull/1556
|
||||
fn fixupBitFlags(self: *Generator) !void {
|
||||
var seen_bits = std.StringArrayHashMap(void).init(self.arena.allocator());
|
||||
defer seen_bits.deinit();
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
const bitmask = switch (decl.decl_type) {
|
||||
.bitmask => |bm| bm,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
if (bitmask.bits_enum) |bits_enum| {
|
||||
try seen_bits.put(bits_enum, {});
|
||||
}
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
|
||||
for (self.registry.decls) |decl| {
|
||||
switch (decl.decl_type) {
|
||||
.enumeration => |e| {
|
||||
if (e.is_bitmask and seen_bits.get(decl.name) == null)
|
||||
continue;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
self.registry.decls[i] = decl;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
self.registry.decls.len = i;
|
||||
}
|
||||
|
||||
fn render(self: *Generator, writer: *std.Io.Writer) !void {
|
||||
try renderRegistry(writer, self.arena.allocator(), &self.registry, &self.id_renderer, self.have_video);
|
||||
}
|
||||
};
|
||||
|
||||
/// The vulkan registry contains the specification for multiple APIs: Vulkan and VulkanSC. This enum
|
||||
/// describes applicable APIs.
|
||||
pub const Api = reg.Api;
|
||||
|
||||
/// Main function for generating the Vulkan bindings. vk.xml is to be provided via `spec_xml`,
|
||||
/// and the resulting binding is written to `writer`. `allocator` will be used to allocate temporary
|
||||
/// internal datastructures - mostly via an ArenaAllocator, but sometimes a hashmap uses this allocator
|
||||
/// directly. `api` is the API to generate the bindings for, usually `.vulkan`.
|
||||
pub fn generate(
|
||||
allocator: Allocator,
|
||||
api: Api,
|
||||
spec_xml: []const u8,
|
||||
maybe_video_spec_xml: ?[]const u8,
|
||||
writer: *std.Io.Writer,
|
||||
) !void {
|
||||
const spec = xml.parse(allocator, spec_xml) catch |err| switch (err) {
|
||||
error.InvalidDocument,
|
||||
error.UnexpectedEof,
|
||||
error.UnexpectedCharacter,
|
||||
error.IllegalCharacter,
|
||||
error.InvalidEntity,
|
||||
error.InvalidName,
|
||||
error.InvalidStandaloneValue,
|
||||
error.NonMatchingClosingTag,
|
||||
error.UnclosedComment,
|
||||
error.UnclosedValue,
|
||||
=> return error.InvalidXml,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
defer spec.deinit();
|
||||
|
||||
const maybe_video_spec_root = if (maybe_video_spec_xml) |video_spec_xml| blk: {
|
||||
const video_spec = xml.parse(allocator, video_spec_xml) catch |err| switch (err) {
|
||||
error.InvalidDocument,
|
||||
error.UnexpectedEof,
|
||||
error.UnexpectedCharacter,
|
||||
error.IllegalCharacter,
|
||||
error.InvalidEntity,
|
||||
error.InvalidName,
|
||||
error.InvalidStandaloneValue,
|
||||
error.NonMatchingClosingTag,
|
||||
error.UnclosedComment,
|
||||
error.UnclosedValue,
|
||||
=> return error.InvalidXml,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
|
||||
break :blk video_spec.root;
|
||||
} else null;
|
||||
|
||||
var gen = Generator.init(allocator, spec.root, maybe_video_spec_root, api) catch |err| switch (err) {
|
||||
error.InvalidXml,
|
||||
error.InvalidCharacter,
|
||||
error.Overflow,
|
||||
error.InvalidFeatureLevel,
|
||||
error.InvalidSyntax,
|
||||
error.InvalidTag,
|
||||
error.MissingTypeIdentifier,
|
||||
error.UnexpectedCharacter,
|
||||
error.UnexpectedEof,
|
||||
error.UnexpectedToken,
|
||||
error.InvalidRegistry,
|
||||
=> return error.InvalidRegistry,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
defer gen.deinit();
|
||||
|
||||
try gen.mergeEnumFields();
|
||||
try gen.fixupBitFlags();
|
||||
gen.render(writer) catch |err| switch (err) {
|
||||
error.InvalidApiConstant,
|
||||
error.InvalidConstantExpr,
|
||||
error.InvalidRegistry,
|
||||
error.UnexpectedCharacter,
|
||||
error.InvalidCharacter,
|
||||
error.Overflow,
|
||||
=> return error.InvalidRegistry,
|
||||
else => |others| return others,
|
||||
};
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,9 @@
|
||||
pub const Api = enum {
|
||||
vulkan,
|
||||
vulkansc,
|
||||
};
|
||||
|
||||
pub const Registry = struct {
|
||||
copyright: []const u8,
|
||||
decls: []Declaration,
|
||||
api_constants: []ApiConstant,
|
||||
tags: []Tag,
|
||||
@@ -38,6 +42,7 @@ pub const ApiConstant = struct {
|
||||
pub const Value = union(enum) {
|
||||
expr: []const u8,
|
||||
version: [4][]const u8,
|
||||
video_std_version: [3][]const u8,
|
||||
};
|
||||
|
||||
name: []const u8,
|
||||
@@ -62,9 +67,11 @@ pub const Container = struct {
|
||||
field_type: TypeInfo,
|
||||
bits: ?usize,
|
||||
is_buffer_len: bool,
|
||||
is_optional: bool,
|
||||
};
|
||||
|
||||
stype: ?[]const u8,
|
||||
extends: ?[]const []const u8,
|
||||
fields: []Field,
|
||||
is_union: bool,
|
||||
};
|
||||
@@ -77,7 +84,7 @@ pub const Enum = struct {
|
||||
alias: struct {
|
||||
name: []const u8,
|
||||
is_compat_alias: bool,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
pub const Field = struct {
|
||||
@@ -105,20 +112,23 @@ pub const Command = struct {
|
||||
name: []const u8,
|
||||
param_type: TypeInfo,
|
||||
is_buffer_len: bool,
|
||||
is_optional: bool,
|
||||
};
|
||||
|
||||
params: []Param,
|
||||
return_type: *TypeInfo,
|
||||
success_codes: [][]const u8,
|
||||
error_codes: [][]const u8,
|
||||
success_codes: []const []const u8,
|
||||
error_codes: []const []const u8,
|
||||
};
|
||||
|
||||
pub const Pointer = struct {
|
||||
pub const PointerSize = union(enum) {
|
||||
one,
|
||||
many, // The length is given by some complex expression, possibly involving another field
|
||||
other_field: []const u8, // The length is given by some other field or parameter
|
||||
zero_terminated
|
||||
/// The length is given by some complex expression, possibly involving another field
|
||||
many,
|
||||
/// The length is given by some other field or parameter
|
||||
other_field: []const u8,
|
||||
zero_terminated,
|
||||
};
|
||||
|
||||
is_const: bool,
|
||||
@@ -133,7 +143,26 @@ pub const Array = struct {
|
||||
alias: []const u8, // Field size is given by an api constant
|
||||
};
|
||||
|
||||
pub const ArrayValidSize = union(enum) {
|
||||
/// All elements are valid.
|
||||
all,
|
||||
/// The length is given by some complex expression, possibly involving another field
|
||||
many,
|
||||
/// The length is given by some complex expression, possibly involving another field
|
||||
other_field: []const u8,
|
||||
/// The valid elements are terminated by a 0, or by the bounds of the array.
|
||||
zero_terminated,
|
||||
};
|
||||
|
||||
/// This is the total size of the array
|
||||
size: ArraySize,
|
||||
/// The number of items that are actually filled with valid values
|
||||
valid_size: ArrayValidSize,
|
||||
/// Some members may indicate than an array is optional. This happens with
|
||||
/// VkPhysicalDeviceHostImageCopyPropertiesEXT::optimalTilingLayoutUUID for example.
|
||||
/// The spec is not entirely clear about what this means, but presumably it should
|
||||
/// be filled with all zeroes.
|
||||
is_optional: bool,
|
||||
child: *TypeInfo,
|
||||
};
|
||||
|
||||
@@ -151,6 +180,7 @@ pub const Extension = struct {
|
||||
pub const ExtensionType = enum {
|
||||
instance,
|
||||
device,
|
||||
video,
|
||||
};
|
||||
|
||||
pub const Promotion = union(enum) {
|
||||
@@ -159,9 +189,15 @@ pub const Extension = struct {
|
||||
extension: []const u8,
|
||||
};
|
||||
|
||||
pub const Version = union(enum) {
|
||||
int: u32,
|
||||
alias: []const u8,
|
||||
unknown,
|
||||
};
|
||||
|
||||
name: []const u8,
|
||||
number: u31,
|
||||
version: u32,
|
||||
version: Version,
|
||||
extension_type: ?ExtensionType,
|
||||
depends: []const []const u8, // Other extensions
|
||||
promoted_to: Promotion,
|
||||
@@ -172,9 +208,13 @@ pub const Extension = struct {
|
||||
|
||||
pub const Require = struct {
|
||||
pub const EnumExtension = struct {
|
||||
pub const Value = union(enum) {
|
||||
field: Enum.Field,
|
||||
new_api_constant_expr: []const u8,
|
||||
};
|
||||
extends: []const u8,
|
||||
extnumber: ?u31,
|
||||
field: Enum.Field,
|
||||
value: Value,
|
||||
};
|
||||
|
||||
extends: []EnumExtension,
|
||||
2337
src/vulkan/render.zig
Normal file
2337
src/vulkan/render.zig
Normal file
File diff suppressed because it is too large
Load Diff
638
src/xml.zig
Normal file
638
src/xml.zig
Normal file
@@ -0,0 +1,638 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const Allocator = mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
pub const Attribute = struct {
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const Content = union(enum) {
|
||||
char_data: []const u8,
|
||||
comment: []const u8,
|
||||
element: *Element,
|
||||
};
|
||||
|
||||
pub const Element = struct {
|
||||
tag: []const u8,
|
||||
attributes: []Attribute = &.{},
|
||||
children: []Content = &.{},
|
||||
|
||||
pub fn getAttribute(self: Element, attrib_name: []const u8) ?[]const u8 {
|
||||
for (self.attributes) |child| {
|
||||
if (mem.eql(u8, child.name, attrib_name)) {
|
||||
return child.value;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getCharData(self: Element, child_tag: []const u8) ?[]const u8 {
|
||||
const child = self.findChildByTag(child_tag) orelse return null;
|
||||
if (child.children.len != 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return switch (child.children[0]) {
|
||||
.char_data => |char_data| char_data,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: Element) ChildIterator {
|
||||
return .{
|
||||
.items = self.children,
|
||||
.i = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn elements(self: Element) ChildElementIterator {
|
||||
return .{
|
||||
.inner = self.iterator(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn findChildByTag(self: Element, tag: []const u8) ?*Element {
|
||||
var it = self.findChildrenByTag(tag);
|
||||
return it.next();
|
||||
}
|
||||
|
||||
pub fn findChildrenByTag(self: Element, tag: []const u8) FindChildrenByTagIterator {
|
||||
return .{
|
||||
.inner = self.elements(),
|
||||
.tag = tag,
|
||||
};
|
||||
}
|
||||
|
||||
pub const ChildIterator = struct {
|
||||
items: []Content,
|
||||
i: usize,
|
||||
|
||||
pub fn next(self: *ChildIterator) ?*Content {
|
||||
if (self.i < self.items.len) {
|
||||
self.i += 1;
|
||||
return &self.items[self.i - 1];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ChildElementIterator = struct {
|
||||
inner: ChildIterator,
|
||||
|
||||
pub fn next(self: *ChildElementIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (child.* != .element) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child.*.element;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const FindChildrenByTagIterator = struct {
|
||||
inner: ChildElementIterator,
|
||||
tag: []const u8,
|
||||
|
||||
pub fn next(self: *FindChildrenByTagIterator) ?*Element {
|
||||
while (self.inner.next()) |child| {
|
||||
if (!mem.eql(u8, child.tag, self.tag)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub const Document = struct {
|
||||
arena: ArenaAllocator,
|
||||
xml_decl: ?*Element,
|
||||
root: *Element,
|
||||
|
||||
pub fn deinit(self: Document) void {
|
||||
var arena = self.arena; // Copy to stack so self can be taken by value.
|
||||
arena.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
const Parser = struct {
|
||||
source: []const u8,
|
||||
offset: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
|
||||
fn init(source: []const u8) Parser {
|
||||
return .{
|
||||
.source = source,
|
||||
.offset = 0,
|
||||
.line = 0,
|
||||
.column = 0,
|
||||
};
|
||||
}
|
||||
|
||||
fn peek(self: *Parser) ?u8 {
|
||||
return if (self.offset < self.source.len) self.source[self.offset] else null;
|
||||
}
|
||||
|
||||
fn consume(self: *Parser) !u8 {
|
||||
if (self.offset < self.source.len) {
|
||||
return self.consumeNoEof();
|
||||
}
|
||||
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn consumeNoEof(self: *Parser) u8 {
|
||||
std.debug.assert(self.offset < self.source.len);
|
||||
const c = self.source[self.offset];
|
||||
self.offset += 1;
|
||||
|
||||
if (c == '\n') {
|
||||
self.line += 1;
|
||||
self.column = 0;
|
||||
} else {
|
||||
self.column += 1;
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
fn eat(self: *Parser, char: u8) bool {
|
||||
self.expect(char) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expect(self: *Parser, expected: u8) !void {
|
||||
if (self.peek()) |actual| {
|
||||
if (expected != actual) {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
_ = self.consumeNoEof();
|
||||
return;
|
||||
}
|
||||
|
||||
return error.UnexpectedEof;
|
||||
}
|
||||
|
||||
fn eatStr(self: *Parser, text: []const u8) bool {
|
||||
self.expectStr(text) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn expectStr(self: *Parser, text: []const u8) !void {
|
||||
if (self.source.len < self.offset + text.len) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (mem.startsWith(u8, self.source[self.offset..], text)) {
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (i += 1) {
|
||||
_ = self.consumeNoEof();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn eatWs(self: *Parser) bool {
|
||||
var ws = false;
|
||||
|
||||
while (self.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => {
|
||||
ws = true;
|
||||
_ = self.consumeNoEof();
|
||||
},
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
fn expectWs(self: *Parser) !void {
|
||||
if (!self.eatWs()) return error.UnexpectedCharacter;
|
||||
}
|
||||
|
||||
fn currentLine(self: Parser) []const u8 {
|
||||
var begin: usize = 0;
|
||||
if (mem.lastIndexOfScalar(u8, self.source[0..self.offset], '\n')) |prev_nl| {
|
||||
begin = prev_nl + 1;
|
||||
}
|
||||
|
||||
const end = mem.indexOfScalarPos(u8, self.source, self.offset, '\n') orelse self.source.len;
|
||||
return self.source[begin..end];
|
||||
}
|
||||
};
|
||||
|
||||
test "xml: Parser" {
|
||||
{
|
||||
var parser = Parser.init("I like pythons");
|
||||
try testing.expectEqual(@as(?u8, 'I'), parser.peek());
|
||||
try testing.expectEqual(@as(u8, 'I'), parser.consumeNoEof());
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
try testing.expectEqual(@as(u8, ' '), try parser.consume());
|
||||
|
||||
try testing.expect(parser.eat('l'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eat('a'));
|
||||
try testing.expectEqual(@as(?u8, 'i'), parser.peek());
|
||||
|
||||
try parser.expect('i');
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expect('a'));
|
||||
try testing.expectEqual(@as(?u8, 'k'), parser.peek());
|
||||
|
||||
try testing.expect(parser.eatStr("ke"));
|
||||
try testing.expectEqual(@as(?u8, ' '), parser.peek());
|
||||
|
||||
try testing.expect(parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectEqual(false, parser.eatWs());
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
|
||||
try testing.expectEqual(false, parser.eatStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
|
||||
try testing.expectError(error.UnexpectedEof, parser.expectStr("aaaaaaaaa"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try testing.expectError(error.UnexpectedCharacter, parser.expectStr("pytn"));
|
||||
try testing.expectEqual(@as(?u8, 'p'), parser.peek());
|
||||
try parser.expectStr("python");
|
||||
try testing.expectEqual(@as(?u8, 's'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("");
|
||||
try testing.expectEqual(parser.peek(), null);
|
||||
try testing.expectError(error.UnexpectedEof, parser.consume());
|
||||
try testing.expectEqual(parser.eat('p'), false);
|
||||
try testing.expectError(error.UnexpectedEof, parser.expect('p'));
|
||||
}
|
||||
}
|
||||
|
||||
pub const ParseError = error{
|
||||
IllegalCharacter,
|
||||
UnexpectedEof,
|
||||
UnexpectedCharacter,
|
||||
UnclosedValue,
|
||||
UnclosedComment,
|
||||
InvalidName,
|
||||
InvalidEntity,
|
||||
InvalidStandaloneValue,
|
||||
NonMatchingClosingTag,
|
||||
InvalidDocument,
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub fn parse(backing_allocator: Allocator, source: []const u8) !Document {
|
||||
var parser = Parser.init(source);
|
||||
return try parseDocument(&parser, backing_allocator);
|
||||
}
|
||||
|
||||
fn parseDocument(parser: *Parser, backing_allocator: Allocator) !Document {
|
||||
var doc = Document{
|
||||
.arena = ArenaAllocator.init(backing_allocator),
|
||||
.xml_decl = null,
|
||||
.root = undefined,
|
||||
};
|
||||
|
||||
errdefer doc.deinit();
|
||||
|
||||
const allocator = doc.arena.allocator();
|
||||
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
doc.xml_decl = try parseElement(parser, allocator, .xml_decl);
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
doc.root = (try parseElement(parser, allocator, .element)) orelse return error.InvalidDocument;
|
||||
_ = parser.eatWs();
|
||||
try skipComments(parser, allocator);
|
||||
|
||||
if (parser.peek() != null) return error.InvalidDocument;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
fn parseAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
const quote = try parser.consume();
|
||||
if (quote != '"' and quote != '\'') return error.UnexpectedCharacter;
|
||||
|
||||
const begin = parser.offset;
|
||||
|
||||
while (true) {
|
||||
const c = parser.consume() catch return error.UnclosedValue;
|
||||
if (c == quote) break;
|
||||
}
|
||||
|
||||
const end = parser.offset - 1;
|
||||
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseEqAttrValue(parser: *Parser, alloc: Allocator) ![]const u8 {
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
|
||||
return try parseAttrValue(parser, alloc);
|
||||
}
|
||||
|
||||
fn parseNameNoDupe(parser: *Parser) ![]const u8 {
|
||||
// XML's spec on names is very long, so to make this easier
|
||||
// we just take any character that is not special and not whitespace
|
||||
const begin = parser.offset;
|
||||
|
||||
while (parser.peek()) |ch| {
|
||||
switch (ch) {
|
||||
' ', '\t', '\n', '\r' => break,
|
||||
'&', '"', '\'', '<', '>', '?', '=', '/' => break,
|
||||
else => _ = parser.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = parser.offset;
|
||||
if (begin == end) return error.InvalidName;
|
||||
|
||||
return parser.source[begin..end];
|
||||
}
|
||||
|
||||
fn parseCharData(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
const begin = parser.offset;
|
||||
|
||||
while (parser.peek()) |ch| {
|
||||
switch (ch) {
|
||||
'<' => break,
|
||||
else => _ = parser.consumeNoEof(),
|
||||
}
|
||||
}
|
||||
|
||||
const end = parser.offset;
|
||||
if (begin == end) return null;
|
||||
|
||||
return try unescape(alloc, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseContent(parser: *Parser, alloc: Allocator) ParseError!Content {
|
||||
if (try parseCharData(parser, alloc)) |cd| {
|
||||
return Content{ .char_data = cd };
|
||||
} else if (try parseComment(parser, alloc)) |comment| {
|
||||
return Content{ .comment = comment };
|
||||
} else if (try parseElement(parser, alloc, .element)) |elem| {
|
||||
return Content{ .element = elem };
|
||||
} else {
|
||||
return error.UnexpectedCharacter;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseAttr(parser: *Parser, alloc: Allocator) !?Attribute {
|
||||
const name = parseNameNoDupe(parser) catch return null;
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('=');
|
||||
_ = parser.eatWs();
|
||||
const value = try parseAttrValue(parser, alloc);
|
||||
|
||||
const attr = Attribute{
|
||||
.name = try alloc.dupe(u8, name),
|
||||
.value = value,
|
||||
};
|
||||
return attr;
|
||||
}
|
||||
|
||||
const ElementKind = enum {
|
||||
xml_decl,
|
||||
element,
|
||||
};
|
||||
|
||||
fn parseElement(parser: *Parser, alloc: Allocator, comptime kind: ElementKind) !?*Element {
|
||||
const start = parser.offset;
|
||||
|
||||
const tag = switch (kind) {
|
||||
.xml_decl => blk: {
|
||||
if (!parser.eatStr("<?") or !mem.eql(u8, try parseNameNoDupe(parser), "xml")) {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
}
|
||||
break :blk "xml";
|
||||
},
|
||||
.element => blk: {
|
||||
if (!parser.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(parser) catch {
|
||||
parser.offset = start;
|
||||
return null;
|
||||
};
|
||||
break :blk tag;
|
||||
},
|
||||
};
|
||||
|
||||
var attributes: std.ArrayList(Attribute) = .empty;
|
||||
defer attributes.deinit(alloc);
|
||||
|
||||
var children: std.ArrayList(Content) = .empty;
|
||||
defer children.deinit(alloc);
|
||||
|
||||
while (parser.eatWs()) {
|
||||
const attr = (try parseAttr(parser, alloc)) orelse break;
|
||||
try attributes.append(alloc, attr);
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.xml_decl => try parser.expectStr("?>"),
|
||||
.element => {
|
||||
if (!parser.eatStr("/>")) {
|
||||
try parser.expect('>');
|
||||
|
||||
while (true) {
|
||||
if (parser.peek() == null) {
|
||||
return error.UnexpectedEof;
|
||||
} else if (parser.eatStr("</")) {
|
||||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(parser, alloc);
|
||||
try children.append(alloc, content);
|
||||
}
|
||||
|
||||
const closing_tag = try parseNameNoDupe(parser);
|
||||
if (!mem.eql(u8, tag, closing_tag)) {
|
||||
return error.NonMatchingClosingTag;
|
||||
}
|
||||
|
||||
_ = parser.eatWs();
|
||||
try parser.expect('>');
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const element = try alloc.create(Element);
|
||||
element.* = .{
|
||||
.tag = try alloc.dupe(u8, tag),
|
||||
.attributes = try attributes.toOwnedSlice(alloc),
|
||||
.children = try children.toOwnedSlice(alloc),
|
||||
};
|
||||
return element;
|
||||
}
|
||||
|
||||
test "xml: parseElement" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const alloc = arena.allocator();
|
||||
|
||||
{
|
||||
var parser = Parser.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, alloc, .element));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
|
||||
const size_attr = elem.?.attributes[0];
|
||||
try testing.expectEqualSlices(u8, size_attr.name, "size");
|
||||
try testing.expectEqualSlices(u8, size_attr.value, "15");
|
||||
|
||||
const color_attr = elem.?.attributes[1];
|
||||
try testing.expectEqualSlices(u8, color_attr.name, "color");
|
||||
try testing.expectEqualSlices(u8, color_attr.value, "green");
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<python>test</python>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "test");
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try parseElement(&parser, alloc, .element);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[0].char_data, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[1].element.tag, "c");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[2].char_data, "d");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[3].element.tag, "e");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[4].char_data, "f");
|
||||
try testing.expectEqualSlices(u8, elem.?.children[5].comment, "g");
|
||||
}
|
||||
}
|
||||
|
||||
test "xml: parse prolog" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xmla version='aa'?>");
|
||||
try testing.expectEqual(@as(?*Element, null), try parseElement(&parser, a, .xml_decl));
|
||||
try testing.expectEqual(@as(?u8, '<'), parser.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xml version='aa'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "aa", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("encoding"));
|
||||
try testing.expectEqual(@as(?[]const u8, null), decl.?.getAttribute("standalone"));
|
||||
}
|
||||
|
||||
{
|
||||
var parser = Parser.init("<?xml version=\"ccc\" encoding = 'bbb' standalone \t = 'yes'?>");
|
||||
const decl = try parseElement(&parser, a, .xml_decl);
|
||||
try testing.expectEqualSlices(u8, "ccc", decl.?.getAttribute("version").?);
|
||||
try testing.expectEqualSlices(u8, "bbb", decl.?.getAttribute("encoding").?);
|
||||
try testing.expectEqualSlices(u8, "yes", decl.?.getAttribute("standalone").?);
|
||||
}
|
||||
}
|
||||
|
||||
fn skipComments(parser: *Parser, alloc: Allocator) !void {
|
||||
while ((try parseComment(parser, alloc)) != null) {
|
||||
_ = parser.eatWs();
|
||||
}
|
||||
}
|
||||
|
||||
fn parseComment(parser: *Parser, alloc: Allocator) !?[]const u8 {
|
||||
if (!parser.eatStr("<!--")) return null;
|
||||
|
||||
const begin = parser.offset;
|
||||
while (!parser.eatStr("-->")) {
|
||||
_ = parser.consume() catch return error.UnclosedComment;
|
||||
}
|
||||
|
||||
const end = parser.offset - "-->".len;
|
||||
return try alloc.dupe(u8, parser.source[begin..end]);
|
||||
}
|
||||
|
||||
fn unescapeEntity(text: []const u8) !u8 {
|
||||
const EntitySubstition = struct { text: []const u8, replacement: u8 };
|
||||
|
||||
const entities = [_]EntitySubstition{
|
||||
.{ .text = "<", .replacement = '<' },
|
||||
.{ .text = ">", .replacement = '>' },
|
||||
.{ .text = "&", .replacement = '&' },
|
||||
.{ .text = "'", .replacement = '\'' },
|
||||
.{ .text = """, .replacement = '"' },
|
||||
};
|
||||
|
||||
for (entities) |entity| {
|
||||
if (mem.eql(u8, text, entity.text)) return entity.replacement;
|
||||
}
|
||||
|
||||
return error.InvalidEntity;
|
||||
}
|
||||
|
||||
fn unescape(arena: Allocator, text: []const u8) ![]const u8 {
|
||||
const unescaped = try arena.alloc(u8, text.len);
|
||||
|
||||
var j: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < text.len) : (j += 1) {
|
||||
if (text[i] == '&') {
|
||||
const entity_end = 1 + (mem.indexOfScalarPos(u8, text, i, ';') orelse return error.InvalidEntity);
|
||||
unescaped[j] = try unescapeEntity(text[i..entity_end]);
|
||||
i = entity_end;
|
||||
} else {
|
||||
unescaped[j] = text[i];
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return unescaped[0..j];
|
||||
}
|
||||
|
||||
test "xml: unescape" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
||||
try testing.expectEqualSlices(u8, "test", try unescape(a, "test"));
|
||||
try testing.expectEqualSlices(u8, "a<b&c>d\"e'f<", try unescape(a, "a<b&c>d"e'f<"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&&"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&test;"));
|
||||
try testing.expectError(error.InvalidEntity, unescape(a, "python&boa"));
|
||||
}
|
||||
|
||||
test "xml: top level comments" {
|
||||
var arena = ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
||||
const doc = try parse(a, "<?xml version='aa'?><!--comment--><python color='green'/><!--another comment-->");
|
||||
try testing.expectEqualSlices(u8, "python", doc.root.tag);
|
||||
}
|
||||
121
test/ref_all_decls.zig
Normal file
121
test/ref_all_decls.zig
Normal file
@@ -0,0 +1,121 @@
|
||||
const std = @import("std");
|
||||
const vk = @import("vulkan");
|
||||
|
||||
// Provide bogus defaults for unknown platform types
|
||||
// The actual type does not really matter here...
|
||||
pub const GgpStreamDescriptor = u32;
|
||||
pub const GgpFrameToken = u32;
|
||||
pub const _screen_buffer = u32;
|
||||
pub const NvSciSyncAttrList = u32;
|
||||
pub const NvSciSyncObj = u32;
|
||||
pub const NvSciSyncFence = u32;
|
||||
pub const NvSciBufAttrList = u32;
|
||||
pub const NvSciBufObj = u32;
|
||||
pub const ANativeWindow = u32;
|
||||
pub const AHardwareBuffer = u32;
|
||||
pub const CAMetalLayer = u32;
|
||||
pub const MTLDevice_id = u32;
|
||||
pub const MTLCommandQueue_id = u32;
|
||||
pub const MTLBuffer_id = u32;
|
||||
pub const MTLTexture_id = u32;
|
||||
pub const MTLSharedEvent_id = u32;
|
||||
pub const IOSurfaceRef = u32;
|
||||
|
||||
pub const StdVideoH264ProfileIdc = u32;
|
||||
pub const StdVideoH264LevelIdc = u32;
|
||||
pub const StdVideoH264ChromaFormatIdc = u32;
|
||||
pub const StdVideoH264PocType = u32;
|
||||
pub const StdVideoH264SpsFlags = u32;
|
||||
pub const StdVideoH264ScalingLists = u32;
|
||||
pub const StdVideoH264SequenceParameterSetVui = u32;
|
||||
pub const StdVideoH264AspectRatioIdc = u32;
|
||||
pub const StdVideoH264HrdParameters = u32;
|
||||
pub const StdVideoH264SpsVuiFlags = u32;
|
||||
pub const StdVideoH264WeightedBipredIdc = u32;
|
||||
pub const StdVideoH264PpsFlags = u32;
|
||||
pub const StdVideoH264SliceType = u32;
|
||||
pub const StdVideoH264CabacInitIdc = u32;
|
||||
pub const StdVideoH264DisableDeblockingFilterIdc = u32;
|
||||
pub const StdVideoH264PictureType = u32;
|
||||
pub const StdVideoH264ModificationOfPicNumsIdc = u32;
|
||||
pub const StdVideoH264MemMgmtControlOp = u32;
|
||||
pub const StdVideoDecodeH264PictureInfo = u32;
|
||||
pub const StdVideoDecodeH264ReferenceInfo = u32;
|
||||
pub const StdVideoDecodeH264PictureInfoFlags = u32;
|
||||
pub const StdVideoDecodeH264ReferenceInfoFlags = u32;
|
||||
pub const StdVideoH264SequenceParameterSet = u32;
|
||||
pub const StdVideoH264PictureParameterSet = u32;
|
||||
pub const StdVideoH265ProfileIdc = u32;
|
||||
pub const StdVideoH265VideoParameterSet = u32;
|
||||
pub const StdVideoH265SequenceParameterSet = u32;
|
||||
pub const StdVideoH265PictureParameterSet = u32;
|
||||
pub const StdVideoH265DecPicBufMgr = u32;
|
||||
pub const StdVideoH265HrdParameters = u32;
|
||||
pub const StdVideoH265VpsFlags = u32;
|
||||
pub const StdVideoH265LevelIdc = u32;
|
||||
pub const StdVideoH265SpsFlags = u32;
|
||||
pub const StdVideoH265ScalingLists = u32;
|
||||
pub const StdVideoH265SequenceParameterSetVui = u32;
|
||||
pub const StdVideoH265PredictorPaletteEntries = u32;
|
||||
pub const StdVideoH265PpsFlags = u32;
|
||||
pub const StdVideoH265SubLayerHrdParameters = u32;
|
||||
pub const StdVideoH265HrdFlags = u32;
|
||||
pub const StdVideoH265SpsVuiFlags = u32;
|
||||
pub const StdVideoH265SliceType = u32;
|
||||
pub const StdVideoH265PictureType = u32;
|
||||
pub const StdVideoDecodeH265PictureInfo = u32;
|
||||
pub const StdVideoDecodeH265ReferenceInfo = u32;
|
||||
pub const StdVideoDecodeH265PictureInfoFlags = u32;
|
||||
pub const StdVideoDecodeH265ReferenceInfoFlags = u32;
|
||||
pub const StdVideoAV1Profile = u32;
|
||||
pub const StdVideoAV1Level = u32;
|
||||
pub const StdVideoAV1SequenceHeader = u32;
|
||||
pub const StdVideoDecodeAV1PictureInfo = u32;
|
||||
pub const StdVideoDecodeAV1ReferenceInfo = u32;
|
||||
pub const StdVideoEncodeH264SliceHeader = u32;
|
||||
pub const StdVideoEncodeH264PictureInfo = u32;
|
||||
pub const StdVideoEncodeH264ReferenceInfo = u32;
|
||||
pub const StdVideoEncodeH264SliceHeaderFlags = u32;
|
||||
pub const StdVideoEncodeH264ReferenceListsInfo = u32;
|
||||
pub const StdVideoEncodeH264PictureInfoFlags = u32;
|
||||
pub const StdVideoEncodeH264ReferenceInfoFlags = u32;
|
||||
pub const StdVideoEncodeH264RefMgmtFlags = u32;
|
||||
pub const StdVideoEncodeH264RefListModEntry = u32;
|
||||
pub const StdVideoEncodeH264RefPicMarkingEntry = u32;
|
||||
pub const StdVideoEncodeH265PictureInfoFlags = u32;
|
||||
pub const StdVideoEncodeH265PictureInfo = u32;
|
||||
pub const StdVideoEncodeH265SliceSegmentHeader = u32;
|
||||
pub const StdVideoEncodeH265ReferenceInfo = u32;
|
||||
pub const StdVideoEncodeH265ReferenceListsInfo = u32;
|
||||
pub const StdVideoEncodeH265SliceSegmentHeaderFlags = u32;
|
||||
pub const StdVideoEncodeH265ReferenceInfoFlags = u32;
|
||||
pub const StdVideoEncodeH265ReferenceModificationFlags = u32;
|
||||
pub const StdVideoEncodeAV1OperatingPointInfo = u32;
|
||||
|
||||
comptime {
|
||||
@setEvalBranchQuota(1000000);
|
||||
reallyRefAllDecls(vk);
|
||||
}
|
||||
|
||||
fn reallyRefAllDecls(comptime T: type) void {
|
||||
switch (@typeInfo(T)) {
|
||||
.@"struct", .@"union" => {
|
||||
reallyRefAllContainerDecls(T);
|
||||
inline for (std.meta.fields(T)) |field| {
|
||||
reallyRefAllDecls(field.type);
|
||||
}
|
||||
},
|
||||
.@"enum", .@"opaque" => {
|
||||
reallyRefAllContainerDecls(T);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn reallyRefAllContainerDecls(comptime T: type) void {
|
||||
inline for (comptime std.meta.declarations(T)) |decl| {
|
||||
if (@TypeOf(@field(T, decl.name)) == type) {
|
||||
reallyRefAllDecls(@field(T, decl.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user