1 /// Vulkan command module 2 module gfx.vulkan.cmd; 3 4 package: 5 6 import gfx.bindings.vulkan; 7 8 import gfx.core.rc; 9 import gfx.core.typecons; 10 import gfx.graal.cmd; 11 import gfx.graal.renderpass; 12 import gfx.vulkan.buffer; 13 import gfx.vulkan.conv; 14 import gfx.vulkan.device; 15 import gfx.vulkan.error; 16 import gfx.vulkan.image; 17 import gfx.vulkan.renderpass; 18 19 import std.typecons : Flag; 20 21 class VulkanCommandPool : VulkanDevObj!(VkCommandPool, "DestroyCommandPool"), CommandPool 22 { 23 mixin(atomicRcCode); 24 25 this (VkCommandPool pool, VulkanDevice dev) { 26 super(pool, dev); 27 } 28 29 override void reset() { 30 vulkanEnforce(vk.ResetCommandPool(vkDev, vkObj, 0), "Could not reset command buffer"); 31 } 32 33 override CommandBuffer[] allocate(size_t count) { 34 VkCommandBufferAllocateInfo cbai; 35 cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; 36 cbai.commandPool = vkObj; 37 cbai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; 38 cbai.commandBufferCount = cast(uint)count; 39 40 auto vkBufs = new VkCommandBuffer[count]; 41 vulkanEnforce( 42 vk.AllocateCommandBuffers(vkDev, &cbai, &vkBufs[0]), 43 "Could not allocate command buffers" 44 ); 45 46 import std.algorithm : map; 47 import std.array : array; 48 49 return vkBufs 50 .map!(vkBuf => cast(CommandBuffer)new VulkanCommandBuffer(vkBuf, this)) 51 .array; 52 } 53 54 override void free(CommandBuffer[] bufs) { 55 import std.algorithm : map; 56 import std.array : array; 57 58 auto vkBufs = bufs.map!( 59 b => enforce(cast(VulkanCommandBuffer)b, "Did not pass a Vulkan command buffer").vkObj 60 ).array; 61 vk.FreeCommandBuffers(vkDev, vkObj, cast(uint)bufs.length, &vkBufs[0]); 62 } 63 } 64 65 final class VulkanCommandBuffer : CommandBuffer 66 { 67 this (VkCommandBuffer vkObj, VulkanCommandPool pool) { 68 _vkObj = vkObj; 69 _pool = pool; 70 _vk = pool.vk; 71 } 72 73 @property VkCommandBuffer vkObj() { 74 return _vkObj; 75 } 76 77 override @property CommandPool pool() { 78 return _pool; 79 } 80 81 @property VkDeviceCmds vk() { 82 return _vk; 83 } 84 85 override void reset() { 86 vulkanEnforce( 87 vk.ResetCommandBuffer(vkObj, 0), "Could not reset vulkan command buffer" 88 ); 89 } 90 91 override void begin(Flag!"persistent" persistent) { 92 VkCommandBufferBeginInfo cbbi; 93 cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; 94 cbbi.flags = persistent ? 95 VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT : 96 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT; 97 vulkanEnforce( 98 vk.BeginCommandBuffer(vkObj, &cbbi), "Could not begin vulkan command buffer" 99 ); 100 } 101 102 override void end() { 103 vulkanEnforce( 104 vk.EndCommandBuffer(vkObj), "Could not end vulkan command buffer" 105 ); 106 } 107 108 override void pipelineBarrier(Trans!PipelineStage stageTrans, 109 BufferMemoryBarrier[] bufMbs, 110 ImageMemoryBarrier[] imgMbs) 111 { 112 import std.algorithm : map; 113 import std.array : array; 114 115 auto vkBufMbs = bufMbs.map!((BufferMemoryBarrier bufMb) { 116 VkBufferMemoryBarrier vkBufMb; 117 vkBufMb.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; 118 vkBufMb.srcAccessMask = accessToVk(bufMb.accessMaskTrans.from); 119 vkBufMb.dstAccessMask = accessToVk(bufMb.accessMaskTrans.to); 120 vkBufMb.srcQueueFamilyIndex = bufMb.queueFamIndexTrans.from; 121 vkBufMb.dstQueueFamilyIndex = bufMb.queueFamIndexTrans.to; 122 vkBufMb.buffer = enforce(cast(VulkanBuffer)bufMb.buffer, "Did not pass a Vulkan buffer").vkObj; 123 vkBufMb.offset = bufMb.offset; 124 vkBufMb.size = bufMb.size; 125 return vkBufMb; 126 }).array; 127 128 auto vkImgMbs = imgMbs.map!((ImageMemoryBarrier imgMb) { 129 VkImageMemoryBarrier vkImgMb; 130 vkImgMb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; 131 vkImgMb.srcAccessMask = accessToVk(imgMb.accessMaskTrans.from); 132 vkImgMb.dstAccessMask = accessToVk(imgMb.accessMaskTrans.to); 133 vkImgMb.oldLayout = imgMb.layoutTrans.from.toVk(); 134 vkImgMb.newLayout = imgMb.layoutTrans.to.toVk(); 135 vkImgMb.srcQueueFamilyIndex = imgMb.queueFamIndexTrans.from; 136 vkImgMb.dstQueueFamilyIndex = imgMb.queueFamIndexTrans.to; 137 vkImgMb.image = enforce(cast(VulkanImageBase)imgMb.image, "Did not pass a Vulkan image").vkObj; 138 vkImgMb.subresourceRange = imgMb.range.toVk(); 139 return vkImgMb; 140 }).array; 141 142 vk.CmdPipelineBarrier( vkObj, 143 pipelineStageToVk(stageTrans.from), pipelineStageToVk(stageTrans.to), 144 0, 0, null, 145 cast(uint)vkBufMbs.length, vkBufMbs.ptr, 146 cast(uint)vkImgMbs.length, vkImgMbs.ptr 147 ); 148 } 149 150 override void clearColorImage(ImageBase image, ImageLayout layout, 151 in ClearColorValues clearValues, ImageSubresourceRange[] ranges) 152 { 153 import std.algorithm : map; 154 import std.array : array; 155 156 auto vkImg = enforce(cast(VulkanImageBase)image, "Did not pass a vulkan image").vkObj; 157 auto vkLayout = layout.toVk(); 158 auto vkClear = cast(const(VkClearColorValue)*) cast(const(void)*) &clearValues.values; 159 auto vkRanges = ranges.map!(r => r.toVk()).array; 160 161 vk.CmdClearColorImage(vkObj, vkImg, vkLayout, vkClear, cast(uint)vkRanges.length, &vkRanges[0]); 162 } 163 164 override void clearDepthStencilImage(ImageBase image, ImageLayout layout, 165 in ClearDepthStencilValues clearValues, 166 ImageSubresourceRange[] ranges) 167 { 168 import std.algorithm : map; 169 import std.array : array; 170 171 auto vkImg = enforce(cast(VulkanImageBase)image, "Did not pass a vulkan image").vkObj; 172 auto vkLayout = layout.toVk(); 173 auto vkClear = VkClearDepthStencilValue(clearValues.depth, clearValues.stencil); 174 auto vkRanges = ranges.map!(r => r.toVk()).array; 175 176 vk.CmdClearDepthStencilImage(vkObj, vkImg, vkLayout, &vkClear, cast(uint)vkRanges.length, &vkRanges[0]); 177 } 178 179 override void copyBuffer(Trans!Buffer buffers, CopyRegion[] regions) 180 { 181 import std.algorithm : map; 182 import std.array : array; 183 184 auto vkRegions = regions.map!( 185 r => VkBufferCopy(r.offset.from, r.offset.to, r.size) 186 ).array; 187 188 vk.CmdCopyBuffer(vkObj, 189 enforce(cast(VulkanBuffer)buffers.from).vkObj, 190 enforce(cast(VulkanBuffer)buffers.to).vkObj, 191 cast(uint)vkRegions.length, vkRegions.ptr 192 ); 193 } 194 195 void copyBufferToImage(Buffer srcBuffer, ImageBase dstImage, 196 in ImageLayout dstLayout, in BufferImageCopy[] regions) 197 { 198 import gfx.core.util : transmute; 199 import std.algorithm : map; 200 import std.array : array; 201 202 auto vkRegions = regions.map!( 203 bic => transmute!VkBufferImageCopy(bic) 204 ).array; 205 206 vk.CmdCopyBufferToImage( 207 vkObj, 208 enforce(cast(VulkanBuffer)srcBuffer).vkObj, 209 enforce(cast(VulkanImageBase)dstImage).vkObj, 210 dstLayout.toVk(), 211 cast(uint)vkRegions.length, vkRegions.ptr 212 ); 213 } 214 215 override void setViewport(in uint firstViewport, in Viewport[] viewports) 216 { 217 import gfx.core.util : transmute; 218 const vkVp = transmute!(const(VkViewport)[])(viewports); 219 vk.CmdSetViewport(vkObj, firstViewport, cast(uint)vkVp.length, vkVp.ptr); 220 } 221 222 override void setScissor(in uint firstScissor, in Rect[] scissors) 223 { 224 import gfx.core.util : transmute; 225 const vkSc = transmute!(const(VkRect2D)[])(scissors); 226 vk.CmdSetScissor(vkObj, firstScissor, cast(uint)vkSc.length, vkSc.ptr); 227 } 228 229 override void setDepthBounds(in float minDepth, in float maxDepth) 230 { 231 vk.CmdSetDepthBounds(vkObj, minDepth, maxDepth); 232 } 233 234 override void setLineWidth(in float lineWidth) 235 { 236 vk.CmdSetLineWidth(vkObj, lineWidth); 237 } 238 239 override void setDepthBias(in float constFactor, in float clamp, in float slopeFactor) 240 { 241 vk.CmdSetDepthBias(vkObj, constFactor, clamp, slopeFactor); 242 } 243 244 override void setStencilCompareMask(in StencilFace faceMask, in uint compareMask) 245 { 246 vk.CmdSetStencilCompareMask(vkObj, cast(VkStencilFaceFlags)faceMask, compareMask); 247 } 248 249 override void setStencilWriteMask(in StencilFace faceMask, in uint writeMask) 250 { 251 vk.CmdSetStencilWriteMask(vkObj, cast(VkStencilFaceFlags)faceMask, writeMask); 252 } 253 254 override void setStencilReference(in StencilFace faceMask, in uint reference) 255 { 256 vk.CmdSetStencilReference(vkObj, cast(VkStencilFaceFlags)faceMask, reference); 257 } 258 259 override void setBlendConstants(in float[4] blendConstants) 260 { 261 vk.CmdSetBlendConstants(vkObj, blendConstants); 262 } 263 264 override void beginRenderPass(RenderPass rp, Framebuffer fb, 265 Rect area, ClearValues[] clearValues) 266 { 267 import std.algorithm : map; 268 import std.array : array; 269 auto vkCvs = clearValues.map!( 270 (ClearValues cv) { 271 VkClearValue vkCv; 272 if (cv.type == ClearValues.Type.color) { 273 const ccv = cv.values.color; 274 VkClearColorValue vkCcv; 275 switch (ccv.type) { 276 case ClearColorValues.Type.f32: 277 vkCcv.float32 = ccv.values.f32; 278 break; 279 case ClearColorValues.Type.i32: 280 vkCcv.int32 = ccv.values.i32; 281 break; 282 case ClearColorValues.Type.u32: 283 vkCcv.uint32 = ccv.values.u32; 284 break; 285 default: 286 break; 287 } 288 vkCv.color = vkCcv; 289 } 290 else if (cv.type == ClearValues.Type.depthStencil) { 291 const dscv = cv.values.depthStencil; 292 vkCv.depthStencil = VkClearDepthStencilValue( 293 dscv.depth, dscv.stencil 294 ); 295 } 296 return vkCv; 297 } 298 ).array; 299 300 VkRenderPassBeginInfo bi; 301 bi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; 302 bi.renderPass = enforce(cast(VulkanRenderPass)rp, "did not supply a valid Vulkan render pass").vkObj; 303 bi.framebuffer = enforce(cast(VulkanFramebuffer)fb, "did not supply a valid Vulkan frame buffer").vkObj; 304 bi.renderArea = area.toVk(); 305 bi.clearValueCount = cast(uint)vkCvs.length; 306 bi.pClearValues = vkCvs.ptr; 307 308 vk.CmdBeginRenderPass(vkObj, &bi, VK_SUBPASS_CONTENTS_INLINE); 309 } 310 311 override void nextSubpass() { 312 vk.CmdNextSubpass(vkObj, VK_SUBPASS_CONTENTS_INLINE); 313 } 314 315 override void endRenderPass() { 316 vk.CmdEndRenderPass(vkObj); 317 } 318 319 override void bindPipeline(Pipeline pipeline) 320 { 321 vk.CmdBindPipeline(vkObj, VK_PIPELINE_BIND_POINT_GRAPHICS, enforce( 322 cast(VulkanPipeline)pipeline, "did not pass a valid Vulkan pipeline" 323 ).vkObj); 324 } 325 326 override void bindVertexBuffers(uint firstBinding, VertexBinding[] bindings) { 327 import std.algorithm : map; 328 import std.array : array; 329 auto vkBufs = bindings 330 .map!(b => enforce(cast(VulkanBuffer)b.buffer).vkObj) 331 .array; 332 auto vkOffsets = bindings 333 .map!(b => cast(VkDeviceSize)b.offset) 334 .array; 335 vk.CmdBindVertexBuffers(vkObj, firstBinding, cast(uint)bindings.length, vkBufs.ptr, vkOffsets.ptr); 336 } 337 338 override void bindIndexBuffer(Buffer indexBuf, size_t offset, IndexType type) { 339 auto vkBuf = enforce(cast(VulkanBuffer)indexBuf).vkObj; 340 vk.CmdBindIndexBuffer(vkObj, vkBuf, offset, type.toVk()); 341 } 342 343 override void bindDescriptorSets(PipelineBindPoint bindPoint, PipelineLayout layout, 344 uint firstSet, DescriptorSet[] sets, 345 in size_t[] dynamicOffsets) 346 { 347 import std.algorithm : map; 348 import std.array : array; 349 350 auto vkSets = sets.map!(s => enforce(cast(VulkanDescriptorSet)s).vkObj).array; 351 static if (size_t.sizeof == uint.sizeof) { 352 const vkOffsets = dynamicOffsets; 353 } 354 else { 355 const vkOffsets = dynamicOffsets.map!(o => cast(uint)o).array; 356 } 357 358 vk.CmdBindDescriptorSets( vkObj, bindPoint.toVk(), 359 enforce(cast(VulkanPipelineLayout)layout).vkObj, 360 firstSet, cast(uint)vkSets.length, vkSets.ptr, 361 cast(uint)vkOffsets.length, vkOffsets.ptr); 362 } 363 364 override void pushConstants(PipelineLayout layout, ShaderStage stages, 365 size_t offset, size_t size, const(void)* data) 366 { 367 auto vkPl = enforce(cast(VulkanPipelineLayout)layout).vkObj; 368 vk.CmdPushConstants(vkObj, vkPl, shaderStageToVk(stages), cast(uint)offset, cast(uint)size, data); 369 } 370 371 override void draw(uint vertexCount, uint instanceCount, uint firstVertex, uint firstInstance) 372 { 373 vk.CmdDraw(vkObj, vertexCount, instanceCount, firstVertex, firstInstance); 374 } 375 376 override void drawIndexed(uint indexCount, uint instanceCount, uint firstVertex, int vertexOffset, uint firstInstance) 377 { 378 vk.CmdDrawIndexed(vkObj, indexCount, instanceCount, firstVertex, vertexOffset, firstInstance); 379 } 380 381 private VkCommandBuffer _vkObj; 382 private VulkanCommandPool _pool; 383 private VkDeviceCmds _vk; 384 }