1 /// Vulkan device module 2 module gfx.vulkan.device; 3 4 package: 5 6 import core.time : Duration; 7 8 import gfx.bindings.vulkan; 9 10 import gfx.core.rc; 11 import gfx.graal.cmd; 12 import gfx.graal.device; 13 import gfx.graal.image; 14 import gfx.graal.memory; 15 import gfx.graal.presentation; 16 import gfx.graal.queue; 17 import gfx.graal.pipeline; 18 import gfx.graal.sync; 19 import gfx.vulkan; 20 import gfx.vulkan.buffer; 21 import gfx.vulkan.cmd; 22 import gfx.vulkan.conv; 23 import gfx.vulkan.error; 24 import gfx.vulkan.image; 25 import gfx.vulkan.memory; 26 import gfx.vulkan.pipeline; 27 import gfx.vulkan.queue; 28 import gfx.vulkan.renderpass; 29 import gfx.vulkan.sync; 30 import gfx.vulkan.wsi; 31 32 import std.typecons : Flag; 33 34 class VulkanDevObj(VkType, string destroyFn) : Disposable 35 { 36 this (VkType vkObj, VulkanDevice dev) 37 { 38 _vkObj = vkObj; 39 _dev = dev; 40 _dev.retain(); 41 _vk = _dev.vk; 42 } 43 44 override void dispose() { 45 mixin("vk."~destroyFn~"(vkDev, vkObj, null);"); 46 _dev.release(); 47 _dev = null; 48 } 49 50 final @property VkType vkObj() { 51 return _vkObj; 52 } 53 54 final @property VulkanDevice dev() { 55 return _dev; 56 } 57 58 final @property VkDevice vkDev() { 59 return _dev.vkObj; 60 } 61 62 final @property VkDeviceCmds vk() { 63 return _vk; 64 } 65 66 private VkType _vkObj; 67 private VulkanDevice _dev; 68 private VkDeviceCmds _vk; 69 } 70 71 final class VulkanDevice : VulkanObj!(VkDevice), Device 72 { 73 mixin(atomicRcCode); 74 75 this (VkDevice vkObj, VulkanPhysicalDevice pd) 76 { 77 super(vkObj); 78 _pd = pd; 79 _pd.retain(); 80 _vk = new VkDeviceCmds(vkObj, pd.vk); 81 } 82 83 override void dispose() { 84 vk.DestroyDevice(vkObj, null); 85 _pd.release(); 86 _pd = null; 87 } 88 89 @property VulkanPhysicalDevice pd() { 90 return _pd; 91 } 92 93 @property VkDeviceCmds vk() { 94 return _vk; 95 } 96 97 override void waitIdle() { 98 vulkanEnforce( 99 vk.DeviceWaitIdle(vkObj), 100 "Problem waiting for device" 101 ); 102 } 103 104 override Queue getQueue(uint queueFamilyIndex, uint queueIndex) { 105 VkQueue vkQ; 106 vk.GetDeviceQueue(vkObj, queueFamilyIndex, queueIndex, &vkQ); 107 108 foreach (q; _queues) { 109 if (q.vkObj is vkQ) { 110 return q; 111 } 112 } 113 114 auto q = new VulkanQueue(vkQ, this); 115 _queues ~= q; 116 return q; 117 } 118 119 override CommandPool createCommandPool(uint queueFamilyIndex) { 120 VkCommandPoolCreateInfo cci; 121 cci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; 122 cci.queueFamilyIndex = queueFamilyIndex; 123 cci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; 124 125 VkCommandPool vkPool; 126 vulkanEnforce( 127 vk.CreateCommandPool(vkObj, &cci, null, &vkPool), 128 "Could not create vulkan command pool" 129 ); 130 131 return new VulkanCommandPool(vkPool, this); 132 } 133 134 override DeviceMemory allocateMemory(uint memTypeIndex, size_t size) 135 { 136 VkMemoryAllocateInfo mai; 137 mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; 138 mai.allocationSize = size; 139 mai.memoryTypeIndex = memTypeIndex; 140 141 VkDeviceMemory vkMem; 142 vulkanEnforce(vk.AllocateMemory(vkObj, &mai, null, &vkMem), "Could not allocate device memory"); 143 144 const props = pd.memoryProperties.types[memTypeIndex].props; 145 146 return new VulkanDeviceMemory(vkMem, this, props, size, memTypeIndex); 147 } 148 149 override void flushMappedMemory(MappedMemorySet set) 150 { 151 import std.algorithm : map; 152 import std.array : array; 153 VkMappedMemoryRange[] mmrs = set.mms.map!((MappedMemorySet.MM mm) { 154 VkMappedMemoryRange mmr; 155 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; 156 mmr.memory = (cast(VulkanDeviceMemory)mm.dm).vkObj; 157 mmr.offset = mm.offset; 158 mmr.size = mm.size; 159 return mmr; 160 }).array; 161 162 vk.FlushMappedMemoryRanges(vkObj, cast(uint)mmrs.length, mmrs.ptr); 163 } 164 165 override void invalidateMappedMemory(MappedMemorySet set) { 166 import std.algorithm : map; 167 import std.array : array; 168 VkMappedMemoryRange[] mmrs = set.mms.map!((MappedMemorySet.MM mm) { 169 VkMappedMemoryRange mmr; 170 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; 171 mmr.memory = (cast(VulkanDeviceMemory)mm.dm).vkObj; 172 mmr.offset = mm.offset; 173 mmr.size = mm.size; 174 return mmr; 175 }).array; 176 177 vk.InvalidateMappedMemoryRanges(vkObj, cast(uint)mmrs.length, mmrs.ptr); 178 } 179 180 override Buffer createBuffer(BufferUsage usage, size_t size) 181 { 182 VkBufferCreateInfo bci; 183 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; 184 bci.size = size; 185 bci.usage = bufferUsageToVk(usage); 186 187 VkBuffer vkBuf; 188 vulkanEnforce(vk.CreateBuffer(vkObj, &bci, null, &vkBuf), "Could not create a Vulkan buffer"); 189 190 return new VulkanBuffer(vkBuf, this, usage, size); 191 } 192 193 override Image createImage(in ImageInfo info) 194 { 195 import gfx.core.util : transmute; 196 197 VkImageCreateInfo ici; 198 ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; 199 if (info.type.isCube) ici.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; 200 ici.imageType = info.type.toVk(); 201 ici.format = info.format.toVk(); 202 ici.extent = info.dims.transmute!VkExtent3D; 203 ici.mipLevels = info.levels; 204 ici.arrayLayers = info.layers; 205 ici.samples = cast(typeof(ici.samples))info.samples; 206 ici.tiling = info.tiling.toVk(); 207 ici.usage = imageUsageToVk(info.usage); 208 ici.sharingMode = VK_SHARING_MODE_EXCLUSIVE; 209 210 VkImage vkImg; 211 vulkanEnforce(vk.CreateImage(vkObj, &ici, null, &vkImg), "Could not create a Vulkan image"); 212 213 return new VulkanImage(vkImg, this, info); 214 } 215 216 Sampler createSampler(in SamplerInfo info) { 217 import gfx.core.typecons : ifNone, ifSome; 218 import std.algorithm : each; 219 220 VkSamplerCreateInfo sci; 221 sci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; 222 sci.minFilter = info.minFilter.toVk(); 223 sci.magFilter = info.magFilter.toVk(); 224 sci.mipmapMode = info.mipmapFilter.toVkMipmapMode(); 225 sci.addressModeU = info.wrapMode[0].toVk(); 226 sci.addressModeV = info.wrapMode[1].toVk(); 227 sci.addressModeW = info.wrapMode[2].toVk(); 228 sci.mipLodBias = info.lodBias; 229 info.anisotropy.save.ifSome!((float max) { 230 sci.anisotropyEnable = VK_TRUE; 231 sci.maxAnisotropy = max; 232 }).ifNone!({ 233 sci.anisotropyEnable = VK_FALSE; 234 sci.maxAnisotropy = 1f; 235 }); 236 info.compare.save.each!((CompareOp op) { 237 sci.compareEnable = VK_TRUE; 238 sci.compareOp = op.toVk(); 239 }); 240 sci.minLod = info.lodRange[0]; 241 sci.maxLod = info.lodRange[1]; 242 sci.borderColor = info.borderColor.toVk(); 243 sci.unnormalizedCoordinates = info.unnormalizeCoords ? VK_TRUE : VK_FALSE; 244 245 VkSampler vkS; 246 vulkanEnforce( 247 vk.CreateSampler(vkObj, &sci, null, &vkS), 248 "Could not create Vulkan sampler" 249 ); 250 251 return new VulkanSampler(vkS, this); 252 } 253 254 override Semaphore createSemaphore() 255 { 256 VkSemaphoreCreateInfo sci; 257 sci.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; 258 259 VkSemaphore vkSem; 260 vulkanEnforce(vk.CreateSemaphore(vkObj, &sci, null, &vkSem), "Could not create a Vulkan semaphore"); 261 262 return new VulkanSemaphore(vkSem, this); 263 } 264 265 override Fence createFence(Flag!"signaled" signaled) 266 { 267 VkFenceCreateInfo fci; 268 fci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; 269 if (signaled) { 270 fci.flags = VK_FENCE_CREATE_SIGNALED_BIT; 271 } 272 VkFence vkF; 273 vulkanEnforce(vk.CreateFence(vkObj, &fci, null, &vkF), "Could not create a Vulkan fence"); 274 275 return new VulkanFence(vkF, this); 276 } 277 278 override void resetFences(Fence[] fences) { 279 import std.algorithm : map; 280 import std.array : array; 281 282 auto vkFs = fences.map!( 283 f => enforce(cast(VulkanFence)f, "Did not pass a Vulkan fence").vkObj 284 ).array; 285 286 vulkanEnforce( 287 vk.ResetFences(vkObj, cast(uint)vkFs.length, &vkFs[0]), 288 "Could not reset vulkan fences" 289 ); 290 } 291 292 override void waitForFences(Fence[] fences, Flag!"waitAll" waitAll, Duration timeout) 293 { 294 import std.algorithm : map; 295 import std.array : array; 296 297 auto vkFs = fences.map!( 298 f => enforce(cast(VulkanFence)f, "Did not pass a Vulkan fence").vkObj 299 ).array; 300 301 const vkWaitAll = waitAll ? VK_TRUE : VK_FALSE; 302 const nsecs = timeout.total!"nsecs"; 303 const vkTimeout = nsecs < 0 ? ulong.max : cast(ulong)nsecs; 304 305 vulkanEnforce( 306 vk.WaitForFences(vkObj, cast(uint)vkFs.length, &vkFs[0], vkWaitAll, vkTimeout), 307 "could not wait for vulkan fences" 308 ); 309 } 310 311 312 override Swapchain createSwapchain(Surface graalSurface, PresentMode pm, uint numImages, 313 Format format, uint[2] size, ImageUsage usage, 314 CompositeAlpha alpha, Swapchain old=null) 315 { 316 auto surf = enforce( 317 cast(VulkanSurface)graalSurface, 318 "Did not pass a Vulkan surface" 319 ); 320 321 auto oldSc = old ? enforce( 322 cast(VulkanSwapchain)old, "Did not pass a vulkan swapchain" 323 ) : null; 324 325 VkSwapchainCreateInfoKHR sci; 326 sci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; 327 sci.surface = surf.vkObj; 328 sci.minImageCount = numImages; 329 sci.imageFormat = format.toVk; 330 sci.imageExtent = VkExtent2D(size[0], size[1]); 331 sci.imageArrayLayers = 1; 332 sci.imageUsage = imageUsageToVk(usage); 333 sci.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; 334 sci.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; 335 sci.clipped = VK_TRUE; 336 sci.presentMode = pm.toVk; 337 sci.compositeAlpha = compositeAlphaToVk(alpha); 338 sci.oldSwapchain = oldSc ? oldSc.vkObj : VK_NULL_ND_HANDLE; 339 340 VkSwapchainKHR vkSc; 341 vulkanEnforce( 342 vk.CreateSwapchainKHR(vkObj, &sci, null, &vkSc), 343 "Could not create a Vulkan Swap chain" 344 ); 345 346 return new VulkanSwapchain(vkSc, this, size, format); 347 } 348 349 override RenderPass createRenderPass(in AttachmentDescription[] attachments, 350 in SubpassDescription[] subpasses, 351 in SubpassDependency[] dependencies) 352 { 353 import std.algorithm : map; 354 import std.array : array; 355 356 auto vkAttachments = attachments.map!((ref const(AttachmentDescription) ad) { 357 VkAttachmentDescription vkAd; 358 if (ad.mayAlias) { 359 vkAd.flags = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT; 360 } 361 vkAd.format = ad.format.toVk(); 362 vkAd.loadOp = ad.colorDepthOps.load.toVk(); 363 vkAd.storeOp = ad.colorDepthOps.store.toVk(); 364 vkAd.stencilLoadOp = ad.stencilOps.load.toVk(); 365 vkAd.stencilStoreOp = ad.stencilOps.store.toVk(); 366 vkAd.initialLayout = ad.layoutTrans.from.toVk(); 367 vkAd.finalLayout = ad.layoutTrans.to.toVk(); 368 return vkAd; 369 }).array; 370 371 static VkAttachmentReference mapRef (in AttachmentRef ar) { 372 return VkAttachmentReference(ar.attachment, ar.layout.toVk()); 373 } 374 static VkAttachmentReference[] mapRefs(in AttachmentRef[] ars) { 375 return ars.map!mapRef.array; 376 } 377 auto vkSubpasses = subpasses.map!((ref const(SubpassDescription) sd) { 378 auto vkInputs = mapRefs(sd.inputs); 379 auto vkColors = mapRefs(sd.colors); 380 auto vkDepthStencil = sd.depthStencil.save.map!(mapRef).array; 381 VkSubpassDescription vkSd; 382 vkSd.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; 383 vkSd.inputAttachmentCount = cast(uint)vkInputs.length; 384 vkSd.pInputAttachments = vkInputs.ptr; 385 vkSd.colorAttachmentCount = cast(uint)vkColors.length; 386 vkSd.pColorAttachments = vkColors.ptr; 387 vkSd.pDepthStencilAttachment = vkDepthStencil.length ? 388 vkDepthStencil.ptr : null; 389 vkSd.preserveAttachmentCount = cast(uint)sd.preserves.length; 390 vkSd.pPreserveAttachments = sd.preserves.ptr; 391 return vkSd; 392 }).array; 393 394 auto vkDeps = dependencies.map!((ref const(SubpassDependency) sd) { 395 VkSubpassDependency vkSd; 396 vkSd.srcSubpass = sd.subpass.from; 397 vkSd.dstSubpass = sd.subpass.to; 398 vkSd.srcStageMask = pipelineStageToVk(sd.stageMask.from); 399 vkSd.dstStageMask = pipelineStageToVk(sd.stageMask.to); 400 vkSd.srcAccessMask = accessToVk(sd.accessMask.from); 401 vkSd.dstAccessMask = accessToVk(sd.accessMask.to); 402 return vkSd; 403 }).array; 404 405 VkRenderPassCreateInfo rpci; 406 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 407 rpci.attachmentCount = cast(uint)vkAttachments.length; 408 rpci.pAttachments = vkAttachments.ptr; 409 rpci.subpassCount = cast(uint)vkSubpasses.length; 410 rpci.pSubpasses = vkSubpasses.ptr; 411 rpci.dependencyCount = cast(uint)vkDeps.length; 412 rpci.pDependencies = vkDeps.ptr; 413 414 VkRenderPass vkRp; 415 vulkanEnforce( 416 vk.CreateRenderPass(vkObj, &rpci, null, &vkRp), 417 "Could not create a Vulkan render pass" 418 ); 419 420 return new VulkanRenderPass(vkRp, this); 421 } 422 423 424 override Framebuffer createFramebuffer(RenderPass rp, ImageView[] attachments, 425 uint width, uint height, uint layers) 426 { 427 import std.algorithm : map; 428 import std.array : array; 429 430 auto vkRp = enforce(cast(VulkanRenderPass)rp, "Did not pass a Vulkan render pass").vkObj; 431 auto vkAttachments = attachments.map!( 432 iv => enforce(cast(VulkanImageView)iv, "Did not pass a Vulkan image view").vkObj 433 ).array; 434 435 VkFramebufferCreateInfo fci; 436 fci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; 437 fci.renderPass = vkRp; 438 fci.attachmentCount = cast(uint)vkAttachments.length; 439 fci.pAttachments = vkAttachments.ptr; 440 fci.width = width; 441 fci.height = height; 442 fci.layers = layers; 443 444 VkFramebuffer vkFb; 445 vulkanEnforce( 446 vk.CreateFramebuffer(vkObj, &fci, null, &vkFb), 447 "Could not create a Vulkan Framebuffer" 448 ); 449 450 return new VulkanFramebuffer(vkFb, this, attachments); 451 } 452 453 override ShaderModule createShaderModule(const(uint)[] code, string entryPoint) 454 { 455 VkShaderModuleCreateInfo smci; 456 smci.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 457 smci.codeSize = cast(uint)code.length * 4; 458 smci.pCode = code.ptr; 459 460 VkShaderModule vkSm; 461 vulkanEnforce( 462 vk.CreateShaderModule(vkObj, &smci, null, &vkSm), 463 "Could not create Vulkan shader module" 464 ); 465 466 return new VulkanShaderModule(vkSm, this, entryPoint); 467 } 468 469 override DescriptorSetLayout createDescriptorSetLayout(in PipelineLayoutBinding[] bindings) 470 { 471 import std.algorithm : map; 472 import std.array : array; 473 474 auto vkBindings = bindings.map!(b => VkDescriptorSetLayoutBinding( 475 b.binding, b.descriptorType.toVk(), b.descriptorCount, shaderStageToVk(b.stages), null 476 )).array; 477 478 VkDescriptorSetLayoutCreateInfo ci; 479 ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; 480 ci.bindingCount = cast(uint)vkBindings.length; 481 ci.pBindings = vkBindings.ptr; 482 483 VkDescriptorSetLayout vkL; 484 vulkanEnforce( 485 vk.CreateDescriptorSetLayout(vkObj, &ci, null, &vkL), 486 "Could not create Vulkan descriptor set layout" 487 ); 488 489 return new VulkanDescriptorSetLayout(vkL, this); 490 } 491 492 override PipelineLayout createPipelineLayout(DescriptorSetLayout[] layouts, 493 PushConstantRange[] ranges) 494 { 495 import std.algorithm : map; 496 import std.array : array; 497 498 auto vkLayouts = layouts.map!( 499 l => enforce( 500 cast(VulkanDescriptorSetLayout)l, 501 "VulkanDevice.createPipelineLayout: Did not supply a Vulkan DescriptorSetLayout" 502 ).vkObj 503 ).array; 504 auto vkRanges = ranges.map!( 505 r => VkPushConstantRange( shaderStageToVk(r.stages), r.offset, r.size ) 506 ).array; 507 508 VkPipelineLayoutCreateInfo ci; 509 ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 510 ci.setLayoutCount = cast(uint)vkLayouts.length; 511 ci.pSetLayouts = vkLayouts.ptr; 512 ci.pushConstantRangeCount = cast(uint)vkRanges.length; 513 ci.pPushConstantRanges = vkRanges.ptr; 514 515 VkPipelineLayout vkPl; 516 vulkanEnforce( 517 vk.CreatePipelineLayout(vkObj, &ci, null, &vkPl), 518 "Could not create Vulkan pipeline layout" 519 ); 520 return new VulkanPipelineLayout(vkPl, this); 521 } 522 523 override DescriptorPool createDescriptorPool(in uint maxSets, in DescriptorPoolSize[] sizes) 524 { 525 import std.algorithm : map; 526 import std.array : array; 527 528 auto vkSizes = sizes.map!( 529 s => VkDescriptorPoolSize(s.type.toVk(), s.count) 530 ).array; 531 532 VkDescriptorPoolCreateInfo ci; 533 ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; 534 ci.maxSets = maxSets; 535 ci.poolSizeCount = cast(uint)vkSizes.length; 536 ci.pPoolSizes = vkSizes.ptr; 537 538 VkDescriptorPool vkP; 539 vulkanEnforce( 540 vk.CreateDescriptorPool(vkObj, &ci, null, &vkP), 541 "Could not create Vulkan Descriptor Pool" 542 ); 543 544 return new VulkanDescriptorPool(vkP, this); 545 } 546 547 override void updateDescriptorSets(WriteDescriptorSet[] writeOps, CopyDescritporSet[] copyOps) 548 { 549 import gfx.core.util : unsafeCast; 550 import std.algorithm : map; 551 import std.array : array; 552 553 auto vkWrites = writeOps.map!((WriteDescriptorSet wds) { 554 VkWriteDescriptorSet vkWds; 555 vkWds.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 556 vkWds.dstSet = enforce(cast(VulkanDescriptorSet)wds.dstSet).vkObj; 557 vkWds.dstBinding = wds.dstBinding; 558 vkWds.dstArrayElement = wds.dstArrayElem; 559 vkWds.descriptorCount = cast(uint)wds.writes.count; 560 vkWds.descriptorType = wds.writes.type.toVk(); 561 562 switch (wds.writes.type) { 563 case DescriptorType.sampler: 564 auto w = unsafeCast!(SamplerDescWrites)(wds.writes); 565 auto vkArr = w.descs.map!((Sampler s) { 566 VkDescriptorImageInfo dii; 567 dii.sampler = enforce(cast(VulkanSampler)s).vkObj; 568 return dii; 569 }).array; 570 vkWds.pImageInfo = vkArr.ptr; 571 break; 572 case DescriptorType.combinedImageSampler: 573 auto w = unsafeCast!(CombinedImageSamplerDescWrites)(wds.writes); 574 auto vkArr = w.descs.map!((CombinedImageSampler cis) { 575 VkDescriptorImageInfo dii; 576 dii.sampler = enforce(cast(VulkanSampler)cis.sampler).vkObj; 577 dii.imageView = enforce(cast(VulkanImageView)cis.view).vkObj; 578 dii.imageLayout = cis.layout.toVk(); 579 return dii; 580 }).array; 581 vkWds.pImageInfo = vkArr.ptr; 582 break; 583 case DescriptorType.sampledImage: 584 case DescriptorType.storageImage: 585 case DescriptorType.inputAttachment: 586 auto w = unsafeCast!(TDescWritesBase!(ImageViewLayout))(wds.writes); 587 auto vkArr = w.descs.map!((ImageViewLayout ivl) { 588 VkDescriptorImageInfo dii; 589 dii.imageView = enforce(cast(VulkanImageView)ivl.view).vkObj; 590 dii.imageLayout = ivl.layout.toVk(); 591 return dii; 592 }).array; 593 vkWds.pImageInfo = vkArr.ptr; 594 break; 595 case DescriptorType.uniformBuffer: 596 case DescriptorType.storageBuffer: 597 case DescriptorType.uniformBufferDynamic: 598 case DescriptorType.storageBufferDynamic: 599 auto w = unsafeCast!(TDescWritesBase!(BufferRange))(wds.writes); 600 auto vkArr = w.descs.map!((BufferRange br) { 601 VkDescriptorBufferInfo dbi; 602 dbi.buffer = enforce(cast(VulkanBuffer)br.buffer).vkObj; 603 dbi.offset = br.offset; 604 dbi.range = br.range; 605 return dbi; 606 }).array; 607 vkWds.pBufferInfo = vkArr.ptr; 608 break; 609 case DescriptorType.uniformTexelBuffer: 610 case DescriptorType.storageTexelBuffer: 611 auto w = unsafeCast!(TDescWritesBase!(BufferView))(wds.writes); 612 auto vkArr = w.descs.map!((BufferView bv) { 613 return enforce(cast(VulkanBufferView)bv).vkObj; 614 }).array; 615 vkWds.pTexelBufferView = vkArr.ptr; 616 break; 617 default: 618 vkWds.descriptorCount = 0; 619 break; 620 } 621 622 return vkWds; 623 }).array; 624 625 auto vkCopies = copyOps.map!((CopyDescritporSet cds) { 626 VkCopyDescriptorSet vkCds; 627 vkCds.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET; 628 vkCds.srcSet = enforce(cast(VulkanDescriptorSet)cds.set.from).vkObj; 629 vkCds.srcBinding = cds.binding.from; 630 vkCds.srcArrayElement = cds.arrayElem.from; 631 vkCds.dstSet = enforce(cast(VulkanDescriptorSet)cds.set.to).vkObj; 632 vkCds.dstBinding = cds.binding.to; 633 vkCds.dstArrayElement = cds.arrayElem.to; 634 return vkCds; 635 }).array; 636 637 vk.UpdateDescriptorSets(vkObj, 638 cast(uint)vkWrites.length, vkWrites.ptr, 639 cast(uint)vkCopies.length, vkCopies.ptr 640 ); 641 } 642 643 override Pipeline[] createPipelines(PipelineInfo[] infos) { 644 import gfx.core.util : transmute; 645 import std.algorithm : map, max; 646 import std.array : array; 647 import std.string : toStringz; 648 649 auto pcis = new VkGraphicsPipelineCreateInfo[infos.length]; 650 651 foreach (i; 0 .. infos.length) { 652 VkPipelineShaderStageCreateInfo[] sscis; 653 void addShaderStage(ShaderModule sm, ShaderStage ss) { 654 VkPipelineShaderStageCreateInfo ssci; 655 ssci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 656 ssci.stage = shaderStageToVk(ss); 657 ssci.module_ = enforce( 658 cast(VulkanShaderModule)sm, 659 "did not pass a Vulkan shader module" 660 ).vkObj; 661 ssci.pName = toStringz(sm.entryPoint); 662 sscis ~= ssci; 663 } 664 auto shaders = infos[i].shaders; 665 enforce(shaders.vertex, "Vertex input shader is mandatory"); 666 addShaderStage(shaders.vertex, ShaderStage.vertex); 667 if (shaders.tessControl) 668 addShaderStage(shaders.tessControl, ShaderStage.tessellationControl); 669 if (shaders.tessEval) 670 addShaderStage(shaders.tessEval, ShaderStage.tessellationEvaluation); 671 if (shaders.geometry) 672 addShaderStage(shaders.geometry, ShaderStage.geometry); 673 if (shaders.fragment) 674 addShaderStage(shaders.fragment, ShaderStage.fragment); 675 676 677 auto vkInputBindings = infos[i].inputBindings.map!( 678 ib => VkVertexInputBindingDescription( 679 ib.binding, cast(uint)ib.stride, 680 ib.instanced ? 681 VK_VERTEX_INPUT_RATE_INSTANCE : 682 VK_VERTEX_INPUT_RATE_VERTEX 683 ) 684 ).array; 685 686 auto vkInputAttribs = infos[i].inputAttribs.map!( 687 ia => VkVertexInputAttributeDescription( 688 ia.location, ia.binding, ia.format.toVk(), cast(uint)ia.offset 689 ) 690 ).array; 691 692 auto vkVtxInput = new VkPipelineVertexInputStateCreateInfo; 693 vkVtxInput.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 694 vkVtxInput.vertexBindingDescriptionCount = cast(uint)vkInputBindings.length; 695 vkVtxInput.pVertexBindingDescriptions = vkInputBindings.ptr; 696 vkVtxInput.vertexAttributeDescriptionCount = cast(uint)vkInputAttribs.length; 697 vkVtxInput.pVertexAttributeDescriptions = vkInputAttribs.ptr; 698 699 auto vkAssy = new VkPipelineInputAssemblyStateCreateInfo; 700 vkAssy.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 701 vkAssy.topology = infos[i].assembly.primitive.toVk(); 702 vkAssy.primitiveRestartEnable = infos[i].assembly.primitiveRestart ? VK_TRUE : VK_FALSE; 703 704 auto vkViewport = new VkPipelineViewportStateCreateInfo; 705 vkViewport.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; 706 if (infos[i].viewports.length) { 707 auto vkViewports = infos[i].viewports.map!(vc => vc.viewport).map!( 708 vp => VkViewport(vp.x, vp.y, vp.width, vp.height, vp.minDepth, vp.maxDepth) 709 ).array; 710 auto vkScissors = infos[i].viewports.map!(vc => vc.scissors).map!( 711 r => VkRect2D(VkOffset2D(r.x, r.y), VkExtent2D(r.width, r.height)) 712 ).array; 713 vkViewport.viewportCount = cast(uint)infos[i].viewports.length; 714 vkViewport.pViewports = vkViewports.ptr; 715 vkViewport.scissorCount = cast(uint)infos[i].viewports.length; 716 vkViewport.pScissors = vkScissors.ptr; 717 } 718 else { 719 static const dummyVp = VkViewport(0f, 0f, 1f, 1f, 0f, 1f); 720 static const dummySc = VkRect2D(VkOffset2D(0, 0), VkExtent2D(1, 1)); 721 vkViewport.viewportCount = 1; 722 vkViewport.pViewports = &dummyVp; 723 vkViewport.scissorCount = 1; 724 vkViewport.pScissors = &dummySc; 725 } 726 727 auto vkRasterizer = new VkPipelineRasterizationStateCreateInfo; 728 vkRasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; 729 vkRasterizer.rasterizerDiscardEnable = shaders.fragment ? VK_FALSE : VK_TRUE; 730 vkRasterizer.polygonMode = infos[i].rasterizer.mode.toVk(); 731 vkRasterizer.cullMode = cullModeToVk(infos[i].rasterizer.cull); 732 vkRasterizer.frontFace = infos[i].rasterizer.front.toVk(); 733 vkRasterizer.lineWidth = infos[i].rasterizer.lineWidth; 734 vkRasterizer.depthClampEnable = infos[i].rasterizer.depthClamp ? VK_TRUE : VK_FALSE; 735 if (infos[i].rasterizer.depthBias.isSome) { 736 DepthBias db = infos[i].rasterizer.depthBias.get; 737 vkRasterizer.depthBiasEnable = VK_TRUE; 738 vkRasterizer.depthBiasConstantFactor = db.constantFactor; 739 vkRasterizer.depthBiasClamp = db.clamp; 740 vkRasterizer.depthBiasSlopeFactor = db.slopeFactor; 741 } 742 else { 743 vkRasterizer.depthBiasConstantFactor = 0f; 744 vkRasterizer.depthBiasClamp = 0f; 745 vkRasterizer.depthBiasSlopeFactor = 0f; 746 } 747 748 const depthInfo = infos[i].depthInfo; 749 const stencilInfo = infos[i].stencilInfo; 750 auto vkDepthStencil = new VkPipelineDepthStencilStateCreateInfo; 751 vkDepthStencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; 752 vkDepthStencil.depthTestEnable = flagToVk(depthInfo.enabled); 753 vkDepthStencil.depthWriteEnable = flagToVk(depthInfo.write); 754 vkDepthStencil.depthCompareOp = depthInfo.compareOp.toVk(); 755 vkDepthStencil.depthBoundsTestEnable = flagToVk(depthInfo.boundsTest); 756 vkDepthStencil.stencilTestEnable = flagToVk(stencilInfo.enabled); 757 vkDepthStencil.front = transmute!VkStencilOpState(stencilInfo.front); 758 vkDepthStencil.back = transmute!VkStencilOpState(stencilInfo.back); 759 vkDepthStencil.minDepthBounds = depthInfo.minBounds; 760 vkDepthStencil.maxDepthBounds = depthInfo.maxBounds; 761 762 const blendInfo = infos[i].blendInfo; 763 auto vkColorAttachments = blendInfo.attachments.map!( 764 cba => VkPipelineColorBlendAttachmentState ( 765 cba.enabled ? VK_TRUE : VK_FALSE, 766 cba.colorBlend.factor.from.toVk(), 767 cba.colorBlend.factor.to.toVk(), 768 cba.colorBlend.op.toVk(), 769 cba.alphaBlend.factor.from.toVk(), 770 cba.alphaBlend.factor.to.toVk(), 771 cba.alphaBlend.op.toVk(), 772 cast(VkColorComponentFlags)cba.colorMask 773 ) 774 ).array; 775 auto vkBlend = new VkPipelineColorBlendStateCreateInfo; 776 vkBlend.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; 777 if (blendInfo.logicOp.isSome) { 778 vkBlend.logicOpEnable = VK_TRUE; 779 vkBlend.logicOp = blendInfo.logicOp.get.toVk(); 780 } 781 vkBlend.attachmentCount = cast(uint)vkColorAttachments.length; 782 vkBlend.pAttachments = vkColorAttachments.ptr; 783 vkBlend.blendConstants = blendInfo.blendConstants; 784 785 VkPipelineDynamicStateCreateInfo *vkDynStatesInfo; 786 if (infos[i].dynamicStates) { 787 auto vkDynStates = infos[i].dynamicStates.map!(ds => ds.toVk()).array; 788 vkDynStatesInfo = new VkPipelineDynamicStateCreateInfo; 789 vkDynStatesInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 790 vkDynStatesInfo.dynamicStateCount = cast(uint)vkDynStates.length; 791 vkDynStatesInfo.pDynamicStates = vkDynStates.ptr; 792 } 793 794 auto rp = infos[i].renderPass; 795 auto vkRp = rp ? enforce( 796 cast(VulkanRenderPass)rp, 797 "did not supply a Vulkan render pass" 798 ).vkObj : VK_NULL_ND_HANDLE; 799 800 // following bindings are not implemented yet 801 auto vkTess = new VkPipelineTessellationStateCreateInfo; 802 vkTess.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; 803 auto vkMs = new VkPipelineMultisampleStateCreateInfo; 804 vkMs.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; 805 vkMs.minSampleShading = 1f; 806 807 pcis[i].sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; 808 pcis[i].stageCount = cast(uint)sscis.length; 809 pcis[i].pStages = sscis.ptr; 810 pcis[i].pVertexInputState = vkVtxInput; 811 pcis[i].pInputAssemblyState = vkAssy; 812 pcis[i].pTessellationState = vkTess; 813 pcis[i].pViewportState = vkViewport; 814 pcis[i].pRasterizationState = vkRasterizer; 815 pcis[i].pMultisampleState = vkMs; 816 pcis[i].pDepthStencilState = vkDepthStencil; 817 pcis[i].pColorBlendState = vkBlend; 818 pcis[i].pDynamicState = vkDynStatesInfo; 819 pcis[i].layout = enforce( 820 cast(VulkanPipelineLayout)infos[i].layout, 821 "did not pass a valid vulkan pipeline layout" 822 ).vkObj; 823 pcis[i].renderPass = vkRp; 824 pcis[i].subpass = infos[i].subpassIndex; 825 pcis[i].basePipelineIndex = -1; 826 } 827 828 auto vkPls = new VkPipeline[infos.length]; 829 vulkanEnforce( 830 vk.CreateGraphicsPipelines(vkObj, VK_NULL_ND_HANDLE, cast(uint)pcis.length, pcis.ptr, null, vkPls.ptr), 831 "Could not create Vulkan graphics pipeline" 832 ); 833 834 auto pls = new Pipeline[infos.length]; 835 foreach (i; 0 .. vkPls.length) { 836 pls[i] = new VulkanPipeline(vkPls[i], this, infos[i].layout); 837 } 838 return pls; 839 } 840 841 private VulkanPhysicalDevice _pd; 842 private VkDeviceCmds _vk; 843 private VulkanQueue[] _queues; 844 }