1 /// Vulkan device module 2 module gfx.vulkan.device; 3 4 package: 5 6 import core.time : Duration; 7 8 import gfx.bindings.vulkan; 9 10 import gfx.core.rc; 11 import gfx.graal; 12 import gfx.graal.cmd; 13 import gfx.graal.device; 14 import gfx.graal.image; 15 import gfx.graal.memory; 16 import gfx.graal.presentation; 17 import gfx.graal.queue; 18 import gfx.graal.pipeline; 19 import gfx.graal.sync; 20 import gfx.vulkan; 21 import gfx.vulkan.buffer; 22 import gfx.vulkan.cmd; 23 import gfx.vulkan.conv; 24 import gfx.vulkan.error; 25 import gfx.vulkan.image; 26 import gfx.vulkan.memory; 27 import gfx.vulkan.pipeline; 28 import gfx.vulkan.queue; 29 import gfx.vulkan.renderpass; 30 import gfx.vulkan.sync; 31 import gfx.vulkan.wsi; 32 33 import std.typecons : Flag; 34 35 class VulkanDevObj(VkType, string destroyFn) : Disposable 36 { 37 this (VkType vkObj, VulkanDevice dev) 38 { 39 _vkObj = vkObj; 40 _dev = dev; 41 _dev.retain(); 42 _vk = _dev.vk; 43 } 44 45 override void dispose() { 46 mixin("vk."~destroyFn~"(vkDev, vkObj, null);"); 47 _dev.release(); 48 _dev = null; 49 } 50 51 final @property VkType vkObj() { 52 return _vkObj; 53 } 54 55 final @property VulkanDevice dev() { 56 return _dev; 57 } 58 59 final @property VkDevice vkDev() { 60 return _dev.vkObj; 61 } 62 63 final @property VkDeviceCmds vk() { 64 return _vk; 65 } 66 67 private VkType _vkObj; 68 private VulkanDevice _dev; 69 private VkDeviceCmds _vk; 70 } 71 72 final class VulkanDevice : VulkanObj!(VkDevice), Device 73 { 74 mixin(atomicRcCode); 75 76 this (VkDevice vkObj, VulkanPhysicalDevice pd, Instance inst) 77 { 78 super(vkObj); 79 _pd = pd; 80 _inst = inst; 81 _inst.retain(); 82 _vk = new VkDeviceCmds(vkObj, pd.vk); 83 } 84 85 override void dispose() { 86 vk.DestroyDevice(vkObj, null); 87 _pd = null; 88 _inst.release(); 89 _inst = null; 90 } 91 92 override @property Instance instance() { 93 return _inst; 94 } 95 96 override @property PhysicalDevice physicalDevice() { 97 return _pd; 98 } 99 100 @property VulkanPhysicalDevice pd() { 101 return _pd; 102 } 103 104 @property VkDeviceCmds vk() { 105 return _vk; 106 } 107 108 override void waitIdle() { 109 vulkanEnforce( 110 vk.DeviceWaitIdle(vkObj), 111 "Problem waiting for device" 112 ); 113 } 114 115 override Queue getQueue(uint queueFamilyIndex, uint queueIndex) { 116 VkQueue vkQ; 117 vk.GetDeviceQueue(vkObj, queueFamilyIndex, queueIndex, &vkQ); 118 119 foreach (q; _queues) { 120 if (q.vkObj is vkQ) { 121 return q; 122 } 123 } 124 125 auto q = new VulkanQueue(vkQ, this, queueIndex); 126 _queues ~= q; 127 return q; 128 } 129 130 override CommandPool createCommandPool(uint queueFamilyIndex) { 131 VkCommandPoolCreateInfo cci; 132 cci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; 133 cci.queueFamilyIndex = queueFamilyIndex; 134 cci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; 135 136 VkCommandPool vkPool; 137 vulkanEnforce( 138 vk.CreateCommandPool(vkObj, &cci, null, &vkPool), 139 "Could not create vulkan command pool" 140 ); 141 142 return new VulkanCommandPool(vkPool, this); 143 } 144 145 override DeviceMemory allocateMemory(uint memTypeIndex, size_t size) 146 { 147 VkMemoryAllocateInfo mai; 148 mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; 149 mai.allocationSize = size; 150 mai.memoryTypeIndex = memTypeIndex; 151 152 VkDeviceMemory vkMem; 153 vulkanEnforce(vk.AllocateMemory(vkObj, &mai, null, &vkMem), "Could not allocate device memory"); 154 155 const props = pd.memoryProperties.types[memTypeIndex].props; 156 157 return new VulkanDeviceMemory(vkMem, this, props, size, memTypeIndex); 158 } 159 160 override void flushMappedMemory(MappedMemorySet set) 161 { 162 import std.algorithm : map; 163 import std.array : array; 164 VkMappedMemoryRange[] mmrs = set.mms.map!((MappedMemorySet.MM mm) { 165 VkMappedMemoryRange mmr; 166 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; 167 mmr.memory = (cast(VulkanDeviceMemory)mm.dm).vkObj; 168 mmr.offset = mm.offset; 169 mmr.size = mm.size; 170 return mmr; 171 }).array; 172 173 vk.FlushMappedMemoryRanges(vkObj, cast(uint)mmrs.length, mmrs.ptr); 174 } 175 176 override void invalidateMappedMemory(MappedMemorySet set) { 177 import std.algorithm : map; 178 import std.array : array; 179 VkMappedMemoryRange[] mmrs = set.mms.map!((MappedMemorySet.MM mm) { 180 VkMappedMemoryRange mmr; 181 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; 182 mmr.memory = (cast(VulkanDeviceMemory)mm.dm).vkObj; 183 mmr.offset = mm.offset; 184 mmr.size = mm.size; 185 return mmr; 186 }).array; 187 188 vk.InvalidateMappedMemoryRanges(vkObj, cast(uint)mmrs.length, mmrs.ptr); 189 } 190 191 override Buffer createBuffer(BufferUsage usage, size_t size) 192 { 193 VkBufferCreateInfo bci; 194 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; 195 bci.size = size; 196 bci.usage = bufferUsageToVk(usage); 197 198 VkBuffer vkBuf; 199 vulkanEnforce(vk.CreateBuffer(vkObj, &bci, null, &vkBuf), "Could not create a Vulkan buffer"); 200 201 return new VulkanBuffer(vkBuf, this, usage, size); 202 } 203 204 override Image createImage(in ImageInfo info) 205 { 206 import gfx.core.util : transmute; 207 208 VkImageCreateInfo ici; 209 ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; 210 if (info.type.isCube) ici.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; 211 ici.imageType = info.type.toVk(); 212 ici.format = info.format.toVk(); 213 ici.extent = info.dims.transmute!VkExtent3D; 214 ici.mipLevels = info.levels; 215 ici.arrayLayers = info.layers; 216 ici.samples = cast(typeof(ici.samples))info.samples; 217 ici.tiling = info.tiling.toVk(); 218 ici.usage = imageUsageToVk(info.usage); 219 ici.sharingMode = VK_SHARING_MODE_EXCLUSIVE; 220 221 VkImage vkImg; 222 vulkanEnforce(vk.CreateImage(vkObj, &ici, null, &vkImg), "Could not create a Vulkan image"); 223 224 return new VulkanImage(vkImg, this, info); 225 } 226 227 Sampler createSampler(in SamplerInfo info) { 228 import gfx.core.typecons : ifNone, ifSome; 229 import std.algorithm : each; 230 231 VkSamplerCreateInfo sci; 232 sci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; 233 sci.minFilter = info.minFilter.toVk(); 234 sci.magFilter = info.magFilter.toVk(); 235 sci.mipmapMode = info.mipmapFilter.toVkMipmapMode(); 236 sci.addressModeU = info.wrapMode[0].toVk(); 237 sci.addressModeV = info.wrapMode[1].toVk(); 238 sci.addressModeW = info.wrapMode[2].toVk(); 239 sci.mipLodBias = info.lodBias; 240 info.anisotropy.save.ifSome!((float max) { 241 sci.anisotropyEnable = VK_TRUE; 242 sci.maxAnisotropy = max; 243 }).ifNone!({ 244 sci.anisotropyEnable = VK_FALSE; 245 sci.maxAnisotropy = 1f; 246 }); 247 info.compare.save.each!((CompareOp op) { 248 sci.compareEnable = VK_TRUE; 249 sci.compareOp = op.toVk(); 250 }); 251 sci.minLod = info.lodRange[0]; 252 sci.maxLod = info.lodRange[1]; 253 sci.borderColor = info.borderColor.toVk(); 254 sci.unnormalizedCoordinates = info.unnormalizeCoords ? VK_TRUE : VK_FALSE; 255 256 VkSampler vkS; 257 vulkanEnforce( 258 vk.CreateSampler(vkObj, &sci, null, &vkS), 259 "Could not create Vulkan sampler" 260 ); 261 262 return new VulkanSampler(vkS, this); 263 } 264 265 override Semaphore createSemaphore() 266 { 267 VkSemaphoreCreateInfo sci; 268 sci.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; 269 270 VkSemaphore vkSem; 271 vulkanEnforce(vk.CreateSemaphore(vkObj, &sci, null, &vkSem), "Could not create a Vulkan semaphore"); 272 273 return new VulkanSemaphore(vkSem, this); 274 } 275 276 override Fence createFence(Flag!"signaled" signaled) 277 { 278 VkFenceCreateInfo fci; 279 fci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; 280 if (signaled) { 281 fci.flags = VK_FENCE_CREATE_SIGNALED_BIT; 282 } 283 VkFence vkF; 284 vulkanEnforce(vk.CreateFence(vkObj, &fci, null, &vkF), "Could not create a Vulkan fence"); 285 286 return new VulkanFence(vkF, this); 287 } 288 289 override void resetFences(Fence[] fences) { 290 import std.algorithm : map; 291 import std.array : array; 292 293 auto vkFs = fences.map!( 294 f => enforce(cast(VulkanFence)f, "Did not pass a Vulkan fence").vkObj 295 ).array; 296 297 vulkanEnforce( 298 vk.ResetFences(vkObj, cast(uint)vkFs.length, &vkFs[0]), 299 "Could not reset vulkan fences" 300 ); 301 } 302 303 override void waitForFences(Fence[] fences, Flag!"waitAll" waitAll, Duration timeout) 304 { 305 import std.algorithm : map; 306 import std.array : array; 307 308 auto vkFs = fences.map!( 309 f => enforce(cast(VulkanFence)f, "Did not pass a Vulkan fence").vkObj 310 ).array; 311 312 const vkWaitAll = waitAll ? VK_TRUE : VK_FALSE; 313 const nsecs = timeout.total!"nsecs"; 314 const vkTimeout = nsecs < 0 ? ulong.max : cast(ulong)nsecs; 315 316 vulkanEnforce( 317 vk.WaitForFences(vkObj, cast(uint)vkFs.length, &vkFs[0], vkWaitAll, vkTimeout), 318 "could not wait for vulkan fences" 319 ); 320 } 321 322 323 override Swapchain createSwapchain(Surface graalSurface, PresentMode pm, uint numImages, 324 Format format, uint[2] size, ImageUsage usage, 325 CompositeAlpha alpha, Swapchain old=null) 326 { 327 auto surf = enforce( 328 cast(VulkanSurface)graalSurface, 329 "Did not pass a Vulkan surface" 330 ); 331 332 auto oldSc = old ? enforce( 333 cast(VulkanSwapchain)old, "Did not pass a vulkan swapchain" 334 ) : null; 335 336 VkSwapchainCreateInfoKHR sci; 337 sci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; 338 sci.surface = surf.vkObj; 339 sci.minImageCount = numImages; 340 sci.imageFormat = format.toVk; 341 sci.imageExtent = VkExtent2D(size[0], size[1]); 342 sci.imageArrayLayers = 1; 343 sci.imageUsage = imageUsageToVk(usage); 344 sci.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; 345 sci.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; 346 sci.clipped = VK_TRUE; 347 sci.presentMode = pm.toVk; 348 sci.compositeAlpha = compositeAlphaToVk(alpha); 349 sci.oldSwapchain = oldSc ? oldSc.vkObj : VK_NULL_ND_HANDLE; 350 351 VkSwapchainKHR vkSc; 352 vulkanEnforce( 353 vk.CreateSwapchainKHR(vkObj, &sci, null, &vkSc), 354 "Could not create a Vulkan Swap chain" 355 ); 356 357 return new VulkanSwapchain(vkSc, this, graalSurface, size, format, usage); 358 } 359 360 override RenderPass createRenderPass(in AttachmentDescription[] attachments, 361 in SubpassDescription[] subpasses, 362 in SubpassDependency[] dependencies) 363 { 364 import std.algorithm : map; 365 import std.array : array; 366 367 auto vkAttachments = attachments.map!((ref const(AttachmentDescription) ad) { 368 VkAttachmentDescription vkAd; 369 if (ad.mayAlias) { 370 vkAd.flags = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT; 371 } 372 vkAd.format = ad.format.toVk(); 373 vkAd.loadOp = ad.ops.load.toVk(); 374 vkAd.storeOp = ad.ops.store.toVk(); 375 vkAd.stencilLoadOp = ad.stencilOps.load.toVk(); 376 vkAd.stencilStoreOp = ad.stencilOps.store.toVk(); 377 vkAd.initialLayout = ad.layoutTrans.from.toVk(); 378 vkAd.finalLayout = ad.layoutTrans.to.toVk(); 379 return vkAd; 380 }).array; 381 382 static VkAttachmentReference mapRef (in AttachmentRef ar) { 383 return VkAttachmentReference(ar.attachment, ar.layout.toVk()); 384 } 385 static VkAttachmentReference[] mapRefs(in AttachmentRef[] ars) { 386 return ars.map!mapRef.array; 387 } 388 auto vkSubpasses = subpasses.map!((ref const(SubpassDescription) sd) { 389 auto vkInputs = mapRefs(sd.inputs); 390 auto vkColors = mapRefs(sd.colors); 391 auto vkDepthStencil = sd.depthStencil.save.map!(mapRef).array; 392 VkSubpassDescription vkSd; 393 vkSd.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; 394 vkSd.inputAttachmentCount = cast(uint)vkInputs.length; 395 vkSd.pInputAttachments = vkInputs.ptr; 396 vkSd.colorAttachmentCount = cast(uint)vkColors.length; 397 vkSd.pColorAttachments = vkColors.ptr; 398 vkSd.pDepthStencilAttachment = vkDepthStencil.length ? 399 vkDepthStencil.ptr : null; 400 vkSd.preserveAttachmentCount = cast(uint)sd.preserves.length; 401 vkSd.pPreserveAttachments = sd.preserves.ptr; 402 return vkSd; 403 }).array; 404 405 auto vkDeps = dependencies.map!((ref const(SubpassDependency) sd) { 406 VkSubpassDependency vkSd; 407 vkSd.srcSubpass = sd.subpass.from; 408 vkSd.dstSubpass = sd.subpass.to; 409 vkSd.srcStageMask = pipelineStageToVk(sd.stageMask.from); 410 vkSd.dstStageMask = pipelineStageToVk(sd.stageMask.to); 411 vkSd.srcAccessMask = accessToVk(sd.accessMask.from); 412 vkSd.dstAccessMask = accessToVk(sd.accessMask.to); 413 return vkSd; 414 }).array; 415 416 VkRenderPassCreateInfo rpci; 417 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 418 rpci.attachmentCount = cast(uint)vkAttachments.length; 419 rpci.pAttachments = vkAttachments.ptr; 420 rpci.subpassCount = cast(uint)vkSubpasses.length; 421 rpci.pSubpasses = vkSubpasses.ptr; 422 rpci.dependencyCount = cast(uint)vkDeps.length; 423 rpci.pDependencies = vkDeps.ptr; 424 425 VkRenderPass vkRp; 426 vulkanEnforce( 427 vk.CreateRenderPass(vkObj, &rpci, null, &vkRp), 428 "Could not create a Vulkan render pass" 429 ); 430 431 return new VulkanRenderPass(vkRp, this); 432 } 433 434 435 override Framebuffer createFramebuffer(RenderPass rp, ImageView[] attachments, 436 uint width, uint height, uint layers) 437 { 438 import std.algorithm : map; 439 import std.array : array; 440 441 auto vkRp = enforce(cast(VulkanRenderPass)rp, "Did not pass a Vulkan render pass").vkObj; 442 auto vkAttachments = attachments.map!( 443 iv => enforce(cast(VulkanImageView)iv, "Did not pass a Vulkan image view").vkObj 444 ).array; 445 446 VkFramebufferCreateInfo fci; 447 fci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; 448 fci.renderPass = vkRp; 449 fci.attachmentCount = cast(uint)vkAttachments.length; 450 fci.pAttachments = vkAttachments.ptr; 451 fci.width = width; 452 fci.height = height; 453 fci.layers = layers; 454 455 VkFramebuffer vkFb; 456 vulkanEnforce( 457 vk.CreateFramebuffer(vkObj, &fci, null, &vkFb), 458 "Could not create a Vulkan Framebuffer" 459 ); 460 461 return new VulkanFramebuffer(vkFb, this, attachments); 462 } 463 464 override ShaderModule createShaderModule(const(uint)[] code, string entryPoint) 465 { 466 VkShaderModuleCreateInfo smci; 467 smci.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 468 smci.codeSize = cast(uint)code.length * 4; 469 smci.pCode = code.ptr; 470 471 VkShaderModule vkSm; 472 vulkanEnforce( 473 vk.CreateShaderModule(vkObj, &smci, null, &vkSm), 474 "Could not create Vulkan shader module" 475 ); 476 477 return new VulkanShaderModule(vkSm, this, entryPoint); 478 } 479 480 override DescriptorSetLayout createDescriptorSetLayout(in PipelineLayoutBinding[] bindings) 481 { 482 import std.algorithm : map; 483 import std.array : array; 484 485 auto vkBindings = bindings.map!(b => VkDescriptorSetLayoutBinding( 486 b.binding, b.descriptorType.toVk(), b.descriptorCount, shaderStageToVk(b.stages), null 487 )).array; 488 489 VkDescriptorSetLayoutCreateInfo ci; 490 ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; 491 ci.bindingCount = cast(uint)vkBindings.length; 492 ci.pBindings = vkBindings.ptr; 493 494 VkDescriptorSetLayout vkL; 495 vulkanEnforce( 496 vk.CreateDescriptorSetLayout(vkObj, &ci, null, &vkL), 497 "Could not create Vulkan descriptor set layout" 498 ); 499 500 return new VulkanDescriptorSetLayout(vkL, this); 501 } 502 503 override PipelineLayout createPipelineLayout(DescriptorSetLayout[] layouts, 504 in PushConstantRange[] ranges) 505 { 506 import std.algorithm : map; 507 import std.array : array; 508 509 auto vkLayouts = layouts.map!( 510 l => enforce( 511 cast(VulkanDescriptorSetLayout)l, 512 "VulkanDevice.createPipelineLayout: Did not supply a Vulkan DescriptorSetLayout" 513 ).vkObj 514 ).array; 515 auto vkRanges = ranges.map!( 516 r => VkPushConstantRange( shaderStageToVk(r.stages), r.offset, r.size ) 517 ).array; 518 519 VkPipelineLayoutCreateInfo ci; 520 ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 521 ci.setLayoutCount = cast(uint)vkLayouts.length; 522 ci.pSetLayouts = vkLayouts.ptr; 523 ci.pushConstantRangeCount = cast(uint)vkRanges.length; 524 ci.pPushConstantRanges = vkRanges.ptr; 525 526 VkPipelineLayout vkPl; 527 vulkanEnforce( 528 vk.CreatePipelineLayout(vkObj, &ci, null, &vkPl), 529 "Could not create Vulkan pipeline layout" 530 ); 531 return new VulkanPipelineLayout(vkPl, this, layouts, ranges); 532 } 533 534 override DescriptorPool createDescriptorPool(in uint maxSets, in DescriptorPoolSize[] sizes) 535 { 536 import std.algorithm : map; 537 import std.array : array; 538 539 auto vkSizes = sizes.map!( 540 s => VkDescriptorPoolSize(s.type.toVk(), s.count) 541 ).array; 542 543 VkDescriptorPoolCreateInfo ci; 544 ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; 545 ci.maxSets = maxSets; 546 ci.poolSizeCount = cast(uint)vkSizes.length; 547 ci.pPoolSizes = vkSizes.ptr; 548 549 VkDescriptorPool vkP; 550 vulkanEnforce( 551 vk.CreateDescriptorPool(vkObj, &ci, null, &vkP), 552 "Could not create Vulkan Descriptor Pool" 553 ); 554 555 return new VulkanDescriptorPool(vkP, this); 556 } 557 558 override void updateDescriptorSets(WriteDescriptorSet[] writeOps, CopyDescritporSet[] copyOps) 559 { 560 import gfx.core.util : unsafeCast; 561 import std.algorithm : map; 562 import std.array : array; 563 564 auto vkWrites = writeOps.map!((WriteDescriptorSet wds) { 565 VkWriteDescriptorSet vkWds; 566 vkWds.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; 567 vkWds.dstSet = enforce(cast(VulkanDescriptorSet)wds.dstSet).vkObj; 568 vkWds.dstBinding = wds.dstBinding; 569 vkWds.dstArrayElement = wds.dstArrayElem; 570 vkWds.descriptorCount = cast(uint)wds.write.count; 571 vkWds.descriptorType = wds.write.type.toVk(); 572 573 final switch (wds.write.type) { 574 case DescriptorType.sampler: 575 auto sds = wds.write.samplers; 576 auto vkArr = sds.map!((SamplerDescriptor sd) { 577 VkDescriptorImageInfo dii; 578 dii.sampler = unsafeCast!(VulkanSampler)(sd.sampler).vkObj; 579 return dii; 580 }).array; 581 vkWds.pImageInfo = vkArr.ptr; 582 break; 583 case DescriptorType.combinedImageSampler: 584 auto sids = wds.write.imageSamplers; 585 auto vkArr = sids.map!((ImageSamplerDescriptor sid) { 586 VkDescriptorImageInfo dii; 587 dii.sampler = unsafeCast!(VulkanSampler)(sid.sampler).vkObj; 588 dii.imageView = unsafeCast!(VulkanImageView)(sid.view).vkObj; 589 dii.imageLayout = sid.layout.toVk(); 590 return dii; 591 }).array; 592 vkWds.pImageInfo = vkArr.ptr; 593 break; 594 case DescriptorType.sampledImage: 595 case DescriptorType.storageImage: 596 case DescriptorType.inputAttachment: 597 auto ids = wds.write.images; 598 auto vkArr = ids.map!((ImageDescriptor id) { 599 VkDescriptorImageInfo dii; 600 dii.imageView = unsafeCast!(VulkanImageView)(id.view).vkObj; 601 dii.imageLayout = id.layout.toVk(); 602 return dii; 603 }).array; 604 vkWds.pImageInfo = vkArr.ptr; 605 break; 606 case DescriptorType.uniformBuffer: 607 case DescriptorType.storageBuffer: 608 case DescriptorType.uniformBufferDynamic: 609 case DescriptorType.storageBufferDynamic: 610 auto bds = wds.write.buffers; 611 auto vkArr = bds.map!((BufferDescriptor bd) { 612 VkDescriptorBufferInfo dbi; 613 dbi.buffer = unsafeCast!(VulkanBuffer)(bd.buffer).vkObj; 614 dbi.offset = bd.offset; 615 dbi.range = bd.size; 616 if (bd.size > 1000000) { 617 asm { int 0x03; } 618 } 619 return dbi; 620 }).array; 621 vkWds.pBufferInfo = vkArr.ptr; 622 break; 623 case DescriptorType.uniformTexelBuffer: 624 case DescriptorType.storageTexelBuffer: 625 auto tbds = wds.write.texelBuffers; 626 auto vkArr = tbds.map!((TexelBufferDescriptor tbd) { 627 return unsafeCast!(VulkanTexelBufferView)(tbd.bufferView).vkObj; 628 }).array; 629 vkWds.pTexelBufferView = vkArr.ptr; 630 break; 631 } 632 633 return vkWds; 634 }).array; 635 636 auto vkCopies = copyOps.map!((CopyDescritporSet cds) { 637 VkCopyDescriptorSet vkCds; 638 vkCds.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET; 639 vkCds.srcSet = enforce(cast(VulkanDescriptorSet)cds.set.from).vkObj; 640 vkCds.srcBinding = cds.binding.from; 641 vkCds.srcArrayElement = cds.arrayElem.from; 642 vkCds.dstSet = enforce(cast(VulkanDescriptorSet)cds.set.to).vkObj; 643 vkCds.dstBinding = cds.binding.to; 644 vkCds.dstArrayElement = cds.arrayElem.to; 645 return vkCds; 646 }).array; 647 648 vk.UpdateDescriptorSets(vkObj, 649 cast(uint)vkWrites.length, vkWrites.ptr, 650 cast(uint)vkCopies.length, vkCopies.ptr 651 ); 652 } 653 654 override Pipeline[] createPipelines(PipelineInfo[] infos) { 655 import gfx.core.util : transmute; 656 import std.algorithm : map, max; 657 import std.array : array; 658 import std..string : toStringz; 659 660 auto pcis = new VkGraphicsPipelineCreateInfo[infos.length]; 661 662 foreach (i; 0 .. infos.length) { 663 VkPipelineShaderStageCreateInfo[] sscis; 664 void addShaderStage(ShaderModule sm, ShaderStage ss) { 665 VkPipelineShaderStageCreateInfo ssci; 666 ssci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 667 ssci.stage = shaderStageToVk(ss); 668 ssci.module_ = enforce( 669 cast(VulkanShaderModule)sm, 670 "did not pass a Vulkan shader module" 671 ).vkObj; 672 ssci.pName = toStringz(sm.entryPoint); 673 sscis ~= ssci; 674 } 675 auto shaders = infos[i].shaders; 676 enforce(shaders.vertex, "Vertex input shader is mandatory"); 677 addShaderStage(shaders.vertex, ShaderStage.vertex); 678 if (shaders.tessControl) 679 addShaderStage(shaders.tessControl, ShaderStage.tessellationControl); 680 if (shaders.tessEval) 681 addShaderStage(shaders.tessEval, ShaderStage.tessellationEvaluation); 682 if (shaders.geometry) 683 addShaderStage(shaders.geometry, ShaderStage.geometry); 684 if (shaders.fragment) 685 addShaderStage(shaders.fragment, ShaderStage.fragment); 686 687 688 auto vkInputBindings = infos[i].inputBindings.map!( 689 ib => VkVertexInputBindingDescription( 690 ib.binding, cast(uint)ib.stride, 691 ib.instanced ? 692 VK_VERTEX_INPUT_RATE_INSTANCE : 693 VK_VERTEX_INPUT_RATE_VERTEX 694 ) 695 ).array; 696 697 auto vkInputAttribs = infos[i].inputAttribs.map!( 698 ia => VkVertexInputAttributeDescription( 699 ia.location, ia.binding, ia.format.toVk(), cast(uint)ia.offset 700 ) 701 ).array; 702 703 auto vkVtxInput = new VkPipelineVertexInputStateCreateInfo; 704 vkVtxInput.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 705 vkVtxInput.vertexBindingDescriptionCount = cast(uint)vkInputBindings.length; 706 vkVtxInput.pVertexBindingDescriptions = vkInputBindings.ptr; 707 vkVtxInput.vertexAttributeDescriptionCount = cast(uint)vkInputAttribs.length; 708 vkVtxInput.pVertexAttributeDescriptions = vkInputAttribs.ptr; 709 710 auto vkAssy = new VkPipelineInputAssemblyStateCreateInfo; 711 vkAssy.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 712 vkAssy.topology = infos[i].assembly.primitive.toVk(); 713 vkAssy.primitiveRestartEnable = infos[i].assembly.primitiveRestart ? VK_TRUE : VK_FALSE; 714 715 auto vkViewport = new VkPipelineViewportStateCreateInfo; 716 vkViewport.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; 717 if (infos[i].viewports.length) { 718 auto vkViewports = infos[i].viewports.map!(vc => vc.viewport).map!( 719 vp => VkViewport(vp.x, vp.y, vp.width, vp.height, vp.minDepth, vp.maxDepth) 720 ).array; 721 auto vkScissors = infos[i].viewports.map!(vc => vc.scissors).map!( 722 r => VkRect2D(VkOffset2D(r.x, r.y), VkExtent2D(r.width, r.height)) 723 ).array; 724 vkViewport.viewportCount = cast(uint)infos[i].viewports.length; 725 vkViewport.pViewports = vkViewports.ptr; 726 vkViewport.scissorCount = cast(uint)infos[i].viewports.length; 727 vkViewport.pScissors = vkScissors.ptr; 728 } 729 else { 730 static const dummyVp = VkViewport(0f, 0f, 1f, 1f, 0f, 1f); 731 static const dummySc = VkRect2D(VkOffset2D(0, 0), VkExtent2D(1, 1)); 732 vkViewport.viewportCount = 1; 733 vkViewport.pViewports = &dummyVp; 734 vkViewport.scissorCount = 1; 735 vkViewport.pScissors = &dummySc; 736 } 737 738 auto vkRasterizer = new VkPipelineRasterizationStateCreateInfo; 739 vkRasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; 740 vkRasterizer.rasterizerDiscardEnable = shaders.fragment ? VK_FALSE : VK_TRUE; 741 vkRasterizer.polygonMode = infos[i].rasterizer.mode.toVk(); 742 vkRasterizer.cullMode = cullModeToVk(infos[i].rasterizer.cull); 743 vkRasterizer.frontFace = infos[i].rasterizer.front.toVk(); 744 vkRasterizer.lineWidth = infos[i].rasterizer.lineWidth; 745 vkRasterizer.depthClampEnable = infos[i].rasterizer.depthClamp ? VK_TRUE : VK_FALSE; 746 if (infos[i].rasterizer.depthBias.isSome) { 747 DepthBias db = infos[i].rasterizer.depthBias.get; 748 vkRasterizer.depthBiasEnable = VK_TRUE; 749 vkRasterizer.depthBiasConstantFactor = db.constantFactor; 750 vkRasterizer.depthBiasClamp = db.clamp; 751 vkRasterizer.depthBiasSlopeFactor = db.slopeFactor; 752 } 753 else { 754 vkRasterizer.depthBiasConstantFactor = 0f; 755 vkRasterizer.depthBiasClamp = 0f; 756 vkRasterizer.depthBiasSlopeFactor = 0f; 757 } 758 759 const depthInfo = infos[i].depthInfo; 760 const stencilInfo = infos[i].stencilInfo; 761 auto vkDepthStencil = new VkPipelineDepthStencilStateCreateInfo; 762 vkDepthStencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; 763 vkDepthStencil.depthTestEnable = flagToVk(depthInfo.enabled); 764 vkDepthStencil.depthWriteEnable = flagToVk(depthInfo.write); 765 vkDepthStencil.depthCompareOp = depthInfo.compareOp.toVk(); 766 vkDepthStencil.depthBoundsTestEnable = flagToVk(depthInfo.boundsTest); 767 vkDepthStencil.stencilTestEnable = flagToVk(stencilInfo.enabled); 768 vkDepthStencil.front = transmute!VkStencilOpState(stencilInfo.front); 769 vkDepthStencil.back = transmute!VkStencilOpState(stencilInfo.back); 770 vkDepthStencil.minDepthBounds = depthInfo.minBounds; 771 vkDepthStencil.maxDepthBounds = depthInfo.maxBounds; 772 773 const blendInfo = infos[i].blendInfo; 774 auto vkColorAttachments = blendInfo.attachments.map!( 775 cba => VkPipelineColorBlendAttachmentState ( 776 cba.enabled ? VK_TRUE : VK_FALSE, 777 cba.colorBlend.factor.from.toVk(), 778 cba.colorBlend.factor.to.toVk(), 779 cba.colorBlend.op.toVk(), 780 cba.alphaBlend.factor.from.toVk(), 781 cba.alphaBlend.factor.to.toVk(), 782 cba.alphaBlend.op.toVk(), 783 cast(VkColorComponentFlags)cba.colorMask 784 ) 785 ).array; 786 auto vkBlend = new VkPipelineColorBlendStateCreateInfo; 787 vkBlend.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; 788 if (blendInfo.logicOp.isSome) { 789 vkBlend.logicOpEnable = VK_TRUE; 790 vkBlend.logicOp = blendInfo.logicOp.get.toVk(); 791 } 792 vkBlend.attachmentCount = cast(uint)vkColorAttachments.length; 793 vkBlend.pAttachments = vkColorAttachments.ptr; 794 vkBlend.blendConstants = blendInfo.blendConstants; 795 796 VkPipelineDynamicStateCreateInfo *vkDynStatesInfo; 797 if (infos[i].dynamicStates) { 798 auto vkDynStates = infos[i].dynamicStates.map!(ds => ds.toVk()).array; 799 vkDynStatesInfo = new VkPipelineDynamicStateCreateInfo; 800 vkDynStatesInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 801 vkDynStatesInfo.dynamicStateCount = cast(uint)vkDynStates.length; 802 vkDynStatesInfo.pDynamicStates = vkDynStates.ptr; 803 } 804 805 auto rp = infos[i].renderPass; 806 auto vkRp = rp ? enforce( 807 cast(VulkanRenderPass)rp, 808 "did not supply a Vulkan render pass" 809 ).vkObj : VK_NULL_ND_HANDLE; 810 811 // following bindings are not implemented yet 812 auto vkTess = new VkPipelineTessellationStateCreateInfo; 813 vkTess.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; 814 auto vkMs = new VkPipelineMultisampleStateCreateInfo; 815 vkMs.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; 816 vkMs.minSampleShading = 1f; 817 818 pcis[i].sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; 819 pcis[i].stageCount = cast(uint)sscis.length; 820 pcis[i].pStages = sscis.ptr; 821 pcis[i].pVertexInputState = vkVtxInput; 822 pcis[i].pInputAssemblyState = vkAssy; 823 pcis[i].pTessellationState = vkTess; 824 pcis[i].pViewportState = vkViewport; 825 pcis[i].pRasterizationState = vkRasterizer; 826 pcis[i].pMultisampleState = vkMs; 827 pcis[i].pDepthStencilState = vkDepthStencil; 828 pcis[i].pColorBlendState = vkBlend; 829 pcis[i].pDynamicState = vkDynStatesInfo; 830 pcis[i].layout = enforce( 831 cast(VulkanPipelineLayout)infos[i].layout, 832 "did not pass a valid vulkan pipeline layout" 833 ).vkObj; 834 pcis[i].renderPass = vkRp; 835 pcis[i].subpass = infos[i].subpassIndex; 836 pcis[i].basePipelineIndex = -1; 837 } 838 839 auto vkPls = new VkPipeline[infos.length]; 840 vulkanEnforce( 841 vk.CreateGraphicsPipelines(vkObj, VK_NULL_ND_HANDLE, cast(uint)pcis.length, pcis.ptr, null, vkPls.ptr), 842 "Could not create Vulkan graphics pipeline" 843 ); 844 845 auto pls = new Pipeline[infos.length]; 846 foreach (i; 0 .. vkPls.length) { 847 pls[i] = new VulkanPipeline(vkPls[i], this, infos[i].layout); 848 } 849 return pls; 850 } 851 852 private Instance _inst; 853 private VulkanPhysicalDevice _pd; 854 private VkDeviceCmds _vk; 855 private VulkanQueue[] _queues; 856 }