1 /// Vulkan device module 2 module gfx.vulkan.device; 3 4 package: 5 6 import core.time : Duration; 7 8 import gfx.bindings.vulkan; 9 10 import gfx.core.rc; 11 import gfx.graal.cmd; 12 import gfx.graal.device; 13 import gfx.graal.image; 14 import gfx.graal.memory; 15 import gfx.graal.presentation; 16 import gfx.graal.queue; 17 import gfx.graal.pipeline; 18 import gfx.graal.sync; 19 import gfx.vulkan; 20 import gfx.vulkan.buffer; 21 import gfx.vulkan.cmd; 22 import gfx.vulkan.conv; 23 import gfx.vulkan.error; 24 import gfx.vulkan.image; 25 import gfx.vulkan.memory; 26 import gfx.vulkan.pipeline; 27 import gfx.vulkan.queue; 28 import gfx.vulkan.renderpass; 29 import gfx.vulkan.sync; 30 import gfx.vulkan.wsi; 31 32 import std.typecons : Flag; 33 34 class VulkanDevObj(VkType, string destroyFn) : Disposable 35 { 36 this (VkType vk, VulkanDevice dev) 37 { 38 _vk = vk; 39 _dev = dev; 40 _dev.retain(); 41 _cmds = _dev.cmds; 42 } 43 44 override void dispose() { 45 mixin("cmds."~destroyFn~"(vkDev, vk, null);"); 46 _dev.release(); 47 _dev = null; 48 } 49 50 final @property VkType vk() { 51 return _vk; 52 } 53 54 final @property VulkanDevice dev() { 55 return _dev; 56 } 57 58 final @property VkDevice vkDev() { 59 return _dev.vk; 60 } 61 62 final @property VkDeviceCmds cmds() { 63 return _cmds; 64 } 65 66 private VkType _vk; 67 private VulkanDevice _dev; 68 private VkDeviceCmds _cmds; 69 } 70 71 final class VulkanDevice : VulkanObj!(VkDevice), Device 72 { 73 mixin(atomicRcCode); 74 75 this (VkDevice vk, VulkanPhysicalDevice pd) 76 { 77 super(vk); 78 _pd = pd; 79 _pd.retain(); 80 _cmds = new VkDeviceCmds(vk, pd.cmds); 81 } 82 83 override void dispose() { 84 cmds.destroyDevice(vk, null); 85 _pd.release(); 86 _pd = null; 87 } 88 89 @property VulkanPhysicalDevice pd() { 90 return _pd; 91 } 92 93 @property VkDeviceCmds cmds() { 94 return _cmds; 95 } 96 97 override void waitIdle() { 98 vulkanEnforce( 99 cmds.deviceWaitIdle(vk), 100 "Problem waiting for device" 101 ); 102 } 103 104 override Queue getQueue(uint queueFamilyIndex, uint queueIndex) { 105 VkQueue vkQ; 106 cmds.getDeviceQueue(vk, queueFamilyIndex, queueIndex, &vkQ); 107 108 foreach (q; _queues) { 109 if (q.vk is vkQ) { 110 return q; 111 } 112 } 113 114 auto q = new VulkanQueue(vkQ, cmds); 115 _queues ~= q; 116 return q; 117 } 118 119 override CommandPool createCommandPool(uint queueFamilyIndex) { 120 VkCommandPoolCreateInfo cci; 121 cci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; 122 cci.queueFamilyIndex = queueFamilyIndex; 123 cci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; 124 125 VkCommandPool vkPool; 126 vulkanEnforce( 127 cmds.createCommandPool(vk, &cci, null, &vkPool), 128 "Could not create vulkan command pool" 129 ); 130 131 return new VulkanCommandPool(vkPool, this); 132 } 133 134 override DeviceMemory allocateMemory(uint memTypeIndex, size_t size) 135 { 136 VkMemoryAllocateInfo mai; 137 mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; 138 mai.allocationSize = size; 139 mai.memoryTypeIndex = memTypeIndex; 140 141 VkDeviceMemory vkMem; 142 vulkanEnforce(cmds.allocateMemory(vk, &mai, null, &vkMem), "Could not allocate device memory"); 143 144 return new VulkanDeviceMemory(vkMem, this, memTypeIndex, size); 145 } 146 147 override void flushMappedMemory(MappedMemorySet set) 148 { 149 import std.algorithm : map; 150 import std.array : array; 151 VkMappedMemoryRange[] mmrs = set.mms.map!((MappedMemorySet.MM mm) { 152 VkMappedMemoryRange mmr; 153 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; 154 mmr.memory = (cast(VulkanDeviceMemory)mm.dm).vk; 155 mmr.offset = mm.offset; 156 mmr.size = mm.size; 157 return mmr; 158 }).array; 159 160 cmds.flushMappedMemoryRanges(vk, cast(uint)mmrs.length, mmrs.ptr); 161 } 162 163 override void invalidateMappedMemory(MappedMemorySet set) { 164 import std.algorithm : map; 165 import std.array : array; 166 VkMappedMemoryRange[] mmrs = set.mms.map!((MappedMemorySet.MM mm) { 167 VkMappedMemoryRange mmr; 168 mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; 169 mmr.memory = (cast(VulkanDeviceMemory)mm.dm).vk; 170 mmr.offset = mm.offset; 171 mmr.size = mm.size; 172 return mmr; 173 }).array; 174 175 cmds.invalidateMappedMemoryRanges(vk, cast(uint)mmrs.length, mmrs.ptr); 176 } 177 178 override Buffer createBuffer(BufferUsage usage, size_t size) 179 { 180 VkBufferCreateInfo bci; 181 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; 182 bci.size = size; 183 bci.usage = bufferUsageToVk(usage); 184 185 VkBuffer vkBuf; 186 vulkanEnforce(cmds.createBuffer(vk, &bci, null, &vkBuf), "Could not create a Vulkan buffer"); 187 188 return new VulkanBuffer(vkBuf, this, usage, size); 189 } 190 191 override Image createImage(ImageType type, ImageDims dims, Format format, 192 ImageUsage usage, uint samples, uint levels=1) 193 { 194 VkImageCreateInfo ici; 195 ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; 196 if (type.isCube) ici.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; 197 ici.imageType = type.toVk(); 198 ici.format = format.toVk(); 199 ici.extent = VkExtent3D(dims.width, dims.height, dims.depth); 200 ici.mipLevels = levels; 201 ici.arrayLayers = dims.layers; 202 ici.samples = cast(typeof(ici.samples))samples; 203 ici.tiling = VK_IMAGE_TILING_OPTIMAL; 204 ici.usage = imageUsageToVk(usage); 205 ici.sharingMode = VK_SHARING_MODE_EXCLUSIVE; 206 207 VkImage vkImg; 208 vulkanEnforce(cmds.createImage(vk, &ici, null, &vkImg), "Could not create a Vulkan image"); 209 210 return new VulkanImage(vkImg, this, type, dims, format); 211 } 212 213 Sampler createSampler(in SamplerInfo info) { 214 import std.algorithm : each; 215 VkSamplerCreateInfo sci; 216 sci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; 217 sci.minFilter = info.minFilter.toVk(); 218 sci.magFilter = info.magFilter.toVk(); 219 sci.mipmapMode = info.mipmapFilter.toVkMipmapMode(); 220 sci.addressModeU = info.wrapMode[0].toVk(); 221 sci.addressModeV = info.wrapMode[1].toVk(); 222 sci.addressModeW = info.wrapMode[2].toVk(); 223 sci.mipLodBias = info.lodBias; 224 info.anisotropy.save.each!((float max) { 225 sci.anisotropyEnable = VK_TRUE; 226 sci.maxAnisotropy = max; 227 }); 228 info.compare.save.each!((CompareOp op) { 229 sci.compareEnable = VK_TRUE; 230 sci.compareOp = op.toVk(); 231 }); 232 sci.minLod = info.lodRange[0]; 233 sci.maxLod = info.lodRange[1]; 234 sci.borderColor = info.borderColor.toVk(); 235 sci.unnormalizedCoordinates = info.unnormalizeCoords ? VK_TRUE : VK_FALSE; 236 237 VkSampler vkS; 238 vulkanEnforce( 239 cmds.createSampler(vk, &sci, null, &vkS), 240 "Could not create Vulkan sampler" 241 ); 242 243 return new VulkanSampler(vkS, this); 244 } 245 246 override Semaphore createSemaphore() 247 { 248 VkSemaphoreCreateInfo sci; 249 sci.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; 250 251 VkSemaphore vkSem; 252 vulkanEnforce(cmds.createSemaphore(vk, &sci, null, &vkSem), "Could not create a Vulkan semaphore"); 253 254 return new VulkanSemaphore(vkSem, this); 255 } 256 257 override Fence createFence(Flag!"signaled" signaled) 258 { 259 VkFenceCreateInfo fci; 260 fci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; 261 if (signaled) { 262 fci.flags = VK_FENCE_CREATE_SIGNALED_BIT; 263 } 264 VkFence vkF; 265 vulkanEnforce(cmds.createFence(vk, &fci, null, &vkF), "Could not create a Vulkan fence"); 266 267 return new VulkanFence(vkF, this); 268 } 269 270 override void resetFences(Fence[] fences) { 271 import std.algorithm : map; 272 import std.array : array; 273 274 auto vkFs = fences.map!( 275 f => enforce(cast(VulkanFence)f, "Did not pass a Vulkan fence").vk 276 ).array; 277 278 vulkanEnforce( 279 cmds.resetFences(vk, cast(uint)vkFs.length, &vkFs[0]), 280 "Could not reset vulkan fences" 281 ); 282 } 283 284 override void waitForFences(Fence[] fences, Flag!"waitAll" waitAll, Duration timeout) 285 { 286 import std.algorithm : map; 287 import std.array : array; 288 289 auto vkFs = fences.map!( 290 f => enforce(cast(VulkanFence)f, "Did not pass a Vulkan fence").vk 291 ).array; 292 293 const vkWaitAll = waitAll ? VK_TRUE : VK_FALSE; 294 const nsecs = timeout.total!"nsecs"; 295 const vkTimeout = nsecs < 0 ? ulong.max : cast(ulong)nsecs; 296 297 vulkanEnforce( 298 cmds.waitForFences(vk, cast(uint)vkFs.length, &vkFs[0], vkWaitAll, vkTimeout), 299 "could not wait for vulkan fences" 300 ); 301 } 302 303 304 override Swapchain createSwapchain(Surface graalSurface, PresentMode pm, uint numImages, 305 Format format, uint[2] size, ImageUsage usage, 306 CompositeAlpha alpha, Swapchain old=null) 307 { 308 auto surf = enforce( 309 cast(VulkanSurface)graalSurface, 310 "Did not pass a Vulkan surface" 311 ); 312 313 auto oldSc = old ? enforce( 314 cast(VulkanSwapchain)old, "Did not pass a vulkan swapchain" 315 ) : null; 316 317 VkSwapchainCreateInfoKHR sci; 318 sci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; 319 sci.surface = surf.vk; 320 sci.minImageCount = numImages; 321 sci.imageFormat = format.toVk; 322 sci.imageExtent = VkExtent2D(size[0], size[1]); 323 sci.imageArrayLayers = 1; 324 sci.imageUsage = imageUsageToVk(usage); 325 sci.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; 326 sci.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; 327 sci.clipped = VK_TRUE; 328 sci.presentMode = pm.toVk; 329 sci.compositeAlpha = compositeAlphaToVk(alpha); 330 sci.oldSwapchain = oldSc ? oldSc.vk : null; 331 332 VkSwapchainKHR vkSc; 333 vulkanEnforce( 334 cmds.createSwapchainKHR(vk, &sci, null, &vkSc), 335 "Could not create a Vulkan Swap chain" 336 ); 337 338 return new VulkanSwapchain(vkSc, this, size, format); 339 } 340 341 override RenderPass createRenderPass(in AttachmentDescription[] attachments, 342 in SubpassDescription[] subpasses, 343 in SubpassDependency[] dependencies) 344 { 345 import std.algorithm : map; 346 import std.array : array; 347 348 auto vkAttachments = attachments.map!((ref const(AttachmentDescription) ad) { 349 VkAttachmentDescription vkAd; 350 if (ad.mayAlias) { 351 vkAd.flags = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT; 352 } 353 vkAd.format = ad.format.toVk(); 354 vkAd.loadOp = ad.colorDepthOps.load.toVk(); 355 vkAd.storeOp = ad.colorDepthOps.store.toVk(); 356 vkAd.stencilLoadOp = ad.stencilOps.load.toVk(); 357 vkAd.stencilStoreOp = ad.stencilOps.store.toVk(); 358 vkAd.initialLayout = ad.layoutTrans.from.toVk(); 359 vkAd.finalLayout = ad.layoutTrans.to.toVk(); 360 return vkAd; 361 }).array; 362 363 static VkAttachmentReference mapRef (in AttachmentRef ar) { 364 return VkAttachmentReference(ar.attachment, ar.layout.toVk()); 365 } 366 static VkAttachmentReference[] mapRefs(in AttachmentRef[] ars) { 367 return ars.map!mapRef.array; 368 } 369 auto vkSubpasses = subpasses.map!((ref const(SubpassDescription) sd) { 370 auto vkInputs = mapRefs(sd.inputs); 371 auto vkColors = mapRefs(sd.colors); 372 auto vkDepthStencil = sd.depthStencil.save.map!(mapRef).array; 373 VkSubpassDescription vkSd; 374 vkSd.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; 375 vkSd.inputAttachmentCount = cast(uint)vkInputs.length; 376 vkSd.pInputAttachments = vkInputs.ptr; 377 vkSd.colorAttachmentCount = cast(uint)vkColors.length; 378 vkSd.pColorAttachments = vkColors.ptr; 379 vkSd.pDepthStencilAttachment = vkDepthStencil.length ? 380 vkDepthStencil.ptr : null; 381 vkSd.preserveAttachmentCount = cast(uint)sd.preserves.length; 382 vkSd.pPreserveAttachments = sd.preserves.ptr; 383 return vkSd; 384 }).array; 385 386 auto vkDeps = dependencies.map!((ref const(SubpassDependency) sd) { 387 VkSubpassDependency vkSd; 388 vkSd.srcSubpass = sd.subpass.from; 389 vkSd.dstSubpass = sd.subpass.to; 390 vkSd.srcStageMask = pipelineStageToVk(sd.stageMask.from); 391 vkSd.dstStageMask = pipelineStageToVk(sd.stageMask.to); 392 vkSd.srcAccessMask = accessToVk(sd.accessMask.from); 393 vkSd.dstAccessMask = accessToVk(sd.accessMask.to); 394 return vkSd; 395 }).array; 396 397 VkRenderPassCreateInfo rpci; 398 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 399 rpci.attachmentCount = cast(uint)vkAttachments.length; 400 rpci.pAttachments = vkAttachments.ptr; 401 rpci.subpassCount = cast(uint)vkSubpasses.length; 402 rpci.pSubpasses = vkSubpasses.ptr; 403 rpci.dependencyCount = cast(uint)vkDeps.length; 404 rpci.pDependencies = vkDeps.ptr; 405 406 VkRenderPass vkRp; 407 vulkanEnforce( 408 cmds.createRenderPass(vk, &rpci, null, &vkRp), 409 "Could not create a Vulkan render pass" 410 ); 411 412 return new VulkanRenderPass(vkRp, this); 413 } 414 415 416 override Framebuffer createFramebuffer(RenderPass rp, ImageView[] attachments, 417 uint width, uint height, uint layers) 418 { 419 import std.algorithm : map; 420 import std.array : array; 421 422 auto vkRp = enforce(cast(VulkanRenderPass)rp, "Did not pass a Vulkan render pass").vk; 423 auto vkAttachments = attachments.map!( 424 iv => enforce(cast(VulkanImageView)iv, "Did not pass a Vulkan image view").vk 425 ).array; 426 427 VkFramebufferCreateInfo fci; 428 fci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; 429 fci.renderPass = vkRp; 430 fci.attachmentCount = cast(uint)vkAttachments.length; 431 fci.pAttachments = vkAttachments.ptr; 432 fci.width = width; 433 fci.height = height; 434 fci.layers = layers; 435 436 VkFramebuffer vkFb; 437 vulkanEnforce( 438 cmds.createFramebuffer(vk, &fci, null, &vkFb), 439 "Could not create a Vulkan Framebuffer" 440 ); 441 442 return new VulkanFramebuffer(vkFb, this, attachments); 443 } 444 445 override ShaderModule createShaderModule(ShaderLanguage sl, string code, string entryPoint) 446 { 447 enforce(sl == ShaderLanguage.spirV, "Vulkan only understands SPIR-V"); 448 enforce(code.length % 4 == 0, "SPIR-V code size must be a multiple of 4"); 449 VkShaderModuleCreateInfo smci; 450 smci.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 451 smci.codeSize = cast(uint)code.length; 452 smci.pCode = cast(const(uint)*)code.ptr; 453 454 VkShaderModule vkSm; 455 vulkanEnforce( 456 cmds.createShaderModule(vk, &smci, null, &vkSm), 457 "Could not create Vulkan shader module" 458 ); 459 460 return new VulkanShaderModule(vkSm, this, entryPoint); 461 } 462 463 override PipelineLayout createPipelineLayout() { 464 VkPipelineLayoutCreateInfo plci; 465 plci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 466 467 VkPipelineLayout vkPl; 468 vulkanEnforce( 469 cmds.createPipelineLayout(vk, &plci, null, &vkPl), 470 "Could not create Vulkan pipeline layout" 471 ); 472 return new VulkanPipelineLayout(vkPl, this); 473 } 474 475 override Pipeline[] createPipelines(PipelineInfo[] infos) { 476 import std.algorithm : map, max; 477 import std.array : array; 478 import std.string : toStringz; 479 480 auto pcis = new VkGraphicsPipelineCreateInfo[infos.length]; 481 482 foreach (i; 0 .. infos.length) { 483 VkPipelineShaderStageCreateInfo[] sscis; 484 void addShaderStage(ShaderModule sm, ShaderStage ss) { 485 VkPipelineShaderStageCreateInfo ssci; 486 ssci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 487 ssci.stage = shaderStageToVk(ss); 488 ssci.module_ = enforce( 489 cast(VulkanShaderModule)sm, 490 "did not pass a Vulkan shader module" 491 ).vk; 492 ssci.pName = toStringz(sm.entryPoint); 493 sscis ~= ssci; 494 } 495 auto shaders = infos[i].shaders; 496 enforce(shaders.vertex, "Vertex input shader is mandatory"); 497 addShaderStage(shaders.vertex, ShaderStage.vertex); 498 if (shaders.tessControl) 499 addShaderStage(shaders.tessControl, ShaderStage.tessellationControl); 500 if (shaders.tessEval) 501 addShaderStage(shaders.tessEval, ShaderStage.tessellationEvaluation); 502 if (shaders.geometry) 503 addShaderStage(shaders.geometry, ShaderStage.geometry); 504 if (shaders.fragment) 505 addShaderStage(shaders.fragment, ShaderStage.fragment); 506 507 508 auto vkInputBindings = infos[i].inputBindings.map!( 509 ib => VkVertexInputBindingDescription( 510 ib.binding, cast(uint)ib.stride, 511 ib.instanced ? 512 VK_VERTEX_INPUT_RATE_INSTANCE : 513 VK_VERTEX_INPUT_RATE_VERTEX 514 ) 515 ).array; 516 517 auto vkInputAttribs = infos[i].inputAttribs.map!( 518 ia => VkVertexInputAttributeDescription( 519 ia.location, ia.binding, ia.format.toVk(), cast(uint)ia.offset 520 ) 521 ).array; 522 523 auto vkVtxInput = new VkPipelineVertexInputStateCreateInfo; 524 vkVtxInput.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 525 vkVtxInput.vertexBindingDescriptionCount = cast(uint)vkInputBindings.length; 526 vkVtxInput.pVertexBindingDescriptions = vkInputBindings.ptr; 527 vkVtxInput.vertexAttributeDescriptionCount = cast(uint)vkInputAttribs.length; 528 vkVtxInput.pVertexAttributeDescriptions = vkInputAttribs.ptr; 529 530 auto vkAssy = new VkPipelineInputAssemblyStateCreateInfo; 531 vkAssy.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 532 vkAssy.topology = infos[i].assembly.primitive.toVk(); 533 vkAssy.primitiveRestartEnable = infos[i].assembly.primitiveRestart ? VK_TRUE : VK_FALSE; 534 535 auto vkViewports = infos[i].viewports.map!(vc => vc.viewport).map!( 536 vp => VkViewport(vp.x, vp.y, vp.width, vp.height, vp.minDepth, vp.maxDepth) 537 ).array; 538 auto vkScissors = infos[i].viewports.map!(vc => vc.scissors).map!( 539 r => VkRect2D(VkOffset2D(r.x, r.y), VkExtent2D(r.width, r.height)) 540 ).array; 541 auto vkViewport = new VkPipelineViewportStateCreateInfo; 542 vkViewport.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; 543 vkViewport.viewportCount = cast(uint)max(1, infos[i].viewports.length); 544 vkViewport.pViewports = vkViewports.ptr; 545 vkViewport.scissorCount = cast(uint)max(1, infos[i].viewports.length); 546 vkViewport.pScissors = vkScissors.ptr; 547 548 auto vkRasterizer = new VkPipelineRasterizationStateCreateInfo; 549 vkRasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; 550 vkRasterizer.rasterizerDiscardEnable = shaders.fragment ? VK_FALSE : VK_TRUE; 551 vkRasterizer.polygonMode = infos[i].rasterizer.mode.toVk(); 552 vkRasterizer.cullMode = cullModeToVk(infos[i].rasterizer.cull); 553 vkRasterizer.frontFace = infos[i].rasterizer.front.toVk(); 554 vkRasterizer.lineWidth = infos[i].rasterizer.lineWidth; 555 vkRasterizer.depthClampEnable = infos[i].rasterizer.depthClamp ? VK_TRUE : VK_FALSE; 556 if (infos[i].rasterizer.depthBias.isSome) { 557 DepthBias db = infos[i].rasterizer.depthBias.get; 558 vkRasterizer.depthBiasEnable = VK_TRUE; 559 vkRasterizer.depthBiasConstantFactor = db.constantFactor; 560 vkRasterizer.depthBiasClamp = db.clamp; 561 vkRasterizer.depthBiasSlopeFactor = db.slopeFactor; 562 } 563 else { 564 vkRasterizer.depthBiasConstantFactor = 0f; 565 vkRasterizer.depthBiasClamp = 0f; 566 vkRasterizer.depthBiasSlopeFactor = 0f; 567 } 568 569 const blendInfo = infos[i].blendInfo; 570 auto vkColorAttachments = blendInfo.attachments.map!( 571 cba => VkPipelineColorBlendAttachmentState ( 572 cba.enabled ? VK_TRUE : VK_FALSE, 573 cba.colorBlend.factor.from.toVk(), 574 cba.colorBlend.factor.to.toVk(), 575 cba.colorBlend.op.toVk(), 576 cba.alphaBlend.factor.from.toVk(), 577 cba.alphaBlend.factor.to.toVk(), 578 cba.alphaBlend.op.toVk(), 579 cast(VkColorComponentFlags)cba.colorMask 580 ) 581 ).array; 582 auto vkBlend = new VkPipelineColorBlendStateCreateInfo; 583 vkBlend.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; 584 if (blendInfo.logicOp.isSome) { 585 vkBlend.logicOpEnable = VK_TRUE; 586 vkBlend.logicOp = blendInfo.logicOp.get.toVk(); 587 } 588 vkBlend.attachmentCount = cast(uint)vkColorAttachments.length; 589 vkBlend.pAttachments = vkColorAttachments.ptr; 590 vkBlend.blendConstants = blendInfo.blendConstants; 591 592 VkPipelineDynamicStateCreateInfo *vkDynStatesInfo; 593 if (infos[i].dynamicStates) { 594 auto vkDynStates = infos[i].dynamicStates.map!(ds => ds.toVk()).array; 595 vkDynStatesInfo = new VkPipelineDynamicStateCreateInfo; 596 vkDynStatesInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 597 vkDynStatesInfo.dynamicStateCount = cast(uint)vkDynStates.length; 598 vkDynStatesInfo.pDynamicStates = vkDynStates.ptr; 599 } 600 601 auto rp = infos[i].renderPass; 602 auto vkRp = rp ? enforce( 603 cast(VulkanRenderPass)rp, 604 "did not supply a Vulkan render pass" 605 ).vk : null; 606 607 // following bindings are not implemented yet 608 auto vkTess = new VkPipelineTessellationStateCreateInfo; 609 vkTess.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; 610 auto vkMs = new VkPipelineMultisampleStateCreateInfo; 611 vkMs.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; 612 vkMs.minSampleShading = 1f; 613 auto vkDepthStencil = new VkPipelineDepthStencilStateCreateInfo; 614 vkDepthStencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; 615 616 pcis[i].sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; 617 pcis[i].stageCount = cast(uint)sscis.length; 618 pcis[i].pStages = sscis.ptr; 619 pcis[i].pVertexInputState = vkVtxInput; 620 pcis[i].pInputAssemblyState = vkAssy; 621 pcis[i].pTessellationState = vkTess; 622 pcis[i].pViewportState = vkViewport; 623 pcis[i].pRasterizationState = vkRasterizer; 624 pcis[i].pMultisampleState = vkMs; 625 pcis[i].pDepthStencilState = vkDepthStencil; 626 pcis[i].pColorBlendState = vkBlend; 627 pcis[i].pDynamicState = vkDynStatesInfo; 628 pcis[i].layout = enforce( 629 cast(VulkanPipelineLayout)infos[i].layout, 630 "did not pass a valid vulkan pipeline layout" 631 ).vk; 632 pcis[i].renderPass = vkRp; 633 pcis[i].subpass = infos[i].subpassIndex; 634 pcis[i].basePipelineIndex = -1; 635 } 636 637 auto vkPls = new VkPipeline[infos.length]; 638 vulkanEnforce( 639 cmds.createGraphicsPipelines(vk, null, cast(uint)pcis.length, pcis.ptr, null, vkPls.ptr), 640 "Could not create Vulkan graphics pipeline" 641 ); 642 643 auto pls = new Pipeline[infos.length]; 644 foreach (i; 0 .. vkPls.length) { 645 pls[i] = new VulkanPipeline(vkPls[i], this, infos[i].layout); 646 } 647 return pls; 648 } 649 650 private VulkanPhysicalDevice _pd; 651 private VkDeviceCmds _cmds; 652 private VulkanQueue[] _queues; 653 }