1 module example;
2
3 import gfx.core.log;
4 import gfx.core.rc;
5 import gfx.graal;
6 import gfx.graal.buffer;
7 import gfx.graal.cmd;
8 import gfx.graal.device;
9 import gfx.graal.format;
10 import gfx.graal.image;
11 import gfx.graal.memory;
12 import gfx.graal.presentation;
13 import gfx.graal.queue;
14 import gfx.graal.renderpass;
15 import gfx.graal.sync;
16 import gfx.graal.types;
17 import gfx.window;
18
19 import std.algorithm;
20 import std.exception;
21 import std.stdio;
22 import std.typecons;
23 import std.traits : isArray;
24 import std.datetime : Duration;
25
26 immutable gfxExLog = LogTag("GFX-EX");
27
28 struct FpsProbe
29 {
30 import std.datetime.stopwatch : StopWatch;
31
32 private StopWatch sw;
33 private size_t lastUsecs;
34 private size_t lastFc;
35 private size_t fc;
36
37 void start() { sw.start(); }
38
39 bool running() { return sw.running(); }
40
41 void stop()
42 {
43 sw.stop();
44 lastUsecs = 0;
45 lastFc = 0;
46 fc = 0;
47 }
48
49 void tick() { fc += 1; }
50
51 size_t framecount() { return fc; }
52
53 float computeFps() {
54 const usecs = sw.peek().total!"usecs"();
55 const fps = 1000_000f * (fc - lastFc) / (usecs-lastUsecs);
56 lastFc = fc;
57 lastUsecs = usecs;
58 return fps;
59 }
60 }
61
62 struct Timer
63 {
64 import std.datetime.stopwatch : StopWatch;
65
66 private StopWatch sw;
67 private size_t fc;
68
69 void start() { sw.start(); }
70 void stop() { sw.stop(); }
71
72 auto frame() {
73 static struct FrameTimer
74 {
75 StopWatch* sw;
76 this(StopWatch* sw)
77 {
78 this.sw = sw;
79 sw.start();
80 }
81 ~this()
82 {
83 if (this.sw) {
84 sw.stop();
85 }
86 }
87 }
88 ++fc;
89 return FrameTimer(&sw);
90 }
91
92 @property Duration totalDur()
93 {
94 return sw.peek;
95 }
96
97 @property Duration avgDur()
98 {
99 return sw.peek / fc;
100 }
101
102 @property size_t framecount()
103 {
104 return fc;
105 }
106 }
107
108 shared static this()
109 {
110 import gfx.core.log : Severity, severity;
111 severity = Severity.trace;
112
113 debug(rc) {
114 import gfx.core.rc : rcPrintStack, rcTypeRegex;
115 rcPrintStack = true;
116 rcTypeRegex = "^GlDevice$";
117 }
118 }
119
120 class Example : Disposable
121 {
122 import gfx.math.proj : NDC;
123
124 string title;
125 string[] args;
126 Rc!Display display;
127 Window window;
128 Rc!Instance instance;
129 NDC ndc;
130 PhysicalDevice physicalDevice;
131 Rc!Device device;
132 Queue graphicsQueue;
133 Queue presentQueue;
134 uint[2] surfaceSize;
135 bool hasAlpha;
136 Rc!Swapchain swapchain;
137 Rc!Semaphore imageAvailableSem;
138 Rc!Semaphore renderingFinishSem;
139 uint frameNumber;
140 FrameData[] frameDatas;
141 FpsProbe probe;
142
143 // garbage collection
144 // it is sometimes desirable to delete objects still in use in a command
145 // buffer (this happens when rebuilding the swapchain for example)
146 // each entry is given an optional fence to check for the command buffer
147 // completion, and if the fence is null, it checks that the garbage
148 // was not emitted more than maxFcAge frames ago.
149 GarbageEntry garbageEntries;
150 GarbageEntry lastGarbageEntry;
151 enum maxFcAge = 4;
152
153 class GarbageEntry
154 {
155 GarbageEntry next;
156 uint fc;
157 Fence fence;
158 IAtomicRefCounted resource;
159 }
160
161 this (string title, string[] args=[])
162 {
163 this.title = title;
164 this.args = args;
165 }
166
167 override void dispose()
168 {
169 probe.stop();
170 if (device) {
171 device.waitIdle();
172 }
173 while (garbageEntries) {
174 if (garbageEntries.fence) {
175 garbageEntries.fence.wait();
176 releaseObj(garbageEntries.fence);
177 }
178 releaseObj(garbageEntries.resource);
179 garbageEntries = garbageEntries.next;
180 }
181 releaseArr(frameDatas);
182 // the rest is checked with Rc, so it is safe to call unload even
183 // if object is invalid
184 imageAvailableSem.unload();
185 renderingFinishSem.unload();
186 swapchain.unload();
187 device.unload();
188 // if (window) window.close();
189 instance.unload();
190 display.unload();
191 }
192
193 void prepare()
194 {
195 bool noVulkan = false;
196 bool noGl3 = false;
197 bool forceXcb = false;
198 foreach (a; args) {
199 if (a == "--no-vulkan" || a == "nv") {
200 noVulkan = true;
201 }
202 else if (a == "--no-gl3" || a == "ng") {
203 noGl3 = true;
204 }
205 else if (a == "--force-xcb") {
206 forceXcb = true;
207 }
208 }
209
210 import std.algorithm : remove;
211
212 DisplayCreateInfo createInfo;
213 if (noVulkan) {
214 createInfo.backendCreateOrder =
215 createInfo.backendCreateOrder.remove(Backend.vulkan);
216 }
217 if (noGl3) {
218 createInfo.backendCreateOrder =
219 createInfo.backendCreateOrder.remove(Backend.gl3);
220 }
221 version (linux)
222 {
223 if (forceXcb) {
224 createInfo.linuxDisplayCreateOrder = [ LinuxDisplay.xcb ];
225 }
226 }
227
228 // Create a display for the running platform
229 // The instance is created by the display. Backend is chosen at runtime
230 // depending on detected API support. (i.e. Vulkan is preferred)
231 display = createDisplay(createInfo);
232 instance = display.instance;
233 ndc = instance.apiProps.ndc;
234
235 // Create a window. The surface is created during the call to show.
236 window = display.createWindow(title);
237 window.show(640, 480);
238 surfaceSize = [640, 480];
239
240 window.onResize = (uint w, uint h)
241 {
242 if (w != surfaceSize[0] || h != surfaceSize[1]) {
243 surfaceSize = [ w, h ];
244 rebuildSwapchain();
245 }
246 };
247
248 debug {
249 alias Sev = gfx.graal.Severity;
250 instance.setDebugCallback((Sev sev, string msg) {
251 import std.stdio : writefln;
252 if (sev >= Sev.warning) {
253 writefln("Gfx backend %s message: %s", sev, msg);
254 }
255 if (sev == Sev.error) {
256 // debug break;
257 asm { int 0x03; }
258 }
259 });
260 }
261
262 // The rest of the preparation.
263 prepareDevice();
264 prepareSync();
265 prepareSwapchain(null);
266 prepareRenderPass();
267 prepareFramebuffers();
268
269 probe.start();
270 }
271
272 Duration timeElapsed()
273 {
274 assert(probe.sw.running(), "stopwatch isn't running!!");
275 return probe.sw.peek();
276 }
277
278 void prepareDevice()
279 {
280 auto graphicsQueueIndex = uint.max;
281 auto presentQueueIndex = uint.max;
282
283 bool checkDevice(PhysicalDevice dev) {
284 if (dev.softwareRendering) return false;
285 foreach (size_t i, qf; dev.queueFamilies) {
286 const qi = cast(uint)i;
287 const graphics = qf.cap & QueueCap.graphics;
288 const present = dev.supportsSurface(qi, window.surface);
289 if (graphics && present) {
290 graphicsQueueIndex = qi;
291 presentQueueIndex = qi;
292 return true;
293 }
294 if (graphics) graphicsQueueIndex = qi;
295 if (present) presentQueueIndex = qi;
296 }
297 return graphicsQueueIndex != uint.max && presentQueueIndex != uint.max;
298 }
299 foreach (pd; instance.devices) {
300 if (checkDevice(pd)) {
301 auto qrs = [ QueueRequest(graphicsQueueIndex, [ 0.5f ]) ];
302 if (graphicsQueueIndex != presentQueueIndex) {
303 qrs ~= QueueRequest(presentQueueIndex, [ 0.5f ]);
304 }
305 physicalDevice = pd;
306 device = pd.open(qrs);
307 graphicsQueue = device.getQueue(graphicsQueueIndex, 0);
308 presentQueue = device.getQueue(presentQueueIndex, 0);
309 break;
310 }
311 }
312 }
313
314 void prepareSync()
315 {
316 imageAvailableSem = device.createSemaphore();
317 renderingFinishSem = device.createSemaphore();
318 }
319
320 void prepareSwapchain(Swapchain former=null)
321 {
322 gfxExLog.infof("building swapchain %sx%s", surfaceSize[0], surfaceSize[1]);
323
324 const surfCaps = physicalDevice.surfaceCaps(window.surface);
325 enforce(surfCaps.usage & ImageUsage.transferDst, "TransferDst not supported by surface");
326 enforce(surfCaps.usage & ImageUsage.colorAttachment, "ColorAttachment not supported by surface");
327 const usage = ImageUsage.colorAttachment | ImageUsage.transferDst;
328 const numImages = max(2, surfCaps.minImages);
329 enforce(surfCaps.maxImages == 0 || surfCaps.maxImages >= numImages);
330 const f = chooseFormat(physicalDevice, window.surface);
331
332 CompositeAlpha ca;
333 if (surfCaps.supportedAlpha & CompositeAlpha.preMultiplied) {
334 ca = CompositeAlpha.preMultiplied;
335 }
336 else if (surfCaps.supportedAlpha & CompositeAlpha.inherit) {
337 ca = CompositeAlpha.inherit;
338 }
339 else if (surfCaps.supportedAlpha & CompositeAlpha.postMultiplied) {
340 ca = CompositeAlpha.postMultiplied;
341 }
342 else {
343 ca = CompositeAlpha.opaque;
344 }
345 hasAlpha = ca != CompositeAlpha.opaque;
346
347 foreach (i; 0..2) {
348 surfaceSize[i] = clamp(surfaceSize[i], surfCaps.minSize[i], surfCaps.maxSize[i]);
349 }
350 const pm = choosePresentMode(physicalDevice, window.surface);
351
352 swapchain = device.createSwapchain(window.surface, pm, numImages, f, surfaceSize, usage, ca, former);
353 }
354
355 void prepareRenderPass()
356 {}
357
358 /// Data that is duplicated for every frame in the swapchain
359 /// This typically include framebuffer and command pool.
360 abstract class FrameData : AtomicRefCounted
361 {
362 Rc!Fence fence; // to keep track of when command processing is done
363 Rc!CommandPool cmdPool;
364
365 ImageBase swcColor;
366 uint[2] size;
367
368 this(ImageBase swcColor)
369 {
370 this.fence = device.createFence(Yes.signaled);
371 this.cmdPool = device.createCommandPool(graphicsQueue.index);
372
373 this.swcColor = swcColor;
374 const dims = swcColor.info.dims;
375 size = [dims.width, dims.height];
376 }
377
378 override void dispose()
379 {
380 fence.unload();
381 cmdPool.unload();
382 }
383 }
384
385 /// Instantiate FrameData implementation for the given swapchain color image.
386 /// tempBuf is a helper that can be used to transfer data, change images layout...
387 /// it is submitted and waited for shortly after FrameData is built.
388 abstract FrameData makeFrameData(ImageBase swcColor, CommandBuffer tempBuf);
389
390 void prepareFramebuffers()
391 {
392 auto swcImages = swapchain.images;
393 frameDatas = new FrameData[swcImages.length];
394
395 auto tempBuf = rc(new RaiiCmdBuf);
396
397 foreach(i, img; swcImages) {
398 frameDatas[i] = retainObj(makeFrameData(img, tempBuf.get));
399 }
400 }
401
402 /// Record buffer implementation for the current frame.
403 /// Returns the submissions for the graphics queue
404 /// the first submission that renders to the swapchain image must
405 /// wait for imageAvailableSem
406 /// the last submission must signal renderingFinishSem
407 abstract Submission[] recordCmds(FrameData frameData);
408
409 /// build a submission for the simplest cases with one submission
410 final Submission[] simpleSubmission(PrimaryCommandBuffer[] cmdBufs)
411 {
412 return [
413 Submission (
414 [ StageWait(imageAvailableSem, PipelineStage.transfer) ],
415 [ renderingFinishSem.obj ], cmdBufs
416 )
417 ];
418 }
419
420 void render()
421 {
422 import gfx.graal.error : OutOfDateException;
423
424 const acq = swapchain.acquireNextImage(imageAvailableSem.obj);
425
426 if (acq.hasIndex) {
427 auto frameData = frameDatas[acq.index];
428 frameData.fence.wait();
429 frameData.fence.reset();
430
431 auto submissions = recordCmds(frameData);
432
433 graphicsQueue.submit(submissions, frameData.fence);
434
435 try {
436 presentQueue.present(
437 [ renderingFinishSem.obj ],
438 [ PresentRequest(swapchain, acq.index) ]
439 );
440 }
441 catch (OutOfDateException ex) {
442 // The swapchain became out of date between acquire and present.
443 // Rare, but can happen
444 gfxExLog.errorf("error during presentation: %s", ex.msg);
445 gfxExLog.errorf("acquisition was %s", acq.state);
446 rebuildSwapchain();
447 return;
448 }
449 }
450
451 if (acq.swapchainNeedsRebuild) {
452 rebuildSwapchain();
453 }
454 }
455
456 void rebuildSwapchain()
457 {
458 foreach (imgData; frameDatas) {
459 gc(imgData, imgData.fence);
460 }
461 releaseArr(frameDatas);
462 prepareSwapchain(swapchain.obj);
463 prepareFramebuffers();
464 }
465
466 void frameTick()
467 {
468 frameNumber += 1;
469 collectGarbage();
470
471 enum reportPeriod = 300;
472 probe.tick();
473 if (probe.framecount % reportPeriod == 0) {
474 gfxExLog.infof("FPS = %s", probe.computeFps());
475 }
476 }
477
478 void gc(IAtomicRefCounted resource, Fence fence=null)
479 {
480 auto entry = new GarbageEntry;
481 entry.resource = retainObj(resource);
482 if (fence) entry.fence = retainObj(fence);
483 entry.fc = frameNumber;
484
485 if (lastGarbageEntry) {
486 lastGarbageEntry.next = entry;
487 lastGarbageEntry = entry;
488 }
489 else {
490 assert(!garbageEntries);
491 garbageEntries = entry;
492 lastGarbageEntry = entry;
493 }
494 }
495
496 void collectGarbage()
497 {
498 while (garbageEntries) {
499 if ((garbageEntries.fence && garbageEntries.fence.signaled) ||
500 (garbageEntries.fc+maxFcAge < frameNumber))
501 {
502 if (garbageEntries.fence) releaseObj(garbageEntries.fence);
503 releaseObj(garbageEntries.resource);
504 garbageEntries = garbageEntries.next;
505 }
506 else {
507 break;
508 }
509 }
510 if (!garbageEntries) lastGarbageEntry = null;
511 }
512
513 // Following functions are general utility that can be used by subclassing
514 // examples.
515
516 /// Find a format supported by the device for the given tiling and features
517 Format findSupportedFormat(in Format[] candidates, in ImageTiling tiling, in FormatFeatures features)
518 {
519 foreach (f; candidates) {
520 const props = physicalDevice.formatProperties(f);
521 if (tiling == ImageTiling.optimal &&
522 (props.optimalTiling & features) == features) {
523 return f;
524 }
525 if (tiling == ImageTiling.linear &&
526 (props.linearTiling & features) == features) {
527 return f;
528 }
529 }
530 throw new Exception("could not find supported format");
531 }
532
533 /// Find a supported depth format
534 Format findDepthFormat() {
535 return findSupportedFormat(
536 [ Format.d32_sFloat, Format.d32s8_sFloat, Format.d24s8_uNorm, Format.d16_uNorm, Format.d16s8_uNorm ],
537 ImageTiling.optimal, FormatFeatures.depthStencilAttachment
538 );
539 }
540
541 /// Find a supported stencil format
542 Format findStencilFormat() {
543 return findSupportedFormat(
544 [ Format.s8_uInt, Format.d16s8_uNorm, Format.d24s8_uNorm, Format.d32s8_sFloat ],
545 ImageTiling.optimal, FormatFeatures.depthStencilAttachment
546 );
547 }
548
549 /// Return the index of a memory type supporting all of props,
550 /// or uint.max if none was found.
551 uint findMemType(MemoryRequirements mr, MemProps props)
552 {
553 const devMemProps = physicalDevice.memoryProperties;
554 foreach (i, mt; devMemProps.types) {
555 if ((mr.memTypeMask & (1 << i)) != 0 && (mt.props & props) == props) {
556 return cast(uint)i;
557 }
558 }
559 return uint.max;
560 }
561
562
563 /// Create a buffer for usage, bind memory of dataSize with memProps
564 /// Return null if no memory type can be found
565 final Buffer createBuffer(size_t dataSize, BufferUsage usage, MemProps props)
566 {
567 auto buf = device.createBuffer( usage, dataSize ).rc;
568
569 const mr = buf.memoryRequirements;
570 const memTypeInd = findMemType(mr, props);
571 if (memTypeInd == uint.max) return null;
572
573 auto mem = device.allocateMemory(memTypeInd, mr.size).rc;
574 buf.bindMemory(mem, 0);
575
576 return buf.giveAway();
577 }
578
579 /// Create a buffer, binds memory to it, and leave content undefined
580 /// The buffer will be host visible and host coherent such as content
581 /// can be updated without staging buffer
582 final Buffer createDynamicBuffer(size_t dataSize, BufferUsage usage)
583 {
584 return createBuffer(dataSize, usage, MemProps.hostVisible | MemProps.hostCoherent);
585 }
586
587 /// Create a buffer, and bind it with memory filled with data.
588 /// The bound memory will be deviceLocal, without guarantee to be host visible.
589 final Buffer createStaticBuffer(const(void)[] data, BufferUsage usage)
590 {
591 const dataSize = data.length;
592
593 // On embedded gpus, device local memory is often also host visible.
594 // Attempting to create one that way.
595 if (physicalDevice.type != DeviceType.discreteGpu) {
596 auto buf = createBuffer(
597 dataSize, usage,
598 MemProps.hostVisible | MemProps.hostCoherent | MemProps.deviceLocal
599 ).rc;
600 if (buf) {
601 auto mm = buf.boundMemory.map(0, dataSize);
602 mm[] = data;
603 return buf.giveAway();
604 }
605 }
606
607 // did not happen :-(
608 // will go the usual way: staging buffer then device local buffer
609
610 // create staging buffer
611 auto stagingBuf = enforce(createBuffer(
612 dataSize, BufferUsage.transferSrc, MemProps.hostVisible | MemProps.hostCoherent
613 )).rc;
614
615 // populate data
616 {
617 auto mm = stagingBuf.boundMemory.map(0, dataSize);
618 mm[] = data;
619 }
620
621 // create actual buffer
622 auto buf = enforce(createBuffer(
623 dataSize, usage | BufferUsage.transferDst, MemProps.deviceLocal
624 )).rc;
625
626 auto b = rc(new RaiiCmdBuf);
627
628 // copy from staging buffer
629 copyBuffer(stagingBuf, buf, dataSize, b.cmdBuf);
630
631 // return data
632 return buf.giveAway();
633 }
634
635 /// ditto
636 Buffer createStaticBuffer(T)(const(T)[] data, BufferUsage usage)
637 if (!is(T == void))
638 {
639 return createStaticBuffer(untypeSlice(data), usage);
640 }
641
642 /// ditto
643 Buffer createStaticBuffer(T)(in T data, BufferUsage usage)
644 if (!isArray!T)
645 {
646 const start = cast(const(void)*)&data;
647 return createStaticBuffer(start[0 .. data.sizeof], usage);
648 }
649
650 bool bindImageMemory(Image img, MemProps props=MemProps.deviceLocal) {
651 const mr = img.memoryRequirements;
652 const memTypeInd = findMemType(mr, props);
653 if (memTypeInd == uint.max) return false;
654
655 auto mem = device.allocateMemory(memTypeInd, mr.size).rc;
656 img.bindMemory(mem, 0);
657 return true;
658 }
659
660 /// create an image to be used as texture
661 Image createTextureImage(const(void)[] data, in ImageInfo info)
662 {
663 const FormatFeatures requirement = FormatFeatures.sampledImage;
664 const formatProps = physicalDevice.formatProperties(info.format);
665 enforce( (formatProps.optimalTiling & requirement) == requirement );
666
667 // create staging buffer
668 auto stagingBuf = enforce(createBuffer(
669 data.length, BufferUsage.transferSrc, MemProps.hostVisible | MemProps.hostCoherent
670 )).rc;
671
672 // populate data to buffer
673 {
674 auto mm = stagingBuf.boundMemory.map(0, data.length);
675 mm[] = data;
676 }
677
678 // create an image
679 auto img = enforce(device.createImage(
680 info.withUsage(info.usage | ImageUsage.sampled | ImageUsage.transferDst)
681 )).rc;
682
683 // allocate memory image
684 if (!bindImageMemory(img)) return null;
685
686 {
687 auto b = rc(new RaiiCmdBuf);
688
689 b.cmdBuf.pipelineBarrier(
690 trans(PipelineStage.topOfPipe, PipelineStage.transfer), [], [
691 ImageMemoryBarrier(
692 trans(Access.none, Access.transferWrite),
693 trans(ImageLayout.undefined, ImageLayout.transferDstOptimal),
694 trans(queueFamilyIgnored, queueFamilyIgnored),
695 img, ImageSubresourceRange(ImageAspect.color)
696 )
697 ]
698 );
699 copyBufferToImage(stagingBuf, img, b.cmdBuf);
700 }
701
702 return img.giveAway();
703 }
704
705 /// Create an image for depth attachment usage
706 Image createDepthImage(uint width, uint height)
707 {
708 // find the format of the image
709 const f = findDepthFormat();
710
711 // create an image
712 auto img = enforce(device.createImage(
713 ImageInfo.d2(width, height).withFormat(f).withUsage(ImageUsage.depthStencilAttachment)
714 )).rc;
715
716 // allocate memory image
717 if (!bindImageMemory(img)) return null;
718
719 return img.giveAway();
720 }
721
722 /// Create an image for stencil attachment usage
723 Image createStencilImage(uint width, uint height)
724 {
725 // assume s8_uInt is supported
726 const f = findStencilFormat();
727 // create an image
728 auto img = enforce(device.createImage(
729 ImageInfo.d2(width, height).withFormat(f).withUsage(ImageUsage.depthStencilAttachment)
730 )).rc;
731
732 // allocate memory image
733 if (!bindImageMemory(img)) return null;
734
735 return img.giveAway();
736 }
737
738 final void recordImageLayoutBarrier(CommandBuffer cmdBuf, ImageBase img, Trans!ImageLayout layout)
739 {
740 const info = img.info;
741
742 if (info.usage & ImageUsage.colorAttachment)
743 {
744 cmdBuf.pipelineBarrier(
745 trans(PipelineStage.colorAttachmentOutput, PipelineStage.colorAttachmentOutput), [],
746 [ ImageMemoryBarrier(
747 trans(Access.none, Access.colorAttachmentWrite),
748 layout,
749 trans(queueFamilyIgnored, queueFamilyIgnored),
750 img, ImageSubresourceRange(ImageAspect.color)
751 ) ]
752 );
753 }
754 else if (info.usage & ImageUsage.depthStencilAttachment)
755 {
756 const hasDepth = formatDesc(info.format).surfaceType.depthBits > 0;
757 const hasStencil = formatDesc(info.format).surfaceType.stencilBits > 0;
758 auto aspect = ImageAspect.none;
759 if (hasDepth) aspect |= ImageAspect.depth;
760 if (hasStencil) aspect |= ImageAspect.stencil;
761 cmdBuf.pipelineBarrier(
762 trans(PipelineStage.topOfPipe, PipelineStage.earlyFragmentTests), [], [
763 ImageMemoryBarrier(
764 trans(
765 Access.none,
766 Access.depthStencilAttachmentRead | Access.depthStencilAttachmentWrite
767 ),
768 layout,
769 trans(queueFamilyIgnored, queueFamilyIgnored),
770 img, ImageSubresourceRange(aspect)
771 )
772 ]
773 );
774 }
775 else {
776 import std.format : format;
777 throw new Exception(
778 format("don't know how to record memory barrier for image usage %s", info.usage)
779 );
780 }
781 }
782
783 /// copy the content of one buffer to another
784 /// srcBuf and dstBuf must support transferSrc and transferDst respectively.
785 final void copyBuffer(Buffer srcBuf, Buffer dstBuf, size_t size, CommandBuffer cmdBuf)
786 {
787 cmdBuf.copyBuffer(trans(srcBuf, dstBuf), [CopyRegion(trans!size_t(0, 0), size)]);
788 }
789
790 /// copy the content of one buffer to an image.
791 /// the image layout must be transferDstOptimal buffer the call
792 final void copyBufferToImage(Buffer srcBuf, Image dstImg, CommandBuffer cmdBuf)
793 {
794 const dims = dstImg.info.dims;
795
796 BufferImageCopy region;
797 region.extent = [dims.width, dims.height, dims.depth];
798 const regions = (®ion)[0 .. 1];
799 cmdBuf.copyBufferToImage(srcBuf, dstImg, ImageLayout.transferDstOptimal, regions);
800 }
801
802 /// Utility command buffer for a one time submission that automatically submit
803 /// when disposed.
804 /// Generally used for transfer operations, or image layout change.
805 final class RaiiCmdBuf : AtomicRefCounted
806 {
807 Rc!CommandPool pool;
808 PrimaryCommandBuffer cmdBuf;
809
810 this() {
811 this.pool = device.createCommandPool(graphicsQueue.index);
812 this.cmdBuf = this.pool.allocatePrimary(1)[0];
813 this.cmdBuf.begin(CommandBufferUsage.oneTimeSubmit);
814 }
815
816 override void dispose() {
817 this.cmdBuf.end();
818 graphicsQueue.submit([
819 Submission([], [], (&this.cmdBuf)[0 .. 1])
820 ], null);
821 graphicsQueue.waitIdle();
822 auto cb = cast(CommandBuffer)this.cmdBuf;
823 this.pool.free((&cb)[0 .. 1]);
824 this.pool.unload();
825 }
826
827 @property CommandBuffer get()
828 {
829 return cmdBuf;
830 }
831 }
832 }
833
834 /// Return a format suitable for the surface.
835 /// - if supported by the surface Format.rgba8_uNorm
836 /// - otherwise the first format with uNorm numeric format
837 /// - otherwise the first format
838 Format chooseFormat(PhysicalDevice pd, Surface surface)
839 {
840 const formats = pd.surfaceFormats(surface);
841 enforce(formats.length, "Could not get surface formats");
842 if (formats.length == 1 && formats[0] == Format.undefined) {
843 return Format.rgba8_uNorm;
844 }
845 foreach(f; formats) {
846 if (f == Format.rgba8_uNorm) {
847 return f;
848 }
849 }
850 foreach(f; formats) {
851 if (f.formatDesc.numFormat == NumFormat.uNorm) {
852 return f;
853 }
854 }
855 return formats[0];
856 }
857
858 PresentMode choosePresentMode(PhysicalDevice pd, Surface surface)
859 {
860 // auto modes = pd.surfacePresentModes(surface);
861 // if (modes.canFind(PresentMode.mailbox)) {
862 // return PresentMode.mailbox;
863 // }
864 assert(pd.surfacePresentModes(surface).canFind(PresentMode.fifo));
865 return PresentMode.fifo;
866 }