Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Common/GPU/Vulkan/VulkanBarrier.cpp
5659 views
1
#include "VulkanLoader.h"
2
#include "VulkanContext.h"
3
#include "VulkanBarrier.h"
4
#include "VulkanFramebuffer.h"
5
6
#include "Common/Log.h"
7
8
VulkanBarrierBatch::~VulkanBarrierBatch() {
9
// _dbg_assert_(imageBarriers_.empty());
10
if (!imageBarriers_.empty()) {
11
ERROR_LOG(Log::G3D, "~VulkanBarrierBatch: %d barriers remaining", (int)imageBarriers_.size());
12
}
13
}
14
15
void VulkanBarrierBatch::Flush(VkCommandBuffer cmd) {
16
if (!imageBarriers_.empty()) {
17
vkCmdPipelineBarrier(cmd, srcStageMask_, dstStageMask_, dependencyFlags_, 0, nullptr, 0, nullptr, (uint32_t)imageBarriers_.size(), imageBarriers_.data());
18
}
19
imageBarriers_.clear();
20
srcStageMask_ = 0;
21
dstStageMask_ = 0;
22
dependencyFlags_ = 0;
23
}
24
25
void VulkanBarrierBatch::TransitionImage(
26
VkImage image, int baseMip, int numMipLevels, int numLayers, VkImageAspectFlags aspectMask,
27
VkImageLayout oldImageLayout, VkImageLayout newImageLayout,
28
VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask,
29
VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask
30
) {
31
_dbg_assert_(image != VK_NULL_HANDLE);
32
33
srcStageMask_ |= srcStageMask;
34
dstStageMask_ |= dstStageMask;
35
dependencyFlags_ |= VK_DEPENDENCY_BY_REGION_BIT;
36
37
VkImageMemoryBarrier &imageBarrier = imageBarriers_.push_uninitialized();
38
imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
39
imageBarrier.pNext = nullptr;
40
imageBarrier.srcAccessMask = srcAccessMask;
41
imageBarrier.dstAccessMask = dstAccessMask;
42
imageBarrier.oldLayout = oldImageLayout;
43
imageBarrier.newLayout = newImageLayout;
44
imageBarrier.image = image;
45
imageBarrier.subresourceRange.aspectMask = aspectMask;
46
imageBarrier.subresourceRange.baseMipLevel = baseMip;
47
imageBarrier.subresourceRange.levelCount = numMipLevels;
48
imageBarrier.subresourceRange.layerCount = numLayers; // NOTE: We could usually use VK_REMAINING_ARRAY_LAYERS/VK_REMAINING_MIP_LEVELS, but really old Mali drivers have problems with those.
49
imageBarrier.subresourceRange.baseArrayLayer = 0;
50
imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
51
imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
52
}
53
54
void VulkanBarrierBatch::TransitionColorImageAuto(
55
VkImage image, VkImageLayout *imageLayout, VkImageLayout newImageLayout, int baseMip, int numMipLevels, int numLayers) {
56
_dbg_assert_(image != VK_NULL_HANDLE);
57
VkAccessFlags srcAccessMask = 0;
58
VkAccessFlags dstAccessMask = 0;
59
switch (*imageLayout) {
60
case VK_IMAGE_LAYOUT_UNDEFINED:
61
srcAccessMask = 0;
62
srcStageMask_ |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
63
break;
64
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
65
srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
66
srcStageMask_ |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
67
break;
68
case VK_IMAGE_LAYOUT_GENERAL:
69
// We came from the Mali workaround, and are transitioning back to COLOR_ATTACHMENT_OPTIMAL.
70
// Alternatively, we're doing an intra-buffer copy. Let's cover both bases if needed.
71
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
72
srcStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
73
// TODO: Add a check for the mali bug presence.
74
srcAccessMask |= VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
75
srcStageMask_ |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
76
break;
77
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
78
// We only texture from images in the fragment shader, so can do this simply.
79
srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
80
srcStageMask_ |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
81
break;
82
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
83
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
84
srcStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
85
break;
86
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
87
srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
88
srcStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
89
break;
90
default:
91
_assert_msg_(false, "Unexpected oldLayout: %s", VulkanImageLayoutToString(*imageLayout));
92
break;
93
}
94
95
switch (newImageLayout) {
96
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
97
dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
98
dstStageMask_ |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
99
break;
100
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
101
dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
102
dstStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
103
break;
104
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
105
dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
106
dstStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
107
break;
108
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
109
dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
110
dstStageMask_ |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
111
break;
112
case VK_IMAGE_LAYOUT_GENERAL:
113
// Used in intra-buffer framebuffer copies. We should add some better metadata...
114
dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
115
dstStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
116
break;
117
default:
118
_assert_msg_(false, "Unexpected newLayout: %s", VulkanImageLayoutToString(newImageLayout));
119
break;
120
}
121
122
VkImageMemoryBarrier &imageBarrier = imageBarriers_.push_uninitialized();
123
imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
124
imageBarrier.pNext = nullptr;
125
imageBarrier.srcAccessMask = srcAccessMask;
126
imageBarrier.dstAccessMask = dstAccessMask;
127
imageBarrier.oldLayout = *imageLayout;
128
imageBarrier.newLayout = newImageLayout;
129
imageBarrier.image = image;
130
imageBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
131
imageBarrier.subresourceRange.baseMipLevel = baseMip;
132
imageBarrier.subresourceRange.levelCount = numMipLevels;
133
imageBarrier.subresourceRange.layerCount = numLayers; // NOTE: We could usually use VK_REMAINING_ARRAY_LAYERS/VK_REMAINING_MIP_LEVELS, but really old Mali drivers have problems with those.
134
imageBarrier.subresourceRange.baseArrayLayer = 0;
135
imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
136
imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
137
138
*imageLayout = newImageLayout;
139
}
140
141
void VulkanBarrierBatch::TransitionDepthStencilImageAuto(
142
VkImage image, VkImageLayout *imageLayout, VkImageLayout newImageLayout, int baseMip, int numMipLevels, int numLayers) {
143
_dbg_assert_(image != VK_NULL_HANDLE);
144
145
VkAccessFlags srcAccessMask = 0;
146
VkAccessFlags dstAccessMask = 0;
147
switch (*imageLayout) {
148
case VK_IMAGE_LAYOUT_UNDEFINED:
149
srcAccessMask = 0;
150
srcStageMask_ |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
151
break;
152
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
153
srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
154
srcStageMask_ |= VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
155
break;
156
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
157
// We only texture from images in the fragment shader, so can do this simply.
158
srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
159
srcStageMask_ |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
160
break;
161
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
162
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
163
srcStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
164
break;
165
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
166
srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
167
srcStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
168
break;
169
default:
170
_assert_msg_(false, "Unexpected oldLayout: %s", VulkanImageLayoutToString(*imageLayout));
171
break;
172
}
173
174
switch (newImageLayout) {
175
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
176
dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
177
dstStageMask_ |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
178
break;
179
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
180
dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
181
dstStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
182
break;
183
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
184
dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
185
dstStageMask_ |= VK_PIPELINE_STAGE_TRANSFER_BIT;
186
break;
187
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
188
dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
189
dstStageMask_ |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
190
break;
191
default:
192
_assert_msg_(false, "Unexpected newLayout: %s", VulkanImageLayoutToString(newImageLayout));
193
break;
194
}
195
196
VkImageMemoryBarrier &imageBarrier = imageBarriers_.push_uninitialized();
197
imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
198
imageBarrier.pNext = nullptr;
199
imageBarrier.srcAccessMask = srcAccessMask;
200
imageBarrier.dstAccessMask = dstAccessMask;
201
imageBarrier.oldLayout = *imageLayout;
202
imageBarrier.newLayout = newImageLayout;
203
imageBarrier.image = image;
204
imageBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
205
imageBarrier.subresourceRange.baseMipLevel = baseMip;
206
imageBarrier.subresourceRange.levelCount = numMipLevels;
207
imageBarrier.subresourceRange.layerCount = numLayers; // NOTE: We could usually use VK_REMAINING_ARRAY_LAYERS/VK_REMAINING_MIP_LEVELS, but really old Mali drivers have problems with those.
208
imageBarrier.subresourceRange.baseArrayLayer = 0;
209
imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
210
imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
211
212
*imageLayout = newImageLayout;
213
}
214
215
void VulkanBarrierBatch::TransitionColorImageAuto(VKRImage *image, VkImageLayout newImageLayout) {
216
TransitionColorImageAuto(image->image, &image->layout, newImageLayout, 0, 1, image->numLayers);
217
}
218
219
void VulkanBarrierBatch::TransitionDepthStencilImageAuto(VKRImage *image, VkImageLayout newImageLayout) {
220
TransitionDepthStencilImageAuto(image->image, &image->layout, newImageLayout, 0, 1, image->numLayers);
221
}
222
223