/src/skia/tools/gpu/vk/VkYcbcrSamplerHelper.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright 2020 Google Inc. |
3 | | * |
4 | | * Use of this source code is governed by a BSD-style license that can be |
5 | | * found in the LICENSE file. |
6 | | */ |
7 | | |
8 | | #include "tools/gpu/vk/VkYcbcrSamplerHelper.h" |
9 | | |
10 | | #ifdef SK_VULKAN |
11 | | |
12 | | #include "include/gpu/GrDirectContext.h" |
13 | | #include "include/gpu/ganesh/vk/GrVkBackendSurface.h" |
14 | | #include "src/gpu/ganesh/GrDirectContextPriv.h" |
15 | | #include "src/gpu/ganesh/vk/GrVkGpu.h" |
16 | | #include "src/gpu/ganesh/vk/GrVkUtil.h" |
17 | | #include "src/gpu/vk/VulkanInterface.h" |
18 | | |
19 | | #if defined(SK_GRAPHITE) |
20 | | #include "include/gpu/GpuTypes.h" |
21 | | #include "include/gpu/graphite/BackendTexture.h" |
22 | | #include "include/gpu/graphite/Recorder.h" |
23 | | #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h" |
24 | | #include "src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h" |
25 | | #include "src/gpu/graphite/vk/VulkanSharedContext.h" |
26 | | #endif |
27 | | |
28 | 0 | int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) { |
29 | 0 | return 16 + (x + y) * 219 / (width + height - 2); |
30 | 0 | } |
31 | | |
32 | 0 | std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) { |
33 | 0 | return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) }; |
34 | 0 | } |
35 | | |
36 | | namespace { |
37 | | |
38 | 0 | void populate_ycbcr_image_info(VkImageCreateInfo* outImageInfo, uint32_t width, uint32_t height) { |
39 | 0 | outImageInfo->sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; |
40 | 0 | outImageInfo->pNext = nullptr; |
41 | 0 | outImageInfo->flags = 0; |
42 | 0 | outImageInfo->imageType = VK_IMAGE_TYPE_2D; |
43 | 0 | outImageInfo->format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM; |
44 | 0 | outImageInfo->extent = VkExtent3D{width, height, 1}; |
45 | 0 | outImageInfo->mipLevels = 1; |
46 | 0 | outImageInfo->arrayLayers = 1; |
47 | 0 | outImageInfo->samples = VK_SAMPLE_COUNT_1_BIT; |
48 | 0 | outImageInfo->tiling = VK_IMAGE_TILING_LINEAR; |
49 | 0 | outImageInfo->usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | |
50 | 0 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT; |
51 | 0 | outImageInfo->sharingMode = VK_SHARING_MODE_EXCLUSIVE; |
52 | 0 | outImageInfo->queueFamilyIndexCount = 0; |
53 | 0 | outImageInfo->pQueueFamilyIndices = nullptr; |
54 | 0 | outImageInfo->initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; |
55 | 0 | } |
56 | | |
57 | | bool find_memory_type_index(const VkPhysicalDeviceMemoryProperties& phyDevMemProps, |
58 | | const VkMemoryRequirements& memoryRequirements, |
59 | 0 | uint32_t* memoryTypeIndex) { |
60 | 0 | for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount; ++i) { |
61 | 0 | if (memoryRequirements.memoryTypeBits & (1 << i)) { |
62 | | // Map host-visible memory. |
63 | 0 | if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { |
64 | 0 | *memoryTypeIndex = i; |
65 | 0 | return true; |
66 | 0 | } |
67 | 0 | } |
68 | 0 | } |
69 | 0 | return false; |
70 | 0 | } |
71 | | |
72 | | } |
73 | | |
74 | | #ifdef SK_GRAPHITE |
75 | | // TODO(b/339211930): When graphite and ganesh can share a macro for certain Vulkan driver calls, |
76 | | // much more code can be shared between this method and createGrBackendTexture. |
77 | 0 | bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) { |
78 | | // Create YCbCr image. |
79 | 0 | VkImageCreateInfo vkImageInfo; |
80 | 0 | populate_ycbcr_image_info(&vkImageInfo, width, height); |
81 | 0 | SkASSERT(fImage == VK_NULL_HANDLE); |
82 | |
|
83 | 0 | VkResult result; |
84 | 0 | VULKAN_CALL_RESULT(fSharedCtxt, result, CreateImage(fSharedCtxt->device(), |
85 | 0 | &vkImageInfo, |
86 | 0 | /*pAllocator=*/nullptr, |
87 | 0 | &fImage)); |
88 | 0 | if (result != VK_SUCCESS) { |
89 | 0 | return false; |
90 | 0 | } |
91 | | |
92 | 0 | VkMemoryRequirements requirements; |
93 | 0 | VULKAN_CALL(fSharedCtxt->interface(), GetImageMemoryRequirements(fSharedCtxt->device(), |
94 | 0 | fImage, |
95 | 0 | &requirements)); |
96 | 0 | uint32_t memoryTypeIndex = 0; |
97 | 0 | const VkPhysicalDeviceMemoryProperties& phyDevMemProps = |
98 | 0 | fSharedCtxt->vulkanCaps().physicalDeviceMemoryProperties2().memoryProperties; |
99 | 0 | if (!find_memory_type_index(phyDevMemProps, requirements, &memoryTypeIndex)) { |
100 | 0 | return false; |
101 | 0 | } |
102 | | |
103 | 0 | VkMemoryAllocateInfo allocInfo; |
104 | 0 | allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; |
105 | 0 | allocInfo.pNext = nullptr; |
106 | 0 | allocInfo.allocationSize = requirements.size; |
107 | 0 | allocInfo.memoryTypeIndex = memoryTypeIndex; |
108 | |
|
109 | 0 | SkASSERT(fImageMemory == VK_NULL_HANDLE); |
110 | 0 | VULKAN_CALL_RESULT(fSharedCtxt, result, AllocateMemory(fSharedCtxt->device(), |
111 | 0 | &allocInfo, |
112 | 0 | nullptr, |
113 | 0 | &fImageMemory)); |
114 | 0 | if (result != VK_SUCCESS) { |
115 | 0 | return false; |
116 | 0 | } |
117 | | |
118 | 0 | void* mappedBuffer; |
119 | 0 | VULKAN_CALL_RESULT(fSharedCtxt, result, MapMemory(fSharedCtxt->device(), |
120 | 0 | fImageMemory, |
121 | 0 | /*offset=*/0u, |
122 | 0 | requirements.size, |
123 | 0 | /*flags=*/0u, |
124 | 0 | &mappedBuffer)); |
125 | 0 | if (result != VK_SUCCESS) { |
126 | 0 | return false; |
127 | 0 | } |
128 | | |
129 | | // Write Y channel. |
130 | 0 | VkImageSubresource subresource; |
131 | 0 | subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT; |
132 | 0 | subresource.mipLevel = 0; |
133 | 0 | subresource.arrayLayer = 0; |
134 | |
|
135 | 0 | VkSubresourceLayout yLayout; |
136 | 0 | VULKAN_CALL(fSharedCtxt->interface(), |
137 | 0 | GetImageSubresourceLayout(fSharedCtxt->device(), fImage, &subresource, &yLayout)); |
138 | 0 | uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset; |
139 | 0 | for (size_t y = 0; y < height; ++y) { |
140 | 0 | for (size_t x = 0; x < width; ++x) { |
141 | 0 | bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height); |
142 | 0 | } |
143 | 0 | } |
144 | | |
145 | | // Write UV channels. |
146 | 0 | subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT; |
147 | 0 | VkSubresourceLayout uvLayout; |
148 | 0 | VULKAN_CALL(fSharedCtxt->interface(), GetImageSubresourceLayout(fSharedCtxt->device(), |
149 | 0 | fImage, |
150 | 0 | &subresource, |
151 | 0 | &uvLayout)); |
152 | 0 | bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset; |
153 | 0 | for (size_t y = 0; y < height / 2; ++y) { |
154 | 0 | for (size_t x = 0; x < width / 2; ++x) { |
155 | 0 | auto [u, v] = GetExpectedUV(2*x, 2*y, width, height); |
156 | 0 | bufferData[y * uvLayout.rowPitch + x * 2] = u; |
157 | 0 | bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v; |
158 | 0 | } |
159 | 0 | } |
160 | |
|
161 | 0 | VkMappedMemoryRange flushRange; |
162 | 0 | flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; |
163 | 0 | flushRange.pNext = nullptr; |
164 | 0 | flushRange.memory = fImageMemory; |
165 | 0 | flushRange.offset = 0; |
166 | 0 | flushRange.size = VK_WHOLE_SIZE; |
167 | 0 | VULKAN_CALL_RESULT(fSharedCtxt, result, FlushMappedMemoryRanges(fSharedCtxt->device(), |
168 | 0 | /*memoryRangeCount=*/1, |
169 | 0 | &flushRange)); |
170 | 0 | if (result != VK_SUCCESS) { |
171 | 0 | return false; |
172 | 0 | } |
173 | 0 | VULKAN_CALL(fSharedCtxt->interface(), UnmapMemory(fSharedCtxt->device(), fImageMemory)); |
174 | | |
175 | | // Bind image memory. |
176 | 0 | VULKAN_CALL_RESULT(fSharedCtxt, result, BindImageMemory(fSharedCtxt->device(), |
177 | 0 | fImage, |
178 | 0 | fImageMemory, |
179 | 0 | /*memoryOffset=*/0u)); |
180 | 0 | if (result != VK_SUCCESS) { |
181 | 0 | return false; |
182 | 0 | } |
183 | | |
184 | | // Wrap the image into SkImage. |
185 | 0 | VkFormatProperties formatProperties; |
186 | 0 | SkASSERT(fPhysDev != VK_NULL_HANDLE); |
187 | 0 | VULKAN_CALL(fSharedCtxt->interface(), |
188 | 0 | GetPhysicalDeviceFormatProperties(fPhysDev, |
189 | 0 | VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, |
190 | 0 | &formatProperties)); |
191 | 0 | SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;) |
192 | 0 | SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) && |
193 | 0 | (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) && |
194 | 0 | (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) && |
195 | 0 | (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)); |
196 | |
|
197 | 0 | skgpu::VulkanYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format, |
198 | 0 | /*externalFormat=*/0, |
199 | 0 | VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, |
200 | 0 | VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, |
201 | 0 | VK_CHROMA_LOCATION_COSITED_EVEN, |
202 | 0 | VK_CHROMA_LOCATION_COSITED_EVEN, |
203 | 0 | VK_FILTER_LINEAR, |
204 | 0 | false, |
205 | 0 | formatProperties.linearTilingFeatures}; |
206 | 0 | skgpu::VulkanAlloc alloc; |
207 | 0 | alloc.fMemory = fImageMemory; |
208 | 0 | alloc.fOffset = 0; |
209 | 0 | alloc.fSize = requirements.size; |
210 | |
|
211 | 0 | skgpu::graphite::VulkanTextureInfo imageInfo = { |
212 | 0 | static_cast<uint32_t>(vkImageInfo.samples), |
213 | 0 | skgpu::Mipmapped::kNo, |
214 | 0 | VK_IMAGE_CREATE_PROTECTED_BIT, |
215 | 0 | vkImageInfo.format, |
216 | 0 | vkImageInfo.tiling, |
217 | 0 | vkImageInfo.usage, |
218 | 0 | vkImageInfo.sharingMode, |
219 | 0 | VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT, |
220 | 0 | ycbcrInfo}; |
221 | |
|
222 | 0 | fTexture = skgpu::graphite::BackendTexture{{(int32_t)width, (int32_t)height}, |
223 | 0 | imageInfo, |
224 | 0 | VK_IMAGE_LAYOUT_UNDEFINED, |
225 | 0 | /*queueFamilyIndex=*/0, |
226 | 0 | fImage, |
227 | 0 | alloc}; |
228 | 0 | return true; |
229 | 0 | } Unexecuted instantiation: VkYcbcrSamplerHelper::createBackendTexture(unsigned int, unsigned int) Unexecuted instantiation: VkYcbcrSamplerHelper::createBackendTexture(unsigned int, unsigned int) |
230 | | #endif // SK_GRAPHITE |
231 | | |
232 | 0 | bool VkYcbcrSamplerHelper::createGrBackendTexture(uint32_t width, uint32_t height) { |
233 | 0 | GrVkGpu* vkGpu = this->vkGpu(); |
234 | 0 | VkResult result; |
235 | | |
236 | | // Create YCbCr image. |
237 | 0 | VkImageCreateInfo vkImageInfo; |
238 | 0 | populate_ycbcr_image_info(&vkImageInfo, width, height); |
239 | 0 | SkASSERT(fImage == VK_NULL_HANDLE); |
240 | |
|
241 | 0 | GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage)); |
242 | 0 | if (result != VK_SUCCESS) { |
243 | 0 | return false; |
244 | 0 | } |
245 | | |
246 | 0 | VkMemoryRequirements requirements; |
247 | 0 | GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(), |
248 | 0 | fImage, |
249 | 0 | &requirements)); |
250 | |
|
251 | 0 | uint32_t memoryTypeIndex = 0; |
252 | 0 | VkPhysicalDeviceMemoryProperties phyDevMemProps; |
253 | 0 | GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(), |
254 | 0 | &phyDevMemProps)); |
255 | 0 | if (!find_memory_type_index(phyDevMemProps, requirements, &memoryTypeIndex)) { |
256 | 0 | return false; |
257 | 0 | } |
258 | | |
259 | 0 | VkMemoryAllocateInfo allocInfo = {}; |
260 | 0 | allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; |
261 | 0 | allocInfo.allocationSize = requirements.size; |
262 | 0 | allocInfo.memoryTypeIndex = memoryTypeIndex; |
263 | |
|
264 | 0 | SkASSERT(fImageMemory == VK_NULL_HANDLE); |
265 | 0 | GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo, |
266 | 0 | nullptr, &fImageMemory)); |
267 | 0 | if (result != VK_SUCCESS) { |
268 | 0 | return false; |
269 | 0 | } |
270 | | |
271 | 0 | void* mappedBuffer; |
272 | 0 | GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u, |
273 | 0 | requirements.size, 0u, &mappedBuffer)); |
274 | 0 | if (result != VK_SUCCESS) { |
275 | 0 | return false; |
276 | 0 | } |
277 | | |
278 | | // Write Y channel. |
279 | 0 | VkImageSubresource subresource; |
280 | 0 | subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT; |
281 | 0 | subresource.mipLevel = 0; |
282 | 0 | subresource.arrayLayer = 0; |
283 | |
|
284 | 0 | VkSubresourceLayout yLayout; |
285 | 0 | GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage, |
286 | 0 | &subresource, &yLayout)); |
287 | 0 | uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset; |
288 | 0 | for (size_t y = 0; y < height; ++y) { |
289 | 0 | for (size_t x = 0; x < width; ++x) { |
290 | 0 | bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height); |
291 | 0 | } |
292 | 0 | } |
293 | | |
294 | | // Write UV channels. |
295 | 0 | subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT; |
296 | 0 | VkSubresourceLayout uvLayout; |
297 | 0 | GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage, |
298 | 0 | &subresource, &uvLayout)); |
299 | 0 | bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset; |
300 | 0 | for (size_t y = 0; y < height / 2; ++y) { |
301 | 0 | for (size_t x = 0; x < width / 2; ++x) { |
302 | 0 | auto [u, v] = GetExpectedUV(2*x, 2*y, width, height); |
303 | 0 | bufferData[y * uvLayout.rowPitch + x * 2] = u; |
304 | 0 | bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v; |
305 | 0 | } |
306 | 0 | } |
307 | |
|
308 | 0 | VkMappedMemoryRange flushRange; |
309 | 0 | flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; |
310 | 0 | flushRange.pNext = nullptr; |
311 | 0 | flushRange.memory = fImageMemory; |
312 | 0 | flushRange.offset = 0; |
313 | 0 | flushRange.size = VK_WHOLE_SIZE; |
314 | 0 | GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange)); |
315 | 0 | if (result != VK_SUCCESS) { |
316 | 0 | return false; |
317 | 0 | } |
318 | 0 | GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory)); |
319 | | |
320 | | // Bind image memory. |
321 | 0 | GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u)); |
322 | 0 | if (result != VK_SUCCESS) { |
323 | 0 | return false; |
324 | 0 | } |
325 | | |
326 | | // Wrap the image into SkImage. |
327 | 0 | VkFormatProperties formatProperties; |
328 | 0 | GR_VK_CALL(vkGpu->vkInterface(), |
329 | 0 | GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(), |
330 | 0 | VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, |
331 | 0 | &formatProperties)); |
332 | 0 | SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;) |
333 | 0 | SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) && |
334 | 0 | (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) && |
335 | 0 | (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) && |
336 | 0 | (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)); |
337 | |
|
338 | 0 | GrVkYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format, |
339 | 0 | /*externalFormat=*/0, |
340 | 0 | VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, |
341 | 0 | VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, |
342 | 0 | VK_CHROMA_LOCATION_COSITED_EVEN, |
343 | 0 | VK_CHROMA_LOCATION_COSITED_EVEN, |
344 | 0 | VK_FILTER_LINEAR, |
345 | 0 | false, |
346 | 0 | formatProperties.linearTilingFeatures, |
347 | 0 | /*fComponents=*/{}}; |
348 | 0 | skgpu::VulkanAlloc alloc; |
349 | 0 | alloc.fMemory = fImageMemory; |
350 | 0 | alloc.fOffset = 0; |
351 | 0 | alloc.fSize = requirements.size; |
352 | |
|
353 | 0 | GrVkImageInfo imageInfo = {fImage, |
354 | 0 | alloc, |
355 | 0 | VK_IMAGE_TILING_LINEAR, |
356 | 0 | VK_IMAGE_LAYOUT_UNDEFINED, |
357 | 0 | vkImageInfo.format, |
358 | 0 | vkImageInfo.usage, |
359 | 0 | 1 /* sample count */, |
360 | 0 | 1 /* levelCount */, |
361 | 0 | VK_QUEUE_FAMILY_IGNORED, |
362 | 0 | GrProtected::kNo, |
363 | 0 | ycbcrInfo}; |
364 | |
|
365 | 0 | fGrTexture = GrBackendTextures::MakeVk(width, height, imageInfo); |
366 | 0 | return true; |
367 | 0 | } Unexecuted instantiation: VkYcbcrSamplerHelper::createGrBackendTexture(unsigned int, unsigned int) Unexecuted instantiation: VkYcbcrSamplerHelper::createGrBackendTexture(unsigned int, unsigned int) |
368 | | |
369 | 0 | GrVkGpu* VkYcbcrSamplerHelper::vkGpu() { |
370 | 0 | return (GrVkGpu*) fDContext->priv().getGpu(); |
371 | 0 | } |
372 | | |
373 | 0 | VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) { |
374 | 0 | SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan); |
375 | 0 | } |
376 | | |
377 | 0 | VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() { |
378 | 0 | #ifdef SK_GRAPHITE |
379 | 0 | if (fSharedCtxt) { |
380 | 0 | if (fImage != VK_NULL_HANDLE) { |
381 | 0 | VULKAN_CALL(fSharedCtxt->interface(), |
382 | 0 | DestroyImage(fSharedCtxt->device(), fImage, nullptr)); |
383 | 0 | fImage = VK_NULL_HANDLE; |
384 | 0 | } |
385 | 0 | if (fImageMemory != VK_NULL_HANDLE) { |
386 | 0 | VULKAN_CALL(fSharedCtxt->interface(), |
387 | 0 | FreeMemory(fSharedCtxt->device(), fImageMemory, nullptr)); |
388 | 0 | fImageMemory = VK_NULL_HANDLE; |
389 | 0 | } |
390 | 0 | } else |
391 | 0 | #endif // SK_GRAPHITE |
392 | 0 | { |
393 | 0 | GrVkGpu* vkGpu = this->vkGpu(); |
394 | |
|
395 | 0 | if (fImage != VK_NULL_HANDLE) { |
396 | 0 | GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr)); |
397 | 0 | fImage = VK_NULL_HANDLE; |
398 | 0 | } |
399 | 0 | if (fImageMemory != VK_NULL_HANDLE) { |
400 | 0 | GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr)); |
401 | 0 | fImageMemory = VK_NULL_HANDLE; |
402 | 0 | } |
403 | 0 | } |
404 | 0 | } |
405 | | |
406 | 0 | bool VkYcbcrSamplerHelper::isYCbCrSupported() { |
407 | 0 | VkFormatProperties formatProperties; |
408 | 0 | #ifdef SK_GRAPHITE |
409 | 0 | if (fSharedCtxt) { |
410 | 0 | if (!fSharedCtxt->vulkanCaps().supportsYcbcrConversion()) { |
411 | 0 | return false; |
412 | 0 | } |
413 | | |
414 | 0 | SkASSERT(fPhysDev != VK_NULL_HANDLE); |
415 | 0 | VULKAN_CALL(fSharedCtxt->interface(), |
416 | 0 | GetPhysicalDeviceFormatProperties(fPhysDev, |
417 | 0 | VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, |
418 | 0 | &formatProperties)); |
419 | 0 | } else |
420 | 0 | #endif |
421 | 0 | { |
422 | 0 | GrVkGpu* vkGpu = this->vkGpu(); |
423 | 0 | if (!vkGpu->vkCaps().supportsYcbcrConversion()) { |
424 | 0 | return false; |
425 | 0 | } |
426 | | |
427 | 0 | GR_VK_CALL(vkGpu->vkInterface(), |
428 | 0 | GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(), |
429 | 0 | VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, |
430 | 0 | &formatProperties)); |
431 | 0 | } |
432 | | |
433 | | // The createBackendTexture call (which is the point of this helper class) requires linear |
434 | | // support for VK_FORMAT_G8_B8R8_2PLANE_420_UNORM including sampling and cosited chroma. |
435 | | // Verify that the image format is supported. |
436 | 0 | auto linFlags = formatProperties.linearTilingFeatures; |
437 | 0 | if (!(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) || |
438 | 0 | !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) || |
439 | 0 | !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) || |
440 | 0 | !(linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) { |
441 | | // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported |
442 | 0 | return false; |
443 | 0 | } |
444 | 0 | return true; |
445 | 0 | } Unexecuted instantiation: VkYcbcrSamplerHelper::isYCbCrSupported() Unexecuted instantiation: VkYcbcrSamplerHelper::isYCbCrSupported() |
446 | | #endif // SK_VULKAN |