| /* |
| * Copyright 2015, The Android Open Source Project |
| * |
| * Licensed under the Apache License, Version 2.0 (the "License"); |
| * you may not use this file except in compliance with the License. |
| * You may obtain a copy of the License at |
| * |
| * http://www.apache.org/licenses/LICENSE-2.0 |
| * |
| * Unless required by applicable law or agreed to in writing, software |
| * distributed under the License is distributed on an "AS IS" BASIS, |
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| * See the License for the specific language governing permissions and |
| * limitations under the License. |
| * |
| * THIS FILE WAS GENERATED BY apic. DO NOT EDIT. |
| */ |
| |
| |
| #include "abort_exception.h" |
| #include "vulkan_imports.h" |
| #include "vulkan_types.h" |
| |
| #include "vulkan_spy.h" |
| |
| #include <gapic/log.h> |
| #include <gapic/coder/memory.h> |
| #include <gapic/coder/atom.h> |
| #include <gapic/coder/vulkan.h> |
| |
| #define __STDC_FORMAT_MACROS |
| #include <inttypes.h> |
| |
| #include <stdint.h> |
| |
| #include <memory> |
| #include <string> |
| |
| namespace gapii { |
| |
| uint32_t VulkanSpy::vkCreateInstance(VkInstanceCreateInfo* pCreateInfo, VkAllocationCallbacks* pAllocator, VkInstance* pInstance) { |
| GAPID_DEBUG("vkCreateInstance(%p, %p, %p)", pCreateInfo, pAllocator, pInstance); |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, pCreateInfo, pAllocator, pInstance] { |
| called = true; |
| observeReads(); |
| result = SpyOverride_vkCreateInstance(pCreateInfo, pAllocator, pInstance); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCreateInstance coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkInstanceCreateInfo__CP >(pCreateInfo, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch), toEncoder< gapic::coder::vulkan::VkInstance__P >(pInstance, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| void VulkanSpy::vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) { |
| GAPID_DEBUG("vkGetPhysicalDeviceQueueFamilyProperties(%p, %p, %p)", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); |
| |
| if (mImports.mPhysicalDevicesToInstances.find(physicalDevice) == mImports.mPhysicalDevicesToInstances.end() || |
| mImports.mInstanceFunctions.find(mImports.mPhysicalDevicesToInstances[physicalDevice]) == mImports.mInstanceFunctions.end() || |
| mImports.mInstanceFunctions[mImports.mPhysicalDevicesToInstances[physicalDevice]].vkGetPhysicalDeviceQueueFamilyProperties == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkGetPhysicalDeviceQueueFamilyProperties"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties] { |
| called = true; |
| observeReads(); |
| mImports.mInstanceFunctions[mImports.mPhysicalDevicesToInstances[physicalDevice]].vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkGetPhysicalDeviceQueueFamilyProperties coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkPhysicalDevice >(physicalDevice, mScratch), toEncoder< gapic::coder::vulkan::U32__P >(pQueueFamilyPropertyCount, mScratch), toEncoder< gapic::coder::vulkan::VkQueueFamilyProperties__P >(pQueueFamilyProperties, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| uint32_t VulkanSpy::vkEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) { |
| GAPID_DEBUG("vkEnumerateInstanceExtensionProperties(%s, %p, %p)", pLayerName, pPropertyCount, pProperties); |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, pLayerName, pPropertyCount, pProperties] { |
| called = true; |
| observeReads(); |
| result = SpyOverride_vkEnumerateInstanceExtensionProperties(pLayerName, pPropertyCount, pProperties); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkEnumerateInstanceExtensionProperties coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< const char* >(pLayerName, mScratch), toEncoder< gapic::coder::vulkan::U32__P >(pPropertyCount, mScratch), toEncoder< gapic::coder::vulkan::VkExtensionProperties__P >(pProperties, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkQueueSubmit(VkQueue queue, uint32_t submitCount, VkSubmitInfo* pSubmits, VkFence fence) { |
| GAPID_DEBUG("vkQueueSubmit(%p, %" PRIu32 ", %p, %" PRIu64 ")", queue, submitCount, pSubmits, fence); |
| |
| if (mImports.mQueuesToDevices.find(queue) == mImports.mQueuesToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mQueuesToDevices[queue]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mQueuesToDevices[queue]].vkQueueSubmit == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkQueueSubmit"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, queue, submitCount, pSubmits, fence] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[mImports.mQueuesToDevices[queue]].vkQueueSubmit(queue, submitCount, pSubmits, fence); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkQueueSubmit coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkQueue >(queue, mScratch), submitCount, toEncoder< gapic::coder::vulkan::VkSubmitInfo__CP >(pSubmits, mScratch), toEncoder< uint64_t >(fence, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, VkMappedMemoryRange* pMemoryRanges) { |
| GAPID_DEBUG("vkInvalidateMappedMemoryRanges(%p, %" PRIu32 ", %p)", device, memoryRangeCount, pMemoryRanges); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkInvalidateMappedMemoryRanges == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkInvalidateMappedMemoryRanges"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, memoryRangeCount, pMemoryRanges] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkInvalidateMappedMemoryRanges coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), memoryRangeCount, toEncoder< gapic::coder::vulkan::VkMappedMemoryRange__CP >(pMemoryRanges, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) { |
| GAPID_DEBUG("vkBindImageMemory(%p, %" PRIu64 ", %" PRIu64 ", %" PRIu64 ")", device, image, memory, memoryOffset); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkBindImageMemory == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkBindImageMemory"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, image, memory, memoryOffset] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkBindImageMemory(device, image, memory, memoryOffset); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkBindImageMemory coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(image, mScratch), toEncoder< uint64_t >(memory, mScratch), toEncoder< uint64_t >(memoryOffset, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkGetFenceStatus(VkDevice device, VkFence fence) { |
| GAPID_DEBUG("vkGetFenceStatus(%p, %" PRIu64 ")", device, fence); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkGetFenceStatus == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkGetFenceStatus"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, fence] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkGetFenceStatus(device, fence); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkGetFenceStatus coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(fence, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkCreateBufferView(VkDevice device, VkBufferViewCreateInfo* pCreateInfo, VkAllocationCallbacks* pAllocator, VkBufferView* pView) { |
| GAPID_DEBUG("vkCreateBufferView(%p, %p, %p, %p)", device, pCreateInfo, pAllocator, pView); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkCreateBufferView == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCreateBufferView"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, pCreateInfo, pAllocator, pView] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkCreateBufferView(device, pCreateInfo, pAllocator, pView); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCreateBufferView coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< gapic::coder::vulkan::VkBufferViewCreateInfo__CP >(pCreateInfo, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch), toEncoder< gapic::coder::vulkan::VkBufferView__P >(pView, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| void VulkanSpy::vkDestroyImage(VkDevice device, VkImage image, VkAllocationCallbacks* pAllocator) { |
| GAPID_DEBUG("vkDestroyImage(%p, %" PRIu64 ", %p)", device, image, pAllocator); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkDestroyImage == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkDestroyImage"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, device, image, pAllocator] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[device].vkDestroyImage(device, image, pAllocator); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkDestroyImage coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(image, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, VkAllocationCallbacks* pAllocator) { |
| GAPID_DEBUG("vkDestroyShaderModule(%p, %" PRIu64 ", %p)", device, shaderModule, pAllocator); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkDestroyShaderModule == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkDestroyShaderModule"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, device, shaderModule, pAllocator] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[device].vkDestroyShaderModule(device, shaderModule, pAllocator); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkDestroyShaderModule coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(shaderModule, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, VkAllocationCallbacks* pAllocator) { |
| GAPID_DEBUG("vkDestroyDescriptorSetLayout(%p, %" PRIu64 ", %p)", device, descriptorSetLayout, pAllocator); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkDestroyDescriptorSetLayout == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkDestroyDescriptorSetLayout"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, device, descriptorSetLayout, pAllocator] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[device].vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkDestroyDescriptorSetLayout coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(descriptorSetLayout, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| uint32_t VulkanSpy::vkAllocateDescriptorSets(VkDevice device, VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) { |
| GAPID_DEBUG("vkAllocateDescriptorSets(%p, %p, %p)", device, pAllocateInfo, pDescriptorSets); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkAllocateDescriptorSets == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkAllocateDescriptorSets"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, pAllocateInfo, pDescriptorSets] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkAllocateDescriptorSets coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< gapic::coder::vulkan::VkDescriptorSetAllocateInfo__CP >(pAllocateInfo, mScratch), toEncoder< gapic::coder::vulkan::VkDescriptorSet__P >(pDescriptorSets, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkCreateFramebuffer(VkDevice device, VkFramebufferCreateInfo* pCreateInfo, VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) { |
| GAPID_DEBUG("vkCreateFramebuffer(%p, %p, %p, %p)", device, pCreateInfo, pAllocator, pFramebuffer); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkCreateFramebuffer == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCreateFramebuffer"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, pCreateInfo, pAllocator, pFramebuffer] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCreateFramebuffer coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< gapic::coder::vulkan::VkFramebufferCreateInfo__CP >(pCreateInfo, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch), toEncoder< gapic::coder::vulkan::VkFramebuffer__P >(pFramebuffer, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| void VulkanSpy::vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, VkAllocationCallbacks* pAllocator) { |
| GAPID_DEBUG("vkDestroyRenderPass(%p, %" PRIu64 ", %p)", device, renderPass, pAllocator); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkDestroyRenderPass == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkDestroyRenderPass"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, device, renderPass, pAllocator] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[device].vkDestroyRenderPass(device, renderPass, pAllocator); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkDestroyRenderPass coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(renderPass, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) { |
| GAPID_DEBUG("vkGetRenderAreaGranularity(%p, %" PRIu64 ", %p)", device, renderPass, pGranularity); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkGetRenderAreaGranularity == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkGetRenderAreaGranularity"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, device, renderPass, pGranularity] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[device].vkGetRenderAreaGranularity(device, renderPass, pGranularity); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkGetRenderAreaGranularity coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< uint64_t >(renderPass, mScratch), toEncoder< gapic::coder::vulkan::VkExtent2D__P >(pGranularity, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| uint32_t VulkanSpy::vkAllocateCommandBuffers(VkDevice device, VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) { |
| GAPID_DEBUG("vkAllocateCommandBuffers(%p, %p, %p)", device, pAllocateInfo, pCommandBuffers); |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, pAllocateInfo, pCommandBuffers] { |
| called = true; |
| observeReads(); |
| result = SpyOverride_vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkAllocateCommandBuffers coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< gapic::coder::vulkan::VkCommandBufferAllocateInfo__CP >(pAllocateInfo, mScratch), toEncoder< gapic::coder::vulkan::VkCommandBuffer__P >(pCommandBuffers, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| void VulkanSpy::vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) { |
| GAPID_DEBUG("vkCmdSetStencilCompareMask(%p, %" PRIu32 ", %" PRIu32 ")", commandBuffer, faceMask, compareMask); |
| |
| if (mImports.mCommandBuffersToDevices.find(commandBuffer) == mImports.mCommandBuffersToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mCommandBuffersToDevices[commandBuffer]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdSetStencilCompareMask == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCmdSetStencilCompareMask"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, commandBuffer, faceMask, compareMask] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCmdSetStencilCompareMask coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkCommandBuffer >(commandBuffer, mScratch), toEncoder< uint32_t >(faceMask, mScratch), compareMask); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) { |
| GAPID_DEBUG("vkCmdSetStencilWriteMask(%p, %" PRIu32 ", %" PRIu32 ")", commandBuffer, faceMask, writeMask); |
| |
| if (mImports.mCommandBuffersToDevices.find(commandBuffer) == mImports.mCommandBuffersToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mCommandBuffersToDevices[commandBuffer]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdSetStencilWriteMask == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCmdSetStencilWriteMask"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, commandBuffer, faceMask, writeMask] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCmdSetStencilWriteMask coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkCommandBuffer >(commandBuffer, mScratch), toEncoder< uint32_t >(faceMask, mScratch), writeMask); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, uint32_t pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, uint32_t* pDynamicOffsets) { |
| GAPID_DEBUG("vkCmdBindDescriptorSets(%p, %u, %" PRIu64 ", %" PRIu32 ", %" PRIu32 ", %p, %" PRIu32 ", %p)", commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); |
| |
| if (mImports.mCommandBuffersToDevices.find(commandBuffer) == mImports.mCommandBuffersToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mCommandBuffersToDevices[commandBuffer]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdBindDescriptorSets == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCmdBindDescriptorSets"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCmdBindDescriptorSets coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkCommandBuffer >(commandBuffer, mScratch), pipelineBindPoint, toEncoder< uint64_t >(layout, mScratch), firstSet, descriptorSetCount, toEncoder< gapic::coder::vulkan::VkDescriptorSet__CP >(pDescriptorSets, mScratch), dynamicOffsetCount, toEncoder< gapic::coder::vulkan::U32__CP >(pDynamicOffsets, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) { |
| GAPID_DEBUG("vkCmdDrawIndexed(%p, %" PRIu32 ", %" PRIu32 ", %" PRIu32 ", %" PRId32 ", %" PRIu32 ")", commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); |
| |
| if (mImports.mCommandBuffersToDevices.find(commandBuffer) == mImports.mCommandBuffersToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mCommandBuffersToDevices[commandBuffer]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdDrawIndexed == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCmdDrawIndexed"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCmdDrawIndexed coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkCommandBuffer >(commandBuffer, mScratch), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, uint32_t* pData) { |
| GAPID_DEBUG("vkCmdUpdateBuffer(%p, %" PRIu64 ", %" PRIu64 ", %" PRIu64 ", %p)", commandBuffer, dstBuffer, dstOffset, dataSize, pData); |
| |
| if (mImports.mCommandBuffersToDevices.find(commandBuffer) == mImports.mCommandBuffersToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mCommandBuffersToDevices[commandBuffer]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdUpdateBuffer == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCmdUpdateBuffer"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, commandBuffer, dstBuffer, dstOffset, dataSize, pData] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCmdUpdateBuffer coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkCommandBuffer >(commandBuffer, mScratch), toEncoder< uint64_t >(dstBuffer, mScratch), toEncoder< uint64_t >(dstOffset, mScratch), toEncoder< uint64_t >(dataSize, mScratch), toEncoder< gapic::coder::vulkan::U32__CP >(pData, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| void VulkanSpy::vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) { |
| GAPID_DEBUG("vkCmdFillBuffer(%p, %" PRIu64 ", %" PRIu64 ", %" PRIu64 ", %" PRIu32 ")", commandBuffer, dstBuffer, dstOffset, size, data); |
| |
| if (mImports.mCommandBuffersToDevices.find(commandBuffer) == mImports.mCommandBuffersToDevices.end() || |
| mImports.mDeviceFunctions.find(mImports.mCommandBuffersToDevices[commandBuffer]) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdFillBuffer == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCmdFillBuffer"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, commandBuffer, dstBuffer, dstOffset, size, data] { |
| called = true; |
| observeReads(); |
| mImports.mDeviceFunctions[mImports.mCommandBuffersToDevices[commandBuffer]].vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCmdFillBuffer coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkCommandBuffer >(commandBuffer, mScratch), toEncoder< uint64_t >(dstBuffer, mScratch), toEncoder< uint64_t >(dstOffset, mScratch), toEncoder< uint64_t >(size, mScratch), data); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| |
| uint32_t VulkanSpy::vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) { |
| GAPID_DEBUG("vkGetPhysicalDeviceSurfaceCapabilitiesKHR(%p, %" PRIu64 ", %p)", physicalDevice, surface, pSurfaceCapabilities); |
| |
| if (mImports.mPhysicalDevicesToInstances.find(physicalDevice) == mImports.mPhysicalDevicesToInstances.end() || |
| mImports.mInstanceFunctions.find(mImports.mPhysicalDevicesToInstances[physicalDevice]) == mImports.mInstanceFunctions.end() || |
| mImports.mInstanceFunctions[mImports.mPhysicalDevicesToInstances[physicalDevice]].vkGetPhysicalDeviceSurfaceCapabilitiesKHR == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, physicalDevice, surface, pSurfaceCapabilities] { |
| called = true; |
| observeReads(); |
| result = mImports.mInstanceFunctions[mImports.mPhysicalDevicesToInstances[physicalDevice]].vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkGetPhysicalDeviceSurfaceCapabilitiesKHR coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkPhysicalDevice >(physicalDevice, mScratch), toEncoder< uint64_t >(surface, mScratch), toEncoder< gapic::coder::vulkan::VkSurfaceCapabilitiesKHR__P >(pSurfaceCapabilities, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkCreateSwapchainKHR(VkDevice device, VkSwapchainCreateInfoKHR* pCreateInfo, VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) { |
| GAPID_DEBUG("vkCreateSwapchainKHR(%p, %p, %p, %p)", device, pCreateInfo, pAllocator, pSwapchain); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkCreateSwapchainKHR == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCreateSwapchainKHR"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, pCreateInfo, pAllocator, pSwapchain] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCreateSwapchainKHR coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), toEncoder< gapic::coder::vulkan::VkSwapchainCreateInfoKHR__CP >(pCreateInfo, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch), toEncoder< gapic::coder::vulkan::VkSwapchainKHR__P >(pSwapchain, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| uint32_t VulkanSpy::vkCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, VkSwapchainCreateInfoKHR* pCreateInfos, VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) { |
| GAPID_DEBUG("vkCreateSharedSwapchainsKHR(%p, %" PRIu32 ", %p, %p, %p)", device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); |
| |
| if (mImports.mDeviceFunctions.find(device) == mImports.mDeviceFunctions.end() || |
| mImports.mDeviceFunctions[device].vkCreateSharedSwapchainsKHR == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkCreateSharedSwapchainsKHR"); |
| return 0; |
| } |
| |
| uint32_t result = 0; |
| bool called = false; |
| auto call = [this, &called, &result, device, swapchainCount, pCreateInfos, pAllocator, pSwapchains] { |
| called = true; |
| observeReads(); |
| result = mImports.mDeviceFunctions[device].vkCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| break; |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkCreateSharedSwapchainsKHR coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkDevice >(device, mScratch), swapchainCount, toEncoder< gapic::coder::vulkan::VkSwapchainCreateInfoKHR__CP >(pCreateInfos, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch), toEncoder< gapic::coder::vulkan::VkSwapchainKHR__P >(pSwapchains, mScratch), result); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| |
| return result; |
| } |
| |
| void VulkanSpy::vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, VkAllocationCallbacks* pAllocator) { |
| GAPID_DEBUG("vkDestroyDebugReportCallbackEXT(%p, %" PRIu64 ", %p)", instance, callback, pAllocator); |
| |
| if (mImports.mInstanceFunctions.find(instance) == mImports.mInstanceFunctions.end() || |
| mImports.mInstanceFunctions[instance].vkDestroyDebugReportCallbackEXT == nullptr) { |
| GAPID_WARNING("Application called unsupported function vkDestroyDebugReportCallbackEXT"); |
| return; |
| } |
| |
| bool called = false; |
| auto call = [this, &called, instance, callback, pAllocator] { |
| called = true; |
| observeReads(); |
| mImports.mInstanceFunctions[instance].vkDestroyDebugReportCallbackEXT(instance, callback, pAllocator); |
| }; |
| |
| |
| uint64_t counter_at_begin = mCommandStartEndCounter++; |
| |
| try { |
| do { |
| call(); |
| } while(false); |
| } catch (gapii::AbortException& e) { |
| if (!called) { |
| call(); // abort() was called before the fence. |
| } |
| handleAbort(e); |
| } |
| uint64_t counter_at_end = mCommandStartEndCounter++; |
| gapic::coder::atom::CommandCounter counter_value_encodable(counter_at_begin, counter_at_end); |
| |
| observeWrites(); |
| |
| gapic::coder::vulkan::VkDestroyDebugReportCallbackEXT coder(mScratch.vector<gapic::Encodable*>(kMaxExtras), toEncoder< gapic::coder::vulkan::VkInstance >(instance, mScratch), toEncoder< uint64_t >(callback, mScratch), toEncoder< gapic::coder::vulkan::VkAllocationCallbacks__CP >(pAllocator, mScratch)); |
| coder.mextras.append(&mObservations); |
| |
| if (counter_at_end > counter_at_begin + 1 || |
| counter_at_begin != mExpectedNextCommandStartCounterValue) { |
| coder.mextras.append(&counter_value_encodable); |
| } |
| mExpectedNextCommandStartCounterValue = counter_at_end + 1; |
| |
| addExtras(coder); |
| mEncoder->Variant(&coder); |
| |
| } |
| } // namespace gapii |