blob: 0eb1f28b0617330498afca57d911057b2de788fe [file] [log] [blame]
/*
* Copyright 2015, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* THIS FILE WAS GENERATED BY apic. DO NOT EDIT.
*/
#include "abort_exception.h"
#include "vulkan_imports.h"
#include "vulkan_types.h"
#include "vulkan_spy.h"
#include <gapic/log.h>
#include <gapic/coder/memory.h>
#include <gapic/coder/atom.h>
#include <gapic/coder/vulkan.h>
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
#include <stdint.h>
#include <memory>
#include <string>
namespace gapii {
std::shared_ptr<SurfaceObject> VulkanSpy::subCreateAndroidSurfaceObject(CallObserver* observer, const std::function<void()>& call, VkAndroidSurfaceCreateInfoKHR* data) {
std::shared_ptr<SurfaceObject> l_object = std::shared_ptr<SurfaceObject>(new SurfaceObject(0, 0));
(void)observer->read(slice(data, 0ULL, 1ULL), 0ULL);
return l_object;
}
std::shared_ptr<SurfaceObject> VulkanSpy::subCreateXCBSurfaceObject(CallObserver* observer, const std::function<void()>& call, VkXcbSurfaceCreateInfoKHR* data) {
std::shared_ptr<SurfaceObject> l_object = std::shared_ptr<SurfaceObject>(new SurfaceObject(0, 0));
(void)observer->read(slice(data, 0ULL, 1ULL), 0ULL);
return l_object;
}
bool VulkanSpy::subIsMemoryCoherent(CallObserver* observer, const std::function<void()>& call, std::shared_ptr<DeviceMemoryObject> memory) {
std::shared_ptr<PhysicalDeviceObject> l_physical_device = findOrZero(this->PhysicalDevices, checkNotNull(findOrZero(this->Devices, checkNotNull(memory).mDevice)).mPhysicalDevice);
return (0UL) != (((uint32_t)(checkNotNull(l_physical_device).mMemoryProperties.mmemoryTypes[checkNotNull(memory).mMemoryTypeIndex].mpropertyFlags)) & ((uint32_t)(VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)));
}
void VulkanSpy::subDoCmdBindVertexBuffers(CallObserver* observer, const std::function<void()>& call, CmdBindBuffer bind) {
{
int32_t l__ = 0;
for (U32ToBoundBuffer::iterator it = bind.mBuffers.begin(); it != bind.mBuffers.end(); ++it, ++l__) {
uint32_t l_k = it->first;
BoundBuffer l_v = it->second;
this->CurrentVertexBuffers[l_k] = l_v;
}
}
return;
}
void VulkanSpy::subDoCmdBindBuffers(CallObserver* observer, const std::function<void()>& call, CmdBindBuffer bind) {
{
int32_t l__ = 0;
for (U32ToBoundBuffer::iterator it = bind.mBuffers.begin(); it != bind.mBuffers.end(); ++it, ++l__) {
uint32_t l__ = it->first;
BoundBuffer l_v = it->second;
subReadMemoryIfCoherent(observer, call, checkNotNull(l_v.mBuffer).mMemory, (checkNotNull(l_v.mBuffer).mMemoryOffset) + (l_v.mOffset), l_v.mRange);
}
}
return;
}
bool VulkanSpy::subAreAnyVertexBuffersCoherent(CallObserver* observer, const std::function<void()>& call) {
std::shared_ptr<GraphicsPipelineObject> l_pipeline = findOrZero(this->GraphicsPipelines, this->CurrentGraphicsPipeline);
MutableBool l_b = MutableBool(false);
{
int32_t l__ = 0;
for (U32ToVkVertexInputBindingDescription::iterator it = checkNotNull(l_pipeline).mVertexBindings.begin(); it != checkNotNull(l_pipeline).mVertexBindings.end(); ++it, ++l__) {
uint32_t l__ = it->first;
VkVertexInputBindingDescription l_binding = it->second;
if (this->CurrentVertexBuffers.count(l_binding.mbinding) > 0) {
bool l__res_0 = subIsMemoryCoherent(observer, call, checkNotNull(findOrZero(this->CurrentVertexBuffers, l_binding.mbinding).mBuffer).mMemory);
l_b.mB = (l_b.mB) || (l__res_0);
}
}
}
return l_b.mB;
}
void VulkanSpy::subReadCoherentVertexBuffers(CallObserver* observer, const std::function<void()>& call, uint32_t firstVertex, uint32_t firstInstance, uint32_t vertexCount, uint32_t instanceCount) {
std::shared_ptr<GraphicsPipelineObject> l_pipeline = findOrZero(this->GraphicsPipelines, this->CurrentGraphicsPipeline);
{
int32_t l__ = 0;
for (U32ToVkVertexInputBindingDescription::iterator it = checkNotNull(l_pipeline).mVertexBindings.begin(); it != checkNotNull(l_pipeline).mVertexBindings.end(); ++it, ++l__) {
uint32_t l__ = it->first;
VkVertexInputBindingDescription l_binding = it->second;
if (this->CurrentVertexBuffers.count(l_binding.mbinding) > 0) {
bool l__res_0 = subIsMemoryCoherent(observer, call, checkNotNull(findOrZero(this->CurrentVertexBuffers, l_binding.mbinding).mBuffer).mMemory);
if (l__res_0) {
BoundBuffer l_currentBoundBuffer = findOrZero(this->CurrentVertexBuffers, l_binding.mbinding);
std::shared_ptr<BufferObject> l_currentBufferObject = l_currentBoundBuffer.mBuffer;
uint32_t l_startVertexOffset = /* switch(l_binding.minputRate) */
/* case VkVertexInputRate::VK_VERTEX_INPUT_RATE_VERTEX: */(((l_binding.minputRate) == (VkVertexInputRate::VK_VERTEX_INPUT_RATE_VERTEX))) ? (firstVertex) :
/* case VkVertexInputRate::VK_VERTEX_INPUT_RATE_INSTANCE: */(((l_binding.minputRate) == (VkVertexInputRate::VK_VERTEX_INPUT_RATE_INSTANCE))) ? (firstInstance) :
/* default: */ 0;
uint32_t l_numVertices = /* switch(l_binding.minputRate) */
/* case VkVertexInputRate::VK_VERTEX_INPUT_RATE_VERTEX: */(((l_binding.minputRate) == (VkVertexInputRate::VK_VERTEX_INPUT_RATE_VERTEX))) ? (vertexCount) :
/* case VkVertexInputRate::VK_VERTEX_INPUT_RATE_INSTANCE: */(((l_binding.minputRate) == (VkVertexInputRate::VK_VERTEX_INPUT_RATE_INSTANCE))) ? (instanceCount) :
/* default: */ 0;
VkDeviceSize l_startVertexOffsetBytes = (l_currentBoundBuffer.mOffset) + ((checkNotNull(l_currentBufferObject).mMemoryOffset) + ((VkDeviceSize)((l_startVertexOffset) * (l_binding.mstride))));
VkDeviceSize l_numBytes = (VkDeviceSize)((l_numVertices) * (l_binding.mstride));
subReadMemoryIfCoherent(observer, call, checkNotNull(l_currentBufferObject).mMemory, l_startVertexOffsetBytes, l_numBytes);
}
}
}
}
return;
}
void VulkanSpy::subDoCmdDraw(CallObserver* observer, const std::function<void()>& call, CmdDraw draw) {
bool l__res_0 = subAreAnyVertexBuffersCoherent(observer, call);
if (l__res_0) {
subReadCoherentVertexBuffers(observer, call, draw.mFirstVertex, draw.mFirstInstance, draw.mVertexCount, draw.mInstanceCount);
}
return;
}
void VulkanSpy::subDoCmdCopyImage(CallObserver* observer, const std::function<void()>& call, CmdCopyImage args) {
std::shared_ptr<ImageObject> l_srcImageObject = findOrZero(this->Images, args.mSrcImage);
std::shared_ptr<ImageObject> l_dstImageObject = findOrZero(this->Images, args.mDstImage);
uint32_t l_srcFormat = checkNotNull(l_srcImageObject).mFormat;
ElementAndTexelBlockSize l_srcElementAndTexelBlockSize = subGetElementAndTexelBlockSize(observer, call, l_srcFormat);
uint32_t l_dstFormat = checkNotNull(l_dstImageObject).mFormat;
ElementAndTexelBlockSize l_dstElementAndTexelBlockSize = subGetElementAndTexelBlockSize(observer, call, l_dstFormat);
for (int32_t l_r = 0L; l_r < int32_t((args.mRegions.size())); ++l_r) {
VkImageCopy l_region = findOrZero(args.mRegions, (uint64_t)(l_r));
uint32_t l_srcBaseLayer = l_region.msrcSubresource.mbaseArrayLayer;
uint32_t l_dstBaseLayer = l_region.msrcSubresource.mbaseArrayLayer;
uint32_t l_srcMipLevel = l_region.msrcSubresource.mmipLevel;
uint32_t l_dstMipLevel = l_region.mdstSubresource.mmipLevel;
uint64_t l_srcElementSize = (uint64_t)(l_srcElementAndTexelBlockSize.mElementSize);
uint64_t l_srcBlockWidth = (uint64_t)(l_srcElementAndTexelBlockSize.mTexelBlockSize.mWidth);
uint64_t l_srcBlockHeight = (uint64_t)(l_srcElementAndTexelBlockSize.mTexelBlockSize.mHeight);
uint64_t l_dstElementSize = (uint64_t)(l_dstElementAndTexelBlockSize.mElementSize);
uint64_t l_dstBlockWidth = (uint64_t)(l_dstElementAndTexelBlockSize.mTexelBlockSize.mWidth);
uint64_t l_dstBlockHeight = (uint64_t)(l_dstElementAndTexelBlockSize.mTexelBlockSize.mHeight);
uint64_t l_srcXStartInBlocks = ((uint64_t)(l_region.msrcOffset.mx)) / (l_srcBlockWidth);
uint64_t l_srcYStartInBlocks = ((uint64_t)(l_region.msrcOffset.my)) / (l_srcBlockHeight);
uint64_t l_srcZStart = (uint64_t)(l_region.msrcOffset.mz);
uint64_t l_dstXStartInBlocks = ((uint64_t)(l_region.mdstOffset.mx)) / (l_dstBlockWidth);
uint64_t l_dstYStartInBlocks = ((uint64_t)(l_region.mdstOffset.my)) / (l_dstBlockHeight);
uint64_t l_dstZStart = (uint64_t)(l_region.mdstOffset.mz);
uint32_t l_extentXInBlocks = subRoundUpTo(observer, call, l_region.mextent.mwidth, (uint32_t)(l_srcBlockWidth));
uint32_t l_extentYInBlocks = subRoundUpTo(observer, call, l_region.mextent.mheight, (uint32_t)(l_srcBlockHeight));
uint32_t l_extentZ = l_region.mextent.mdepth;
for (uint32_t l_l = 0UL; l_l < l_region.msrcSubresource.mlayerCount; ++l_l) {
std::shared_ptr<ImageLevel> l_srcImageLevel = findOrZero(checkNotNull(findOrZero(checkNotNull(l_srcImageObject).mLayers, (l_srcBaseLayer) + (l_l))).mLevels, l_srcMipLevel);
std::shared_ptr<ImageLevel> l_dstImageLevel = findOrZero(checkNotNull(findOrZero(checkNotNull(l_dstImageObject).mLayers, (l_dstBaseLayer) + (l_l))).mLevels, l_dstMipLevel);
uint32_t l__res_0 = subRoundUpTo(observer, call, checkNotNull(l_srcImageLevel).mWidth, (uint32_t)(l_srcBlockWidth));
uint64_t l_srcImageLevelWidthInBlocks = (uint64_t)(l__res_0);
uint32_t l__res_1 = subRoundUpTo(observer, call, checkNotNull(l_srcImageLevel).mHeight, (uint32_t)(l_srcBlockHeight));
uint64_t l_srcImageLevelHeightInBlocks = (uint64_t)(l__res_1);
uint32_t l__res_2 = subRoundUpTo(observer, call, checkNotNull(l_dstImageLevel).mWidth, (uint32_t)(l_dstBlockWidth));
uint64_t l_dstImageLevelWidthInBlocks = (uint64_t)(l__res_2);
uint32_t l__res_3 = subRoundUpTo(observer, call, checkNotNull(l_dstImageLevel).mHeight, (uint32_t)(l_dstBlockHeight));
uint64_t l_dstImageLevelHeightInBlocks = (uint64_t)(l__res_3);
Slice<uint8_t> l_srcData = checkNotNull(l_srcImageLevel).mData;
Slice<uint8_t> l_dstData = checkNotNull(l_dstImageLevel).mData;
for (uint32_t l_z = 0UL; l_z < l_extentZ; ++l_z) {
for (uint32_t l_y = 0UL; l_y < l_extentYInBlocks; ++l_y) {
uint64_t l_copySize = ((uint64_t)(l_extentXInBlocks)) * (l_srcElementSize);
uint64_t l_dstY = (l_dstYStartInBlocks) + ((uint64_t)(l_y));
uint64_t l_dstZ = (l_dstZStart) + ((uint64_t)(l_z));
uint64_t l_srcY = (l_srcYStartInBlocks) + ((uint64_t)(l_y));
uint64_t l_srcZ = (l_srcZStart) + ((uint64_t)(l_z));
uint64_t l_dstStart = (((((l_dstZ) * (l_dstImageLevelHeightInBlocks)) + (l_dstY)) * (l_dstImageLevelWidthInBlocks)) + (l_dstXStartInBlocks)) * (l_dstElementSize);
uint64_t l_srcStart = (((((l_srcZ) * (l_srcImageLevelHeightInBlocks)) + (l_srcY)) * (l_srcImageLevelWidthInBlocks)) + (l_srcXStartInBlocks)) * (l_srcElementSize);
subReadMemoryIfCoherent(observer, call, checkNotNull(l_srcImageObject).mBoundMemory, (VkDeviceSize)(l_srcStart), (VkDeviceSize)(l_copySize));
observer->copy(slice(l_dstData, l_dstStart, (l_dstStart) + (l_copySize)), slice(l_srcData, l_srcStart, (l_srcStart) + (l_copySize)));
}
}
}
}
return;
}
ElementAndTexelBlockSize VulkanSpy::subGetElementAndTexelBlockSize(CallObserver* observer, const std::function<void()>& call, uint32_t format) {
return /* switch(format) */
/* case VkFormat::VK_FORMAT_R8G8B8_UNORM: */(((format) == (VkFormat::VK_FORMAT_R8G8B8_UNORM))) ? (ElementAndTexelBlockSize(3UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_R8G8B8A8_UNORM: */(((format) == (VkFormat::VK_FORMAT_R8G8B8A8_UNORM))) ? (ElementAndTexelBlockSize(4UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_B8G8R8A8_UNORM: */(((format) == (VkFormat::VK_FORMAT_B8G8R8A8_UNORM))) ? (ElementAndTexelBlockSize(4UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_BC1_RGB_UNORM_BLOCK, VkFormat::VK_FORMAT_BC1_RGB_SRGB_BLOCK, VkFormat::VK_FORMAT_BC1_RGBA_UNORM_BLOCK, VkFormat::VK_FORMAT_BC1_RGBA_SRGB_BLOCK: */(((format) == (VkFormat::VK_FORMAT_BC1_RGB_UNORM_BLOCK))|| ((format) == (VkFormat::VK_FORMAT_BC1_RGB_SRGB_BLOCK))|| ((format) == (VkFormat::VK_FORMAT_BC1_RGBA_UNORM_BLOCK))|| ((format) == (VkFormat::VK_FORMAT_BC1_RGBA_SRGB_BLOCK))) ? (ElementAndTexelBlockSize(8UL, TexelBlockSizePair(4UL, 4UL))) :
/* case VkFormat::VK_FORMAT_BC2_UNORM_BLOCK: */(((format) == (VkFormat::VK_FORMAT_BC2_UNORM_BLOCK))) ? (ElementAndTexelBlockSize(16UL, TexelBlockSizePair(4UL, 4UL))) :
/* case VkFormat::VK_FORMAT_BC3_UNORM_BLOCK: */(((format) == (VkFormat::VK_FORMAT_BC3_UNORM_BLOCK))) ? (ElementAndTexelBlockSize(16UL, TexelBlockSizePair(4UL, 4UL))) :
/* case VkFormat::VK_FORMAT_R16G16B16A16_SFLOAT: */(((format) == (VkFormat::VK_FORMAT_R16G16B16A16_SFLOAT))) ? (ElementAndTexelBlockSize(8UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_R32G32B32A32_SFLOAT: */(((format) == (VkFormat::VK_FORMAT_R32G32B32A32_SFLOAT))) ? (ElementAndTexelBlockSize(16UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_R8_UNORM: */(((format) == (VkFormat::VK_FORMAT_R8_UNORM))) ? (ElementAndTexelBlockSize(1UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_R16_UNORM: */(((format) == (VkFormat::VK_FORMAT_R16_UNORM))) ? (ElementAndTexelBlockSize(2UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_R16_SFLOAT: */(((format) == (VkFormat::VK_FORMAT_R16_SFLOAT))) ? (ElementAndTexelBlockSize(2UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_R32_SFLOAT: */(((format) == (VkFormat::VK_FORMAT_R32_SFLOAT))) ? (ElementAndTexelBlockSize(4UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_D32_SFLOAT_S8_UINT: */(((format) == (VkFormat::VK_FORMAT_D32_SFLOAT_S8_UINT))) ? (ElementAndTexelBlockSize(5UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_D32_SFLOAT: */(((format) == (VkFormat::VK_FORMAT_D32_SFLOAT))) ? (ElementAndTexelBlockSize(4UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_D16_UNORM: */(((format) == (VkFormat::VK_FORMAT_D16_UNORM))) ? (ElementAndTexelBlockSize(2UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_D24_UNORM_S8_UINT: */(((format) == (VkFormat::VK_FORMAT_D24_UNORM_S8_UINT))) ? (ElementAndTexelBlockSize(4UL, TexelBlockSizePair(1UL, 1UL))) :
/* case VkFormat::VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, VkFormat::VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: */(((format) == (VkFormat::VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK))|| ((format) == (VkFormat::VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK))) ? (ElementAndTexelBlockSize(8UL, TexelBlockSizePair(4UL, 4UL))) :
/* case VkFormat::VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, VkFormat::VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: */(((format) == (VkFormat::VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK))|| ((format) == (VkFormat::VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK))) ? (ElementAndTexelBlockSize(16UL, TexelBlockSizePair(4UL, 4UL))) :
/* default: */ ElementAndTexelBlockSize();
}
RowLengthAndImageHeight VulkanSpy::subGetRowLengthAndImageHeight(CallObserver* observer, const std::function<void()>& call, VkBufferImageCopy region) {
uint32_t l_rowLength = /* switch((region.mbufferRowLength) == (0UL)) */
/* case true: */((((region.mbufferRowLength) == (0UL)) == (true))) ? (region.mimageExtent.mwidth) :
/* case false: */((((region.mbufferRowLength) == (0UL)) == (false))) ? (region.mbufferRowLength) :
/* default: */ 0;
uint32_t l_imageHeight = /* switch((region.mbufferImageHeight) == (0UL)) */
/* case true: */((((region.mbufferImageHeight) == (0UL)) == (true))) ? (region.mimageExtent.mheight) :
/* case false: */((((region.mbufferImageHeight) == (0UL)) == (false))) ? (region.mbufferImageHeight) :
/* default: */ 0;
return RowLengthAndImageHeight(l_rowLength, l_imageHeight);
}
void VulkanSpy::subDoCmdBeginRenderPass(CallObserver* observer, const std::function<void()>& call, CmdBeginRenderPass args) {
this->LastUsedFramebuffer = args.mFramebuffer;
std::shared_ptr<FramebufferObject> l_framebuffer = findOrZero(this->Framebuffers, args.mFramebuffer);
{
int32_t l__ = 0;
for (U32ToImageViewObject__R::iterator it = checkNotNull(l_framebuffer).mImageAttachments.begin(); it != checkNotNull(l_framebuffer).mImageAttachments.end(); ++it, ++l__) {
uint32_t l__ = it->first;
std::shared_ptr<ImageViewObject> l_v = it->second;
checkNotNull(checkNotNull(l_v).mImage).mLastBoundQueue = this->LastBoundQueue;
}
}
return;
}
uint32_t VulkanSpy::subReadVkApplicationInfo(CallObserver* observer, const std::function<void()>& call, VkApplicationInfo* applicationInfo) {
VkApplicationInfo l_info = observer->read(slice(applicationInfo, 0ULL, 1ULL), 0ULL);
(void)observer->string(l_info.mpApplicationName);
(void)observer->string(l_info.mpEngineName);
return l_info.mapiVersion;
}
std::shared_ptr<DeviceObject> VulkanSpy::subCreateDeviceObject(CallObserver* observer, const std::function<void()>& call, VkDeviceCreateInfo* data) {
std::shared_ptr<DeviceObject> l_object = std::shared_ptr<DeviceObject>(new DeviceObject(0, U32ToString(), VkPhysicalDeviceFeatures(), 0));
VkDeviceCreateInfo l_info = observer->read(slice(data, 0ULL, 1ULL), 0ULL);
Slice<VkDeviceQueueCreateInfo> l_queueCreateInfos = slice(l_info.mpQueueCreateInfos, (uint64_t)(0UL), (uint64_t)(l_info.mqueueCreateInfoCount));
for (uint32_t l_i = 0UL; l_i < l_info.mqueueCreateInfoCount; ++l_i) {
subReadVkDeviceQueueCreateInfo(observer, call, observer->read(l_queueCreateInfos, (uint64_t)(l_i)));
}
Slice<char*> l_layerNames = slice(l_info.mppEnabledLayerNames, (uint64_t)(0UL), (uint64_t)(l_info.menabledLayerCount));
for (uint32_t l_i = 0UL; l_i < l_info.menabledLayerCount; ++l_i) {
(void)observer->string(observer->read(l_layerNames, (uint64_t)(l_i)));
}
Slice<char*> l_extensionNames = slice(l_info.mppEnabledExtensionNames, (uint64_t)(0UL), (uint64_t)(l_info.menabledExtensionCount));
for (uint32_t l_i = 0UL; l_i < l_info.menabledExtensionCount; ++l_i) {
checkNotNull(l_object).mEnabledExtensions[l_i] = observer->string(observer->read(l_extensionNames, (uint64_t)(l_i)));
}
if ((l_info.mpEnabledFeatures) != (nullptr)) {
checkNotNull(l_object).mEnabledFeatures = observer->read(slice(l_info.mpEnabledFeatures, 0ULL, 1ULL), 0ULL);
}
return l_object;
}
} // namespace gapii