Run clang-format on files in stream-servers/vulkan (except auto-generated files).
DO NOT ATTEMPT TO MERGE THIS COMMIT. Instead, simply follow the steps below to reproduce this change on your local repository:
cd stream-servers/vulkan
clang-format -i *.cpp *.h
# Revert auto-generated files:
grep -l "\(Autogenerated module\)\|\(DO NOT EDIT\)" *.cpp *.h | xargs git checkout
(Note: on Windows, be sure to use a Visual Studio command prompt)
Auto-generated files will be addressed separately by adding clang-format
as a final pass in the codegen script.
Test: compile
Change-Id: I0cc3c38d14465c3753f53af6e3887c66ea73103a
diff --git a/stream-servers/vulkan/CompositorVk.cpp b/stream-servers/vulkan/CompositorVk.cpp
index d29177a..3f73086 100644
--- a/stream-servers/vulkan/CompositorVk.cpp
+++ b/stream-servers/vulkan/CompositorVk.cpp
@@ -24,8 +24,8 @@
fflush(stderr); \
} while (0)
-static VkShaderModule createShaderModule(const goldfish_vk::VulkanDispatch &vk, VkDevice device,
- const std::vector<uint32_t> &code) {
+static VkShaderModule createShaderModule(const goldfish_vk::VulkanDispatch& vk, VkDevice device,
+ const std::vector<uint32_t>& code) {
VkShaderModuleCreateInfo shaderModuleCi = {
.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
.codeSize = static_cast<uint32_t>(code.size() * sizeof(uint32_t)),
@@ -36,13 +36,13 @@
}
ComposeLayerVk::ComposeLayerVk(VkSampler vkSampler, VkImageView vkImageView,
- const LayerTransform &layerTransform)
+ const LayerTransform& layerTransform)
: m_vkSampler(vkSampler),
m_vkImageView(vkImageView),
m_layerTransform({.pos = layerTransform.pos, .texcoord = layerTransform.texcoord}) {}
std::unique_ptr<ComposeLayerVk> ComposeLayerVk::createFromHwc2ComposeLayer(
- VkSampler vkSampler, VkImageView vkImageView, const ComposeLayer &composeLayer,
+ VkSampler vkSampler, VkImageView vkImageView, const ComposeLayer& composeLayer,
uint32_t cbWidth, uint32_t cbHeight, uint32_t dstWidth, uint32_t dstHeight) {
// Calculate the posTransform and the texcoordTransform needed in the
// uniform of the Compositor.vert shader. The posTransform should transform
@@ -51,8 +51,8 @@
// texcoordTransform should transform the unit square(top = 0, bottom = 1,
// left = 0, right = 1) to where we should sample the layer in the
// normalized uv space given the composeLayer.
- const hwc_rect_t &posRect = composeLayer.displayFrame;
- const hwc_frect_t &texcoordRect = composeLayer.crop;
+ const hwc_rect_t& posRect = composeLayer.displayFrame;
+ const hwc_frect_t& texcoordRect = composeLayer.crop;
int posWidth = posRect.right - posRect.left;
int posHeight = posRect.bottom - posRect.top;
@@ -127,7 +127,7 @@
const std::vector<uint16_t> CompositorVk::k_indices = {0, 1, 2, 2, 3, 0};
std::unique_ptr<CompositorVk> CompositorVk::create(
- const goldfish_vk::VulkanDispatch &vk, VkDevice vkDevice, VkPhysicalDevice vkPhysicalDevice,
+ const goldfish_vk::VulkanDispatch& vk, VkDevice vkDevice, VkPhysicalDevice vkPhysicalDevice,
VkQueue vkQueue, std::shared_ptr<android::base::Lock> queueLock, VkFormat format,
VkImageLayout initialLayout, VkImageLayout finalLayout, uint32_t maxFramesInFlight,
VkCommandPool commandPool, VkSampler sampler) {
@@ -146,7 +146,7 @@
return res;
}
-CompositorVk::CompositorVk(const goldfish_vk::VulkanDispatch &vk, VkDevice vkDevice,
+CompositorVk::CompositorVk(const goldfish_vk::VulkanDispatch& vk, VkDevice vkDevice,
VkPhysicalDevice vkPhysicalDevice, VkQueue vkQueue,
std::shared_ptr<android::base::Lock> queueLock,
VkCommandPool commandPool, uint32_t maxFramesInFlight)
@@ -390,12 +390,12 @@
}
std::tuple<VkBuffer, VkDeviceMemory> CompositorVk::createStagingBufferWithData(
- const void *srcData, VkDeviceSize size) const {
+ const void* srcData, VkDeviceSize size) const {
auto [stagingBuffer, stagingBufferMemory] =
createBuffer(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
.value();
- void *data;
+ void* data;
VK_CHECK(m_vk.vkMapMemory(m_vkDevice, stagingBufferMemory, 0, size, 0, &data));
memcpy(data, srcData, size);
m_vk.vkUnmapMemory(m_vkDevice, stagingBufferMemory);
@@ -403,7 +403,7 @@
}
void CompositorVk::copyBuffer(VkBuffer src, VkBuffer dst, VkDeviceSize size) const {
- runSingleTimeCommands(m_vkQueue, m_vkQueueLock, [&, this](const auto &cmdBuff) {
+ runSingleTimeCommands(m_vkQueue, m_vkQueueLock, [&, this](const auto& cmdBuff) {
VkBufferCopy copyRegion = {};
copyRegion.srcOffset = 0;
copyRegion.dstOffset = 0;
@@ -439,7 +439,7 @@
}
// We do see a composition requests with 12 layers. (b/222700096)
-// Inside hwc2, we will ask for surfaceflinger to
+// Inside hwc2, we will ask for surfaceflinger to
// do the composition, if the layers more than 16.
// If we see rendering error or significant time spent on updating
// descriptors in setComposition, we should tune this number.
@@ -487,7 +487,7 @@
}
void CompositorVk::recordCommandBuffers(uint32_t renderTargetIndex, VkCommandBuffer cmdBuffer,
- const CompositorVkRenderTarget &renderTarget) {
+ const CompositorVkRenderTarget& renderTarget) {
VkClearValue clearColor = {.color = {.float32 = {0.0f, 0.0f, 0.0f, 1.0f}}};
VkRenderPassBeginInfo renderPassBeginInfo = {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
@@ -546,16 +546,16 @@
}
std::tie(m_uniformStorage.m_vkBuffer, m_uniformStorage.m_vkDeviceMemory) = buffer;
VK_CHECK(m_vk.vkMapMemory(m_vkDevice, m_uniformStorage.m_vkDeviceMemory, 0, size, 0,
- reinterpret_cast<void **>(&m_uniformStorage.m_data)));
+ reinterpret_cast<void**>(&m_uniformStorage.m_data)));
}
-bool CompositorVk::validateQueueFamilyProperties(const VkQueueFamilyProperties &properties) {
+bool CompositorVk::validateQueueFamilyProperties(const VkQueueFamilyProperties& properties) {
return properties.queueFlags & VK_QUEUE_GRAPHICS_BIT;
}
-void CompositorVk::setComposition(uint32_t rtIndex, std::unique_ptr<Composition> &&composition) {
+void CompositorVk::setComposition(uint32_t rtIndex, std::unique_ptr<Composition>&& composition) {
m_currentCompositions[rtIndex] = std::move(composition);
- const auto ¤tComposition = *m_currentCompositions[rtIndex];
+ const auto& currentComposition = *m_currentCompositions[rtIndex];
if (currentComposition.m_composeLayers.size() > kMaxLayersPerFrame) {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
<< "CompositorVk can't compose more than " << kMaxLayersPerFrame
@@ -563,14 +563,14 @@
<< static_cast<uint32_t>(currentComposition.m_composeLayers.size());
}
- memset(reinterpret_cast<uint8_t *>(m_uniformStorage.m_data) +
+ memset(reinterpret_cast<uint8_t*>(m_uniformStorage.m_data) +
(rtIndex * kMaxLayersPerFrame + 0) * m_uniformStorage.m_stride,
0, sizeof(ComposeLayerVk::LayerTransform) * kMaxLayersPerFrame);
std::vector<VkDescriptorImageInfo> imageInfos(currentComposition.m_composeLayers.size());
std::vector<VkWriteDescriptorSet> descriptorWrites;
for (size_t i = 0; i < currentComposition.m_composeLayers.size(); ++i) {
- const auto &layer = currentComposition.m_composeLayers[i];
+ const auto& layer = currentComposition.m_composeLayers[i];
if (m_vkSampler != layer->m_vkSampler) {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
<< "Unsupported sampler(" << reinterpret_cast<uintptr_t>(layer->m_vkSampler)
@@ -580,16 +580,16 @@
VkDescriptorImageInfo({.sampler = VK_NULL_HANDLE,
.imageView = layer->m_vkImageView,
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL});
- const VkDescriptorImageInfo &imageInfo = imageInfos[i];
+ const VkDescriptorImageInfo& imageInfo = imageInfos[i];
descriptorWrites.emplace_back(
VkWriteDescriptorSet{.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
- .dstSet = m_vkDescriptorSets[rtIndex * kMaxLayersPerFrame + i],
- .dstBinding = 0,
- .dstArrayElement = 0,
- .descriptorCount = 1,
- .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
- .pImageInfo = &imageInfo});
- memcpy(reinterpret_cast<uint8_t *>(m_uniformStorage.m_data) +
+ .dstSet = m_vkDescriptorSets[rtIndex * kMaxLayersPerFrame + i],
+ .dstBinding = 0,
+ .dstArrayElement = 0,
+ .descriptorCount = 1,
+ .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+ .pImageInfo = &imageInfo});
+ memcpy(reinterpret_cast<uint8_t*>(m_uniformStorage.m_data) +
(rtIndex * kMaxLayersPerFrame + i) * m_uniformStorage.m_stride,
&layer->m_layerTransform, sizeof(ComposeLayerVk::LayerTransform));
}
@@ -620,7 +620,7 @@
.offset = offsetof(struct Vertex, texPos)}};
}
-CompositorVkRenderTarget::CompositorVkRenderTarget(const goldfish_vk::VulkanDispatch &vk,
+CompositorVkRenderTarget::CompositorVkRenderTarget(const goldfish_vk::VulkanDispatch& vk,
VkDevice vkDevice, VkImageView vkImageView,
uint32_t width, uint32_t height,
VkRenderPass vkRenderPass)
diff --git a/stream-servers/vulkan/CompositorVk.h b/stream-servers/vulkan/CompositorVk.h
index 5fdbee2..2216c61 100644
--- a/stream-servers/vulkan/CompositorVk.h
+++ b/stream-servers/vulkan/CompositorVk.h
@@ -24,12 +24,12 @@
} m_layerTransform;
static std::unique_ptr<ComposeLayerVk> createFromHwc2ComposeLayer(
- VkSampler, VkImageView, const ComposeLayer &, uint32_t cbWidth, uint32_t cbHeight,
+ VkSampler, VkImageView, const ComposeLayer&, uint32_t cbWidth, uint32_t cbHeight,
uint32_t dstWidth, uint32_t dstHeight);
private:
ComposeLayerVk() = delete;
- explicit ComposeLayerVk(VkSampler, VkImageView, const LayerTransform &);
+ explicit ComposeLayerVk(VkSampler, VkImageView, const LayerTransform&);
};
// If we want to apply transform to all layers to rotate/clip/position the
@@ -49,7 +49,7 @@
CompositorVkBase,
vk_util::FindMemoryType<CompositorVkBase,
vk_util::RecordImageLayoutTransformCommands<CompositorVkBase>>> {
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
const VkDevice m_vkDevice;
const VkPhysicalDevice m_vkPhysicalDevice;
const VkQueue m_vkQueue;
@@ -67,7 +67,7 @@
VkCommandPool m_vkCommandPool;
- explicit CompositorVkBase(const goldfish_vk::VulkanDispatch &vk, VkDevice device,
+ explicit CompositorVkBase(const goldfish_vk::VulkanDispatch& vk, VkDevice device,
VkPhysicalDevice physicalDevice, VkQueue queue,
std::shared_ptr<android::base::Lock> queueLock,
VkCommandPool commandPool)
@@ -92,20 +92,20 @@
class CompositorVk : protected CompositorVkBase {
public:
static std::unique_ptr<CompositorVk> create(
- const goldfish_vk::VulkanDispatch &vk, VkDevice, VkPhysicalDevice, VkQueue,
+ const goldfish_vk::VulkanDispatch& vk, VkDevice, VkPhysicalDevice, VkQueue,
std::shared_ptr<android::base::Lock> queueLock, VkFormat, VkImageLayout initialLayout,
VkImageLayout finalLayout, uint32_t maxFramesInFlight, VkCommandPool, VkSampler);
- static bool validateQueueFamilyProperties(const VkQueueFamilyProperties &properties);
+ static bool validateQueueFamilyProperties(const VkQueueFamilyProperties& properties);
~CompositorVk();
void recordCommandBuffers(uint32_t renderTargetIndex, VkCommandBuffer,
- const CompositorVkRenderTarget &);
- void setComposition(uint32_t i, std::unique_ptr<Composition> &&composition);
+ const CompositorVkRenderTarget&);
+ void setComposition(uint32_t i, std::unique_ptr<Composition>&& composition);
std::unique_ptr<CompositorVkRenderTarget> createRenderTarget(VkImageView, uint32_t width,
uint32_t height);
private:
- explicit CompositorVk(const goldfish_vk::VulkanDispatch &, VkDevice, VkPhysicalDevice, VkQueue,
+ explicit CompositorVk(const goldfish_vk::VulkanDispatch&, VkDevice, VkPhysicalDevice, VkQueue,
std::shared_ptr<android::base::Lock> queueLock, VkCommandPool,
uint32_t maxFramesInFlight);
void setUpGraphicsPipeline(VkFormat renderTargetFormat, VkImageLayout initialLayout,
@@ -118,7 +118,7 @@
std::optional<std::tuple<VkBuffer, VkDeviceMemory>> createBuffer(VkDeviceSize,
VkBufferUsageFlags,
VkMemoryPropertyFlags) const;
- std::tuple<VkBuffer, VkDeviceMemory> createStagingBufferWithData(const void *data,
+ std::tuple<VkBuffer, VkDeviceMemory> createStagingBufferWithData(const void* data,
VkDeviceSize size) const;
void copyBuffer(VkBuffer src, VkBuffer dst, VkDeviceSize) const;
@@ -145,7 +145,7 @@
struct UniformStorage {
VkBuffer m_vkBuffer;
VkDeviceMemory m_vkDeviceMemory;
- void *m_data;
+ void* m_data;
VkDeviceSize m_stride;
} m_uniformStorage;
};
@@ -155,12 +155,12 @@
~CompositorVkRenderTarget();
private:
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
VkDevice m_vkDevice;
VkFramebuffer m_vkFramebuffer;
uint32_t m_width;
uint32_t m_height;
- CompositorVkRenderTarget(const goldfish_vk::VulkanDispatch &, VkDevice, VkImageView,
+ CompositorVkRenderTarget(const goldfish_vk::VulkanDispatch&, VkDevice, VkImageView,
uint32_t width, uint32_t height, VkRenderPass);
friend class CompositorVk;
};
diff --git a/stream-servers/vulkan/DisplayVk.cpp b/stream-servers/vulkan/DisplayVk.cpp
index 9e85319..0550c08 100644
--- a/stream-servers/vulkan/DisplayVk.cpp
+++ b/stream-servers/vulkan/DisplayVk.cpp
@@ -382,8 +382,7 @@
continue;
}
auto composeLayerVk = ComposeLayerVk::createFromHwc2ComposeLayer(
- m_compositionVkSampler, composeBuffers[i]->m_vkImageView,
- composeLayers[i],
+ m_compositionVkSampler, composeBuffers[i]->m_vkImageView, composeLayers[i],
composeBuffers[i]->m_vkImageCreateInfo.extent.width,
composeBuffers[i]->m_vkImageCreateInfo.extent.height,
targetBuffer->m_vkImageCreateInfo.extent.width,
@@ -649,9 +648,8 @@
}
bool DisplayVk::compareAndSaveComposition(
- uint32_t renderTargetIndex,
- const std::vector<ComposeLayer>& composeLayers,
- const std::vector<std::shared_ptr<DisplayBufferInfo>>& composeBuffers) {
+ uint32_t renderTargetIndex, const std::vector<ComposeLayer>& composeLayers,
+ const std::vector<std::shared_ptr<DisplayBufferInfo>>& composeBuffers) {
if (!m_surfaceState) {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
<< "Haven't bound to a surface, can't compare and save composition.";
@@ -769,7 +767,7 @@
}
std::shared_ptr<DisplayVk::PostResource> DisplayVk::PostResource::create(
- const goldfish_vk::VulkanDispatch &vk, VkDevice vkDevice, VkCommandPool vkCommandPool) {
+ const goldfish_vk::VulkanDispatch& vk, VkDevice vkDevice, VkCommandPool vkCommandPool) {
VkFenceCreateInfo fenceCi = {
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
};
@@ -816,7 +814,7 @@
m_vkCommandPool(vkCommandPool) {}
std::unique_ptr<DisplayVk::ComposeResource> DisplayVk::ComposeResource::create(
- const goldfish_vk::VulkanDispatch &vk, VkDevice vkDevice, VkCommandPool vkCommandPool) {
+ const goldfish_vk::VulkanDispatch& vk, VkDevice vkDevice, VkCommandPool vkCommandPool) {
VkFenceCreateInfo fenceCi = {
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
};
diff --git a/stream-servers/vulkan/DisplayVk.h b/stream-servers/vulkan/DisplayVk.h
index 2d1d750..cae840b 100644
--- a/stream-servers/vulkan/DisplayVk.h
+++ b/stream-servers/vulkan/DisplayVk.h
@@ -27,10 +27,10 @@
~DisplayBufferInfo();
private:
- DisplayBufferInfo(const goldfish_vk::VulkanDispatch &, VkDevice, const VkImageCreateInfo &,
+ DisplayBufferInfo(const goldfish_vk::VulkanDispatch&, VkDevice, const VkImageCreateInfo&,
VkImage);
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
VkDevice m_vkDevice;
VkImageCreateInfo m_vkImageCreateInfo;
@@ -44,7 +44,7 @@
friend class DisplayVk;
};
- DisplayVk(const goldfish_vk::VulkanDispatch &, VkPhysicalDevice,
+ DisplayVk(const goldfish_vk::VulkanDispatch&, VkPhysicalDevice,
uint32_t swapChainQueueFamilyIndex, uint32_t compositorQueueFamilyIndex, VkDevice,
VkQueue compositorVkQueue, std::shared_ptr<android::base::Lock> compositorVkQueueLock,
VkQueue swapChainVkQueue, std::shared_ptr<android::base::Lock> swapChainVkQueueLock);
@@ -52,7 +52,7 @@
void bindToSurface(VkSurfaceKHR, uint32_t width, uint32_t height);
// The caller is responsible to make sure the VkImage lives longer than the DisplayBufferInfo
// created here.
- std::shared_ptr<DisplayBufferInfo> createDisplayBuffer(VkImage, const VkImageCreateInfo &);
+ std::shared_ptr<DisplayBufferInfo> createDisplayBuffer(VkImage, const VkImageCreateInfo&);
// The first component of the returned tuple is false when the swapchain is no longer valid and
// bindToSurface() needs to be called again. When the first component is true, the second
// component of the returned tuple is a/ future that will complete when the GPU side of work
@@ -68,27 +68,26 @@
// true, the second component of the returned tuple is a future that will
// complete when the GPU side of work completes.
std::tuple<bool, std::shared_future<void>> compose(
- const std::vector<ComposeLayer> &composeLayers,
+ const std::vector<ComposeLayer>& composeLayers,
std::vector<std::shared_ptr<DisplayBufferInfo>> composeBuffers,
std::shared_ptr<DisplayBufferInfo> renderTarget);
private:
VkFormatFeatureFlags getFormatFeatures(VkFormat, VkImageTiling);
- bool canPost(const VkImageCreateInfo &);
+ bool canPost(const VkImageCreateInfo&);
// Check if the VkImage can be used as the compose layer to be sampled from.
- bool canCompositeFrom(const VkImageCreateInfo &);
+ bool canCompositeFrom(const VkImageCreateInfo&);
// Check if the VkImage can be used as the render target of the composition.
- bool canCompositeTo(const VkImageCreateInfo &);
+ bool canCompositeTo(const VkImageCreateInfo&);
// Returns if the composition specified by the parameter is different from
// the previous composition. If the composition is different, update the
// previous composition stored in m_surfaceState. Must be called after
// bindToSurface() is called.
bool compareAndSaveComposition(
- uint32_t renderTargetIndex,
- const std::vector<ComposeLayer> &composeLayers,
- const std::vector<std::shared_ptr<DisplayBufferInfo>> &composeBuffers);
+ uint32_t renderTargetIndex, const std::vector<ComposeLayer>& composeLayers,
+ const std::vector<std::shared_ptr<DisplayBufferInfo>>& composeBuffers);
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
VkPhysicalDevice m_vkPhysicalDevice;
uint32_t m_swapChainQueueFamilyIndex;
uint32_t m_compositorQueueFamilyIndex;
@@ -106,16 +105,16 @@
const VkSemaphore m_swapchainImageAcquireSemaphore;
const VkSemaphore m_swapchainImageReleaseSemaphore;
const VkCommandBuffer m_vkCommandBuffer;
- static std::shared_ptr<PostResource> create(const goldfish_vk::VulkanDispatch &, VkDevice,
+ static std::shared_ptr<PostResource> create(const goldfish_vk::VulkanDispatch&, VkDevice,
VkCommandPool);
~PostResource();
DISALLOW_COPY_ASSIGN_AND_MOVE(PostResource);
private:
- PostResource(const goldfish_vk::VulkanDispatch &, VkDevice, VkCommandPool,
+ PostResource(const goldfish_vk::VulkanDispatch&, VkDevice, VkCommandPool,
VkFence swapchainImageReleaseFence, VkSemaphore swapchainImageAcquireSemaphore,
VkSemaphore swapchainImageReleaseSemaphore, VkCommandBuffer);
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
const VkDevice m_vkDevice;
const VkCommandPool m_vkCommandPool;
};
@@ -127,15 +126,15 @@
public:
const VkFence m_composeCompleteFence;
const VkCommandBuffer m_vkCommandBuffer;
- static std::unique_ptr<ComposeResource> create(const goldfish_vk::VulkanDispatch &,
- VkDevice, VkCommandPool);
+ static std::unique_ptr<ComposeResource> create(const goldfish_vk::VulkanDispatch&, VkDevice,
+ VkCommandPool);
~ComposeResource();
DISALLOW_COPY_ASSIGN_AND_MOVE(ComposeResource);
private:
- ComposeResource(const goldfish_vk::VulkanDispatch &, VkDevice, VkCommandPool, VkFence,
+ ComposeResource(const goldfish_vk::VulkanDispatch&, VkDevice, VkCommandPool, VkFence,
VkCommandBuffer);
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
const VkDevice m_vkDevice;
const VkCommandPool m_vkCommandPool;
};
diff --git a/stream-servers/vulkan/GrallocDefs.h b/stream-servers/vulkan/GrallocDefs.h
index 574d6fc..2f8d96d 100644
--- a/stream-servers/vulkan/GrallocDefs.h
+++ b/stream-servers/vulkan/GrallocDefs.h
@@ -19,105 +19,105 @@
HAL_PIXEL_FORMAT_RGB_888 = 3,
HAL_PIXEL_FORMAT_RGB_565 = 4,
HAL_PIXEL_FORMAT_BGRA_8888 = 5,
- HAL_PIXEL_FORMAT_RGBA_1010102 = 43, // 0x2B
- HAL_PIXEL_FORMAT_RGBA_FP16 = 22, // 0x16
- HAL_PIXEL_FORMAT_YV12 = 842094169, // 0x32315659
- HAL_PIXEL_FORMAT_Y8 = 538982489, // 0x20203859
- HAL_PIXEL_FORMAT_Y16 = 540422489, // 0x20363159
- HAL_PIXEL_FORMAT_RAW16 = 32, // 0x20
- HAL_PIXEL_FORMAT_RAW10 = 37, // 0x25
- HAL_PIXEL_FORMAT_RAW12 = 38, // 0x26
- HAL_PIXEL_FORMAT_RAW_OPAQUE = 36, // 0x24
- HAL_PIXEL_FORMAT_BLOB = 33, // 0x21
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 34, // 0x22
- HAL_PIXEL_FORMAT_YCBCR_420_888 = 35, // 0x23
- HAL_PIXEL_FORMAT_YCBCR_422_888 = 39, // 0x27
- HAL_PIXEL_FORMAT_YCBCR_444_888 = 40, // 0x28
- HAL_PIXEL_FORMAT_FLEX_RGB_888 = 41, // 0x29
- HAL_PIXEL_FORMAT_FLEX_RGBA_8888 = 42, // 0x2A
- HAL_PIXEL_FORMAT_YCBCR_422_SP = 16, // 0x10
- HAL_PIXEL_FORMAT_YCRCB_420_SP = 17, // 0x11
- HAL_PIXEL_FORMAT_YCBCR_422_I = 20, // 0x14
- HAL_PIXEL_FORMAT_JPEG = 256, // 0x100
+ HAL_PIXEL_FORMAT_RGBA_1010102 = 43, // 0x2B
+ HAL_PIXEL_FORMAT_RGBA_FP16 = 22, // 0x16
+ HAL_PIXEL_FORMAT_YV12 = 842094169, // 0x32315659
+ HAL_PIXEL_FORMAT_Y8 = 538982489, // 0x20203859
+ HAL_PIXEL_FORMAT_Y16 = 540422489, // 0x20363159
+ HAL_PIXEL_FORMAT_RAW16 = 32, // 0x20
+ HAL_PIXEL_FORMAT_RAW10 = 37, // 0x25
+ HAL_PIXEL_FORMAT_RAW12 = 38, // 0x26
+ HAL_PIXEL_FORMAT_RAW_OPAQUE = 36, // 0x24
+ HAL_PIXEL_FORMAT_BLOB = 33, // 0x21
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 34, // 0x22
+ HAL_PIXEL_FORMAT_YCBCR_420_888 = 35, // 0x23
+ HAL_PIXEL_FORMAT_YCBCR_422_888 = 39, // 0x27
+ HAL_PIXEL_FORMAT_YCBCR_444_888 = 40, // 0x28
+ HAL_PIXEL_FORMAT_FLEX_RGB_888 = 41, // 0x29
+ HAL_PIXEL_FORMAT_FLEX_RGBA_8888 = 42, // 0x2A
+ HAL_PIXEL_FORMAT_YCBCR_422_SP = 16, // 0x10
+ HAL_PIXEL_FORMAT_YCRCB_420_SP = 17, // 0x11
+ HAL_PIXEL_FORMAT_YCBCR_422_I = 20, // 0x14
+ HAL_PIXEL_FORMAT_JPEG = 256, // 0x100
} android_pixel_format_t;
enum {
/* buffer is never read in software */
- GRALLOC_USAGE_SW_READ_NEVER = 0x00000000U,
+ GRALLOC_USAGE_SW_READ_NEVER = 0x00000000U,
/* buffer is rarely read in software */
- GRALLOC_USAGE_SW_READ_RARELY = 0x00000002U,
+ GRALLOC_USAGE_SW_READ_RARELY = 0x00000002U,
/* buffer is often read in software */
- GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003U,
+ GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003U,
/* mask for the software read values */
- GRALLOC_USAGE_SW_READ_MASK = 0x0000000FU,
+ GRALLOC_USAGE_SW_READ_MASK = 0x0000000FU,
/* buffer is never written in software */
- GRALLOC_USAGE_SW_WRITE_NEVER = 0x00000000U,
+ GRALLOC_USAGE_SW_WRITE_NEVER = 0x00000000U,
/* buffer is rarely written in software */
- GRALLOC_USAGE_SW_WRITE_RARELY = 0x00000020U,
+ GRALLOC_USAGE_SW_WRITE_RARELY = 0x00000020U,
/* buffer is often written in software */
- GRALLOC_USAGE_SW_WRITE_OFTEN = 0x00000030U,
+ GRALLOC_USAGE_SW_WRITE_OFTEN = 0x00000030U,
/* mask for the software write values */
- GRALLOC_USAGE_SW_WRITE_MASK = 0x000000F0U,
+ GRALLOC_USAGE_SW_WRITE_MASK = 0x000000F0U,
/* buffer will be used as an OpenGL ES texture */
- GRALLOC_USAGE_HW_TEXTURE = 0x00000100U,
+ GRALLOC_USAGE_HW_TEXTURE = 0x00000100U,
/* buffer will be used as an OpenGL ES render target */
- GRALLOC_USAGE_HW_RENDER = 0x00000200U,
+ GRALLOC_USAGE_HW_RENDER = 0x00000200U,
/* buffer will be used by the 2D hardware blitter */
- GRALLOC_USAGE_HW_2D = 0x00000400U,
+ GRALLOC_USAGE_HW_2D = 0x00000400U,
/* buffer will be used by the HWComposer HAL module */
- GRALLOC_USAGE_HW_COMPOSER = 0x00000800U,
+ GRALLOC_USAGE_HW_COMPOSER = 0x00000800U,
/* buffer will be used with the framebuffer device */
- GRALLOC_USAGE_HW_FB = 0x00001000U,
+ GRALLOC_USAGE_HW_FB = 0x00001000U,
/* buffer should be displayed full-screen on an external display when
* possible */
- GRALLOC_USAGE_EXTERNAL_DISP = 0x00002000U,
+ GRALLOC_USAGE_EXTERNAL_DISP = 0x00002000U,
/* Must have a hardware-protected path to external display sink for
* this buffer. If a hardware-protected path is not available, then
* either don't composite only this buffer (preferred) to the
* external sink, or (less desirable) do not route the entire
* composition to the external sink. */
- GRALLOC_USAGE_PROTECTED = 0x00004000U,
+ GRALLOC_USAGE_PROTECTED = 0x00004000U,
/* buffer may be used as a cursor */
- GRALLOC_USAGE_CURSOR = 0x00008000U,
+ GRALLOC_USAGE_CURSOR = 0x00008000U,
/* buffer will be used with the HW video encoder */
- GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000U,
+ GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000U,
/* buffer will be written by the HW camera pipeline */
- GRALLOC_USAGE_HW_CAMERA_WRITE = 0x00020000U,
+ GRALLOC_USAGE_HW_CAMERA_WRITE = 0x00020000U,
/* buffer will be read by the HW camera pipeline */
- GRALLOC_USAGE_HW_CAMERA_READ = 0x00040000U,
+ GRALLOC_USAGE_HW_CAMERA_READ = 0x00040000U,
/* buffer will be used as part of zero-shutter-lag queue */
- GRALLOC_USAGE_HW_CAMERA_ZSL = 0x00060000U,
+ GRALLOC_USAGE_HW_CAMERA_ZSL = 0x00060000U,
/* mask for the camera access values */
- GRALLOC_USAGE_HW_CAMERA_MASK = 0x00060000U,
+ GRALLOC_USAGE_HW_CAMERA_MASK = 0x00060000U,
/* mask for the software usage bit-mask */
- GRALLOC_USAGE_HW_MASK = 0x00071F00U,
+ GRALLOC_USAGE_HW_MASK = 0x00071F00U,
/* buffer will be used as a RenderScript Allocation */
- GRALLOC_USAGE_RENDERSCRIPT = 0x00100000U,
+ GRALLOC_USAGE_RENDERSCRIPT = 0x00100000U,
/* Set by the consumer to indicate to the producer that they may attach a
* buffer that they did not detach from the BufferQueue. Will be filtered
* out by GRALLOC_USAGE_ALLOC_MASK, so gralloc modules will not need to
* handle this flag. */
- GRALLOC_USAGE_FOREIGN_BUFFERS = 0x00200000U,
+ GRALLOC_USAGE_FOREIGN_BUFFERS = 0x00200000U,
/* Mask of all flags which could be passed to a gralloc module for buffer
* allocation. Any flags not in this mask do not need to be handled by
* gralloc modules. */
- GRALLOC_USAGE_ALLOC_MASK = ~(GRALLOC_USAGE_FOREIGN_BUFFERS),
+ GRALLOC_USAGE_ALLOC_MASK = ~(GRALLOC_USAGE_FOREIGN_BUFFERS),
/* implementation-specific private usage flags */
- GRALLOC_USAGE_PRIVATE_0 = 0x10000000U,
- GRALLOC_USAGE_PRIVATE_1 = 0x20000000U,
- GRALLOC_USAGE_PRIVATE_2 = 0x40000000U,
- GRALLOC_USAGE_PRIVATE_3 = 0x80000000U,
- GRALLOC_USAGE_PRIVATE_MASK = 0xF0000000U,
+ GRALLOC_USAGE_PRIVATE_0 = 0x10000000U,
+ GRALLOC_USAGE_PRIVATE_1 = 0x20000000U,
+ GRALLOC_USAGE_PRIVATE_2 = 0x40000000U,
+ GRALLOC_USAGE_PRIVATE_3 = 0x80000000U,
+ GRALLOC_USAGE_PRIVATE_MASK = 0xF0000000U,
};
typedef enum {
@@ -125,8 +125,7 @@
GRALLOC1_CONSUMER_USAGE_CPU_READ_NEVER = 0,
/* 1ULL << 0 */
GRALLOC1_CONSUMER_USAGE_CPU_READ = 1ULL << 1,
- GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN = 1ULL << 2 |
- GRALLOC1_CONSUMER_USAGE_CPU_READ,
+ GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN = 1ULL << 2 | GRALLOC1_CONSUMER_USAGE_CPU_READ,
/* 1ULL << 3 */
/* 1ULL << 4 */
/* 1ULL << 5 */
@@ -205,13 +204,11 @@
GRALLOC1_PRODUCER_USAGE_CPU_WRITE_NEVER = 0,
/* 1ULL << 0 */
GRALLOC1_PRODUCER_USAGE_CPU_READ = 1ULL << 1,
- GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN = 1ULL << 2 |
- GRALLOC1_PRODUCER_USAGE_CPU_READ,
+ GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN = 1ULL << 2 | GRALLOC1_PRODUCER_USAGE_CPU_READ,
/* 1ULL << 3 */
/* 1ULL << 4 */
GRALLOC1_PRODUCER_USAGE_CPU_WRITE = 1ULL << 5,
- GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN = 1ULL << 6 |
- GRALLOC1_PRODUCER_USAGE_CPU_WRITE,
+ GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN = 1ULL << 6 | GRALLOC1_PRODUCER_USAGE_CPU_WRITE,
/* 1ULL << 7 */
/* 1ULL << 8 */
GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET = 1ULL << 9,
diff --git a/stream-servers/vulkan/SwapChainStateVk.cpp b/stream-servers/vulkan/SwapChainStateVk.cpp
index 2f5e789..83e9749 100644
--- a/stream-servers/vulkan/SwapChainStateVk.cpp
+++ b/stream-servers/vulkan/SwapChainStateVk.cpp
@@ -72,8 +72,8 @@
}
}
-SwapChainStateVk::SwapChainStateVk(const goldfish_vk::VulkanDispatch &vk, VkDevice vkDevice,
- const VkSwapchainCreateInfoKHR &swapChainCi)
+SwapChainStateVk::SwapChainStateVk(const goldfish_vk::VulkanDispatch& vk, VkDevice vkDevice,
+ const VkSwapchainCreateInfoKHR& swapChainCi)
: m_vk(vk),
m_vkDevice(vkDevice),
m_vkSwapChain(VK_NULL_HANDLE),
@@ -113,7 +113,7 @@
m_vk.vkDestroySwapchainKHR(m_vkDevice, m_vkSwapChain, nullptr);
}
-std::vector<const char *> SwapChainStateVk::getRequiredInstanceExtensions() {
+std::vector<const char*> SwapChainStateVk::getRequiredInstanceExtensions() {
return {
VK_KHR_SURFACE_EXTENSION_NAME,
#ifdef _WIN32
@@ -128,13 +128,13 @@
};
}
-std::vector<const char *> SwapChainStateVk::getRequiredDeviceExtensions() {
+std::vector<const char*> SwapChainStateVk::getRequiredDeviceExtensions() {
return {
VK_KHR_SWAPCHAIN_EXTENSION_NAME,
};
}
-bool SwapChainStateVk::validateQueueFamilyProperties(const goldfish_vk::VulkanDispatch &vk,
+bool SwapChainStateVk::validateQueueFamilyProperties(const goldfish_vk::VulkanDispatch& vk,
VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
uint32_t queueFamilyIndex) {
@@ -175,7 +175,7 @@
VK_CHECK(res);
}
auto iSurfaceFormat =
- std::find_if(formats.begin(), formats.end(), [](const VkSurfaceFormatKHR &format) {
+ std::find_if(formats.begin(), formats.end(), [](const VkSurfaceFormatKHR& format) {
return format.format == k_vkFormat && format.colorSpace == k_vkColorSpace;
});
if (iSurfaceFormat == formats.end()) {
@@ -278,8 +278,8 @@
VkFormat SwapChainStateVk::getFormat() { return k_vkFormat; }
-const std::vector<VkImage> &SwapChainStateVk::getVkImages() const { return m_vkImages; }
+const std::vector<VkImage>& SwapChainStateVk::getVkImages() const { return m_vkImages; }
-const std::vector<VkImageView> &SwapChainStateVk::getVkImageViews() const { return m_vkImageViews; }
+const std::vector<VkImageView>& SwapChainStateVk::getVkImageViews() const { return m_vkImageViews; }
VkSwapchainKHR SwapChainStateVk::getSwapChain() const { return m_vkSwapChain; }
diff --git a/stream-servers/vulkan/SwapChainStateVk.h b/stream-servers/vulkan/SwapChainStateVk.h
index 71066d7..f9c900f 100644
--- a/stream-servers/vulkan/SwapChainStateVk.h
+++ b/stream-servers/vulkan/SwapChainStateVk.h
@@ -32,27 +32,27 @@
class SwapChainStateVk {
public:
- static std::vector<const char *> getRequiredInstanceExtensions();
- static std::vector<const char *> getRequiredDeviceExtensions();
- static bool validateQueueFamilyProperties(const goldfish_vk::VulkanDispatch &, VkPhysicalDevice,
+ static std::vector<const char*> getRequiredInstanceExtensions();
+ static std::vector<const char*> getRequiredDeviceExtensions();
+ static bool validateQueueFamilyProperties(const goldfish_vk::VulkanDispatch&, VkPhysicalDevice,
VkSurfaceKHR, uint32_t queueFamilyIndex);
static std::optional<SwapchainCreateInfoWrapper> createSwapChainCi(
const goldfish_vk::VulkanDispatch&, VkSurfaceKHR, VkPhysicalDevice, uint32_t width,
uint32_t height, const std::unordered_set<uint32_t>& queueFamilyIndices);
- explicit SwapChainStateVk(const goldfish_vk::VulkanDispatch &, VkDevice,
- const VkSwapchainCreateInfoKHR &);
+ explicit SwapChainStateVk(const goldfish_vk::VulkanDispatch&, VkDevice,
+ const VkSwapchainCreateInfoKHR&);
~SwapChainStateVk();
VkFormat getFormat();
- const std::vector<VkImage> &getVkImages() const;
- const std::vector<VkImageView> &getVkImageViews() const;
+ const std::vector<VkImage>& getVkImages() const;
+ const std::vector<VkImageView>& getVkImageViews() const;
VkSwapchainKHR getSwapChain() const;
private:
const static VkFormat k_vkFormat = VK_FORMAT_B8G8R8A8_UNORM;
const static VkColorSpaceKHR k_vkColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
- const goldfish_vk::VulkanDispatch &m_vk;
+ const goldfish_vk::VulkanDispatch& m_vk;
VkDevice m_vkDevice;
VkSwapchainKHR m_vkSwapChain;
std::vector<VkImage> m_vkImages;
diff --git a/stream-servers/vulkan/VkAndroidNativeBuffer.cpp b/stream-servers/vulkan/VkAndroidNativeBuffer.cpp
index 9680668..2cc9e54 100644
--- a/stream-servers/vulkan/VkAndroidNativeBuffer.cpp
+++ b/stream-servers/vulkan/VkAndroidNativeBuffer.cpp
@@ -26,16 +26,18 @@
#include "stream-servers/FrameBuffer.h"
#include "vulkan/vk_enum_string_helper.h"
-#define VK_ANB_ERR(fmt,...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define VK_ANB_ERR(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
#define ENABLE_VK_ANB_DEBUG 0
#if ENABLE_VK_ANB_DEBUG
-#define VK_ANB_DEBUG(fmt,...) fprintf(stderr, "vk-anb-debug: %s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
-#define VK_ANB_DEBUG_OBJ(obj, fmt,...) fprintf(stderr, "vk-anb-debug: %s:%d:%p " fmt "\n", __func__, __LINE__, obj, ##__VA_ARGS__);
+#define VK_ANB_DEBUG(fmt, ...) \
+ fprintf(stderr, "vk-anb-debug: %s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define VK_ANB_DEBUG_OBJ(obj, fmt, ...) \
+ fprintf(stderr, "vk-anb-debug: %s:%d:%p " fmt "\n", __func__, __LINE__, obj, ##__VA_ARGS__);
#else
-#define VK_ANB_DEBUG(fmt,...)
-#define VK_ANB_DEBUG_OBJ(obj, fmt,...)
+#define VK_ANB_DEBUG(fmt, ...)
+#define VK_ANB_DEBUG_OBJ(obj, fmt, ...)
#endif
using android::base::AutoLock;
@@ -54,7 +56,9 @@
VkFence fence = VK_NULL_HANDLE;
if (mAvailableFences.empty()) {
VkFenceCreateInfo fenceCreateInfo = {
- VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, 0,
+ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+ 0,
+ 0,
};
mVk->vkCreateFence(mDevice, &fenceCreateInfo, nullptr, &fence);
VK_ANB_DEBUG("no fences in pool, created %p", fence);
@@ -97,10 +101,8 @@
mAvailableFences.push_back(fence);
}
-bool parseAndroidNativeBufferInfo(
- const VkImageCreateInfo* pCreateInfo,
- AndroidNativeBufferInfo* info_out) {
-
+bool parseAndroidNativeBufferInfo(const VkImageCreateInfo* pCreateInfo,
+ AndroidNativeBufferInfo* info_out) {
// Look through the extension chain.
const void* curr_pNext = pCreateInfo->pNext;
if (!curr_pNext) return false;
@@ -110,15 +112,12 @@
return structType == VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID;
}
-VkResult prepareAndroidNativeBufferImage(
- VulkanDispatch* vk,
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkNativeBufferANDROID* nativeBufferANDROID,
- const VkAllocationCallbacks* pAllocator,
- const VkPhysicalDeviceMemoryProperties* memProps,
- AndroidNativeBufferInfo* out) {
-
+VkResult prepareAndroidNativeBufferImage(VulkanDispatch* vk, VkDevice device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkNativeBufferANDROID* nativeBufferANDROID,
+ const VkAllocationCallbacks* pAllocator,
+ const VkPhysicalDeviceMemoryProperties* memProps,
+ AndroidNativeBufferInfo* out) {
out->vk = vk;
out->device = device;
out->vkFormat = pCreateInfo->format;
@@ -126,23 +125,20 @@
out->usage = pCreateInfo->usage;
for (uint32_t i = 0; i < pCreateInfo->queueFamilyIndexCount; ++i) {
- out->queueFamilyIndices.push_back(
- pCreateInfo->pQueueFamilyIndices[i]);
+ out->queueFamilyIndices.push_back(pCreateInfo->pQueueFamilyIndices[i]);
}
out->format = nativeBufferANDROID->format;
out->stride = nativeBufferANDROID->stride;
out->colorBufferHandle = *(nativeBufferANDROID->handle);
- bool colorBufferVulkanCompatible =
- isColorBufferVulkanCompatible(out->colorBufferHandle);
+ bool colorBufferVulkanCompatible = isColorBufferVulkanCompatible(out->colorBufferHandle);
bool externalMemoryCompatible = false;
auto emu = getGlobalVkEmulation();
if (emu && emu->live) {
- externalMemoryCompatible =
- emu->deviceInfo.supportsExternalMemory;
+ externalMemoryCompatible = emu->deviceInfo.supportsExternalMemory;
}
if (colorBufferVulkanCompatible && externalMemoryCompatible &&
@@ -161,15 +157,14 @@
if (out->externallyBacked) {
// Create the image with extension structure about external backing.
VkExternalMemoryImageCreateInfo extImageCi = {
- VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, 0,
+ VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+ 0,
VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
infoNoNative.pNext = &extImageCi;
- VkResult createResult =
- vk->vkCreateImage(
- device, &infoNoNative, pAllocator, &out->image);
+ VkResult createResult = vk->vkCreateImage(device, &infoNoNative, pAllocator, &out->image);
if (createResult != VK_SUCCESS) return createResult;
@@ -177,8 +172,7 @@
const auto& cbInfo = getColorBufferInfo(out->colorBufferHandle);
const auto& memInfo = cbInfo.memory;
- vk->vkGetImageMemoryRequirements(
- device, out->image, &out->memReqs);
+ vk->vkGetImageMemoryRequirements(device, out->image, &out->memReqs);
if (out->memReqs.size < memInfo.size) {
out->memReqs.size = memInfo.size;
@@ -190,21 +184,17 @@
}
} else {
- VkResult createResult =
- vk->vkCreateImage(
- device, &infoNoNative, pAllocator, &out->image);
+ VkResult createResult = vk->vkCreateImage(device, &infoNoNative, pAllocator, &out->image);
if (createResult != VK_SUCCESS) return createResult;
- vk->vkGetImageMemoryRequirements(
- device, out->image, &out->memReqs);
+ vk->vkGetImageMemoryRequirements(device, out->image, &out->memReqs);
uint32_t imageMemoryTypeIndex = 0;
bool imageMemoryTypeIndexFound = false;
for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
- bool supported =
- out->memReqs.memoryTypeBits & (1 << i);
+ bool supported = out->memReqs.memoryTypeBits & (1 << i);
if (supported) {
imageMemoryTypeIndex = i;
imageMemoryTypeIndexFound = true;
@@ -213,8 +203,9 @@
}
if (!imageMemoryTypeIndexFound) {
- VK_ANB_ERR("VK_ANDROID_native_buffer: could not obtain "
- "image memory type index");
+ VK_ANB_ERR(
+ "VK_ANDROID_native_buffer: could not obtain "
+ "image memory type index");
teardownAndroidNativeBufferImage(vk, out);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
@@ -222,27 +213,26 @@
out->imageMemoryTypeIndex = imageMemoryTypeIndex;
VkMemoryAllocateInfo allocInfo = {
- VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
+ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ 0,
out->memReqs.size,
out->imageMemoryTypeIndex,
};
- if (VK_SUCCESS !=
- vk->vkAllocateMemory(
- device, &allocInfo, nullptr,
- &out->imageMemory)) {
- VK_ANB_ERR("VK_ANDROID_native_buffer: could not allocate "
- "image memory. requested size: %zu", (size_t)(out->memReqs.size));
+ if (VK_SUCCESS != vk->vkAllocateMemory(device, &allocInfo, nullptr, &out->imageMemory)) {
+ VK_ANB_ERR(
+ "VK_ANDROID_native_buffer: could not allocate "
+ "image memory. requested size: %zu",
+ (size_t)(out->memReqs.size));
teardownAndroidNativeBufferImage(vk, out);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
}
- if (VK_SUCCESS !=
- vk->vkBindImageMemory(
- device, out->image, out->imageMemory, 0)) {
- VK_ANB_ERR("VK_ANDROID_native_buffer: could not bind "
- "image memory.");
+ if (VK_SUCCESS != vk->vkBindImageMemory(device, out->image, out->imageMemory, 0)) {
+ VK_ANB_ERR(
+ "VK_ANDROID_native_buffer: could not bind "
+ "image memory.");
teardownAndroidNativeBufferImage(vk, out);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
@@ -252,8 +242,7 @@
// work on Windows with NVIDIA.
{
bool stagingIndexRes =
- getStagingMemoryTypeIndex(
- vk, device, memProps, &out->stagingMemoryTypeIndex);
+ getStagingMemoryTypeIndex(vk, device, memProps, &out->stagingMemoryTypeIndex);
if (!stagingIndexRes) {
VK_ANB_ERR(
@@ -264,13 +253,13 @@
}
VkMemoryAllocateInfo allocInfo = {
- VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, 0,
+ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ 0,
out->memReqs.size,
out->stagingMemoryTypeIndex,
};
- VkResult res = vk->vkAllocateMemory(device, &allocInfo, nullptr,
- &out->stagingMemory);
+ VkResult res = vk->vkAllocateMemory(device, &allocInfo, nullptr, &out->stagingMemory);
if (VK_SUCCESS != res) {
VK_ANB_ERR(
"VK_ANDROID_native_buffer: could not allocate staging memory. "
@@ -294,36 +283,32 @@
stagingBufferCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
stagingBufferCreateInfo.queueFamilyIndexCount =
static_cast<uint32_t>(out->queueFamilyIndices.size());
- stagingBufferCreateInfo.pQueueFamilyIndices =
- out->queueFamilyIndices.data();
+ stagingBufferCreateInfo.pQueueFamilyIndices = out->queueFamilyIndices.data();
}
if (VK_SUCCESS !=
- vk->vkCreateBuffer(
- device, &stagingBufferCreateInfo, nullptr,
- &out->stagingBuffer)) {
- VK_ANB_ERR("VK_ANDROID_native_buffer: could not create "
- "staging buffer.");
+ vk->vkCreateBuffer(device, &stagingBufferCreateInfo, nullptr, &out->stagingBuffer)) {
+ VK_ANB_ERR(
+ "VK_ANDROID_native_buffer: could not create "
+ "staging buffer.");
teardownAndroidNativeBufferImage(vk, out);
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
if (VK_SUCCESS !=
- vk->vkBindBufferMemory(
- device, out->stagingBuffer, out->stagingMemory, 0)) {
- VK_ANB_ERR("VK_ANDROID_native_buffer: could not bind "
- "staging buffer to staging memory.");
+ vk->vkBindBufferMemory(device, out->stagingBuffer, out->stagingMemory, 0)) {
+ VK_ANB_ERR(
+ "VK_ANDROID_native_buffer: could not bind "
+ "staging buffer to staging memory.");
teardownAndroidNativeBufferImage(vk, out);
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
- if (VK_SUCCESS !=
- vk->vkMapMemory(
- device, out->stagingMemory, 0,
- out->memReqs.size, 0,
- (void**)&out->mappedStagingPtr)) {
- VK_ANB_ERR("VK_ANDROID_native_buffer: could not map "
- "staging buffer.");
+ if (VK_SUCCESS != vk->vkMapMemory(device, out->stagingMemory, 0, out->memReqs.size, 0,
+ (void**)&out->mappedStagingPtr)) {
+ VK_ANB_ERR(
+ "VK_ANDROID_native_buffer: could not map "
+ "staging buffer.");
teardownAndroidNativeBufferImage(vk, out);
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
@@ -335,8 +320,7 @@
return VK_SUCCESS;
}
-void teardownAndroidNativeBufferImage(
- VulkanDispatch* vk, AndroidNativeBufferInfo* anbInfo) {
+void teardownAndroidNativeBufferImage(VulkanDispatch* vk, AndroidNativeBufferInfo* anbInfo) {
auto device = anbInfo->device;
auto image = anbInfo->image;
@@ -371,55 +355,41 @@
anbInfo->qsriWaitFencePool = nullptr;
}
-void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage,
- int* usage_out) {
+void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage, int* usage_out) {
// Pick some default flexible values for gralloc usage for now.
(void)format;
(void)imageUsage;
- *usage_out =
- GRALLOC_USAGE_SW_READ_OFTEN |
- GRALLOC_USAGE_SW_WRITE_OFTEN |
- GRALLOC_USAGE_HW_RENDER |
- GRALLOC_USAGE_HW_TEXTURE;
+ *usage_out = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+ GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
}
// Taken from Android GrallocUsageConversion.h
void getGralloc1Usage(VkFormat format, VkImageUsageFlags imageUsage,
VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
- uint64_t* consumerUsage_out,
- uint64_t* producerUsage_out) {
+ uint64_t* consumerUsage_out, uint64_t* producerUsage_out) {
// Pick some default flexible values for gralloc usage for now.
(void)format;
(void)imageUsage;
(void)swapchainImageUsage;
- constexpr int usage =
- GRALLOC_USAGE_SW_READ_OFTEN |
- GRALLOC_USAGE_SW_WRITE_OFTEN |
- GRALLOC_USAGE_HW_RENDER |
- GRALLOC_USAGE_HW_TEXTURE;
+ constexpr int usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+ GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
constexpr uint64_t PRODUCER_MASK =
- GRALLOC1_PRODUCER_USAGE_CPU_READ |
- /* GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN | */
- GRALLOC1_PRODUCER_USAGE_CPU_WRITE |
- /* GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN | */
- GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET |
- GRALLOC1_PRODUCER_USAGE_PROTECTED |
- GRALLOC1_PRODUCER_USAGE_CAMERA |
- GRALLOC1_PRODUCER_USAGE_VIDEO_DECODER |
- GRALLOC1_PRODUCER_USAGE_SENSOR_DIRECT_DATA;
+ GRALLOC1_PRODUCER_USAGE_CPU_READ |
+ /* GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN | */
+ GRALLOC1_PRODUCER_USAGE_CPU_WRITE |
+ /* GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN | */
+ GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET | GRALLOC1_PRODUCER_USAGE_PROTECTED |
+ GRALLOC1_PRODUCER_USAGE_CAMERA | GRALLOC1_PRODUCER_USAGE_VIDEO_DECODER |
+ GRALLOC1_PRODUCER_USAGE_SENSOR_DIRECT_DATA;
constexpr uint64_t CONSUMER_MASK =
- GRALLOC1_CONSUMER_USAGE_CPU_READ |
- /* GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN | */
- GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE |
- GRALLOC1_CONSUMER_USAGE_HWCOMPOSER |
- GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET |
- GRALLOC1_CONSUMER_USAGE_CURSOR |
- GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER |
- GRALLOC1_CONSUMER_USAGE_CAMERA |
- GRALLOC1_CONSUMER_USAGE_RENDERSCRIPT |
- GRALLOC1_CONSUMER_USAGE_GPU_DATA_BUFFER;
+ GRALLOC1_CONSUMER_USAGE_CPU_READ |
+ /* GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN | */
+ GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE | GRALLOC1_CONSUMER_USAGE_HWCOMPOSER |
+ GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET | GRALLOC1_CONSUMER_USAGE_CURSOR |
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER | GRALLOC1_CONSUMER_USAGE_CAMERA |
+ GRALLOC1_CONSUMER_USAGE_RENDERSCRIPT | GRALLOC1_CONSUMER_USAGE_GPU_DATA_BUFFER;
*producerUsage_out = static_cast<uint64_t>(usage) & PRODUCER_MASK;
*consumerUsage_out = static_cast<uint64_t>(usage) & CONSUMER_MASK;
@@ -436,58 +406,40 @@
}
}
-void AndroidNativeBufferInfo::QueueState::setup(
- VulkanDispatch* vk,
- VkDevice device,
- VkQueue queueIn,
- uint32_t queueFamilyIndexIn,
- android::base::Lock* queueLockIn) {
-
+void AndroidNativeBufferInfo::QueueState::setup(VulkanDispatch* vk, VkDevice device,
+ VkQueue queueIn, uint32_t queueFamilyIndexIn,
+ android::base::Lock* queueLockIn) {
queue = queueIn;
queueFamilyIndex = queueFamilyIndexIn;
lock = queueLockIn;
VkCommandPoolCreateInfo poolCreateInfo = {
- VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0,
+ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+ 0,
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
queueFamilyIndex,
};
- vk->vkCreateCommandPool(
- device,
- &poolCreateInfo,
- nullptr,
- &pool);
+ vk->vkCreateCommandPool(device, &poolCreateInfo, nullptr, &pool);
VkCommandBufferAllocateInfo cbAllocInfo = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
- pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0, pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
};
- vk->vkAllocateCommandBuffers(
- device,
- &cbAllocInfo,
- &cb);
+ vk->vkAllocateCommandBuffers(device, &cbAllocInfo, &cb);
- vk->vkAllocateCommandBuffers(
- device,
- &cbAllocInfo,
- &cb2);
+ vk->vkAllocateCommandBuffers(device, &cbAllocInfo, &cb2);
VkFenceCreateInfo fenceCreateInfo = {
- VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, 0,
+ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+ 0,
+ 0,
};
- vk->vkCreateFence(
- device,
- &fenceCreateInfo,
- nullptr,
- &fence);
+ vk->vkCreateFence(device, &fenceCreateInfo, nullptr, &fence);
}
-void AndroidNativeBufferInfo::QueueState::teardown(
- VulkanDispatch* vk, VkDevice device) {
-
+void AndroidNativeBufferInfo::QueueState::teardown(VulkanDispatch* vk, VkDevice device) {
if (queue) {
AutoLock qlock(*lock);
vk->vkQueueWaitIdle(queue);
@@ -504,38 +456,34 @@
queueFamilyIndex = 0;
}
-VkResult setAndroidNativeImageSemaphoreSignaled(
- VulkanDispatch* vk,
- VkDevice device,
- VkQueue defaultQueue,
- uint32_t defaultQueueFamilyIndex,
- Lock* defaultQueueLock,
- VkSemaphore semaphore,
- VkFence fence,
- AndroidNativeBufferInfo* anbInfo) {
-
+VkResult setAndroidNativeImageSemaphoreSignaled(VulkanDispatch* vk, VkDevice device,
+ VkQueue defaultQueue,
+ uint32_t defaultQueueFamilyIndex,
+ Lock* defaultQueueLock, VkSemaphore semaphore,
+ VkFence fence, AndroidNativeBufferInfo* anbInfo) {
auto fb = FrameBuffer::getFB();
- bool firstTimeSetup =
- !anbInfo->everSynced &&
- !anbInfo->everAcquired;
+ bool firstTimeSetup = !anbInfo->everSynced && !anbInfo->everAcquired;
anbInfo->everAcquired = true;
if (firstTimeSetup) {
VkSubmitInfo submitInfo = {
- VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
- 0, nullptr, nullptr,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ 0,
+ 0,
+ nullptr,
+ nullptr,
+ 0,
+ nullptr,
(uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
};
AutoLock qlock(*defaultQueueLock);
vk->vkQueueSubmit(defaultQueue, 1, &submitInfo, fence);
} else {
-
const AndroidNativeBufferInfo::QueueState& queueState =
- anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
+ anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
// If we used the Vulkan image without copying it back
// to the CPU, reset the layout to PRESENT.
@@ -590,12 +538,16 @@
// TODO(kaiyili): initiate ownership transfer from DisplayVk here
vk->vkQueueSubmit(queueState.queue, 1, &submitInfo, fence);
} else {
- const AndroidNativeBufferInfo::QueueState&
- queueState = anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
+ const AndroidNativeBufferInfo::QueueState& queueState =
+ anbInfo->queueStates[anbInfo->lastUsedQueueFamilyIndex];
VkSubmitInfo submitInfo = {
- VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
- 0, nullptr, nullptr,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ 0,
+ 0,
+ nullptr,
+ nullptr,
+ 0,
+ nullptr,
(uint32_t)(semaphore == VK_NULL_HANDLE ? 0 : 1),
semaphore == VK_NULL_HANDLE ? nullptr : &semaphore,
};
@@ -609,16 +561,10 @@
static constexpr uint64_t kTimeoutNs = 3ULL * 1000000000ULL;
-VkResult syncImageToColorBuffer(
- VulkanDispatch* vk,
- uint32_t queueFamilyIndex,
- VkQueue queue,
- Lock* queueLock,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- int* pNativeFenceFd,
- std::shared_ptr<AndroidNativeBufferInfo> anbInfo) {
-
+VkResult syncImageToColorBuffer(VulkanDispatch* vk, uint32_t queueFamilyIndex, VkQueue queue,
+ Lock* queueLock, uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores, int* pNativeFenceFd,
+ std::shared_ptr<AndroidNativeBufferInfo> anbInfo) {
auto anbInfoPtr = anbInfo.get();
auto fb = FrameBuffer::getFB();
fb->lock();
@@ -637,13 +583,13 @@
auto& queueState = anbInfo->queueStates[queueFamilyIndex];
if (!queueState.queue) {
- queueState.setup(
- vk, anbInfo->device, queue, queueFamilyIndex, queueLock);
+ queueState.setup(vk, anbInfo->device, queue, queueFamilyIndex, queueLock);
}
// Record our synchronization commands.
VkCommandBufferBeginInfo beginInfo = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ 0,
VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
nullptr /* no inheritance info */,
};
@@ -684,7 +630,8 @@
// We definitely need to transition the image to
// VK_TRANSFER_SRC_OPTIMAL and back.
VkImageMemoryBarrier presentToTransferSrc = {
- VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0,
+ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ 0,
0,
VK_ACCESS_HOST_READ_BIT,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
@@ -694,18 +641,16 @@
anbInfo->image,
{
VK_IMAGE_ASPECT_COLOR_BIT,
- 0, 1, 0, 1,
+ 0,
+ 1,
+ 0,
+ 1,
},
};
- vk->vkCmdPipelineBarrier(
- queueState.cb,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- 0,
- 0, nullptr,
- 0, nullptr,
- 1, &presentToTransferSrc);
+ vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &presentToTransferSrc);
VkBufferImageCopy region = {
0 /* buffer offset */,
@@ -713,22 +658,22 @@
anbInfo->extent.height,
{
VK_IMAGE_ASPECT_COLOR_BIT,
- 0, 0, 1,
+ 0,
+ 0,
+ 1,
},
- { 0, 0, 0 },
+ {0, 0, 0},
anbInfo->extent,
};
- vk->vkCmdCopyImageToBuffer(
- queueState.cb,
- anbInfo->image,
- VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- anbInfo->stagingBuffer,
- 1, ®ion);
+ vk->vkCmdCopyImageToBuffer(queueState.cb, anbInfo->image,
+ VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, anbInfo->stagingBuffer, 1,
+ ®ion);
// Transfer back to present src.
VkImageMemoryBarrier backToPresentSrc = {
- VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0,
+ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ 0,
VK_ACCESS_HOST_READ_BIT,
0,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
@@ -738,19 +683,16 @@
anbInfo->image,
{
VK_IMAGE_ASPECT_COLOR_BIT,
- 0, 1, 0, 1,
+ 0,
+ 1,
+ 0,
+ 1,
},
};
- vk->vkCmdPipelineBarrier(
- queueState.cb,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- 0,
- 0, nullptr,
- 0, nullptr,
- 1, &backToPresentSrc);
-
+ vk->vkCmdPipelineBarrier(queueState.cb, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &backToPresentSrc);
}
vk->vkEndCommandBuffer(queueState.cb);
@@ -759,11 +701,15 @@
pipelineStageFlags.resize(waitSemaphoreCount, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
VkSubmitInfo submitInfo = {
- VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
- waitSemaphoreCount, pWaitSemaphores,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ 0,
+ waitSemaphoreCount,
+ pWaitSemaphores,
pipelineStageFlags.data(),
- 1, &queueState.cb,
- 0, nullptr,
+ 1,
+ &queueState.cb,
+ 0,
+ nullptr,
};
// TODO(kaiyili): initiate ownership transfer to DisplayVk here.
@@ -806,13 +752,10 @@
waitForQsriFenceTask();
VkMappedMemoryRange toInvalidate = {
- VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
- anbInfo->stagingMemory,
- 0, VK_WHOLE_SIZE,
+ VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0, anbInfo->stagingMemory, 0, VK_WHOLE_SIZE,
};
- vk->vkInvalidateMappedMemoryRanges(
- anbInfo->device, 1, &toInvalidate);
+ vk->vkInvalidateMappedMemoryRanges(anbInfo->device, 1, &toInvalidate);
uint32_t colorBufferHandle = anbInfo->colorBufferHandle;
@@ -832,15 +775,13 @@
break;
}
- FrameBuffer::getFB()->
- replaceColorBufferContents(
- colorBufferHandle,
- anbInfo->mappedStagingPtr,
- bpp * anbInfo->extent.width * anbInfo->extent.height);
+ FrameBuffer::getFB()->replaceColorBufferContents(
+ colorBufferHandle, anbInfo->mappedStagingPtr,
+ bpp * anbInfo->extent.width * anbInfo->extent.height);
anbInfo->qsriTimeline->signalNextPresentAndPoll();
}
return VK_SUCCESS;
}
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VkAndroidNativeBuffer.h b/stream-servers/vulkan/VkAndroidNativeBuffer.h
index 7fb5399..844c8f2 100644
--- a/stream-servers/vulkan/VkAndroidNativeBuffer.h
+++ b/stream-servers/vulkan/VkAndroidNativeBuffer.h
@@ -39,9 +39,7 @@
// This is to be refactored to move to external memory only once we get that
// working.
-void teardownAndroidNativeBufferImage(
- VulkanDispatch* vk,
- AndroidNativeBufferInfo* anbInfo);
+void teardownAndroidNativeBufferImage(VulkanDispatch* vk, AndroidNativeBufferInfo* anbInfo);
struct AndroidNativeBufferInfo {
~AndroidNativeBufferInfo() {
@@ -95,12 +93,8 @@
VkFence fence = VK_NULL_HANDLE;
android::base::Lock* lock = nullptr;
uint32_t queueFamilyIndex = 0;
- void setup(
- VulkanDispatch* vk,
- VkDevice device,
- VkQueue queue,
- uint32_t queueFamilyIndex,
- android::base::Lock* queueLock);
+ void setup(VulkanDispatch* vk, VkDevice device, VkQueue queue, uint32_t queueFamilyIndex,
+ android::base::Lock* queueLock);
void teardown(VulkanDispatch* vk, VkDevice device);
};
// We keep one QueueState for each queue family index used by the guest
@@ -147,40 +141,28 @@
std::unique_ptr<VkQsriTimeline> qsriTimeline = nullptr;
};
-VkResult prepareAndroidNativeBufferImage(
- VulkanDispatch* vk,
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkNativeBufferANDROID* nativeBufferANDROID,
- const VkAllocationCallbacks* pAllocator,
- const VkPhysicalDeviceMemoryProperties* memProps,
- AndroidNativeBufferInfo* out);
+VkResult prepareAndroidNativeBufferImage(VulkanDispatch* vk, VkDevice device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkNativeBufferANDROID* nativeBufferANDROID,
+ const VkAllocationCallbacks* pAllocator,
+ const VkPhysicalDeviceMemoryProperties* memProps,
+ AndroidNativeBufferInfo* out);
-void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage,
- int* usage_out);
+void getGralloc0Usage(VkFormat format, VkImageUsageFlags imageUsage, int* usage_out);
void getGralloc1Usage(VkFormat format, VkImageUsageFlags imageUsage,
VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
- uint64_t* consumerUsage_out,
- uint64_t* producerUsage_out);
+ uint64_t* consumerUsage_out, uint64_t* producerUsage_out);
-VkResult setAndroidNativeImageSemaphoreSignaled(
- VulkanDispatch* vk,
- VkDevice device,
- VkQueue defaultQueue,
- uint32_t defaultQueueFamilyIndex,
- android::base::Lock* defaultQueueLock,
- VkSemaphore semaphore,
- VkFence fence,
- AndroidNativeBufferInfo* anbInfo);
+VkResult setAndroidNativeImageSemaphoreSignaled(VulkanDispatch* vk, VkDevice device,
+ VkQueue defaultQueue,
+ uint32_t defaultQueueFamilyIndex,
+ android::base::Lock* defaultQueueLock,
+ VkSemaphore semaphore, VkFence fence,
+ AndroidNativeBufferInfo* anbInfo);
-VkResult syncImageToColorBuffer(
- VulkanDispatch* vk,
- uint32_t queueFamilyIndex,
- VkQueue queue,
- android::base::Lock* queueLock,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- int* pNativeFenceFd,
- std::shared_ptr<AndroidNativeBufferInfo> anbInfo);
+VkResult syncImageToColorBuffer(VulkanDispatch* vk, uint32_t queueFamilyIndex, VkQueue queue,
+ android::base::Lock* queueLock, uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores, int* pNativeFenceFd,
+ std::shared_ptr<AndroidNativeBufferInfo> anbInfo);
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VkCommonOperations.cpp b/stream-servers/vulkan/VkCommonOperations.cpp
index af413f4..6f5808a 100644
--- a/stream-servers/vulkan/VkCommonOperations.cpp
+++ b/stream-servers/vulkan/VkCommonOperations.cpp
@@ -48,9 +48,13 @@
#include <CoreFoundation/CoreFoundation.h>
#endif
-#define VK_COMMON_ERROR(fmt,...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
-#define VK_COMMON_LOG(fmt,...) fprintf(stdout, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
-#define VK_COMMON_VERBOSE(fmt,...) if (android::base::isVerboseLogging()) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define VK_COMMON_ERROR(fmt, ...) \
+ fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define VK_COMMON_LOG(fmt, ...) \
+ fprintf(stdout, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define VK_COMMON_VERBOSE(fmt, ...) \
+ if (android::base::isVerboseLogging()) \
+ fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
using android::base::AutoLock;
using android::base::Optional;
@@ -70,8 +74,7 @@
} // namespace
-static StaticMap<VkDevice, uint32_t>
-sKnownStagingTypeIndices;
+static StaticMap<VkDevice, uint32_t> sKnownStagingTypeIndices;
static android::base::StaticLock sVkEmulationLock;
@@ -79,24 +82,19 @@
#ifdef _WIN32
auto myProcessHandle = GetCurrentProcess();
VK_EXT_MEMORY_HANDLE res;
- DuplicateHandle(
- myProcessHandle, h, // source process and handle
- myProcessHandle, &res, // target process and pointer to handle
- 0 /* desired access (ignored) */,
- true /* inherit */,
- DUPLICATE_SAME_ACCESS /* same access option */);
+ DuplicateHandle(myProcessHandle, h, // source process and handle
+ myProcessHandle, &res, // target process and pointer to handle
+ 0 /* desired access (ignored) */, true /* inherit */,
+ DUPLICATE_SAME_ACCESS /* same access option */);
return res;
#else
return dup(h);
#endif
}
-bool getStagingMemoryTypeIndex(
- VulkanDispatch* vk,
- VkDevice device,
- const VkPhysicalDeviceMemoryProperties* memProps,
- uint32_t* typeIndex) {
-
+bool getStagingMemoryTypeIndex(VulkanDispatch* vk, VkDevice device,
+ const VkPhysicalDeviceMemoryProperties* memProps,
+ uint32_t* typeIndex) {
auto res = sKnownStagingTypeIndices.get(device);
if (res) {
@@ -105,16 +103,18 @@
}
VkBufferCreateInfo testCreateInfo = {
- VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, 0, 0,
+ VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+ 0,
+ 0,
4096,
// To be a staging buffer, it must support being
// both a transfer src and dst.
- VK_BUFFER_USAGE_TRANSFER_DST_BIT |
- VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
// TODO: See if buffers over shared queues need to be
// considered separately
VK_SHARING_MODE_EXCLUSIVE,
- 0, nullptr,
+ 0,
+ nullptr,
};
VkBuffer testBuffer;
@@ -140,10 +140,8 @@
for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
const auto& typeInfo = memProps->memoryTypes[i];
- bool hostVisible =
- typeInfo.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- bool hostCached =
- typeInfo.propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+ bool hostVisible = typeInfo.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ bool hostCached = typeInfo.propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
bool allowedInBuffer = (1 << i) & memReqs.memoryTypeBits;
if (hostVisible && hostCached && allowedInBuffer) {
foundSuitableStagingMemoryType = true;
@@ -156,18 +154,15 @@
if (!foundSuitableStagingMemoryType) {
std::stringstream ss;
- ss <<
- "Could not find suitable memory type index " <<
- "for staging buffer. Memory type bits: " <<
- std::hex << memReqs.memoryTypeBits << "\n" <<
- "Available host visible memory type indices:" << "\n";
+ ss << "Could not find suitable memory type index "
+ << "for staging buffer. Memory type bits: " << std::hex << memReqs.memoryTypeBits << "\n"
+ << "Available host visible memory type indices:"
+ << "\n";
for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
- if (memProps->memoryTypes[i].propertyFlags &
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
+ if (memProps->memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
ss << "Host visible memory type index: %u" << i << "\n";
}
- if (memProps->memoryTypes[i].propertyFlags &
- VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
+ if (memProps->memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
ss << "Host cached memory type index: %u" << i << "\n";
}
}
@@ -185,10 +180,8 @@
static VkEmulation* sVkEmulation = nullptr;
-static bool extensionsSupported(
- const std::vector<VkExtensionProperties>& currentProps,
- const std::vector<const char*>& wantedExtNames) {
-
+static bool extensionsSupported(const std::vector<VkExtensionProperties>& currentProps,
+ const std::vector<const char*>& wantedExtNames) {
std::vector<bool> foundExts(wantedExtNames.size(), false);
for (uint32_t i = 0; i < currentProps.size(); ++i) {
@@ -220,20 +213,17 @@
// not needed.
// Precondition: sVkEmulation instance has been created and ext memory caps known.
// Returns false if the query failed.
-static bool getImageFormatExternalMemorySupportInfo(
- VulkanDispatch* vk,
- VkPhysicalDevice physdev,
- VkEmulation::ImageSupportInfo* info) {
-
+static bool getImageFormatExternalMemorySupportInfo(VulkanDispatch* vk, VkPhysicalDevice physdev,
+ VkEmulation::ImageSupportInfo* info) {
// Currently there is nothing special we need to do about
// VkFormatProperties2, so just use the normal version
// and put it in the format2 struct.
VkFormatProperties outFormatProps;
- vk->vkGetPhysicalDeviceFormatProperties(
- physdev, info->format, &outFormatProps);
+ vk->vkGetPhysicalDeviceFormatProperties(physdev, info->format, &outFormatProps);
info->formatProps2 = {
- VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, 0,
+ VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+ 0,
outFormatProps,
};
@@ -243,8 +233,8 @@
VkImageFormatProperties outImageFormatProps;
VkResult res = vk->vkGetPhysicalDeviceImageFormatProperties(
- physdev, info->format, info->type, info->tiling,
- info->usageFlags, info->createFlags, &outImageFormatProps);
+ physdev, info->format, info->type, info->tiling, info->usageFlags, info->createFlags,
+ &outImageFormatProps);
if (res != VK_SUCCESS) {
if (res == VK_ERROR_FORMAT_NOT_SUPPORTED) {
@@ -255,8 +245,8 @@
"%s: vkGetPhysicalDeviceImageFormatProperties query "
"failed with %d "
"for format 0x%x type 0x%x usage 0x%x flags 0x%x\n",
- __func__, res, info->format, info->type,
- info->usageFlags, info->createFlags);
+ __func__, res, info->format, info->type, info->usageFlags,
+ info->createFlags);
return false;
}
}
@@ -264,7 +254,8 @@
info->supported = true;
info->imageFormatProps2 = {
- VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, 0,
+ VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+ 0,
outImageFormatProps,
};
@@ -279,14 +270,19 @@
}
VkPhysicalDeviceExternalImageFormatInfo extInfo = {
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, 0,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+ 0,
VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
VkPhysicalDeviceImageFormatInfo2 formatInfo2 = {
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, &extInfo,
- info->format, info->type, info->tiling,
- info->usageFlags, info->createFlags,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+ &extInfo,
+ info->format,
+ info->type,
+ info->tiling,
+ info->usageFlags,
+ info->createFlags,
};
VkExternalImageFormatProperties outExternalProps = {
@@ -299,19 +295,17 @@
},
};
- VkImageFormatProperties2 outProps2 = {
- VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, &outExternalProps,
- {
- { 0, 0, 0},
- 0, 0,
- 1, 0,
- }
- };
+ VkImageFormatProperties2 outProps2 = {VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+ &outExternalProps,
+ {
+ {0, 0, 0},
+ 0,
+ 0,
+ 1,
+ 0,
+ }};
- VkResult res = sVkEmulation->getImageFormatProperties2Func(
- physdev,
- &formatInfo2,
- &outProps2);
+ VkResult res = sVkEmulation->getImageFormatProperties2Func(physdev, &formatInfo2, &outProps2);
if (res != VK_SUCCESS) {
if (res == VK_ERROR_FORMAT_NOT_SUPPORTED) {
@@ -322,8 +316,7 @@
"%s: vkGetPhysicalDeviceImageFormatProperties2KHR query "
"failed "
"for format 0x%x type 0x%x usage 0x%x flags 0x%x\n",
- __func__, info->format, info->type, info->usageFlags,
- info->createFlags);
+ __func__, info->format, info->type, info->usageFlags, info->createFlags);
return false;
}
}
@@ -342,10 +335,9 @@
VkExternalMemoryHandleTypeFlags compatibleHandleTypes =
outExternalProps.externalMemoryProperties.compatibleHandleTypes;
- info->supportsExternalMemory =
- (VK_EXT_MEMORY_HANDLE_TYPE_BIT & compatibleHandleTypes) &&
- (VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT & featureFlags) &&
- (VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT & featureFlags);
+ info->supportsExternalMemory = (VK_EXT_MEMORY_HANDLE_TYPE_BIT & compatibleHandleTypes) &&
+ (VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT & featureFlags) &&
+ (VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT & featureFlags);
info->requiresDedicatedAllocation =
(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT & featureFlags);
@@ -444,10 +436,8 @@
};
std::vector<VkImageUsageFlags> usageFlags = {
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
- VK_IMAGE_USAGE_SAMPLED_BIT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+ VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
VK_IMAGE_USAGE_TRANSFER_DST_BIT,
};
@@ -524,13 +514,11 @@
bool externalMemoryCapabilitiesSupported =
extensionsSupported(exts, externalMemoryInstanceExtNames);
- bool moltenVKSupported = (vk->vkGetMTLTextureMVK != nullptr) &&
- (vk->vkSetMTLTextureMVK != nullptr);
+ bool moltenVKSupported =
+ (vk->vkGetMTLTextureMVK != nullptr) && (vk->vkSetMTLTextureMVK != nullptr);
VkInstanceCreateInfo instCi = {
- VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
- 0, 0, nullptr, 0, nullptr,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, 0, 0, nullptr, 0, nullptr, 0, nullptr,
};
std::unordered_set<const char*> enabledExtensions;
@@ -551,17 +539,12 @@
for (auto extension : SwapChainStateVk::getRequiredInstanceExtensions()) {
enabledExtensions.emplace(extension);
}
- std::vector<const char*> enabledExtensions_(enabledExtensions.begin(),
- enabledExtensions.end());
- instCi.enabledExtensionCount =
- static_cast<uint32_t>(enabledExtensions_.size());
+ std::vector<const char*> enabledExtensions_(enabledExtensions.begin(), enabledExtensions.end());
+ instCi.enabledExtensionCount = static_cast<uint32_t>(enabledExtensions_.size());
instCi.ppEnabledExtensionNames = enabledExtensions_.data();
VkApplicationInfo appInfo = {
- VK_STRUCTURE_TYPE_APPLICATION_INFO, 0,
- "AEMU", 1,
- "AEMU", 1,
- VK_MAKE_VERSION(1, 0, 0),
+ VK_STRUCTURE_TYPE_APPLICATION_INFO, 0, "AEMU", 1, "AEMU", 1, VK_MAKE_VERSION(1, 0, 0),
};
instCi.pApplicationInfo = &appInfo;
@@ -593,8 +576,7 @@
// Create instance level dispatch.
sVkEmulation->ivk = new VulkanDispatch;
- init_vulkan_dispatch_from_instance(
- vk, sVkEmulation->instance, sVkEmulation->ivk);
+ init_vulkan_dispatch_from_instance(vk, sVkEmulation->instance, sVkEmulation->ivk);
auto ivk = sVkEmulation->ivk;
@@ -605,10 +587,10 @@
if (ivk->vkEnumerateInstanceVersion) {
uint32_t instanceVersion;
VkResult enumInstanceRes = ivk->vkEnumerateInstanceVersion(&instanceVersion);
- if ((VK_SUCCESS == enumInstanceRes) &&
- instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
+ if ((VK_SUCCESS == enumInstanceRes) && instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
if (!vulkan_dispatch_check_instance_VK_VERSION_1_1(ivk)) {
- fprintf(stderr, "%s: Warning: Vulkan 1.1 APIs missing from instance (1st try)\n", __func__);
+ fprintf(stderr, "%s: Warning: Vulkan 1.1 APIs missing from instance (1st try)\n",
+ __func__);
}
}
@@ -626,21 +608,20 @@
string_VkResult(res));
}
- init_vulkan_dispatch_from_instance(
- vk, sVkEmulation->instance, sVkEmulation->ivk);
+ init_vulkan_dispatch_from_instance(vk, sVkEmulation->instance, sVkEmulation->ivk);
// LOG(VERBOSE) << "Created Vulkan 1.1 instance on second try.";
if (!vulkan_dispatch_check_instance_VK_VERSION_1_1(ivk)) {
- fprintf(stderr, "%s: Warning: Vulkan 1.1 APIs missing from instance (2nd try)\n", __func__);
+ fprintf(stderr, "%s: Warning: Vulkan 1.1 APIs missing from instance (2nd try)\n",
+ __func__);
}
}
}
sVkEmulation->vulkanInstanceVersion = appInfo.apiVersion;
- sVkEmulation->instanceSupportsExternalMemoryCapabilities =
- externalMemoryCapabilitiesSupported;
+ sVkEmulation->instanceSupportsExternalMemoryCapabilities = externalMemoryCapabilitiesSupported;
sVkEmulation->instanceSupportsMoltenVK = moltenVKSupported;
if (sVkEmulation->instanceSupportsExternalMemoryCapabilities) {
@@ -657,15 +638,13 @@
if (sVkEmulation->instanceSupportsMoltenVK) {
sVkEmulation->setMTLTextureFunc = reinterpret_cast<PFN_vkSetMTLTextureMVK>(
- vk->vkGetInstanceProcAddr(
- sVkEmulation->instance, "vkSetMTLTextureMVK"));
+ vk->vkGetInstanceProcAddr(sVkEmulation->instance, "vkSetMTLTextureMVK"));
if (!sVkEmulation->setMTLTextureFunc) {
VK_EMU_INIT_RETURN_ON_ERROR("Cannot find vkSetMTLTextureMVK.");
}
- sVkEmulation->getMTLTextureFunc = reinterpret_cast<PFN_vkGetMTLTextureMVK>(
- vk->vkGetInstanceProcAddr(
- sVkEmulation->instance, "vkGetMTLTextureMVK"));
+ sVkEmulation->getMTLTextureFunc = reinterpret_cast<PFN_vkGetMTLTextureMVK>(
+ vk->vkGetInstanceProcAddr(sVkEmulation->instance, "vkGetMTLTextureMVK"));
if (!sVkEmulation->getMTLTextureFunc) {
VK_EMU_INIT_RETURN_ON_ERROR("Cannot find vkGetMTLTextureMVK.");
}
@@ -673,11 +652,9 @@
}
uint32_t physdevCount = 0;
- ivk->vkEnumeratePhysicalDevices(sVkEmulation->instance, &physdevCount,
- nullptr);
+ ivk->vkEnumeratePhysicalDevices(sVkEmulation->instance, &physdevCount, nullptr);
std::vector<VkPhysicalDevice> physdevs(physdevCount);
- ivk->vkEnumeratePhysicalDevices(sVkEmulation->instance, &physdevCount,
- physdevs.data());
+ ivk->vkEnumeratePhysicalDevices(sVkEmulation->instance, &physdevCount, physdevs.data());
// LOG(VERBOSE) << "Found " << physdevCount << " Vulkan physical devices.";
@@ -688,27 +665,25 @@
std::vector<VkEmulation::DeviceSupportInfo> deviceInfos(physdevCount);
for (int i = 0; i < physdevCount; ++i) {
- ivk->vkGetPhysicalDeviceProperties(physdevs[i],
- &deviceInfos[i].physdevProps);
+ ivk->vkGetPhysicalDeviceProperties(physdevs[i], &deviceInfos[i].physdevProps);
// LOG(VERBOSE) << "Considering Vulkan physical device " << i << ": "
// << deviceInfos[i].physdevProps.deviceName;
// It's easier to figure out the staging buffer along with
// external memories if we have the memory properties on hand.
- ivk->vkGetPhysicalDeviceMemoryProperties(physdevs[i],
- &deviceInfos[i].memProps);
+ ivk->vkGetPhysicalDeviceMemoryProperties(physdevs[i], &deviceInfos[i].memProps);
uint32_t deviceExtensionCount = 0;
- ivk->vkEnumerateDeviceExtensionProperties(
- physdevs[i], nullptr, &deviceExtensionCount, nullptr);
+ ivk->vkEnumerateDeviceExtensionProperties(physdevs[i], nullptr, &deviceExtensionCount,
+ nullptr);
std::vector<VkExtensionProperties>& deviceExts = deviceInfos[i].extensions;
deviceExts.resize(deviceExtensionCount);
- ivk->vkEnumerateDeviceExtensionProperties(
- physdevs[i], nullptr, &deviceExtensionCount, deviceExts.data());
+ ivk->vkEnumerateDeviceExtensionProperties(physdevs[i], nullptr, &deviceExtensionCount,
+ deviceExts.data());
deviceInfos[i].supportsExternalMemory = false;
- deviceInfos[i].glInteropSupported = 0; // set later
+ deviceInfos[i].glInteropSupported = 0; // set later
if (sVkEmulation->instanceSupportsExternalMemoryCapabilities) {
deviceInfos[i].supportsExternalMemory =
@@ -720,8 +695,10 @@
(deviceInfos[i].physdevProps.apiVersion >= VK_API_VERSION_1_2);
if (!sVkEmulation->getPhysicalDeviceProperties2Func) {
- fprintf(stderr, "%s: warning: device claims to support ID properties "
- "but vkGetPhysicalDeviceProperties2 could not be found\n", __func__);
+ fprintf(stderr,
+ "%s: warning: device claims to support ID properties "
+ "but vkGetPhysicalDeviceProperties2 could not be found\n",
+ __func__);
}
}
@@ -746,9 +723,7 @@
vk_append_struct(&devicePropsChain, &driverProps);
}
- sVkEmulation->getPhysicalDeviceProperties2Func(
- physdevs[i],
- &deviceProps);
+ sVkEmulation->getPhysicalDeviceProperties2Func(physdevs[i], &deviceProps);
deviceInfos[i].idProps = vk_make_orphan_copy(idProps);
@@ -757,8 +732,7 @@
<< deviceInfos[i].physdevProps.vendorID;
std::string decodedDriverVersion = decodeDriverVersion(
- deviceInfos[i].physdevProps.vendorID,
- deviceInfos[i].physdevProps.driverVersion);
+ deviceInfos[i].physdevProps.vendorID, deviceInfos[i].physdevProps.driverVersion);
std::stringstream driverVersionBuilder;
driverVersionBuilder << "Driver Version " << std::hex << std::setfill('0')
@@ -795,28 +769,23 @@
}
uint32_t queueFamilyCount = 0;
- ivk->vkGetPhysicalDeviceQueueFamilyProperties(
- physdevs[i], &queueFamilyCount, nullptr);
+ ivk->vkGetPhysicalDeviceQueueFamilyProperties(physdevs[i], &queueFamilyCount, nullptr);
std::vector<VkQueueFamilyProperties> queueFamilyProps(queueFamilyCount);
- ivk->vkGetPhysicalDeviceQueueFamilyProperties(
- physdevs[i], &queueFamilyCount, queueFamilyProps.data());
+ ivk->vkGetPhysicalDeviceQueueFamilyProperties(physdevs[i], &queueFamilyCount,
+ queueFamilyProps.data());
for (uint32_t j = 0; j < queueFamilyCount; ++j) {
auto count = queueFamilyProps[j].queueCount;
auto flags = queueFamilyProps[j].queueFlags;
- bool hasGraphicsQueueFamily =
- (count > 0 && (flags & VK_QUEUE_GRAPHICS_BIT));
- bool hasComputeQueueFamily =
- (count > 0 && (flags & VK_QUEUE_COMPUTE_BIT));
+ bool hasGraphicsQueueFamily = (count > 0 && (flags & VK_QUEUE_GRAPHICS_BIT));
+ bool hasComputeQueueFamily = (count > 0 && (flags & VK_QUEUE_COMPUTE_BIT));
deviceInfos[i].hasGraphicsQueueFamily =
- deviceInfos[i].hasGraphicsQueueFamily ||
- hasGraphicsQueueFamily;
+ deviceInfos[i].hasGraphicsQueueFamily || hasGraphicsQueueFamily;
deviceInfos[i].hasComputeQueueFamily =
- deviceInfos[i].hasComputeQueueFamily ||
- hasComputeQueueFamily;
+ deviceInfos[i].hasComputeQueueFamily || hasComputeQueueFamily;
if (hasGraphicsQueueFamily) {
deviceInfos[i].graphicsQueueFamilyIndices.push_back(j);
@@ -874,8 +843,10 @@
// If we don't support physical device ID properties,
// just pick the first physical device.
if (!sVkEmulation->instanceSupportsExternalMemoryCapabilities) {
- fprintf(stderr, "%s: warning: instance doesn't support "
- "external memory capabilities, picking first physical device\n", __func__);
+ fprintf(stderr,
+ "%s: warning: instance doesn't support "
+ "external memory capabilities, picking first physical device\n",
+ __func__);
maxScoringIndex = 0;
} else {
for (uint32_t i = 0; i < physdevCount; ++i) {
@@ -896,8 +867,8 @@
// support)
sVkEmulation->imageSupportInfo = getBasicImageSupportList();
for (size_t i = 0; i < sVkEmulation->imageSupportInfo.size(); ++i) {
- getImageFormatExternalMemorySupportInfo(
- ivk, sVkEmulation->physdev, &sVkEmulation->imageSupportInfo[i]);
+ getImageFormatExternalMemorySupportInfo(ivk, sVkEmulation->physdev,
+ &sVkEmulation->imageSupportInfo[i]);
}
if (!sVkEmulation->deviceInfo.hasGraphicsQueueFamily) {
@@ -908,7 +879,8 @@
VK_COMMON_LOG("Selecting Vulkan device: %s", sVkEmulation->deviceInfo.physdevProps.deviceName);
// LOG(VERBOSE) << "Version: "
- // << VK_VERSION_MAJOR(deviceVersion) << "." << VK_VERSION_MINOR(deviceVersion) << "." << VK_VERSION_PATCH(deviceVersion);
+ // << VK_VERSION_MAJOR(deviceVersion) << "." << VK_VERSION_MINOR(deviceVersion) <<
+ // "." << VK_VERSION_PATCH(deviceVersion);
// LOG(VERBOSE) << "Has graphics queue? "
// << sVkEmulation->deviceInfo.hasGraphicsQueueFamily;
// LOG(VERBOSE) << "Has external memory support? "
@@ -918,9 +890,12 @@
float priority = 1.0f;
VkDeviceQueueCreateInfo dqCi = {
- VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, 0, 0,
+ VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ 0,
+ 0,
sVkEmulation->deviceInfo.graphicsQueueFamilyIndices[0],
- 1, &priority,
+ 1,
+ &priority,
};
std::unordered_set<const char*> selectedDeviceExtensionNames_;
@@ -936,16 +911,14 @@
if (sVkEmulation->deviceInfo.hasSamplerYcbcrConversionExtension) {
selectedDeviceExtensionNames_.emplace(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
}
- std::vector<const char*> selectedDeviceExtensionNames(
- selectedDeviceExtensionNames_.begin(),
- selectedDeviceExtensionNames_.end());
+ std::vector<const char*> selectedDeviceExtensionNames(selectedDeviceExtensionNames_.begin(),
+ selectedDeviceExtensionNames_.end());
VkDeviceCreateInfo dCi = {};
dCi.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
dCi.queueCreateInfoCount = 1;
dCi.pQueueCreateInfos = &dqCi;
- dCi.enabledExtensionCount =
- static_cast<uint32_t>(selectedDeviceExtensionNames.size());
+ dCi.enabledExtensionCount = static_cast<uint32_t>(selectedDeviceExtensionNames.size());
dCi.ppEnabledExtensionNames = selectedDeviceExtensionNames.data();
// Setting up VkDeviceCreateInfo::pNext
@@ -966,8 +939,7 @@
vk_append_struct(&deviceCiChain, samplerYcbcrConversionFeatures.get());
}
- ivk->vkCreateDevice(sVkEmulation->physdev, &dCi, nullptr,
- &sVkEmulation->device);
+ ivk->vkCreateDevice(sVkEmulation->physdev, &dCi, nullptr, &sVkEmulation->device);
if (res != VK_SUCCESS) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to create Vulkan device. Error %s.",
@@ -976,8 +948,7 @@
// device created; populate dispatch table
sVkEmulation->dvk = new VulkanDispatch;
- init_vulkan_dispatch_from_device(
- ivk, sVkEmulation->device, sVkEmulation->dvk);
+ init_vulkan_dispatch_from_device(ivk, sVkEmulation->device, sVkEmulation->dvk);
auto dvk = sVkEmulation->dvk;
@@ -994,28 +965,23 @@
if (sVkEmulation->deviceInfo.supportsExternalMemory) {
sVkEmulation->deviceInfo.getImageMemoryRequirements2Func =
reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(
- dvk->vkGetDeviceProcAddr(
- sVkEmulation->device, "vkGetImageMemoryRequirements2KHR"));
+ dvk->vkGetDeviceProcAddr(sVkEmulation->device, "vkGetImageMemoryRequirements2KHR"));
if (!sVkEmulation->deviceInfo.getImageMemoryRequirements2Func) {
VK_EMU_INIT_RETURN_ON_ERROR("Cannot find vkGetImageMemoryRequirements2KHR.");
}
sVkEmulation->deviceInfo.getBufferMemoryRequirements2Func =
- reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(
- dvk->vkGetDeviceProcAddr(
- sVkEmulation->device, "vkGetBufferMemoryRequirements2KHR"));
+ reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(dvk->vkGetDeviceProcAddr(
+ sVkEmulation->device, "vkGetBufferMemoryRequirements2KHR"));
if (!sVkEmulation->deviceInfo.getBufferMemoryRequirements2Func) {
VK_EMU_INIT_RETURN_ON_ERROR("Cannot find vkGetBufferMemoryRequirements2KHR");
}
#ifdef _WIN32
sVkEmulation->deviceInfo.getMemoryHandleFunc =
- reinterpret_cast<PFN_vkGetMemoryWin32HandleKHR>(
- dvk->vkGetDeviceProcAddr(sVkEmulation->device,
- "vkGetMemoryWin32HandleKHR"));
+ reinterpret_cast<PFN_vkGetMemoryWin32HandleKHR>(
+ dvk->vkGetDeviceProcAddr(sVkEmulation->device, "vkGetMemoryWin32HandleKHR"));
#else
- sVkEmulation->deviceInfo.getMemoryHandleFunc =
- reinterpret_cast<PFN_vkGetMemoryFdKHR>(
- dvk->vkGetDeviceProcAddr(sVkEmulation->device,
- "vkGetMemoryFdKHR"));
+ sVkEmulation->deviceInfo.getMemoryHandleFunc = reinterpret_cast<PFN_vkGetMemoryFdKHR>(
+ dvk->vkGetDeviceProcAddr(sVkEmulation->device, "vkGetMemoryFdKHR"));
#endif
if (!sVkEmulation->deviceInfo.getMemoryHandleFunc) {
VK_EMU_INIT_RETURN_ON_ERROR("Cannot find vkGetMemory(Fd|Win32Handle)KHR");
@@ -1027,25 +993,24 @@
sVkEmulation->queueLock = std::make_shared<android::base::Lock>();
{
android::base::AutoLock lock(*sVkEmulation->queueLock);
- dvk->vkGetDeviceQueue(
- sVkEmulation->device,
- sVkEmulation->deviceInfo.graphicsQueueFamilyIndices[0], 0,
- &sVkEmulation->queue);
+ dvk->vkGetDeviceQueue(sVkEmulation->device,
+ sVkEmulation->deviceInfo.graphicsQueueFamilyIndices[0], 0,
+ &sVkEmulation->queue);
}
- sVkEmulation->queueFamilyIndex =
- sVkEmulation->deviceInfo.graphicsQueueFamilyIndices[0];
+ sVkEmulation->queueFamilyIndex = sVkEmulation->deviceInfo.graphicsQueueFamilyIndices[0];
// LOG(VERBOSE) << "Vulkan device queue obtained.";
VkCommandPoolCreateInfo poolCi = {
- VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 0,
+ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+ 0,
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
sVkEmulation->queueFamilyIndex,
};
- VkResult poolCreateRes = dvk->vkCreateCommandPool(
- sVkEmulation->device, &poolCi, nullptr, &sVkEmulation->commandPool);
+ VkResult poolCreateRes = dvk->vkCreateCommandPool(sVkEmulation->device, &poolCi, nullptr,
+ &sVkEmulation->commandPool);
if (poolCreateRes != VK_SUCCESS) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to create command pool. Error: %s.",
@@ -1053,12 +1018,15 @@
}
VkCommandBufferAllocateInfo cbAi = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 0,
- sVkEmulation->commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+ 0,
+ sVkEmulation->commandPool,
+ VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+ 1,
};
- VkResult cbAllocRes = dvk->vkAllocateCommandBuffers(
- sVkEmulation->device, &cbAi, &sVkEmulation->commandBuffer);
+ VkResult cbAllocRes =
+ dvk->vkAllocateCommandBuffers(sVkEmulation->device, &cbAi, &sVkEmulation->commandBuffer);
if (cbAllocRes != VK_SUCCESS) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to allocate command buffer. Error: %s.",
@@ -1066,12 +1034,13 @@
}
VkFenceCreateInfo fenceCi = {
- VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 0, 0,
+ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+ 0,
+ 0,
};
- VkResult fenceCreateRes = dvk->vkCreateFence(
- sVkEmulation->device, &fenceCi, nullptr,
- &sVkEmulation->commandBufferFence);
+ VkResult fenceCreateRes = dvk->vkCreateFence(sVkEmulation->device, &fenceCi, nullptr,
+ &sVkEmulation->commandBufferFence);
if (fenceCreateRes != VK_SUCCESS) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to create fence for command buffer. Error: %s.",
@@ -1085,17 +1054,18 @@
// non-external-memory fallback.
VkBufferCreateInfo bufCi = {
- VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, 0, 0,
+ VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+ 0,
+ 0,
sVkEmulation->staging.size,
- VK_BUFFER_USAGE_TRANSFER_DST_BIT |
- VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_SHARING_MODE_EXCLUSIVE,
- 0, nullptr,
+ 0,
+ nullptr,
};
VkResult bufCreateRes =
- dvk->vkCreateBuffer(sVkEmulation->device, &bufCi, nullptr,
- &sVkEmulation->staging.buffer);
+ dvk->vkCreateBuffer(sVkEmulation->device, &bufCi, nullptr, &sVkEmulation->staging.buffer);
if (bufCreateRes != VK_SUCCESS) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to create staging buffer index. Error: %s.",
@@ -1103,35 +1073,31 @@
}
VkMemoryRequirements memReqs;
- dvk->vkGetBufferMemoryRequirements(sVkEmulation->device,
- sVkEmulation->staging.buffer, &memReqs);
+ dvk->vkGetBufferMemoryRequirements(sVkEmulation->device, sVkEmulation->staging.buffer,
+ &memReqs);
sVkEmulation->staging.memory.size = memReqs.size;
- bool gotStagingTypeIndex = getStagingMemoryTypeIndex(
- dvk, sVkEmulation->device, &sVkEmulation->deviceInfo.memProps,
- &sVkEmulation->staging.memory.typeIndex);
+ bool gotStagingTypeIndex =
+ getStagingMemoryTypeIndex(dvk, sVkEmulation->device, &sVkEmulation->deviceInfo.memProps,
+ &sVkEmulation->staging.memory.typeIndex);
if (!gotStagingTypeIndex) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to determine staging memory type index.");
}
- if (!((1 << sVkEmulation->staging.memory.typeIndex) &
- memReqs.memoryTypeBits)) {
+ if (!((1 << sVkEmulation->staging.memory.typeIndex) & memReqs.memoryTypeBits)) {
VK_EMU_INIT_RETURN_ON_ERROR(
"Failed: Inconsistent determination of memory type index for staging buffer");
}
- if (!allocExternalMemory(dvk, &sVkEmulation->staging.memory,
- false /* not external */,
+ if (!allocExternalMemory(dvk, &sVkEmulation->staging.memory, false /* not external */,
kNullopt /* deviceAlignment */)) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to allocate memory for staging buffer.");
}
VkResult stagingBufferBindRes = dvk->vkBindBufferMemory(
- sVkEmulation->device,
- sVkEmulation->staging.buffer,
- sVkEmulation->staging.memory.memory, 0);
+ sVkEmulation->device, sVkEmulation->staging.buffer, sVkEmulation->staging.memory.memory, 0);
if (stagingBufferBindRes != VK_SUCCESS) {
VK_EMU_INIT_RETURN_ON_ERROR("Failed to bind memory for staging buffer.");
@@ -1199,28 +1165,26 @@
}
// Precondition: sVkEmulation has valid device support info
-bool allocExternalMemory(VulkanDispatch* vk,
- VkEmulation::ExternalMemoryInfo* info,
- bool actuallyExternal,
- Optional<uint64_t> deviceAlignment) {
+bool allocExternalMemory(VulkanDispatch* vk, VkEmulation::ExternalMemoryInfo* info,
+ bool actuallyExternal, Optional<uint64_t> deviceAlignment) {
VkExportMemoryAllocateInfo exportAi = {
- VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, 0,
+ VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+ 0,
VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
VkExportMemoryAllocateInfo* exportAiPtr = nullptr;
- if (sVkEmulation->deviceInfo.supportsExternalMemory &&
- actuallyExternal) {
+ if (sVkEmulation->deviceInfo.supportsExternalMemory && actuallyExternal) {
exportAiPtr = &exportAi;
}
info->actualSize = (info->size + 2 * kPageSize - 1) / kPageSize * kPageSize;
VkMemoryAllocateInfo allocInfo = {
- VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
- exportAiPtr,
- info->actualSize,
- info->typeIndex,
+ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ exportAiPtr,
+ info->actualSize,
+ info->typeIndex,
};
bool memoryAllocated = false;
@@ -1228,8 +1192,8 @@
constexpr size_t kMaxAllocationAttempts = 20u;
while (!memoryAllocated) {
- VkResult allocRes = vk->vkAllocateMemory(
- sVkEmulation->device, &allocInfo, nullptr, &info->memory);
+ VkResult allocRes =
+ vk->vkAllocateMemory(sVkEmulation->device, &allocInfo, nullptr, &info->memory);
if (allocRes != VK_SUCCESS) {
// LOG(VERBOSE) << "allocExternalMemory: failed in vkAllocateMemory: "
@@ -1237,12 +1201,10 @@
break;
}
- if (sVkEmulation->deviceInfo.memProps.memoryTypes[info->typeIndex]
- .propertyFlags &
+ if (sVkEmulation->deviceInfo.memProps.memoryTypes[info->typeIndex].propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
- VkResult mapRes =
- vk->vkMapMemory(sVkEmulation->device, info->memory, 0,
- info->actualSize, 0, &info->mappedPtr);
+ VkResult mapRes = vk->vkMapMemory(sVkEmulation->device, info->memory, 0,
+ info->actualSize, 0, &info->mappedPtr);
if (mapRes != VK_SUCCESS) {
// LOG(VERBOSE) << "allocExternalMemory: failed in vkMapMemory: "
// << mapRes;
@@ -1250,22 +1212,21 @@
}
}
- uint64_t mappedPtrPageOffset =
- reinterpret_cast<uint64_t>(info->mappedPtr) % kPageSize;
+ uint64_t mappedPtrPageOffset = reinterpret_cast<uint64_t>(info->mappedPtr) % kPageSize;
if ( // don't care about alignment (e.g. device-local memory)
- !deviceAlignment.hasValue() ||
- // If device has an alignment requirement larger than current
- // host pointer alignment (i.e. the lowest 1 bit of mappedPtr),
- // the only possible way to make mappedPtr valid is to ensure
- // that it is already aligned to page.
- mappedPtrPageOffset == 0u ||
- // If device has an alignment requirement smaller or equals to
- // current host pointer alignment, clients can set a offset
- // |kPageSize - mappedPtrPageOffset| in vkBindImageMemory to
- // make it aligned to page and compatible with device
- // requirements.
- (kPageSize - mappedPtrPageOffset) % deviceAlignment.value() == 0) {
+ !deviceAlignment.hasValue() ||
+ // If device has an alignment requirement larger than current
+ // host pointer alignment (i.e. the lowest 1 bit of mappedPtr),
+ // the only possible way to make mappedPtr valid is to ensure
+ // that it is already aligned to page.
+ mappedPtrPageOffset == 0u ||
+ // If device has an alignment requirement smaller or equals to
+ // current host pointer alignment, clients can set a offset
+ // |kPageSize - mappedPtrPageOffset| in vkBindImageMemory to
+ // make it aligned to page and compatible with device
+ // requirements.
+ (kPageSize - mappedPtrPageOffset) % deviceAlignment.value() == 0) {
// allocation success.
memoryAllocated = true;
} else {
@@ -1293,29 +1254,28 @@
return false;
}
- if (!sVkEmulation->deviceInfo.supportsExternalMemory ||
- !actuallyExternal) {
+ if (!sVkEmulation->deviceInfo.supportsExternalMemory || !actuallyExternal) {
return true;
}
#ifdef _WIN32
VkMemoryGetWin32HandleInfoKHR getWin32HandleInfo = {
- VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, 0,
- info->memory, VK_EXT_MEMORY_HANDLE_TYPE_BIT,
+ VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
+ 0,
+ info->memory,
+ VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
- VkResult exportRes =
- sVkEmulation->deviceInfo.getMemoryHandleFunc(
- sVkEmulation->device, &getWin32HandleInfo,
- &info->exportedHandle);
+ VkResult exportRes = sVkEmulation->deviceInfo.getMemoryHandleFunc(
+ sVkEmulation->device, &getWin32HandleInfo, &info->exportedHandle);
#else
VkMemoryGetFdInfoKHR getFdInfo = {
- VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, 0,
- info->memory, VK_EXT_MEMORY_HANDLE_TYPE_BIT,
+ VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+ 0,
+ info->memory,
+ VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
- VkResult exportRes =
- sVkEmulation->deviceInfo.getMemoryHandleFunc(
- sVkEmulation->device, &getFdInfo,
- &info->exportedHandle);
+ VkResult exportRes = sVkEmulation->deviceInfo.getMemoryHandleFunc(
+ sVkEmulation->device, &getFdInfo, &info->exportedHandle);
#endif
if (exportRes != VK_SUCCESS) {
@@ -1330,19 +1290,14 @@
return true;
}
-void freeExternalMemoryLocked(VulkanDispatch* vk,
- VkEmulation::ExternalMemoryInfo* info) {
- if (!info->memory)
- return;
+void freeExternalMemoryLocked(VulkanDispatch* vk, VkEmulation::ExternalMemoryInfo* info) {
+ if (!info->memory) return;
- if (sVkEmulation->deviceInfo.memProps.memoryTypes[info->typeIndex]
- .propertyFlags &
+ if (sVkEmulation->deviceInfo.memProps.memoryTypes[info->typeIndex].propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
- if (sVkEmulation->occupiedGpas.find(info->gpa) !=
- sVkEmulation->occupiedGpas.end()) {
+ if (sVkEmulation->occupiedGpas.find(info->gpa) != sVkEmulation->occupiedGpas.end()) {
sVkEmulation->occupiedGpas.erase(info->gpa);
- get_emugl_vm_operations().unmapUserBackedRam(info->gpa,
- info->sizeToPage);
+ get_emugl_vm_operations().unmapUserBackedRam(info->gpa, info->sizeToPage);
info->gpa = 0u;
}
@@ -1365,20 +1320,20 @@
}
}
-bool importExternalMemory(VulkanDispatch* vk,
- VkDevice targetDevice,
- const VkEmulation::ExternalMemoryInfo* info,
- VkDeviceMemory* out) {
+bool importExternalMemory(VulkanDispatch* vk, VkDevice targetDevice,
+ const VkEmulation::ExternalMemoryInfo* info, VkDeviceMemory* out) {
#ifdef _WIN32
VkImportMemoryWin32HandleInfoKHR importInfo = {
- VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, 0,
+ VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+ 0,
VK_EXT_MEMORY_HANDLE_TYPE_BIT,
info->exportedHandle,
0,
};
#else
VkImportMemoryFdInfoKHR importInfo = {
- VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, 0,
+ VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+ 0,
VK_EXT_MEMORY_HANDLE_TYPE_BIT,
dupExternalMemory(info->exportedHandle),
};
@@ -1400,15 +1355,12 @@
return true;
}
-bool importExternalMemoryDedicatedImage(
- VulkanDispatch* vk,
- VkDevice targetDevice,
- const VkEmulation::ExternalMemoryInfo* info,
- VkImage image,
- VkDeviceMemory* out) {
-
+bool importExternalMemoryDedicatedImage(VulkanDispatch* vk, VkDevice targetDevice,
+ const VkEmulation::ExternalMemoryInfo* info, VkImage image,
+ VkDeviceMemory* out) {
VkMemoryDedicatedAllocateInfo dedicatedInfo = {
- VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, 0,
+ VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ 0,
image,
VK_NULL_HANDLE,
};
@@ -1473,7 +1425,8 @@
return VK_FORMAT_R16G16B16A16_SFLOAT;
case GL_BGRA_EXT:
case GL_BGRA8_EXT:
- return VK_FORMAT_B8G8R8A8_UNORM;;
+ return VK_FORMAT_B8G8R8A8_UNORM;
+ ;
default:
return VK_FORMAT_R8G8B8A8_UNORM;
}
@@ -1486,8 +1439,7 @@
int height;
GLint internalformat;
- if (!fb->getColorBufferInfo(colorBufferHandle, &width, &height,
- &internalformat)) {
+ if (!fb->getColorBufferInfo(colorBufferHandle, &width, &height, &internalformat)) {
return false;
}
@@ -1511,14 +1463,12 @@
return 0;
}
-static uint32_t lastGoodTypeIndexWithMemoryProperties(
- uint32_t indices,
- VkMemoryPropertyFlags memoryProperty) {
+static uint32_t lastGoodTypeIndexWithMemoryProperties(uint32_t indices,
+ VkMemoryPropertyFlags memoryProperty) {
for (int32_t i = 31; i >= 0; --i) {
if ((indices & (1u << i)) &&
(!memoryProperty ||
- (sVkEmulation->deviceInfo.memProps.memoryTypes[i].propertyFlags &
- memoryProperty))) {
+ (sVkEmulation->deviceInfo.memProps.memoryTypes[i].propertyFlags & memoryProperty))) {
return i;
}
}
@@ -1544,15 +1494,11 @@
constexpr std::pair<VkFormatFeatureFlags, VkImageUsageFlags> formatUsagePairs[] = {
{VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT},
- {VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
- VK_IMAGE_USAGE_SAMPLED_BIT},
- {VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT},
- {VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
- VK_IMAGE_USAGE_TRANSFER_DST_BIT},
- {VK_FORMAT_FEATURE_BLIT_SRC_BIT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT},
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT},
+ {VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, VK_IMAGE_USAGE_SAMPLED_BIT},
+ {VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT},
+ {VK_FORMAT_FEATURE_TRANSFER_DST_BIT, VK_IMAGE_USAGE_TRANSFER_DST_BIT},
+ {VK_FORMAT_FEATURE_BLIT_SRC_BIT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT},
};
VkFormatFeatureFlags tilingFeatures = (tiling == VK_IMAGE_TILING_OPTIMAL)
? formatProperties.optimalTilingFeatures
@@ -1618,12 +1564,8 @@
// We should make it so the guest can only allocate external images/
// buffers of one type index for image and one type index for buffer
// to begin with, via filtering from the host.
-bool setupVkColorBuffer(uint32_t colorBufferHandle,
- bool vulkanOnly,
- uint32_t memoryProperty,
- bool* exported,
- VkDeviceSize* allocSize,
- uint32_t* typeIndex,
+bool setupVkColorBuffer(uint32_t colorBufferHandle, bool vulkanOnly, uint32_t memoryProperty,
+ bool* exported, VkDeviceSize* allocSize, uint32_t* typeIndex,
void** mappedPtr) {
if (!isColorBufferVulkanCompatible(colorBufferHandle)) return false;
@@ -1636,8 +1578,8 @@
GLint internalformat;
FrameworkFormat frameworkFormat;
- if (!fb->getColorBufferInfo(colorBufferHandle, &width, &height,
- &internalformat, &frameworkFormat)) {
+ if (!fb->getColorBufferInfo(colorBufferHandle, &width, &height, &internalformat,
+ &frameworkFormat)) {
return false;
}
@@ -1658,8 +1600,7 @@
if (typeIndex) *typeIndex = infoPtr->memory.typeIndex;
// Update the mappedPtr to what the host driver wanted, otherwise we
// may map the same memory twice.
- if (mappedPtr)
- *mappedPtr = infoPtr->memory.mappedPtr;
+ if (mappedPtr) *mappedPtr = infoPtr->memory.mappedPtr;
return true;
}
@@ -1703,7 +1644,8 @@
// Create the image. If external memory is supported, make it external.
VkExternalMemoryImageCreateInfo extImageCi = {
- VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, 0,
+ VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+ 0,
VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
@@ -1725,21 +1667,20 @@
res.imageCreateInfoShallow = vk_make_orphan_copy(*imageCi);
- vk->vkGetImageMemoryRequirements(sVkEmulation->device, res.image,
- &res.memReqs);
+ vk->vkGetImageMemoryRequirements(sVkEmulation->device, res.image, &res.memReqs);
// Currently we only care about two memory properties: DEVICE_LOCAL
// and HOST_VISIBLE; other memory properties specified in
// rcSetColorBufferVulkanMode2() call will be ignored for now.
- memoryProperty = memoryProperty & (VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ memoryProperty = memoryProperty &
+ (VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
res.memory.size = res.memReqs.size;
// Determine memory type.
if (memoryProperty) {
- res.memory.typeIndex = lastGoodTypeIndexWithMemoryProperties(
- res.memReqs.memoryTypeBits, memoryProperty);
+ res.memory.typeIndex =
+ lastGoodTypeIndexWithMemoryProperties(res.memReqs.memoryTypeBits, memoryProperty);
} else {
res.memory.typeIndex = lastGoodTypeIndex(res.memReqs.memoryTypeBits);
}
@@ -1755,27 +1696,23 @@
bool isHostVisible = memoryProperty & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
Optional<uint64_t> deviceAlignment =
- isHostVisible ? Optional<uint64_t>(res.memReqs.alignment) : kNullopt;
- bool allocRes = allocExternalMemory(
- vk, &res.memory, true /*actuallyExternal*/, deviceAlignment);
+ isHostVisible ? Optional<uint64_t>(res.memReqs.alignment) : kNullopt;
+ bool allocRes =
+ allocExternalMemory(vk, &res.memory, true /*actuallyExternal*/, deviceAlignment);
if (!allocRes) {
// LOG(VERBOSE) << "Failed to allocate ColorBuffer with Vulkan backing.";
return false;
}
- res.memory.pageOffset =
- reinterpret_cast<uint64_t>(res.memory.mappedPtr) % kPageSize;
- res.memory.bindOffset =
- res.memory.pageOffset ? kPageSize - res.memory.pageOffset : 0u;
+ res.memory.pageOffset = reinterpret_cast<uint64_t>(res.memory.mappedPtr) % kPageSize;
+ res.memory.bindOffset = res.memory.pageOffset ? kPageSize - res.memory.pageOffset : 0u;
- VkResult bindImageMemoryRes =
- vk->vkBindImageMemory(sVkEmulation->device, res.image,
- res.memory.memory, res.memory.bindOffset);
+ VkResult bindImageMemoryRes = vk->vkBindImageMemory(sVkEmulation->device, res.image,
+ res.memory.memory, res.memory.bindOffset);
if (bindImageMemoryRes != VK_SUCCESS) {
- fprintf(stderr, "%s: Failed to bind image memory. %d\n", __func__,
- bindImageMemoryRes);
+ fprintf(stderr, "%s: Failed to bind image memory. %d\n", __func__, bindImageMemoryRes);
return false;
}
@@ -1791,8 +1728,7 @@
}
if (sVkEmulation->deviceInfo.supportsExternalMemory &&
- sVkEmulation->deviceInfo.glInteropSupported &&
- glCompatible &&
+ sVkEmulation->deviceInfo.glInteropSupported && glCompatible &&
FrameBuffer::getFB()->importMemoryToColorBuffer(
dupExternalMemory(res.memory.exportedHandle), res.memory.size, false /* dedicated */,
vulkanOnly, colorBufferHandle, res.image, *imageCi)) {
@@ -1802,8 +1738,7 @@
if (exported) *exported = res.glExported;
if (allocSize) *allocSize = res.memory.size;
if (typeIndex) *typeIndex = res.memory.typeIndex;
- if (mappedPtr)
- *mappedPtr = res.memory.mappedPtr;
+ if (mappedPtr) *mappedPtr = res.memory.mappedPtr;
res.ownedByHost = std::make_shared<std::atomic_bool>(true);
@@ -1869,12 +1804,12 @@
}
if (!infoPtr->image) {
- fprintf(stderr, "%s: error: ColorBuffer 0x%x has no VkImage\n", __func__, colorBufferHandle);
+ fprintf(stderr, "%s: error: ColorBuffer 0x%x has no VkImage\n", __func__,
+ colorBufferHandle);
return false;
}
- if (infoPtr->glExported ||
- (infoPtr->vulkanMode == VkEmulation::VulkanMode::VulkanOnly) ||
+ if (infoPtr->glExported || (infoPtr->vulkanMode == VkEmulation::VulkanMode::VulkanOnly) ||
infoPtr->frameworkFormat != FrameworkFormat::FRAMEWORK_FORMAT_GL_COMPATIBLE) {
// No sync needed if exported to GL or in Vulkan-only mode
return true;
@@ -1882,14 +1817,13 @@
// Record our synchronization commands.
VkCommandBufferBeginInfo beginInfo = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ 0,
VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
nullptr /* no inheritance info */,
};
- vk->vkBeginCommandBuffer(
- sVkEmulation->commandBuffer,
- &beginInfo);
+ vk->vkBeginCommandBuffer(sVkEmulation->commandBuffer, &beginInfo);
// From the spec: If an application does not need the contents of a resource
// to remain valid when transferring from one queue family to another, then
@@ -1899,7 +1833,8 @@
// VK_TRANSFER_SRC_OPTIMAL and back.
VkImageMemoryBarrier presentToTransferSrc = {
- VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0,
+ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ 0,
0,
VK_ACCESS_HOST_READ_BIT,
infoPtr->currentLayout,
@@ -1909,18 +1844,16 @@
infoPtr->image,
{
VK_IMAGE_ASPECT_COLOR_BIT,
- 0, 1, 0, 1,
+ 0,
+ 1,
+ 0,
+ 1,
},
};
- vk->vkCmdPipelineBarrier(
- sVkEmulation->commandBuffer,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- 0,
- 0, nullptr,
- 0, nullptr,
- 1, &presentToTransferSrc);
+ vk->vkCmdPipelineBarrier(sVkEmulation->commandBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &presentToTransferSrc);
infoPtr->currentLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
@@ -1952,46 +1885,37 @@
infoPtr->imageCreateInfoShallow.extent,
};
- vk->vkCmdCopyImageToBuffer(
- sVkEmulation->commandBuffer,
- infoPtr->image,
- VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- sVkEmulation->staging.buffer,
- 1, ®ion);
+ vk->vkCmdCopyImageToBuffer(sVkEmulation->commandBuffer, infoPtr->image,
+ VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, sVkEmulation->staging.buffer,
+ 1, ®ion);
vk->vkEndCommandBuffer(sVkEmulation->commandBuffer);
VkSubmitInfo submitInfo = {
- VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
- 0, nullptr,
- nullptr,
- 1, &sVkEmulation->commandBuffer,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO, 0, 0, nullptr, nullptr, 1,
+ &sVkEmulation->commandBuffer, 0, nullptr,
};
{
android::base::AutoLock lock(*sVkEmulation->queueLock);
- vk->vkQueueSubmit(sVkEmulation->queue, 1, &submitInfo,
- sVkEmulation->commandBufferFence);
+ vk->vkQueueSubmit(sVkEmulation->queue, 1, &submitInfo, sVkEmulation->commandBufferFence);
}
- static constexpr uint64_t ANB_MAX_WAIT_NS =
- 5ULL * 1000ULL * 1000ULL * 1000ULL;
+ static constexpr uint64_t ANB_MAX_WAIT_NS = 5ULL * 1000ULL * 1000ULL * 1000ULL;
- vk->vkWaitForFences(
- sVkEmulation->device, 1, &sVkEmulation->commandBufferFence,
- VK_TRUE, ANB_MAX_WAIT_NS);
- vk->vkResetFences(
- sVkEmulation->device, 1, &sVkEmulation->commandBufferFence);
+ vk->vkWaitForFences(sVkEmulation->device, 1, &sVkEmulation->commandBufferFence, VK_TRUE,
+ ANB_MAX_WAIT_NS);
+ vk->vkResetFences(sVkEmulation->device, 1, &sVkEmulation->commandBufferFence);
VkMappedMemoryRange toInvalidate = {
- VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
+ VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+ 0,
sVkEmulation->staging.memory.memory,
- 0, VK_WHOLE_SIZE,
+ 0,
+ VK_WHOLE_SIZE,
};
- vk->vkInvalidateMappedMemoryRanges(
- sVkEmulation->device, 1, &toInvalidate);
+ vk->vkInvalidateMappedMemoryRanges(sVkEmulation->device, 1, &toInvalidate);
const std::size_t copiedSize = infoPtr->imageCreateInfoShallow.extent.width *
infoPtr->imageCreateInfoShallow.extent.height * bpp;
@@ -2016,53 +1940,47 @@
return false;
}
- if (infoPtr->frameworkFormat == FrameworkFormat::FRAMEWORK_FORMAT_GL_COMPATIBLE && (
- infoPtr->glExported ||
- infoPtr->vulkanMode == VkEmulation::VulkanMode::VulkanOnly)) {
+ if (infoPtr->frameworkFormat == FrameworkFormat::FRAMEWORK_FORMAT_GL_COMPATIBLE &&
+ (infoPtr->glExported || infoPtr->vulkanMode == VkEmulation::VulkanMode::VulkanOnly)) {
// No sync needed if exported to GL or in Vulkan-only mode
return true;
}
size_t cbNumBytes = 0;
- bool readRes = FrameBuffer::getFB()->
- readColorBufferContents(
- colorBufferHandle, &cbNumBytes, nullptr);
+ bool readRes =
+ FrameBuffer::getFB()->readColorBufferContents(colorBufferHandle, &cbNumBytes, nullptr);
if (!readRes) {
- fprintf(stderr, "%s: Failed to read color buffer 0x%x\n",
- __func__, colorBufferHandle);
+ fprintf(stderr, "%s: Failed to read color buffer 0x%x\n", __func__, colorBufferHandle);
return false;
}
if (cbNumBytes > sVkEmulation->staging.memory.size) {
fprintf(stderr,
- "%s: Not enough space to read to staging buffer. "
- "Wanted: 0x%llx Have: 0x%llx\n", __func__,
- (unsigned long long)cbNumBytes,
- (unsigned long long)(sVkEmulation->staging.memory.size));
+ "%s: Not enough space to read to staging buffer. "
+ "Wanted: 0x%llx Have: 0x%llx\n",
+ __func__, (unsigned long long)cbNumBytes,
+ (unsigned long long)(sVkEmulation->staging.memory.size));
return false;
}
- readRes = FrameBuffer::getFB()->
- readColorBufferContents(
- colorBufferHandle, &cbNumBytes,
- sVkEmulation->staging.memory.mappedPtr);
+ readRes = FrameBuffer::getFB()->readColorBufferContents(colorBufferHandle, &cbNumBytes,
+ sVkEmulation->staging.memory.mappedPtr);
if (!readRes) {
- fprintf(stderr, "%s: Failed to read color buffer 0x%x (at glReadPixels)\n",
- __func__, colorBufferHandle);
+ fprintf(stderr, "%s: Failed to read color buffer 0x%x (at glReadPixels)\n", __func__,
+ colorBufferHandle);
return false;
}
// Record our synchronization commands.
VkCommandBufferBeginInfo beginInfo = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, 0,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ 0,
VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
nullptr /* no inheritance info */,
};
- vk->vkBeginCommandBuffer(
- sVkEmulation->commandBuffer,
- &beginInfo);
+ vk->vkBeginCommandBuffer(sVkEmulation->commandBuffer, &beginInfo);
// From the spec: If an application does not need the contents of a resource
// to remain valid when transferring from one queue family to another, then
@@ -2072,7 +1990,8 @@
// VK_TRANSFER_SRC_OPTIMAL and back.
VkImageMemoryBarrier presentToTransferSrc = {
- VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0,
+ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ 0,
0,
VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
infoPtr->currentLayout,
@@ -2082,21 +2001,18 @@
infoPtr->image,
{
VK_IMAGE_ASPECT_COLOR_BIT,
- 0, 1, 0, 1,
+ 0,
+ 1,
+ 0,
+ 1,
},
};
- infoPtr->currentLayout =
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ infoPtr->currentLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- vk->vkCmdPipelineBarrier(
- sVkEmulation->commandBuffer,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- 0,
- 0, nullptr,
- 0, nullptr,
- 1, &presentToTransferSrc);
+ vk->vkCmdPipelineBarrier(sVkEmulation->commandBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &presentToTransferSrc);
// Copy to staging buffer
std::vector<VkBufferImageCopy> regions;
@@ -2147,8 +2063,8 @@
{0, 0, 0},
subplaneExtent,
});
- if (infoPtr->frameworkFormat == FRAMEWORK_FORMAT_YUV_420_888
- || infoPtr->frameworkFormat == FRAMEWORK_FORMAT_YV12) {
+ if (infoPtr->frameworkFormat == FRAMEWORK_FORMAT_YUV_420_888 ||
+ infoPtr->frameworkFormat == FRAMEWORK_FORMAT_YV12) {
regions.push_back({
infoPtr->imageCreateInfoShallow.extent.width *
infoPtr->imageCreateInfoShallow.extent.height +
@@ -2168,46 +2084,37 @@
}
}
- vk->vkCmdCopyBufferToImage(
- sVkEmulation->commandBuffer,
- sVkEmulation->staging.buffer,
- infoPtr->image,
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
- regions.size(), regions.data());
+ vk->vkCmdCopyBufferToImage(sVkEmulation->commandBuffer, sVkEmulation->staging.buffer,
+ infoPtr->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, regions.size(),
+ regions.data());
vk->vkEndCommandBuffer(sVkEmulation->commandBuffer);
VkSubmitInfo submitInfo = {
- VK_STRUCTURE_TYPE_SUBMIT_INFO, 0,
- 0, nullptr,
- nullptr,
- 1, &sVkEmulation->commandBuffer,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO, 0, 0, nullptr, nullptr, 1,
+ &sVkEmulation->commandBuffer, 0, nullptr,
};
{
android::base::AutoLock lock(*sVkEmulation->queueLock);
- vk->vkQueueSubmit(sVkEmulation->queue, 1, &submitInfo,
- sVkEmulation->commandBufferFence);
+ vk->vkQueueSubmit(sVkEmulation->queue, 1, &submitInfo, sVkEmulation->commandBufferFence);
}
- static constexpr uint64_t ANB_MAX_WAIT_NS =
- 5ULL * 1000ULL * 1000ULL * 1000ULL;
+ static constexpr uint64_t ANB_MAX_WAIT_NS = 5ULL * 1000ULL * 1000ULL * 1000ULL;
- vk->vkWaitForFences(
- sVkEmulation->device, 1, &sVkEmulation->commandBufferFence,
- VK_TRUE, ANB_MAX_WAIT_NS);
- vk->vkResetFences(
- sVkEmulation->device, 1, &sVkEmulation->commandBufferFence);
+ vk->vkWaitForFences(sVkEmulation->device, 1, &sVkEmulation->commandBufferFence, VK_TRUE,
+ ANB_MAX_WAIT_NS);
+ vk->vkResetFences(sVkEmulation->device, 1, &sVkEmulation->commandBufferFence);
VkMappedMemoryRange toInvalidate = {
- VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, 0,
+ VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+ 0,
sVkEmulation->staging.memory.memory,
- 0, VK_WHOLE_SIZE,
+ 0,
+ VK_WHOLE_SIZE,
};
- vk->vkInvalidateMappedMemoryRanges(
- sVkEmulation->device, 1, &toInvalidate);
+ vk->vkInvalidateMappedMemoryRanges(sVkEmulation->device, 1, &toInvalidate);
return true;
}
@@ -2260,23 +2167,18 @@
return infoPtr->mtlTexture;
}
-int32_t mapGpaToBufferHandle(uint32_t bufferHandle,
- uint64_t gpa,
- uint64_t size) {
- if (!sVkEmulation || !sVkEmulation->live)
- return VK_ERROR_DEVICE_LOST;
+int32_t mapGpaToBufferHandle(uint32_t bufferHandle, uint64_t gpa, uint64_t size) {
+ if (!sVkEmulation || !sVkEmulation->live) return VK_ERROR_DEVICE_LOST;
AutoLock lock(sVkEmulationLock);
VkEmulation::ExternalMemoryInfo* memoryInfoPtr = nullptr;
- auto colorBufferInfoPtr =
- android::base::find(sVkEmulation->colorBuffers, bufferHandle);
+ auto colorBufferInfoPtr = android::base::find(sVkEmulation->colorBuffers, bufferHandle);
if (colorBufferInfoPtr) {
memoryInfoPtr = &colorBufferInfoPtr->memory;
}
- auto bufferInfoPtr =
- android::base::find(sVkEmulation->buffers, bufferHandle);
+ auto bufferInfoPtr = android::base::find(sVkEmulation->buffers, bufferHandle);
if (bufferInfoPtr) {
memoryInfoPtr = &bufferInfoPtr->memory;
}
@@ -2292,42 +2194,35 @@
memoryInfoPtr->gpa = gpa;
memoryInfoPtr->pageAlignedHva =
- reinterpret_cast<uint8_t*>(memoryInfoPtr->mappedPtr) +
- memoryInfoPtr->bindOffset;
+ reinterpret_cast<uint8_t*>(memoryInfoPtr->mappedPtr) + memoryInfoPtr->bindOffset;
size_t rawSize = memoryInfoPtr->size + memoryInfoPtr->pageOffset;
if (size && size < rawSize) {
rawSize = size;
}
- memoryInfoPtr->sizeToPage = ((rawSize + kPageSize - 1) >> kPageBits)
- << kPageBits;
+ memoryInfoPtr->sizeToPage = ((rawSize + kPageSize - 1) >> kPageBits) << kPageBits;
// LOG(VERBOSE) << "mapGpaToColorBuffer: hva = " << memoryInfoPtr->mappedPtr
// << ", pageAlignedHva = " << memoryInfoPtr->pageAlignedHva
// << " -> [ " << memoryInfoPtr->gpa << ", "
// << memoryInfoPtr->gpa + memoryInfoPtr->sizeToPage << " ]";
- if (sVkEmulation->occupiedGpas.find(gpa) !=
- sVkEmulation->occupiedGpas.end()) {
+ if (sVkEmulation->occupiedGpas.find(gpa) != sVkEmulation->occupiedGpas.end()) {
// emugl::emugl_crash_reporter("FATAL: already mapped gpa 0x%lx! ", gpa);
return VK_ERROR_MEMORY_MAP_FAILED;
}
- get_emugl_vm_operations().mapUserBackedRam(
- gpa, memoryInfoPtr->pageAlignedHva, memoryInfoPtr->sizeToPage);
+ get_emugl_vm_operations().mapUserBackedRam(gpa, memoryInfoPtr->pageAlignedHva,
+ memoryInfoPtr->sizeToPage);
sVkEmulation->occupiedGpas.insert(gpa);
return memoryInfoPtr->pageOffset;
}
-bool setupVkBuffer(uint32_t bufferHandle,
- bool vulkanOnly,
- uint32_t memoryProperty,
- bool* exported,
- VkDeviceSize* allocSize,
- uint32_t* typeIndex) {
+bool setupVkBuffer(uint32_t bufferHandle, bool vulkanOnly, uint32_t memoryProperty, bool* exported,
+ VkDeviceSize* allocSize, uint32_t* typeIndex) {
if (vulkanOnly == false) {
fprintf(stderr, "Data buffers should be vulkanOnly. Setup failed.\n");
return false;
@@ -2349,12 +2244,10 @@
if (infoPtr) {
// Update the allocation size to what the host driver wanted, or we
// might get VK_ERROR_OUT_OF_DEVICE_MEMORY and a host crash
- if (allocSize)
- *allocSize = infoPtr->memory.size;
+ if (allocSize) *allocSize = infoPtr->memory.size;
// Update the type index to what the host driver wanted, or we might
// get VK_ERROR_DEVICE_LOST
- if (typeIndex)
- *typeIndex = infoPtr->memory.typeIndex;
+ if (typeIndex) *typeIndex = infoPtr->memory.typeIndex;
return true;
}
@@ -2363,21 +2256,18 @@
res.handle = bufferHandle;
res.size = size;
- res.usageFlags = VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
- VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
- VK_BUFFER_USAGE_STORAGE_BUFFER_BIT |
- VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
- VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
- VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ res.usageFlags = VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
+ VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
+ VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
res.createFlags = 0;
res.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
// Create the image. If external memory is supported, make it external.
VkExternalMemoryBufferCreateInfo extBufferCi = {
- VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
- 0,
- VK_EXT_MEMORY_HANDLE_TYPE_BIT,
+ VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+ 0,
+ VK_EXT_MEMORY_HANDLE_TYPE_BIT,
};
VkExternalMemoryBufferCreateInfo* extBufferCiPtr = nullptr;
@@ -2386,40 +2276,38 @@
}
VkBufferCreateInfo bufferCi = {
- VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- extBufferCiPtr,
- res.createFlags,
- res.size,
- res.usageFlags,
- res.sharingMode,
- /* queueFamilyIndexCount */ 0,
- /* pQueueFamilyIndices */ nullptr,
+ VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ extBufferCiPtr,
+ res.createFlags,
+ res.size,
+ res.usageFlags,
+ res.sharingMode,
+ /* queueFamilyIndexCount */ 0,
+ /* pQueueFamilyIndices */ nullptr,
};
- VkResult createRes = vk->vkCreateBuffer(sVkEmulation->device, &bufferCi,
- nullptr, &res.buffer);
+ VkResult createRes = vk->vkCreateBuffer(sVkEmulation->device, &bufferCi, nullptr, &res.buffer);
if (createRes != VK_SUCCESS) {
// LOG(VERBOSE) << "Failed to create Vulkan Buffer for Buffer "
- // << bufferHandle;
+ // << bufferHandle;
return false;
}
- vk->vkGetBufferMemoryRequirements(sVkEmulation->device, res.buffer,
- &res.memReqs);
+ vk->vkGetBufferMemoryRequirements(sVkEmulation->device, res.buffer, &res.memReqs);
// Currently we only care about two memory properties: DEVICE_LOCAL
// and HOST_VISIBLE; other memory properties specified in
// rcSetColorBufferVulkanMode2() call will be ignored for now.
- memoryProperty = memoryProperty & (VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ memoryProperty = memoryProperty &
+ (VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
res.memory.size = res.memReqs.size;
// Determine memory type.
if (memoryProperty) {
- res.memory.typeIndex = lastGoodTypeIndexWithMemoryProperties(
- res.memReqs.memoryTypeBits, memoryProperty);
+ res.memory.typeIndex =
+ lastGoodTypeIndexWithMemoryProperties(res.memReqs.memoryTypeBits, memoryProperty);
} else {
res.memory.typeIndex = lastGoodTypeIndex(res.memReqs.memoryTypeBits);
}
@@ -2435,65 +2323,54 @@
bool isHostVisible = memoryProperty & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
Optional<uint64_t> deviceAlignment =
- isHostVisible ? Optional<uint64_t>(res.memReqs.alignment) : kNullopt;
- bool allocRes = allocExternalMemory(
- vk, &res.memory, true /* actuallyExternal */, deviceAlignment);
+ isHostVisible ? Optional<uint64_t>(res.memReqs.alignment) : kNullopt;
+ bool allocRes =
+ allocExternalMemory(vk, &res.memory, true /* actuallyExternal */, deviceAlignment);
if (!allocRes) {
// LOG(VERBOSE) << "Failed to allocate ColorBuffer with Vulkan backing.";
}
- res.memory.pageOffset =
- reinterpret_cast<uint64_t>(res.memory.mappedPtr) % kPageSize;
- res.memory.bindOffset =
- res.memory.pageOffset ? kPageSize - res.memory.pageOffset : 0u;
+ res.memory.pageOffset = reinterpret_cast<uint64_t>(res.memory.mappedPtr) % kPageSize;
+ res.memory.bindOffset = res.memory.pageOffset ? kPageSize - res.memory.pageOffset : 0u;
- VkResult bindBufferMemoryRes = vk->vkBindBufferMemory(
- sVkEmulation->device, res.buffer, res.memory.memory, 0);
+ VkResult bindBufferMemoryRes =
+ vk->vkBindBufferMemory(sVkEmulation->device, res.buffer, res.memory.memory, 0);
if (bindBufferMemoryRes != VK_SUCCESS) {
- fprintf(stderr, "%s: Failed to bind buffer memory. %d\n", __func__,
- bindBufferMemoryRes);
+ fprintf(stderr, "%s: Failed to bind buffer memory. %d\n", __func__, bindBufferMemoryRes);
return bindBufferMemoryRes;
}
- bool isHostVisibleMemory =
- memoryProperty & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ bool isHostVisibleMemory = memoryProperty & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
if (isHostVisibleMemory) {
- VkResult mapMemoryRes =
- vk->vkMapMemory(sVkEmulation->device, res.memory.memory, 0,
- res.memory.size, {}, &res.memory.mappedPtr);
+ VkResult mapMemoryRes = vk->vkMapMemory(sVkEmulation->device, res.memory.memory, 0,
+ res.memory.size, {}, &res.memory.mappedPtr);
if (mapMemoryRes != VK_SUCCESS) {
- fprintf(stderr, "%s: Failed to map image memory. %d\n", __func__,
- mapMemoryRes);
+ fprintf(stderr, "%s: Failed to map image memory. %d\n", __func__, mapMemoryRes);
return false;
}
}
res.glExported = false;
- if (exported)
- *exported = res.glExported;
- if (allocSize)
- *allocSize = res.memory.size;
- if (typeIndex)
- *typeIndex = res.memory.typeIndex;
+ if (exported) *exported = res.glExported;
+ if (allocSize) *allocSize = res.memory.size;
+ if (typeIndex) *typeIndex = res.memory.typeIndex;
sVkEmulation->buffers[bufferHandle] = res;
return allocRes;
}
bool teardownVkBuffer(uint32_t bufferHandle) {
- if (!sVkEmulation || !sVkEmulation->live)
- return false;
+ if (!sVkEmulation || !sVkEmulation->live) return false;
auto vk = sVkEmulation->dvk;
AutoLock lock(sVkEmulationLock);
auto infoPtr = android::base::find(sVkEmulation->buffers, bufferHandle);
- if (!infoPtr)
- return false;
+ if (!infoPtr) return false;
{
android::base::AutoLock lock(*sVkEmulation->queueLock);
VK_CHECK(vk->vkQueueWaitIdle(sVkEmulation->queue));
@@ -2508,8 +2385,7 @@
}
VK_EXT_MEMORY_HANDLE getBufferExtMemoryHandle(uint32_t bufferHandle) {
- if (!sVkEmulation || !sVkEmulation->live)
- return VK_EXT_MEMORY_HANDLE_INVALID;
+ if (!sVkEmulation || !sVkEmulation->live) return VK_EXT_MEMORY_HANDLE_INVALID;
AutoLock lock(sVkEmulationLock);
@@ -2522,10 +2398,8 @@
return infoPtr->memory.exportedHandle;
}
-VkExternalMemoryHandleTypeFlags
-transformExternalMemoryHandleTypeFlags_tohost(
+VkExternalMemoryHandleTypeFlags transformExternalMemoryHandleTypeFlags_tohost(
VkExternalMemoryHandleTypeFlags bits) {
-
VkExternalMemoryHandleTypeFlags res = bits;
// Transform Android/Fuchsia/Linux bits to host bits.
@@ -2555,11 +2429,9 @@
return res;
}
-VkExternalMemoryHandleTypeFlags
-transformExternalMemoryHandleTypeFlags_fromhost(
+VkExternalMemoryHandleTypeFlags transformExternalMemoryHandleTypeFlags_fromhost(
VkExternalMemoryHandleTypeFlags hostBits,
VkExternalMemoryHandleTypeFlags wantedGuestHandleType) {
-
VkExternalMemoryHandleTypeFlags res = hostBits;
if (res & VK_EXT_MEMORY_HANDLE_TYPE_BIT) {
@@ -2575,32 +2447,23 @@
return res;
}
-VkExternalMemoryProperties
-transformExternalMemoryProperties_tohost(
+VkExternalMemoryProperties transformExternalMemoryProperties_tohost(
VkExternalMemoryProperties props) {
VkExternalMemoryProperties res = props;
res.exportFromImportedHandleTypes =
- transformExternalMemoryHandleTypeFlags_tohost(
- props.exportFromImportedHandleTypes);
+ transformExternalMemoryHandleTypeFlags_tohost(props.exportFromImportedHandleTypes);
res.compatibleHandleTypes =
- transformExternalMemoryHandleTypeFlags_tohost(
- props.compatibleHandleTypes);
+ transformExternalMemoryHandleTypeFlags_tohost(props.compatibleHandleTypes);
return res;
}
-VkExternalMemoryProperties
-transformExternalMemoryProperties_fromhost(
- VkExternalMemoryProperties props,
- VkExternalMemoryHandleTypeFlags wantedGuestHandleType) {
+VkExternalMemoryProperties transformExternalMemoryProperties_fromhost(
+ VkExternalMemoryProperties props, VkExternalMemoryHandleTypeFlags wantedGuestHandleType) {
VkExternalMemoryProperties res = props;
- res.exportFromImportedHandleTypes =
- transformExternalMemoryHandleTypeFlags_fromhost(
- props.exportFromImportedHandleTypes,
- wantedGuestHandleType);
- res.compatibleHandleTypes =
- transformExternalMemoryHandleTypeFlags_fromhost(
- props.compatibleHandleTypes,
- wantedGuestHandleType);
+ res.exportFromImportedHandleTypes = transformExternalMemoryHandleTypeFlags_fromhost(
+ props.exportFromImportedHandleTypes, wantedGuestHandleType);
+ res.compatibleHandleTypes = transformExternalMemoryHandleTypeFlags_fromhost(
+ props.compatibleHandleTypes, wantedGuestHandleType);
return res;
}
@@ -2620,9 +2483,8 @@
auto res = vk->vkGetFenceStatus(sVkEmulation->device, fence);
if (res == VK_SUCCESS) {
VK_CHECK(vk->vkResetFences(sVkEmulation->device, 1, &fence));
- VK_CHECK(vk->vkResetCommandBuffer(
- commandBuffer,
- VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT));
+ VK_CHECK(vk->vkResetCommandBuffer(commandBuffer,
+ VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT));
return std::make_tuple(commandBuffer, fence);
}
if (res == VK_NOT_READY) {
@@ -2640,16 +2502,14 @@
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
.commandBufferCount = 1,
};
- VK_CHECK(vk->vkAllocateCommandBuffers(sVkEmulation->device, &allocateInfo,
- &commandBuffer));
+ VK_CHECK(vk->vkAllocateCommandBuffers(sVkEmulation->device, &allocateInfo, &commandBuffer));
VkFence fence;
VkFenceCreateInfo fenceCi = {
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
.pNext = nullptr,
.flags = 0,
};
- VK_CHECK(
- vk->vkCreateFence(sVkEmulation->device, &fenceCi, nullptr, &fence));
+ VK_CHECK(vk->vkCreateFence(sVkEmulation->device, &fenceCi, nullptr, &fence));
sVkEmulation->transferQueueCommandBufferPool.emplace_back(commandBuffer, fence);
@@ -2680,11 +2540,10 @@
std::vector<std::tuple<VkEmulation::ColorBufferInfo*, VkImageLayout>>
colorBufferInfosAndLayouts;
for (auto [colorBufferHandle, newLayout] : colorBuffersAndLayouts) {
- VkEmulation::ColorBufferInfo *infoPtr =
+ VkEmulation::ColorBufferInfo* infoPtr =
android::base::find(sVkEmulation->colorBuffers, colorBufferHandle);
if (!infoPtr) {
- VK_COMMON_ERROR("Invalid ColorBuffer handle %d.",
- static_cast<int>(colorBufferHandle));
+ VK_COMMON_ERROR("Invalid ColorBuffer handle %d.", static_cast<int>(colorBufferHandle));
continue;
}
colorBufferInfosAndLayouts.emplace_back(infoPtr, newLayout);
@@ -2816,11 +2675,9 @@
std::vector<VkImageMemoryBarrier> layoutTransitionBarriers;
std::vector<VkImageMemoryBarrier> queueTransferBarriers;
for (uint32_t colorBufferHandle : colorBufferHandles) {
- auto infoPtr =
- android::base::find(sVkEmulation->colorBuffers, colorBufferHandle);
+ auto infoPtr = android::base::find(sVkEmulation->colorBuffers, colorBufferHandle);
if (!infoPtr) {
- VK_COMMON_ERROR("Invalid ColorBuffer handle %d.",
- static_cast<int>(colorBufferHandle));
+ VK_COMMON_ERROR("Invalid ColorBuffer handle %d.", static_cast<int>(colorBufferHandle));
continue;
}
if (!infoPtr->ownedByHost->load()) {
@@ -2934,10 +2791,8 @@
AutoLock lock(sVkEmulationLock);
auto vk = sVkEmulation->dvk;
- static constexpr uint64_t ANB_MAX_WAIT_NS =
- 5ULL * 1000ULL * 1000ULL * 1000ULL;
- VK_CHECK(vk->vkWaitForFences(sVkEmulation->device, 1, &fence, VK_TRUE,
- ANB_MAX_WAIT_NS));
+ static constexpr uint64_t ANB_MAX_WAIT_NS = 5ULL * 1000ULL * 1000ULL * 1000ULL;
+ VK_CHECK(vk->vkWaitForFences(sVkEmulation->device, 1, &fence, VK_TRUE, ANB_MAX_WAIT_NS));
}
void setColorBufferCurrentLayout(uint32_t colorBufferHandle, VkImageLayout layout) {
@@ -2951,4 +2806,4 @@
infoPtr->currentLayout = layout;
}
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VkCommonOperations.h b/stream-servers/vulkan/VkCommonOperations.h
index 67b613b..e987df2 100644
--- a/stream-servers/vulkan/VkCommonOperations.h
+++ b/stream-servers/vulkan/VkCommonOperations.h
@@ -36,11 +36,9 @@
// memory. This is not the simplest thing in the world because even if a memory
// type index is host visible, that doesn't mean a VkBuffer is allowed to be
// associated with it.
-bool getStagingMemoryTypeIndex(
- VulkanDispatch* vk,
- VkDevice device,
- const VkPhysicalDeviceMemoryProperties* memProps,
- uint32_t* typeIndex);
+bool getStagingMemoryTypeIndex(VulkanDispatch* vk, VkDevice device,
+ const VkPhysicalDeviceMemoryProperties* memProps,
+ uint32_t* typeIndex);
#ifdef _WIN32
typedef void* HANDLE;
@@ -90,10 +88,8 @@
VulkanDispatch* dvk = nullptr;
bool instanceSupportsExternalMemoryCapabilities = false;
- PFN_vkGetPhysicalDeviceImageFormatProperties2KHR
- getImageFormatProperties2Func = nullptr;
- PFN_vkGetPhysicalDeviceProperties2KHR
- getPhysicalDeviceProperties2Func = nullptr;
+ PFN_vkGetPhysicalDeviceImageFormatProperties2KHR getImageFormatProperties2Func = nullptr;
+ PFN_vkGetPhysicalDeviceProperties2KHR getPhysicalDeviceProperties2Func = nullptr;
PFN_vkGetPhysicalDeviceFeatures2 getPhysicalDeviceFeatures2Func = nullptr;
bool instanceSupportsMoltenVK = false;
@@ -191,15 +187,13 @@
// guest physical address.
uintptr_t gpa = 0u;
- VK_EXT_MEMORY_HANDLE exportedHandle =
- VK_EXT_MEMORY_HANDLE_INVALID;
+ VK_EXT_MEMORY_HANDLE exportedHandle = VK_EXT_MEMORY_HANDLE_INVALID;
bool actuallyExternal = false;
};
// 128 mb staging buffer (really, just a few 4K frames or one 4k HDR frame)
// ought to be big enough for anybody!
- static constexpr VkDeviceSize kDefaultStagingBufferSize =
- 128ULL * 1048576ULL;
+ static constexpr VkDeviceSize kDefaultStagingBufferSize = 128ULL * 1048576ULL;
struct StagingBufferInfo {
// TODO: Don't actually use this as external memory until host visible
@@ -361,24 +355,16 @@
VkEmulation* getGlobalVkEmulation();
void teardownGlobalVkEmulation();
-bool allocExternalMemory(VulkanDispatch* vk,
- VkEmulation::ExternalMemoryInfo* info,
- bool actuallyExternal = true,
- android::base::Optional<uint64_t> deviceAlignment =
- android::base::kNullopt);
-void freeExternalMemoryLocked(VulkanDispatch* vk,
- VkEmulation::ExternalMemoryInfo* info);
+bool allocExternalMemory(
+ VulkanDispatch* vk, VkEmulation::ExternalMemoryInfo* info, bool actuallyExternal = true,
+ android::base::Optional<uint64_t> deviceAlignment = android::base::kNullopt);
+void freeExternalMemoryLocked(VulkanDispatch* vk, VkEmulation::ExternalMemoryInfo* info);
-bool importExternalMemory(VulkanDispatch* vk,
- VkDevice targetDevice,
- const VkEmulation::ExternalMemoryInfo* info,
- VkDeviceMemory* out);
-bool importExternalMemoryDedicatedImage(
- VulkanDispatch* vk,
- VkDevice targetDevice,
- const VkEmulation::ExternalMemoryInfo* info,
- VkImage image,
- VkDeviceMemory* out);
+bool importExternalMemory(VulkanDispatch* vk, VkDevice targetDevice,
+ const VkEmulation::ExternalMemoryInfo* info, VkDeviceMemory* out);
+bool importExternalMemoryDedicatedImage(VulkanDispatch* vk, VkDevice targetDevice,
+ const VkEmulation::ExternalMemoryInfo* info, VkImage image,
+ VkDeviceMemory* out);
// ColorBuffer operations
@@ -389,12 +375,9 @@
uint32_t height,
VkImageTiling tiling);
-bool setupVkColorBuffer(uint32_t colorBufferHandle,
- bool vulkanOnly = false,
- uint32_t memoryProperty = 0,
- bool* exported = nullptr,
- VkDeviceSize* allocSize = nullptr,
- uint32_t* typeIndex = nullptr,
+bool setupVkColorBuffer(uint32_t colorBufferHandle, bool vulkanOnly = false,
+ uint32_t memoryProperty = 0, bool* exported = nullptr,
+ VkDeviceSize* allocSize = nullptr, uint32_t* typeIndex = nullptr,
void** mappedPtr = nullptr);
bool teardownVkColorBuffer(uint32_t colorBufferHandle);
VkEmulation::ColorBufferInfo getColorBufferInfo(uint32_t colorBufferHandle);
@@ -403,38 +386,28 @@
VK_EXT_MEMORY_HANDLE getColorBufferExtMemoryHandle(uint32_t colorBufferHandle);
MTLTextureRef getColorBufferMTLTexture(uint32_t colorBufferHandle);
bool setColorBufferVulkanMode(uint32_t colorBufferHandle, uint32_t vulkanMode);
-int32_t mapGpaToBufferHandle(uint32_t bufferHandle,
- uint64_t gpa,
- uint64_t size = 0);
+int32_t mapGpaToBufferHandle(uint32_t bufferHandle, uint64_t gpa, uint64_t size = 0);
// Data buffer operations
-bool setupVkBuffer(uint32_t bufferHandle,
- bool vulkanOnly = false,
- uint32_t memoryProperty = 0,
- bool* exported = nullptr,
- VkDeviceSize* allocSize = nullptr,
+bool setupVkBuffer(uint32_t bufferHandle, bool vulkanOnly = false, uint32_t memoryProperty = 0,
+ bool* exported = nullptr, VkDeviceSize* allocSize = nullptr,
uint32_t* typeIndex = nullptr);
bool teardownVkBuffer(uint32_t bufferHandle);
VK_EXT_MEMORY_HANDLE getBufferExtMemoryHandle(uint32_t bufferHandle);
-VkExternalMemoryHandleTypeFlags
-transformExternalMemoryHandleTypeFlags_tohost(
+VkExternalMemoryHandleTypeFlags transformExternalMemoryHandleTypeFlags_tohost(
VkExternalMemoryHandleTypeFlags bits);
-VkExternalMemoryHandleTypeFlags
-transformExternalMemoryHandleTypeFlags_fromhost(
+VkExternalMemoryHandleTypeFlags transformExternalMemoryHandleTypeFlags_fromhost(
VkExternalMemoryHandleTypeFlags hostBits,
VkExternalMemoryHandleTypeFlags wantedGuestHandleType);
-VkExternalMemoryProperties
-transformExternalMemoryProperties_tohost(
+VkExternalMemoryProperties transformExternalMemoryProperties_tohost(
VkExternalMemoryProperties props);
-VkExternalMemoryProperties
-transformExternalMemoryProperties_fromhost(
- VkExternalMemoryProperties props,
- VkExternalMemoryHandleTypeFlags wantedGuestHandleType);
+VkExternalMemoryProperties transformExternalMemoryProperties_fromhost(
+ VkExternalMemoryProperties props, VkExternalMemoryHandleTypeFlags wantedGuestHandleType);
void acquireColorBuffersForHostComposing(const std::vector<uint32_t>& layerColorBuffers,
uint32_t renderTargetColorBuffer);
@@ -445,4 +418,4 @@
void setColorBufferCurrentLayout(uint32_t colorBufferHandle, VkImageLayout);
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VkDecoderGlobalState.cpp b/stream-servers/vulkan/VkDecoderGlobalState.cpp
index 6d1b2ef..d2d44c8 100644
--- a/stream-servers/vulkan/VkDecoderGlobalState.cpp
+++ b/stream-servers/vulkan/VkDecoderGlobalState.cpp
@@ -75,17 +75,16 @@
#define VKDGS_DEBUG 0
#if VKDGS_DEBUG
-#define VKDGS_LOG(fmt,...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define VKDGS_LOG(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
#else
-#define VKDGS_LOG(fmt,...)
+#define VKDGS_LOG(fmt, ...)
#endif
namespace goldfish_vk {
// A list of extensions that should not be passed to the host driver.
// These will mainly include Vulkan features that we emulate ourselves.
-static constexpr const char* const
-kEmulatedExtensions[] = {
+static constexpr const char* const kEmulatedExtensions[] = {
"VK_ANDROID_external_memory_android_hardware_buffer",
"VK_ANDROID_native_buffer",
"VK_FUCHSIA_buffer_collection",
@@ -105,8 +104,7 @@
static constexpr uint32_t kMaxSafeVersion = VK_MAKE_VERSION(1, 1, 0);
static constexpr uint32_t kMinVersion = VK_MAKE_VERSION(1, 0, 0);
-#define DEFINE_BOXED_HANDLE_TYPE_TAG(type) \
- Tag_##type, \
+#define DEFINE_BOXED_HANDLE_TYPE_TAG(type) Tag_##type,
enum BoxedHandleTypeTag {
Tag_Invalid = 0,
@@ -165,7 +163,7 @@
void removeDelayed(uint64_t h, VkDevice device, std::function<void()> callback) {
AutoLock l(lock);
- delayedRemoves[device].push_back({ h, callback });
+ delayedRemoves[device].push_back({h, callback});
}
void processDelayedRemovesGlobalStateLocked(VkDevice device) {
@@ -184,9 +182,7 @@
delayedRemoves.erase(it);
}
- T* get(uint64_t h) {
- return (T*)store.get_const(h);
- }
+ T* get(uint64_t h) { return (T*)store.get_const(h); }
uint64_t getBoxedFromUnboxedLocked(uint64_t unboxed) {
auto res = android::base::find(reverseMap, unboxed);
@@ -202,13 +198,9 @@
uint32_t refcount = 1;
- void incRef() {
- __atomic_add_fetch(&refcount, 1, __ATOMIC_SEQ_CST);
- }
+ void incRef() { __atomic_add_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
- bool decRef() {
- return 0 == __atomic_sub_fetch(&refcount, 1, __ATOMIC_SEQ_CST);
- }
+ bool decRef() { return 0 == __atomic_sub_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
};
static void acquireOrderMaintInfo(OrderMaintenanceInfo* ord) {
@@ -234,27 +226,27 @@
static BoxedHandleManager<DispatchableHandleInfo<uint64_t>> sBoxedHandleManager;
struct ReadStreamRegistry {
- Lock mLock;
+ Lock mLock;
- std::vector<VulkanMemReadingStream*> freeStreams;
+ std::vector<VulkanMemReadingStream*> freeStreams;
- ReadStreamRegistry() { freeStreams.reserve(100); };
+ ReadStreamRegistry() { freeStreams.reserve(100); };
- VulkanMemReadingStream* pop() {
- AutoLock lock(mLock);
- if (freeStreams.empty()) {
- return new VulkanMemReadingStream(0);
- } else {
+ VulkanMemReadingStream* pop() {
+ AutoLock lock(mLock);
+ if (freeStreams.empty()) {
+ return new VulkanMemReadingStream(0);
+ } else {
VulkanMemReadingStream* res = freeStreams.back();
freeStreams.pop_back();
return res;
- }
- }
+ }
+ }
- void push(VulkanMemReadingStream* stream) {
- AutoLock lock(mLock);
- freeStreams.push_back(stream);
- }
+ void push(VulkanMemReadingStream* stream) {
+ AutoLock lock(mLock);
+ freeStreams.push_back(stream);
+ }
};
static ReadStreamRegistry sReadStreamRegistry;
@@ -266,7 +258,8 @@
m_emu(getGlobalVkEmulation()),
mRenderDocWithMultipleVkInstances(m_emu->guestRenderDoc.get()) {
mSnapshotsEnabled = feature_is_enabled(kFeature_VulkanSnapshots);
- mVkCleanupEnabled = android::base::getEnvironmentVariable("ANDROID_EMU_VK_NO_CLEANUP") != "1";
+ mVkCleanupEnabled =
+ android::base::getEnvironmentVariable("ANDROID_EMU_VK_NO_CLEANUP") != "1";
mLogging = android::base::getEnvironmentVariable("ANDROID_EMU_VK_LOG_CALLS") == "1";
mVerbosePrints = android::base::getEnvironmentVariable("ANDROID_EMUGL_VERBOSE") == "1";
if (get_emugl_address_space_device_control_ops().control_get_hw_funcs &&
@@ -312,17 +305,11 @@
sBoxedHandleManager.clear();
}
- bool snapshotsEnabled() const {
- return mSnapshotsEnabled;
- }
+ bool snapshotsEnabled() const { return mSnapshotsEnabled; }
- bool vkCleanupEnabled() const {
- return mVkCleanupEnabled;
- }
+ bool vkCleanupEnabled() const { return mVkCleanupEnabled; }
- void save(android::base::Stream* stream) {
- snapshot()->save(stream);
- }
+ void save(android::base::Stream* stream) { snapshot()->save(stream); }
void load(android::base::Stream* stream) {
// assume that we already destroyed all instances
@@ -335,13 +322,9 @@
snapshot()->load(stream);
}
- void lock() {
- mLock.lock();
- }
+ void lock() { mLock.lock(); }
- void unlock() {
- mLock.unlock();
- }
+ void unlock() { mLock.unlock(); }
size_t setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
size_t consumed = 0;
@@ -374,9 +357,7 @@
mCreatedHandlesForSnapshotLoadIndex = 0;
}
- VkResult on_vkEnumerateInstanceVersion(
- android::base::BumpPool* pool,
- uint32_t* pApiVersion) {
+ VkResult on_vkEnumerateInstanceVersion(android::base::BumpPool* pool, uint32_t* pApiVersion) {
if (m_vk->vkEnumerateInstanceVersion) {
VkResult res = m_vk->vkEnumerateInstanceVersion(pApiVersion);
@@ -390,16 +371,11 @@
return VK_SUCCESS;
}
- VkResult on_vkCreateInstance(
- android::base::BumpPool* pool,
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance) {
-
- std::vector<const char*> finalExts =
- filteredExtensionNames(
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ VkResult on_vkCreateInstance(android::base::BumpPool* pool,
+ const VkInstanceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {
+ std::vector<const char*> finalExts = filteredExtensionNames(
+ pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
// Create higher version instance whenever it is possible.
uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
@@ -408,10 +384,8 @@
}
if (m_vk->vkEnumerateInstanceVersion) {
uint32_t instanceVersion;
- VkResult result =
- m_vk->vkEnumerateInstanceVersion(&instanceVersion);
- if (result == VK_SUCCESS &&
- instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
+ VkResult result = m_vk->vkEnumerateInstanceVersion(&instanceVersion);
+ if (result == VK_SUCCESS && instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
apiVersion = instanceVersion;
}
}
@@ -441,27 +415,22 @@
// TODO: bug 129484301
get_emugl_vm_operations().setSkipSnapshotSave(
- !feature_is_enabled(kFeature_VulkanSnapshots));
+ !feature_is_enabled(kFeature_VulkanSnapshots));
InstanceInfo info;
info.apiVersion = apiVersion;
- for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount;
- ++i) {
- info.enabledExtensionNames.push_back(
- createInfoFiltered.ppEnabledExtensionNames[i]);
+ for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
+ info.enabledExtensionNames.push_back(createInfoFiltered.ppEnabledExtensionNames[i]);
}
// Box it up
VkInstance boxed = new_boxed_VkInstance(*pInstance, nullptr, true /* own dispatch */);
- init_vulkan_dispatch_from_instance(
- m_vk, *pInstance,
- dispatch_VkInstance(boxed));
+ init_vulkan_dispatch_from_instance(m_vk, *pInstance, dispatch_VkInstance(boxed));
info.boxed = boxed;
if (m_emu->instanceSupportsMoltenVK) {
if (!m_vk->vkSetMTLTextureMVK) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "Cannot find vkSetMTLTextureMVK";
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find vkSetMTLTextureMVK";
}
}
@@ -473,22 +442,17 @@
if (!fb) return res;
if (vkCleanupEnabled()) {
- fb->registerProcessCleanupCallback(
- unbox_VkInstance(boxed),
- [this, boxed] {
-
- vkDestroyInstanceImpl(
- unbox_VkInstance(boxed),
- nullptr);
- });
+ fb->registerProcessCleanupCallback(unbox_VkInstance(boxed), [this, boxed] {
+ vkDestroyInstanceImpl(unbox_VkInstance(boxed), nullptr);
+ });
}
return res;
}
void vkDestroyInstanceImpl(VkInstance instance, const VkAllocationCallbacks* pAllocator) {
-
- // Do delayed removes out of the lock, but get the list of devices to destroy inside the lock.
+ // Do delayed removes out of the lock, but get the list of devices to destroy inside the
+ // lock.
{
AutoLock lock(mLock);
std::vector<VkDevice> devicesToDestroy;
@@ -501,13 +465,11 @@
}
}
- for (auto device: devicesToDestroy) {
- sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(
- device);
+ for (auto device : devicesToDestroy) {
+ sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
}
}
-
AutoLock lock(mLock);
teardownInstanceLocked(instance);
@@ -532,11 +494,8 @@
mInstanceInfo.erase(instance);
}
- void on_vkDestroyInstance(
- android::base::BumpPool* pool,
- VkInstance boxed_instance,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance boxed_instance,
+ const VkAllocationCallbacks* pAllocator) {
auto instance = unbox_VkInstance(boxed_instance);
vkDestroyInstanceImpl(instance, pAllocator);
@@ -547,8 +506,7 @@
fb->unregisterProcessCleanupCallback(instance);
}
- VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool,
- VkInstance boxed_instance,
+ VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool, VkInstance boxed_instance,
uint32_t* physicalDeviceCount,
VkPhysicalDevice* physicalDevices) {
auto instance = unbox_VkInstance(boxed_instance);
@@ -560,15 +518,13 @@
}
uint32_t actualPhysicalDeviceCount;
- auto res = vk->vkEnumeratePhysicalDevices(
- instance, &actualPhysicalDeviceCount, nullptr);
+ auto res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount, nullptr);
if (res != VK_SUCCESS) {
return res;
}
- std::vector<VkPhysicalDevice> validPhysicalDevices(
- actualPhysicalDeviceCount);
- res = vk->vkEnumeratePhysicalDevices(
- instance, &actualPhysicalDeviceCount, validPhysicalDevices.data());
+ std::vector<VkPhysicalDevice> validPhysicalDevices(actualPhysicalDeviceCount);
+ res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount,
+ validPhysicalDevices.data());
if (res != VK_SUCCESS) return res;
AutoLock lock(mLock);
@@ -584,26 +540,26 @@
instance);
if (getPhysdevProps2Func) {
- validPhysicalDevices.erase(std::remove_if(
- validPhysicalDevices.begin(), validPhysicalDevices.end(),
- [getPhysdevProps2Func,
- this](VkPhysicalDevice physicalDevice) {
- // We can get the device UUID.
- VkPhysicalDeviceIDPropertiesKHR idProps = {
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
- nullptr,
- };
- VkPhysicalDeviceProperties2KHR propsWithId = {
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
- &idProps,
- };
- getPhysdevProps2Func(physicalDevice, &propsWithId);
+ validPhysicalDevices.erase(
+ std::remove_if(validPhysicalDevices.begin(), validPhysicalDevices.end(),
+ [getPhysdevProps2Func, this](VkPhysicalDevice physicalDevice) {
+ // We can get the device UUID.
+ VkPhysicalDeviceIDPropertiesKHR idProps = {
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+ nullptr,
+ };
+ VkPhysicalDeviceProperties2KHR propsWithId = {
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+ &idProps,
+ };
+ getPhysdevProps2Func(physicalDevice, &propsWithId);
- // Remove those devices whose UUIDs don't match the one
- // in VkCommonOperations.
- return memcmp(m_emu->deviceInfo.idProps.deviceUUID,
- idProps.deviceUUID, VK_UUID_SIZE) != 0;
- }), validPhysicalDevices.end());
+ // Remove those devices whose UUIDs don't match the one
+ // in VkCommonOperations.
+ return memcmp(m_emu->deviceInfo.idProps.deviceUUID,
+ idProps.deviceUUID, VK_UUID_SIZE) != 0;
+ }),
+ validPhysicalDevices.end());
} else {
fprintf(stderr,
"%s: warning: failed to "
@@ -629,33 +585,29 @@
if (physicalDeviceCount && physicalDevices) {
// Box them up
- for (uint32_t i = 0;
- i < std::min(*physicalDeviceCount, physicalDevicesSize); ++i) {
+ for (uint32_t i = 0; i < std::min(*physicalDeviceCount, physicalDevicesSize); ++i) {
mPhysicalDeviceToInstance[validPhysicalDevices[i]] = instance;
auto& physdevInfo = mPhysdevInfo[validPhysicalDevices[i]];
- physdevInfo.boxed = new_boxed_VkPhysicalDevice(
- validPhysicalDevices[i], vk,
- false /* does not own dispatch */);
+ physdevInfo.boxed = new_boxed_VkPhysicalDevice(validPhysicalDevices[i], vk,
+ false /* does not own dispatch */);
- vk->vkGetPhysicalDeviceProperties(validPhysicalDevices[i],
- &physdevInfo.props);
+ vk->vkGetPhysicalDeviceProperties(validPhysicalDevices[i], &physdevInfo.props);
if (physdevInfo.props.apiVersion > kMaxSafeVersion) {
physdevInfo.props.apiVersion = kMaxSafeVersion;
}
- vk->vkGetPhysicalDeviceMemoryProperties(
- validPhysicalDevices[i], &physdevInfo.memoryProperties);
+ vk->vkGetPhysicalDeviceMemoryProperties(validPhysicalDevices[i],
+ &physdevInfo.memoryProperties);
uint32_t queueFamilyPropCount = 0;
- vk->vkGetPhysicalDeviceQueueFamilyProperties(
- validPhysicalDevices[i], &queueFamilyPropCount, nullptr);
+ vk->vkGetPhysicalDeviceQueueFamilyProperties(validPhysicalDevices[i],
+ &queueFamilyPropCount, nullptr);
- physdevInfo.queueFamilyProperties.resize(
- (size_t)queueFamilyPropCount);
+ physdevInfo.queueFamilyProperties.resize((size_t)queueFamilyPropCount);
vk->vkGetPhysicalDeviceQueueFamilyProperties(
validPhysicalDevices[i], &queueFamilyPropCount,
@@ -671,11 +623,9 @@
return res;
}
- void on_vkGetPhysicalDeviceFeatures(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures) {
-
+ void on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ VkPhysicalDeviceFeatures* pFeatures) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
@@ -684,18 +634,15 @@
pFeatures->textureCompressionASTC_LDR |= kEmulateAstc;
}
- void on_vkGetPhysicalDeviceFeatures2(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
-
+ void on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ VkPhysicalDeviceFeatures2* pFeatures) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
AutoLock lock(mLock);
- auto physdevInfo =
- android::base::find(mPhysdevInfo, physicalDevice);
+ auto physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
if (!physdevInfo) return;
auto instance = mPhysicalDeviceToInstance[physicalDevice];
@@ -717,7 +664,10 @@
"the extension!!!!11111\n",
__func__);
}
- *pFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, 0, };
+ *pFeatures = {
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+ 0,
+ };
vk->vkGetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
}
@@ -726,25 +676,19 @@
}
VkResult on_vkGetPhysicalDeviceImageFormatProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties) {
-
+ android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice, VkFormat format,
+ VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+ VkImageFormatProperties* pImageFormatProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
bool emulatedEtc2 = needEmulatedEtc2(physicalDevice, vk);
bool emulatedAstc = needEmulatedAstc(physicalDevice, vk);
if (emulatedEtc2 || emulatedAstc) {
CompressedImageInfo cmpInfo = createCompressedImageInfo(format);
- if (cmpInfo.isCompressed && ((emulatedEtc2 && cmpInfo.isEtc2) ||
- (emulatedAstc && cmpInfo.isAstc))) {
- if (!supportEmulatedCompressedImageFormatProperty(
- format, type, tiling, usage, flags)) {
+ if (cmpInfo.isCompressed &&
+ ((emulatedEtc2 && cmpInfo.isEtc2) || (emulatedAstc && cmpInfo.isAstc))) {
+ if (!supportEmulatedCompressedImageFormatProperty(format, type, tiling, usage,
+ flags)) {
memset(pImageFormatProperties, 0, sizeof(VkImageFormatProperties));
return VK_ERROR_FORMAT_NOT_SUPPORTED;
}
@@ -754,32 +698,28 @@
format = cmpInfo.sizeCompFormat;
}
}
- return vk->vkGetPhysicalDeviceImageFormatProperties(
- physicalDevice, format, type, tiling, usage, flags,
- pImageFormatProperties);
+ return vk->vkGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling,
+ usage, flags, pImageFormatProperties);
}
VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
-
+ android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
+ const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
+ VkImageFormatProperties2* pImageFormatProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
bool emulatedEtc2 = needEmulatedEtc2(physicalDevice, vk);
bool emulatedAstc = needEmulatedAstc(physicalDevice, vk);
if (emulatedEtc2 || emulatedAstc) {
- CompressedImageInfo cmpInfo =
- createCompressedImageInfo(pImageFormatInfo->format);
- if (cmpInfo.isCompressed && ((emulatedEtc2 && cmpInfo.isEtc2) ||
- (emulatedAstc && cmpInfo.isAstc))) {
+ CompressedImageInfo cmpInfo = createCompressedImageInfo(pImageFormatInfo->format);
+ if (cmpInfo.isCompressed &&
+ ((emulatedEtc2 && cmpInfo.isEtc2) || (emulatedAstc && cmpInfo.isAstc))) {
if (!supportEmulatedCompressedImageFormatProperty(
- pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
- pImageFormatInfo->usage, pImageFormatInfo->flags)) {
+ pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
+ pImageFormatInfo->usage, pImageFormatInfo->flags)) {
memset(&pImageFormatProperties->imageFormatProperties, 0,
- sizeof(VkImageFormatProperties));
+ sizeof(VkImageFormatProperties));
return VK_ERROR_FORMAT_NOT_SUPPORTED;
}
imageFormatInfo = *pImageFormatInfo;
@@ -807,13 +747,12 @@
if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
- res = vk->vkGetPhysicalDeviceImageFormatProperties2(
- physicalDevice, pImageFormatInfo, pImageFormatProperties);
- } else if (hasInstanceExtension(
- instance,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- res = vk->vkGetPhysicalDeviceImageFormatProperties2KHR(
- physicalDevice, pImageFormatInfo, pImageFormatProperties);
+ res = vk->vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo,
+ pImageFormatProperties);
+ } else if (hasInstanceExtension(instance,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ res = vk->vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo,
+ pImageFormatProperties);
} else {
// No instance extension, fake it!!!!
if (pImageFormatProperties->pNext) {
@@ -828,9 +767,8 @@
0,
};
res = vk->vkGetPhysicalDeviceImageFormatProperties(
- physicalDevice, pImageFormatInfo->format,
- pImageFormatInfo->type, pImageFormatInfo->tiling,
- pImageFormatInfo->usage, pImageFormatInfo->flags,
+ physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type,
+ pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags,
&pImageFormatProperties->imageFormatProperties);
}
@@ -848,37 +786,31 @@
return res;
}
- void on_vkGetPhysicalDeviceFormatProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties) {
-
+ void on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ VkFormat format,
+ VkFormatProperties* pFormatProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
- [vk](VkPhysicalDevice physicalDevice, VkFormat format,
- VkFormatProperties* pFormatProperties) {
- vk->vkGetPhysicalDeviceFormatProperties(
- physicalDevice, format, pFormatProperties);
- },
- vk, physicalDevice, format, pFormatProperties);
+ [vk](VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties* pFormatProperties) {
+ vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+ },
+ vk, physicalDevice, format, pFormatProperties);
}
- void on_vkGetPhysicalDeviceFormatProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
-
+ void on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ VkFormat format,
+ VkFormatProperties2* pFormatProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
AutoLock lock(mLock);
auto physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
- if (!physdevInfo)
- return;
+ if (!physdevInfo) return;
auto instance = mPhysicalDeviceToInstance[physicalDevice];
auto instanceInfo = android::base::find(mInstanceInfo, instance);
@@ -887,22 +819,21 @@
if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
- [vk](VkPhysicalDevice physicalDevice, VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- vk->vkGetPhysicalDeviceFormatProperties2(
- physicalDevice, format, pFormatProperties);
- },
- vk, physicalDevice, format, pFormatProperties);
- } else if (hasInstanceExtension(
- instance,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ [vk](VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties2* pFormatProperties) {
+ vk->vkGetPhysicalDeviceFormatProperties2(physicalDevice, format,
+ pFormatProperties);
+ },
+ vk, physicalDevice, format, pFormatProperties);
+ } else if (hasInstanceExtension(instance,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
- [vk](VkPhysicalDevice physicalDevice, VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- vk->vkGetPhysicalDeviceFormatProperties2KHR(
- physicalDevice, format, pFormatProperties);
- },
- vk, physicalDevice, format, pFormatProperties);
+ [vk](VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties2* pFormatProperties) {
+ vk->vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format,
+ pFormatProperties);
+ },
+ vk, physicalDevice, format, pFormatProperties);
} else {
// No instance extension, fake it!!!!
if (pFormatProperties->pNext) {
@@ -914,44 +845,37 @@
}
pFormatProperties->sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
- [vk](VkPhysicalDevice physicalDevice, VkFormat format,
- VkFormatProperties* pFormatProperties) {
- vk->vkGetPhysicalDeviceFormatProperties(
- physicalDevice, format, pFormatProperties);
- },
- vk, physicalDevice, format,
- &pFormatProperties->formatProperties);
+ [vk](VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties* pFormatProperties) {
+ vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format,
+ pFormatProperties);
+ },
+ vk, physicalDevice, format, &pFormatProperties->formatProperties);
}
}
- void on_vkGetPhysicalDeviceProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkPhysicalDeviceProperties* pProperties) {
-
+ void on_vkGetPhysicalDeviceProperties(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ VkPhysicalDeviceProperties* pProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
- vk->vkGetPhysicalDeviceProperties(
- physicalDevice, pProperties);
+ vk->vkGetPhysicalDeviceProperties(physicalDevice, pProperties);
if (pProperties->apiVersion > kMaxSafeVersion) {
pProperties->apiVersion = kMaxSafeVersion;
}
}
- void on_vkGetPhysicalDeviceProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
-
+ void on_vkGetPhysicalDeviceProperties2(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ VkPhysicalDeviceProperties2* pProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
AutoLock lock(mLock);
- auto physdevInfo =
- android::base::find(mPhysdevInfo, physicalDevice);
+ auto physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
if (!physdevInfo) return;
auto instance = mPhysicalDeviceToInstance[physicalDevice];
@@ -973,7 +897,10 @@
"the extension!!!!11111\n",
__func__);
}
- *pProperties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, 0, };
+ *pProperties = {
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+ 0,
+ };
vk->vkGetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
}
@@ -983,21 +910,17 @@
}
void on_vkGetPhysicalDeviceMemoryProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
-
+ android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
+ VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
- vk->vkGetPhysicalDeviceMemoryProperties(
- physicalDevice, pMemoryProperties);
+ vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
// Pick a max heap size that will work around
// drivers that give bad suggestions (such as 0xFFFFFFFFFFFFFFFF for the heap size)
// plus won't break the bank on 32-bit userspace.
- static constexpr VkDeviceSize kMaxSafeHeapSize =
- 2ULL * 1024ULL * 1024ULL * 1024ULL;
+ static constexpr VkDeviceSize kMaxSafeHeapSize = 2ULL * 1024ULL * 1024ULL * 1024ULL;
for (uint32_t i = 0; i < pMemoryProperties->memoryTypeCount; ++i) {
uint32_t heapIndex = pMemoryProperties->memoryTypes[i].heapIndex;
@@ -1017,15 +940,12 @@
}
void on_vkGetPhysicalDeviceMemoryProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
-
+ android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
+ VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
- auto physdevInfo =
- android::base::find(mPhysdevInfo, physicalDevice);
+ auto physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
if (!physdevInfo) return;
auto instance = mPhysicalDeviceToInstance[physicalDevice];
@@ -1047,15 +967,18 @@
"the extension!!!!11111\n",
__func__);
}
- *pMemoryProperties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, 0, };
- vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
+ *pMemoryProperties = {
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+ 0,
+ };
+ vk->vkGetPhysicalDeviceMemoryProperties(physicalDevice,
+ &pMemoryProperties->memoryProperties);
}
// Pick a max heap size that will work around
// drivers that give bad suggestions (such as 0xFFFFFFFFFFFFFFFF for the heap size)
// plus won't break the bank on 32-bit userspace.
- static constexpr VkDeviceSize kMaxSafeHeapSize =
- 2ULL * 1024ULL * 1024ULL * 1024ULL;
+ static constexpr VkDeviceSize kMaxSafeHeapSize = 2ULL * 1024ULL * 1024ULL * 1024ULL;
for (uint32_t i = 0; i < pMemoryProperties->memoryProperties.memoryTypeCount; ++i) {
uint32_t heapIndex = pMemoryProperties->memoryProperties.memoryTypes[i].heapIndex;
@@ -1074,19 +997,17 @@
}
}
- VkResult on_vkEnumerateDeviceExtensionProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties) {
-
+ VkResult on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool* pool,
+ VkPhysicalDevice boxed_physicalDevice,
+ const char* pLayerName,
+ uint32_t* pPropertyCount,
+ VkExtensionProperties* pProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
if (!m_emu->instanceSupportsMoltenVK) {
- return vk->vkEnumerateDeviceExtensionProperties(
- physicalDevice, pLayerName, pPropertyCount, pProperties);
+ return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
+ pPropertyCount, pProperties);
}
// If MoltenVK is supported on host, we need to ensure that we include
@@ -1115,13 +1036,9 @@
return VK_SUCCESS;
}
- VkResult on_vkCreateDevice(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice) {
-
+ VkResult on_vkCreateDevice(android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
+ const VkDeviceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {
if (mLogging) {
fprintf(stderr, "%s: begin\n", __func__);
}
@@ -1129,10 +1046,8 @@
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
- std::vector<const char*> finalExts =
- filteredExtensionNames(
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ std::vector<const char*> finalExts = filteredExtensionNames(
+ pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
#ifdef _WIN32
// Always request VK_KHR_external_semaphore_win32 if it's supported. This fixes a crash
@@ -1170,14 +1085,12 @@
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
if (needEmulatedEtc2(physicalDevice, vk)) {
emulateTextureEtc2 = true;
- VkPhysicalDeviceFeatures2* features2 =
- (VkPhysicalDeviceFeatures2*)ext;
+ VkPhysicalDeviceFeatures2* features2 = (VkPhysicalDeviceFeatures2*)ext;
features2->features.textureCompressionETC2 = false;
}
if (needEmulatedAstc(physicalDevice, vk)) {
emulateTextureAstc = true;
- VkPhysicalDeviceFeatures2* features2 =
- (VkPhysicalDeviceFeatures2*)ext;
+ VkPhysicalDeviceFeatures2* features2 = (VkPhysicalDeviceFeatures2*)ext;
features2->features.textureCompressionASTC_LDR = false;
}
break;
@@ -1201,8 +1114,7 @@
}
VkResult result =
- vk->vkCreateDevice(
- physicalDevice, &createInfoFiltered, pAllocator, pDevice);
+ vk->vkCreateDevice(physicalDevice, &createInfoFiltered, pAllocator, pDevice);
if (mLogging) {
fprintf(stderr, "%s: host returned. result: %d\n", __func__, result);
@@ -1228,8 +1140,7 @@
deviceInfo.emulateTextureEtc2 = emulateTextureEtc2;
deviceInfo.emulateTextureAstc = emulateTextureAstc;
- for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount;
- ++i) {
+ for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
deviceInfo.enabledExtensionNames.push_back(
createInfoFiltered.ppEnabledExtensionNames[i]);
}
@@ -1241,9 +1152,7 @@
fprintf(stderr, "%s: init vulkan dispatch from device\n", __func__);
}
- init_vulkan_dispatch_from_device(
- vk, *pDevice,
- dispatch_VkDevice(boxed));
+ init_vulkan_dispatch_from_device(vk, *pDevice, dispatch_VkDevice(boxed));
if (mLogging) {
fprintf(stderr, "%s: init vulkan dispatch from device (end)\n", __func__);
@@ -1254,8 +1163,7 @@
// Next, get information about the queue families used by this device.
std::unordered_map<uint32_t, uint32_t> queueFamilyIndexCounts;
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
- const auto& queueCreateInfo =
- pCreateInfo->pQueueCreateInfos[i];
+ const auto& queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
// Check only queues created with flags = 0 in VkDeviceQueueCreateInfo.
auto flags = queueCreateInfo.flags;
if (flags) continue;
@@ -1277,8 +1185,7 @@
fprintf(stderr, "%s: get device queue (begin)\n", __func__);
}
- vk->vkGetDeviceQueue(
- *pDevice, index, i, &queueOut);
+ vk->vkGetDeviceQueue(*pDevice, index, i, &queueOut);
if (mLogging) {
fprintf(stderr, "%s: get device queue (end)\n", __func__);
@@ -1287,7 +1194,8 @@
mQueueInfo[queueOut].device = *pDevice;
mQueueInfo[queueOut].queueFamilyIndex = index;
- auto boxed = new_boxed_VkQueue(queueOut, dispatch_VkDevice(deviceInfo.boxed), false /* does not own dispatch */);
+ auto boxed = new_boxed_VkQueue(queueOut, dispatch_VkDevice(deviceInfo.boxed),
+ false /* does not own dispatch */);
mQueueInfo[queueOut].boxed = boxed;
mQueueInfo[queueOut].lock = new Lock;
}
@@ -1303,13 +1211,8 @@
return VK_SUCCESS;
}
- void on_vkGetDeviceQueue(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue) {
-
+ void on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice boxed_device,
+ uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {
auto device = unbox_VkDevice(boxed_device);
AutoLock lock(mLock);
@@ -1319,11 +1222,9 @@
auto deviceInfo = android::base::find(mDeviceInfo, device);
if (!deviceInfo) return;
- const auto& queues =
- deviceInfo->queues;
+ const auto& queues = deviceInfo->queues;
- const auto queueList =
- android::base::find(queues, queueFamilyIndex);
+ const auto queueList = android::base::find(queues, queueFamilyIndex);
if (!queueList) return;
if (queueIndex >= queueList->size()) return;
@@ -1342,7 +1243,7 @@
if (it == mDeviceInfo.end()) return;
auto eraseIt = mQueueInfo.begin();
- for(; eraseIt != mQueueInfo.end();) {
+ for (; eraseIt != mQueueInfo.end();) {
if (eraseIt->second.device == device) {
delete eraseIt->second.lock;
delete_VkQueue(eraseIt->second.boxed);
@@ -1368,11 +1269,8 @@
delete_VkDevice(it->second.boxed);
}
- void on_vkDestroyDevice(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
AutoLock lock(mLock);
@@ -1384,18 +1282,13 @@
mDeviceToPhysicalDevice.erase(device);
}
- VkResult on_vkCreateBuffer(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer) {
-
+ VkResult on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkBufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+ VkResult result = vk->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
if (result == VK_SUCCESS) {
AutoLock lock(mLock);
@@ -1409,11 +1302,8 @@
return result;
}
- void on_vkDestroyBuffer(
- android::base::BumpPool* pool,
- VkDevice boxed_device, VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice boxed_device, VkBuffer buffer,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1423,8 +1313,8 @@
mBufferInfo.erase(buffer);
}
- void setBufferMemoryBindInfoLocked(
- VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
+ void setBufferMemoryBindInfoLocked(VkBuffer buffer, VkDeviceMemory memory,
+ VkDeviceSize memoryOffset) {
auto it = mBufferInfo.find(buffer);
if (it == mBufferInfo.end()) {
return;
@@ -1433,18 +1323,13 @@
it->second.memoryOffset = memoryOffset;
}
- VkResult on_vkBindBufferMemory(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
-
+ VkResult on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkBuffer buffer, VkDeviceMemory memory,
+ VkDeviceSize memoryOffset) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkBindBufferMemory(device, buffer, memory, memoryOffset);
+ VkResult result = vk->vkBindBufferMemory(device, buffer, memory, memoryOffset);
if (result == VK_SUCCESS) {
AutoLock lock(mLock);
@@ -1453,64 +1338,47 @@
return result;
}
- VkResult on_vkBindBufferMemory2(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
-
+ VkResult on_vkBindBufferMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
+ uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfo* pBindInfos) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
+ VkResult result = vk->vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
if (result == VK_SUCCESS) {
AutoLock lock(mLock);
for (uint32_t i = 0; i < bindInfoCount; ++i) {
- setBufferMemoryBindInfoLocked(
- pBindInfos[i].buffer,
- pBindInfos[i].memory,
- pBindInfos[i].memoryOffset);
+ setBufferMemoryBindInfoLocked(pBindInfos[i].buffer, pBindInfos[i].memory,
+ pBindInfos[i].memoryOffset);
}
}
return result;
}
- VkResult on_vkBindBufferMemory2KHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
-
+ VkResult on_vkBindBufferMemory2KHR(android::base::BumpPool* pool, VkDevice boxed_device,
+ uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfo* pBindInfos) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
+ VkResult result = vk->vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
if (result == VK_SUCCESS) {
AutoLock lock(mLock);
for (uint32_t i = 0; i < bindInfoCount; ++i) {
- setBufferMemoryBindInfoLocked(
- pBindInfos[i].buffer,
- pBindInfos[i].memory,
- pBindInfos[i].memoryOffset);
+ setBufferMemoryBindInfoLocked(pBindInfos[i].buffer, pBindInfos[i].memory,
+ pBindInfos[i].memoryOffset);
}
}
return result;
-
}
- VkResult on_vkCreateImage(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage) {
-
+ VkResult on_vkCreateImage(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkImage* pImage) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1524,8 +1392,7 @@
CompressedImageInfo cmpInfo = {};
VkImageCreateInfo& sizeCompInfo = cmpInfo.sizeCompImgCreateInfo;
VkImageCreateInfo decompInfo;
- if (deviceInfoIt->second.needEmulatedDecompression(
- pCreateInfo->format)) {
+ if (deviceInfoIt->second.needEmulatedDecompression(pCreateInfo->format)) {
cmpInfo = createCompressedImageInfo(pCreateInfo->format);
cmpInfo.imageType = pCreateInfo->imageType;
cmpInfo.extent = pCreateInfo->extent;
@@ -1534,29 +1401,25 @@
sizeCompInfo = *pCreateInfo;
sizeCompInfo.format = cmpInfo.sizeCompFormat;
sizeCompInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
- sizeCompInfo.flags &=
- ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR;
+ sizeCompInfo.flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR;
sizeCompInfo.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
// Each block is 4x4 in ETC2 compressed texture
- sizeCompInfo.extent.width = (sizeCompInfo.extent.width +
- cmpInfo.compressedBlockWidth - 1) /
- cmpInfo.compressedBlockWidth;
- sizeCompInfo.extent.height = (sizeCompInfo.extent.height +
- cmpInfo.compressedBlockHeight - 1) /
- cmpInfo.compressedBlockHeight;
+ sizeCompInfo.extent.width =
+ (sizeCompInfo.extent.width + cmpInfo.compressedBlockWidth - 1) /
+ cmpInfo.compressedBlockWidth;
+ sizeCompInfo.extent.height =
+ (sizeCompInfo.extent.height + cmpInfo.compressedBlockHeight - 1) /
+ cmpInfo.compressedBlockHeight;
sizeCompInfo.mipLevels = 1;
if (pCreateInfo->queueFamilyIndexCount) {
cmpInfo.sizeCompImgQueueFamilyIndices.assign(
- pCreateInfo->pQueueFamilyIndices,
- pCreateInfo->pQueueFamilyIndices +
- pCreateInfo->queueFamilyIndexCount);
- sizeCompInfo.pQueueFamilyIndices =
- cmpInfo.sizeCompImgQueueFamilyIndices.data();
+ pCreateInfo->pQueueFamilyIndices,
+ pCreateInfo->pQueueFamilyIndices + pCreateInfo->queueFamilyIndexCount);
+ sizeCompInfo.pQueueFamilyIndices = cmpInfo.sizeCompImgQueueFamilyIndices.data();
}
decompInfo = *pCreateInfo;
decompInfo.format = cmpInfo.decompFormat;
- decompInfo.flags &=
- ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR;
+ decompInfo.flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR;
decompInfo.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
decompInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
pCreateInfo = &decompInfo;
@@ -1570,19 +1433,15 @@
VkResult createRes = VK_SUCCESS;
if (nativeBufferANDROID) {
-
auto memProps = memPropsOfDeviceLocked(device);
- createRes =
- prepareAndroidNativeBufferImage(
- vk, device, pCreateInfo, nativeBufferANDROID, pAllocator,
- memProps, anbInfo);
+ createRes = prepareAndroidNativeBufferImage(
+ vk, device, pCreateInfo, nativeBufferANDROID, pAllocator, memProps, anbInfo);
if (createRes == VK_SUCCESS) {
*pImage = anbInfo->image;
}
} else {
- createRes =
- vk->vkCreateImage(device, pCreateInfo, pAllocator, pImage);
+ createRes = vk->vkCreateImage(device, pCreateInfo, pAllocator, pImage);
}
if (createRes != VK_SUCCESS) return createRes;
@@ -1603,12 +1462,8 @@
return createRes;
}
- void on_vkDestroyImage(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyImage(android::base::BumpPool* pool, VkDevice boxed_device, VkImage image,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1628,14 +1483,11 @@
for (const auto& image : cmpInfo.sizeCompImgs) {
vk->vkDestroyImage(device, image, nullptr);
}
- vk->vkDestroyDescriptorSetLayout(
- device, cmpInfo.decompDescriptorSetLayout, nullptr);
- vk->vkDestroyDescriptorPool(
- device, cmpInfo.decompDescriptorPool, nullptr);
- vk->vkDestroyShaderModule(device, cmpInfo.decompShader,
- nullptr);
- vk->vkDestroyPipelineLayout(
- device, cmpInfo.decompPipelineLayout, nullptr);
+ vk->vkDestroyDescriptorSetLayout(device, cmpInfo.decompDescriptorSetLayout,
+ nullptr);
+ vk->vkDestroyDescriptorPool(device, cmpInfo.decompDescriptorPool, nullptr);
+ vk->vkDestroyShaderModule(device, cmpInfo.decompShader, nullptr);
+ vk->vkDestroyPipelineLayout(device, cmpInfo.decompPipelineLayout, nullptr);
vk->vkDestroyPipeline(device, cmpInfo.decompPipeline, nullptr);
for (const auto& imageView : cmpInfo.sizeCompImageViews) {
vk->vkDestroyImageView(device, imageView, nullptr);
@@ -1649,15 +1501,11 @@
mImageInfo.erase(image);
}
- VkResult on_vkBindImageMemory(android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
+ VkResult on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkBindImageMemory(device, image, memory, memoryOffset);
+ VkResult result = vk->vkBindImageMemory(device, image, memory, memoryOffset);
if (VK_SUCCESS != result) {
return result;
}
@@ -1677,8 +1525,7 @@
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
}
- if (!deviceInfoIt->second.emulateTextureEtc2 &&
- !deviceInfoIt->second.emulateTextureAstc) {
+ if (!deviceInfoIt->second.emulateTextureEtc2 && !deviceInfoIt->second.emulateTextureAstc) {
return VK_SUCCESS;
}
auto imageInfoIt = mImageInfo.find(image);
@@ -1691,19 +1538,15 @@
}
for (size_t i = 0; i < cmp.sizeCompImgs.size(); i++) {
result = vk->vkBindImageMemory(device, cmp.sizeCompImgs[i], memory,
- memoryOffset + cmp.memoryOffsets[i]);
+ memoryOffset + cmp.memoryOffsets[i]);
}
return VK_SUCCESS;
}
- VkResult on_vkCreateImageView(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView) {
-
+ VkResult on_vkCreateImageView(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkImageViewCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkImageView* pView) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1722,11 +1565,8 @@
}
VkImageViewCreateInfo createInfo;
bool needEmulatedAlpha = false;
- if (deviceInfoIt->second.emulateTextureEtc2 ||
- deviceInfoIt->second.emulateTextureAstc) {
- CompressedImageInfo cmpInfo = createCompressedImageInfo(
- pCreateInfo->format
- );
+ if (deviceInfoIt->second.emulateTextureEtc2 || deviceInfoIt->second.emulateTextureAstc) {
+ CompressedImageInfo cmpInfo = createCompressedImageInfo(pCreateInfo->format);
if (deviceInfoIt->second.needEmulatedDecompression(cmpInfo)) {
if (imageInfoIt->second.cmpInfo.decompImg) {
createInfo = *pCreateInfo;
@@ -1736,27 +1576,24 @@
pCreateInfo = &createInfo;
}
} else if (deviceInfoIt->second.needEmulatedDecompression(
- imageInfoIt->second.cmpInfo)) {
+ imageInfoIt->second.cmpInfo)) {
// Size compatible image view
createInfo = *pCreateInfo;
createInfo.format = cmpInfo.sizeCompFormat;
needEmulatedAlpha = false;
- createInfo.image =
- imageInfoIt->second.cmpInfo.sizeCompImgs
- [pCreateInfo->subresourceRange.baseMipLevel];
+ createInfo.image = imageInfoIt->second.cmpInfo
+ .sizeCompImgs[pCreateInfo->subresourceRange.baseMipLevel];
createInfo.subresourceRange.baseMipLevel = 0;
pCreateInfo = &createInfo;
}
}
- if (imageInfoIt->second.anbInfo &&
- imageInfoIt->second.anbInfo->externallyBacked) {
+ if (imageInfoIt->second.anbInfo && imageInfoIt->second.anbInfo->externallyBacked) {
createInfo = *pCreateInfo;
createInfo.format = imageInfoIt->second.anbInfo->vkFormat;
pCreateInfo = &createInfo;
}
- VkResult result =
- vk->vkCreateImageView(device, pCreateInfo, pAllocator, pView);
+ VkResult result = vk->vkCreateImageView(device, pCreateInfo, pAllocator, pView);
if (result != VK_SUCCESS) {
return result;
}
@@ -1769,12 +1606,8 @@
return result;
}
- void on_vkDestroyImageView(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkImageView imageView, const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1783,17 +1616,12 @@
mImageViewInfo.erase(imageView);
}
- VkResult on_vkCreateSampler(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler) {
-
+ VkResult on_vkCreateSampler(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkSamplerCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
+ VkResult result = vk->vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
if (result != VK_SUCCESS) {
return result;
}
@@ -1803,28 +1631,19 @@
// We emulate RGB with RGBA for some compressed textures, which does not
// handle translarent border correctly.
samplerInfo.needEmulatedAlpha =
- (pCreateInfo->addressModeU ==
- VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
- pCreateInfo->addressModeV ==
- VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
- pCreateInfo->addressModeW ==
- VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) &&
- (pCreateInfo->borderColor ==
- VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ||
- pCreateInfo->borderColor ==
- VK_BORDER_COLOR_INT_TRANSPARENT_BLACK);
+ (pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
+ pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
+ pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) &&
+ (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ||
+ pCreateInfo->borderColor == VK_BORDER_COLOR_INT_TRANSPARENT_BLACK);
*pSampler = new_boxed_non_dispatchable_VkSampler(*pSampler);
return result;
}
- void on_vkDestroySampler(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroySampler(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkSampler sampler, const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
vk->vkDestroySampler(device, sampler, pAllocator);
@@ -1832,21 +1651,16 @@
const auto& samplerInfoIt = mSamplerInfo.find(sampler);
if (samplerInfoIt != mSamplerInfo.end()) {
if (samplerInfoIt->second.emulatedborderSampler != VK_NULL_HANDLE) {
- vk->vkDestroySampler(
- device, samplerInfoIt->second.emulatedborderSampler,
- nullptr);
+ vk->vkDestroySampler(device, samplerInfoIt->second.emulatedborderSampler, nullptr);
}
mSamplerInfo.erase(samplerInfoIt);
}
}
- VkResult on_vkCreateSemaphore(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore) {
-
+ VkResult on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkSemaphoreCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSemaphore* pSemaphore) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1855,10 +1669,9 @@
VkSemaphoreTypeCreateInfoKHR localSemaphoreTypeCreateInfo;
if (const VkSemaphoreTypeCreateInfoKHR* semaphoreTypeCiPtr =
- vk_find_struct<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo);
+ vk_find_struct<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo);
semaphoreTypeCiPtr) {
- localSemaphoreTypeCreateInfo =
- vk_make_orphan_copy(*semaphoreTypeCiPtr);
+ localSemaphoreTypeCreateInfo = vk_make_orphan_copy(*semaphoreTypeCiPtr);
vk_append_struct(&structChainIter, &localSemaphoreTypeCreateInfo);
}
@@ -1888,11 +1701,9 @@
return res;
}
- VkResult on_vkCreateFence(android::base::BumpPool* pool,
- VkDevice boxed_device,
+ VkResult on_vkCreateFence(android::base::BumpPool* pool, VkDevice boxed_device,
const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
+ const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -1949,21 +1760,19 @@
return VK_NULL_HANDLE;
}
- auto it = std::find_if(
- deviceFences->second.begin(), deviceFences->second.end(),
- [this, device](const VkFence& fence) {
- VkResult status = m_vk->vkGetFenceStatus(device, fence);
- if (status != VK_SUCCESS) {
- if (status != VK_NOT_READY) {
- VK_CHECK(status);
- }
+ auto it = std::find_if(deviceFences->second.begin(), deviceFences->second.end(),
+ [this, device](const VkFence& fence) {
+ VkResult status = m_vk->vkGetFenceStatus(device, fence);
+ if (status != VK_SUCCESS) {
+ if (status != VK_NOT_READY) {
+ VK_CHECK(status);
+ }
- // Status is valid, but fence is not yet signaled
- return false;
- }
- return true;
- }
- );
+ // Status is valid, but fence is not yet signaled
+ return false;
+ }
+ return true;
+ });
if (it == deviceFences->second.end()) {
return VK_NULL_HANDLE;
}
@@ -1978,22 +1787,19 @@
return fence;
}
- VkResult on_vkResetFences(android::base::BumpPool* pool,
- VkDevice boxed_device,
- uint32_t fenceCount,
- const VkFence* pFences) {
+ VkResult on_vkResetFences(android::base::BumpPool* pool, VkDevice boxed_device,
+ uint32_t fenceCount, const VkFence* pFences) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
std::vector<VkFence> cleanedFences;
for (uint32_t i = 0; i < fenceCount; ++i) {
- if (pFences[i] != VK_NULL_HANDLE)
- cleanedFences.push_back(pFences[i]);
+ if (pFences[i] != VK_NULL_HANDLE) cleanedFences.push_back(pFences[i]);
}
- VkResult res = vk->vkResetFences(
- device, (uint32_t)cleanedFences.size(), cleanedFences.data());
+ VkResult res =
+ vk->vkResetFences(device, (uint32_t)cleanedFences.size(), cleanedFences.data());
// Reset all fences' states to kNotWaitable.
{
@@ -2006,37 +1812,34 @@
return VK_SUCCESS;
}
- VkResult on_vkImportSemaphoreFdKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
-
+ VkResult on_vkImportSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
#ifdef _WIN32
AutoLock lock(mLock);
- auto infoPtr = android::base::find(
- mSemaphoreInfo, mExternalSemaphoresById[pImportSemaphoreFdInfo->fd]);
+ auto infoPtr = android::base::find(mSemaphoreInfo,
+ mExternalSemaphoresById[pImportSemaphoreFdInfo->fd]);
if (!infoPtr) {
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
}
- VK_EXT_MEMORY_HANDLE handle =
- dupExternalMemory(infoPtr->externalHandle);
+ VK_EXT_MEMORY_HANDLE handle = dupExternalMemory(infoPtr->externalHandle);
VkImportSemaphoreWin32HandleInfoKHR win32ImportInfo = {
- VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, 0,
+ VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+ 0,
pImportSemaphoreFdInfo->semaphore,
pImportSemaphoreFdInfo->flags,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
- handle, L"",
+ handle,
+ L"",
};
- return vk->vkImportSemaphoreWin32HandleKHR(
- device, &win32ImportInfo);
+ return vk->vkImportSemaphoreWin32HandleKHR(device, &win32ImportInfo);
#else
VkImportSemaphoreFdInfoKHR importInfo = *pImportSemaphoreFdInfo;
importInfo.fd = dup(pImportSemaphoreFdInfo->fd);
@@ -2044,17 +1847,14 @@
#endif
}
- VkResult on_vkGetSemaphoreFdKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
-
+ VkResult on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
#ifdef _WIN32
VkSemaphoreGetWin32HandleInfoKHR getWin32 = {
- VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, 0,
+ VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
+ 0,
pGetFdInfo->semaphore,
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
};
@@ -2082,12 +1882,8 @@
return result;
}
- void on_vkDestroySemaphore(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -2095,16 +1891,14 @@
AutoLock lock(mLock);
const auto& ite = mSemaphoreInfo.find(semaphore);
if (ite != mSemaphoreInfo.end() &&
- (ite->second.externalHandle != VK_EXT_MEMORY_HANDLE_INVALID)) {
+ (ite->second.externalHandle != VK_EXT_MEMORY_HANDLE_INVALID)) {
close(ite->second.externalHandle);
}
#endif
vk->vkDestroySemaphore(device, semaphore, pAllocator);
}
- void on_vkDestroyFence(android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkFence fence,
+ void on_vkDestroyFence(android::base::BumpPool* pool, VkDevice boxed_device, VkFence fence,
const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -2128,18 +1922,14 @@
vk->vkDestroyFence(device, fence, pAllocator);
}
- VkResult on_vkCreateDescriptorSetLayout(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout) {
-
+ VkResult on_vkCreateDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorSetLayout* pSetLayout) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- auto res =
- vk->vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+ auto res = vk->vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
if (res == VK_SUCCESS) {
AutoLock lock(mLock);
@@ -2157,12 +1947,9 @@
return res;
}
- void on_vkDestroyDescriptorSetLayout(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorSetLayout descriptorSetLayout,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -2172,18 +1959,14 @@
mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
}
- VkResult on_vkCreateDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool) {
-
+ VkResult on_vkCreateDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkDescriptorPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorPool* pDescriptorPool) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- auto res =
- vk->vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+ auto res = vk->vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
if (res == VK_SUCCESS) {
AutoLock lock(mLock);
@@ -2205,7 +1988,8 @@
if (feature_is_enabled(kFeature_VulkanBatchedDescriptorSetUpdate)) {
for (uint32_t i = 0; i < pCreateInfo->maxSets; ++i) {
- info.poolIds.push_back((uint64_t)new_boxed_non_dispatchable_VkDescriptorSet(VK_NULL_HANDLE));
+ info.poolIds.push_back(
+ (uint64_t)new_boxed_non_dispatchable_VkDescriptorSet(VK_NULL_HANDLE));
}
}
}
@@ -2213,7 +1997,8 @@
return res;
}
- void cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool, bool isDestroy = false) {
+ void cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,
+ bool isDestroy = false) {
auto info = android::base::find(mDescriptorPoolInfo, descriptorPool);
if (!info) return;
@@ -2235,7 +2020,8 @@
} else {
for (auto poolId : info->poolIds) {
auto handleInfo = sBoxedHandleManager.get(poolId);
- if (handleInfo) handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
+ if (handleInfo)
+ handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
}
}
}
@@ -2248,12 +2034,9 @@
}
}
- void on_vkDestroyDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -2264,12 +2047,9 @@
mDescriptorPoolInfo.erase(descriptorPool);
}
- VkResult on_vkResetDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags) {
-
+ VkResult on_vkResetDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -2283,16 +2063,11 @@
return res;
}
- void initDescriptorSetInfoLocked(
- VkDescriptorPool pool,
- VkDescriptorSetLayout setLayout,
- uint64_t boxedDescriptorSet,
- VkDescriptorSet descriptorSet) {
-
+ void initDescriptorSetInfoLocked(VkDescriptorPool pool, VkDescriptorSetLayout setLayout,
+ uint64_t boxedDescriptorSet, VkDescriptorSet descriptorSet) {
auto poolInfo = android::base::find(mDescriptorPoolInfo, pool);
- auto setLayoutInfo =
- android::base::find(mDescriptorSetLayoutInfo, setLayout);
+ auto setLayoutInfo = android::base::find(mDescriptorSetLayoutInfo, setLayout);
auto& setInfo = mDescriptorSetInfo[descriptorSet];
@@ -2303,12 +2078,9 @@
applyDescriptorSetAllocationLocked(*poolInfo, setInfo.bindings);
}
- VkResult on_vkAllocateDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets) {
-
+ VkResult on_vkAllocateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkDescriptorSetAllocateInfo* pAllocateInfo,
+ VkDescriptorSet* pDescriptorSets) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -2320,58 +2092,46 @@
auto res = vk->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
if (res == VK_SUCCESS) {
-
auto poolInfo = android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
if (!poolInfo) return res;
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
auto unboxed = pDescriptorSets[i];
pDescriptorSets[i] = new_boxed_non_dispatchable_VkDescriptorSet(pDescriptorSets[i]);
- initDescriptorSetInfoLocked(
- pAllocateInfo->descriptorPool,
- pAllocateInfo->pSetLayouts[i],
- (uint64_t)(pDescriptorSets[i]),
- unboxed);
+ initDescriptorSetInfoLocked(pAllocateInfo->descriptorPool,
+ pAllocateInfo->pSetLayouts[i],
+ (uint64_t)(pDescriptorSets[i]), unboxed);
}
}
return res;
}
- VkResult on_vkFreeDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets) {
-
+ VkResult on_vkFreeDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
+ const VkDescriptorSet* pDescriptorSets) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- auto res = vk->vkFreeDescriptorSets(
- device, descriptorPool,
- descriptorSetCount, pDescriptorSets);
+ auto res =
+ vk->vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
if (res == VK_SUCCESS) {
AutoLock lock(mLock);
for (uint32_t i = 0; i < descriptorSetCount; ++i) {
- auto setInfo = android::base::find(
- mDescriptorSetInfo, pDescriptorSets[i]);
+ auto setInfo = android::base::find(mDescriptorSetInfo, pDescriptorSets[i]);
if (!setInfo) continue;
- auto poolInfo =
- android::base::find(
- mDescriptorPoolInfo, setInfo->pool);
+ auto poolInfo = android::base::find(mDescriptorPoolInfo, setInfo->pool);
if (!poolInfo) continue;
removeDescriptorSetAllocationLocked(*poolInfo, setInfo->bindings);
auto descSetAllocedEntry =
- android::base::find(
- poolInfo->allocedSetsToBoxed, pDescriptorSets[i]);
+ android::base::find(poolInfo->allocedSetsToBoxed, pDescriptorSets[i]);
if (!descSetAllocedEntry) continue;
@@ -2393,44 +2153,35 @@
return res;
}
- void on_vkUpdateDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
-
+ void on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
+ uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet* pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet* pDescriptorCopies) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
AutoLock lock(mLock);
- on_vkUpdateDescriptorSetsImpl(pool, vk, device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+ on_vkUpdateDescriptorSetsImpl(pool, vk, device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
}
- void on_vkUpdateDescriptorSetsImpl(
- android::base::BumpPool* pool,
- VulkanDispatch* vk,
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
-
+ void on_vkUpdateDescriptorSetsImpl(android::base::BumpPool* pool, VulkanDispatch* vk,
+ VkDevice device, uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet* pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet* pDescriptorCopies) {
bool needEmulateWriteDescriptor = false;
// c++ seems to allow for 0-size array allocation
- std::unique_ptr<bool[]> descriptorWritesNeedDeepCopy(
- new bool[descriptorWriteCount]);
+ std::unique_ptr<bool[]> descriptorWritesNeedDeepCopy(new bool[descriptorWriteCount]);
for (uint32_t i = 0; i < descriptorWriteCount; i++) {
const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[i];
descriptorWritesNeedDeepCopy[i] = false;
- if (descriptorWrite.descriptorType !=
- VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
+ if (descriptorWrite.descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
continue;
}
for (uint32_t j = 0; j < descriptorWrite.descriptorCount; j++) {
- const VkDescriptorImageInfo& imageInfo =
- descriptorWrite.pImageInfo[j];
+ const VkDescriptorImageInfo& imageInfo = descriptorWrite.pImageInfo[j];
const auto& viewIt = mImageViewInfo.find(imageInfo.imageView);
if (viewIt == mImageViewInfo.end()) {
continue;
@@ -2439,8 +2190,7 @@
if (samplerIt == mSamplerInfo.end()) {
continue;
}
- if (viewIt->second.needEmulatedAlpha &&
- samplerIt->second.needEmulatedAlpha) {
+ if (viewIt->second.needEmulatedAlpha && samplerIt->second.needEmulatedAlpha) {
needEmulateWriteDescriptor = true;
descriptorWritesNeedDeepCopy[i] = true;
break;
@@ -2448,17 +2198,15 @@
}
}
if (!needEmulateWriteDescriptor) {
- vk->vkUpdateDescriptorSets(device, descriptorWriteCount,
- pDescriptorWrites, descriptorCopyCount,
- pDescriptorCopies);
+ vk->vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
return;
}
std::list<std::unique_ptr<VkDescriptorImageInfo[]>> imageInfoPool;
std::unique_ptr<VkWriteDescriptorSet[]> descriptorWrites(
- new VkWriteDescriptorSet[descriptorWriteCount]);
+ new VkWriteDescriptorSet[descriptorWriteCount]);
for (uint32_t i = 0; i < descriptorWriteCount; i++) {
- const VkWriteDescriptorSet& srcDescriptorWrite =
- pDescriptorWrites[i];
+ const VkWriteDescriptorSet& srcDescriptorWrite = pDescriptorWrites[i];
VkWriteDescriptorSet& dstDescriptorWrite = descriptorWrites[i];
// Shallow copy first
dstDescriptorWrite = srcDescriptorWrite;
@@ -2466,15 +2214,12 @@
continue;
}
// Deep copy
- assert(dstDescriptorWrite.descriptorType ==
- VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+ assert(dstDescriptorWrite.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
imageInfoPool.emplace_back(
- new VkDescriptorImageInfo[dstDescriptorWrite
- .descriptorCount]);
+ new VkDescriptorImageInfo[dstDescriptorWrite.descriptorCount]);
VkDescriptorImageInfo* imageInfos = imageInfoPool.back().get();
memcpy(imageInfos, srcDescriptorWrite.pImageInfo,
- dstDescriptorWrite.descriptorCount *
- sizeof(VkDescriptorImageInfo));
+ dstDescriptorWrite.descriptorCount * sizeof(VkDescriptorImageInfo));
dstDescriptorWrite.pImageInfo = imageInfos;
for (uint32_t j = 0; j < dstDescriptorWrite.descriptorCount; j++) {
VkDescriptorImageInfo& imageInfo = imageInfos[j];
@@ -2486,43 +2231,35 @@
if (samplerIt == mSamplerInfo.end()) {
continue;
}
- if (viewIt->second.needEmulatedAlpha &&
- samplerIt->second.needEmulatedAlpha) {
+ if (viewIt->second.needEmulatedAlpha && samplerIt->second.needEmulatedAlpha) {
SamplerInfo& samplerInfo = samplerIt->second;
if (samplerInfo.emulatedborderSampler == VK_NULL_HANDLE) {
// create the emulated sampler
VkSamplerCreateInfo createInfo = samplerInfo.createInfo;
switch (createInfo.borderColor) {
case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
- createInfo.borderColor =
- VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
+ createInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
break;
case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
- createInfo.borderColor =
- VK_BORDER_COLOR_INT_OPAQUE_BLACK;
+ createInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
break;
default:
break;
}
vk->vkCreateSampler(device, &createInfo, nullptr,
- &samplerInfo.emulatedborderSampler);
+ &samplerInfo.emulatedborderSampler);
}
imageInfo.sampler = samplerInfo.emulatedborderSampler;
}
}
}
- vk->vkUpdateDescriptorSets(device, descriptorWriteCount,
- descriptorWrites.get(), descriptorCopyCount,
- pDescriptorCopies);
+ vk->vkUpdateDescriptorSets(device, descriptorWriteCount, descriptorWrites.get(),
+ descriptorCopyCount, pDescriptorCopies);
}
- void on_vkCmdCopyImage(android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
+ void on_vkCmdCopyImage(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
+ VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageCopy* pRegions) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
@@ -2541,13 +2278,13 @@
if (deviceInfoIt == mDeviceInfo.end()) {
return;
}
- bool needEmulatedSrc = deviceInfoIt->second.needEmulatedDecompression(
- srcIt->second.cmpInfo);
- bool needEmulatedDst = deviceInfoIt->second.needEmulatedDecompression(
- dstIt->second.cmpInfo);
+ bool needEmulatedSrc =
+ deviceInfoIt->second.needEmulatedDecompression(srcIt->second.cmpInfo);
+ bool needEmulatedDst =
+ deviceInfoIt->second.needEmulatedDecompression(dstIt->second.cmpInfo);
if (!needEmulatedSrc && !needEmulatedDst) {
- vk->vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout,
- dstImage, dstImageLayout, regionCount, pRegions);
+ vk->vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
return;
}
VkImage srcImageMip = srcImage;
@@ -2556,51 +2293,40 @@
VkImageCopy region = pRegions[r];
if (needEmulatedSrc) {
uint32_t mipLevel = region.srcSubresource.mipLevel;
- uint32_t compressedBlockWidth =
- srcIt->second.cmpInfo.compressedBlockWidth;
- uint32_t compressedBlockHeight =
- srcIt->second.cmpInfo.compressedBlockHeight;
+ uint32_t compressedBlockWidth = srcIt->second.cmpInfo.compressedBlockWidth;
+ uint32_t compressedBlockHeight = srcIt->second.cmpInfo.compressedBlockHeight;
srcImageMip = srcIt->second.cmpInfo.sizeCompImgs[mipLevel];
region.srcSubresource.mipLevel = 0;
region.srcOffset.x /= compressedBlockWidth;
region.srcOffset.y /= compressedBlockHeight;
- uint32_t width =
- srcIt->second.cmpInfo.sizeCompMipmapWidth(mipLevel);
- uint32_t height =
- srcIt->second.cmpInfo.sizeCompMipmapHeight(mipLevel);
+ uint32_t width = srcIt->second.cmpInfo.sizeCompMipmapWidth(mipLevel);
+ uint32_t height = srcIt->second.cmpInfo.sizeCompMipmapHeight(mipLevel);
// region.extent uses pixel size for source image
region.extent.width =
- (region.extent.width + compressedBlockWidth - 1) /
- compressedBlockWidth;
+ (region.extent.width + compressedBlockWidth - 1) / compressedBlockWidth;
region.extent.height =
- (region.extent.height + compressedBlockHeight - 1) /
- compressedBlockHeight;
+ (region.extent.height + compressedBlockHeight - 1) / compressedBlockHeight;
region.extent.width = std::min(region.extent.width, width);
region.extent.height = std::min(region.extent.height, height);
}
if (needEmulatedDst) {
- uint32_t compressedBlockWidth =
- dstIt->second.cmpInfo.compressedBlockWidth;
- uint32_t compressedBlockHeight =
- dstIt->second.cmpInfo.compressedBlockHeight;
+ uint32_t compressedBlockWidth = dstIt->second.cmpInfo.compressedBlockWidth;
+ uint32_t compressedBlockHeight = dstIt->second.cmpInfo.compressedBlockHeight;
uint32_t mipLevel = region.dstSubresource.mipLevel;
dstImageMip = dstIt->second.cmpInfo.sizeCompImgs[mipLevel];
region.dstSubresource.mipLevel = 0;
region.dstOffset.x /= compressedBlockWidth;
region.dstOffset.y /= compressedBlockHeight;
}
- vk->vkCmdCopyImage(commandBuffer, srcImageMip, srcImageLayout,
- dstImageMip, dstImageLayout, 1, ®ion);
+ vk->vkCmdCopyImage(commandBuffer, srcImageMip, srcImageLayout, dstImageMip,
+ dstImageLayout, 1, ®ion);
}
}
void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
+ VkCommandBuffer boxed_commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+ uint32_t regionCount, const VkBufferImageCopy* pRegions) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
@@ -2618,10 +2344,9 @@
if (deviceInfoIt == mDeviceInfo.end()) {
return;
}
- if (!deviceInfoIt->second.needEmulatedDecompression(
- it->second.cmpInfo)) {
- vk->vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout,
- dstBuffer, regionCount, pRegions);
+ if (!deviceInfoIt->second.needEmulatedDecompression(it->second.cmpInfo)) {
+ vk->vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer,
+ regionCount, pRegions);
return;
}
CompressedImageInfo& cmp = it->second.cmpInfo;
@@ -2636,27 +2361,20 @@
region.imageOffset.y /= cmp.compressedBlockHeight;
uint32_t width = cmp.sizeCompMipmapWidth(mipLevel);
uint32_t height = cmp.sizeCompMipmapHeight(mipLevel);
- region.imageExtent.width =
- (region.imageExtent.width + cmp.compressedBlockWidth - 1) /
- cmp.compressedBlockWidth;
- region.imageExtent.height = (region.imageExtent.height +
- cmp.compressedBlockHeight - 1) /
- cmp.compressedBlockHeight;
- region.imageExtent.width =
- std::min(region.imageExtent.width, width);
+ region.imageExtent.width = (region.imageExtent.width + cmp.compressedBlockWidth - 1) /
+ cmp.compressedBlockWidth;
region.imageExtent.height =
- std::min(region.imageExtent.height, height);
- vk->vkCmdCopyImageToBuffer(commandBuffer,
- cmp.sizeCompImgs[mipLevel],
- srcImageLayout, dstBuffer, 1, ®ion);
+ (region.imageExtent.height + cmp.compressedBlockHeight - 1) /
+ cmp.compressedBlockHeight;
+ region.imageExtent.width = std::min(region.imageExtent.width, width);
+ region.imageExtent.height = std::min(region.imageExtent.height, height);
+ vk->vkCmdCopyImageToBuffer(commandBuffer, cmp.sizeCompImgs[mipLevel], srcImageLayout,
+ dstBuffer, 1, ®ion);
}
}
- void on_vkGetImageMemoryRequirements(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
+ void on_vkGetImageMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkImage image, VkMemoryRequirements* pMemoryRequirements) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
vk->vkGetImageMemoryRequirements(device, image, pMemoryRequirements);
@@ -2664,11 +2382,9 @@
updateImageMemorySizeLocked(device, image, pMemoryRequirements);
}
- void on_vkGetImageMemoryRequirements2(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
+ void on_vkGetImageMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
AutoLock lock(mLock);
@@ -2696,23 +2412,16 @@
__func__);
}
- vk->vkGetImageMemoryRequirements(
- device, pInfo->image,
- &pMemoryRequirements->memoryRequirements);
+ vk->vkGetImageMemoryRequirements(device, pInfo->image,
+ &pMemoryRequirements->memoryRequirements);
}
- updateImageMemorySizeLocked(device, pInfo->image,
- &pMemoryRequirements->memoryRequirements);
+ updateImageMemorySizeLocked(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
}
- void on_vkCmdCopyBufferToImage(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
-
+ void on_vkCmdCopyBufferToImage(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer, VkBuffer srcBuffer,
+ VkImage dstImage, VkImageLayout dstImageLayout,
+ uint32_t regionCount, const VkBufferImageCopy* pRegions) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
@@ -2728,10 +2437,9 @@
if (deviceInfoIt == mDeviceInfo.end()) {
return;
}
- if (!deviceInfoIt->second.needEmulatedDecompression(
- it->second.cmpInfo)) {
- vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage,
- dstImageLayout, regionCount, pRegions);
+ if (!deviceInfoIt->second.needEmulatedDecompression(it->second.cmpInfo)) {
+ vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout,
+ regionCount, pRegions);
return;
}
auto cmdBufferInfoIt = mCmdBufferInfo.find(commandBuffer);
@@ -2750,19 +2458,16 @@
dstRegion.imageOffset.y /= cmp.compressedBlockHeight;
uint32_t width = cmp.sizeCompMipmapWidth(mipLevel);
uint32_t height = cmp.sizeCompMipmapHeight(mipLevel);
- dstRegion.imageExtent.width = (dstRegion.imageExtent.width +
- cmp.compressedBlockWidth - 1) /
- cmp.compressedBlockWidth;
- dstRegion.imageExtent.height = (dstRegion.imageExtent.height +
- cmp.compressedBlockHeight - 1) /
- cmp.compressedBlockHeight;
dstRegion.imageExtent.width =
- std::min(dstRegion.imageExtent.width, width);
+ (dstRegion.imageExtent.width + cmp.compressedBlockWidth - 1) /
+ cmp.compressedBlockWidth;
dstRegion.imageExtent.height =
- std::min(dstRegion.imageExtent.height, height);
- vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer,
- cmp.sizeCompImgs[mipLevel],
- dstImageLayout, 1, &dstRegion);
+ (dstRegion.imageExtent.height + cmp.compressedBlockHeight - 1) /
+ cmp.compressedBlockHeight;
+ dstRegion.imageExtent.width = std::min(dstRegion.imageExtent.width, width);
+ dstRegion.imageExtent.height = std::min(dstRegion.imageExtent.height, height);
+ vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, cmp.sizeCompImgs[mipLevel],
+ dstImageLayout, 1, &dstRegion);
}
}
@@ -2772,29 +2477,27 @@
}
}
- inline void convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(VkBufferMemoryBarrier* barrier) {
+ inline void convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
+ VkBufferMemoryBarrier* barrier) {
convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
}
- inline void convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(VkImageMemoryBarrier* barrier) {
+ inline void convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
+ VkImageMemoryBarrier* barrier) {
convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
}
- void on_vkCmdPipelineBarrier(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
-
+ void on_vkCmdPipelineBarrier(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
+ VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask,
+ VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
+ const VkMemoryBarrier* pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier* pImageMemoryBarriers) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
@@ -2809,11 +2512,10 @@
}
if (imageMemoryBarrierCount == 0) {
- vk->vkCmdPipelineBarrier(
- commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
- memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers,
- imageMemoryBarrierCount, pImageMemoryBarriers);
+ vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
return;
}
AutoLock lock(mLock);
@@ -2825,13 +2527,11 @@
if (deviceInfoIt == mDeviceInfo.end()) {
return;
}
- if (!deviceInfoIt->second.emulateTextureEtc2 &&
- !deviceInfoIt->second.emulateTextureAstc) {
- vk->vkCmdPipelineBarrier(
- commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
- memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers,
- imageMemoryBarrierCount, pImageMemoryBarriers);
+ if (!deviceInfoIt->second.emulateTextureEtc2 && !deviceInfoIt->second.emulateTextureAstc) {
+ vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
return;
}
// Add barrier for decompressed image
@@ -2842,8 +2542,7 @@
auto image = srcBarrier.image;
auto it = mImageInfo.find(image);
if (it == mImageInfo.end() ||
- !deviceInfoIt->second.needEmulatedDecompression(
- it->second.cmpInfo)) {
+ !deviceInfoIt->second.needEmulatedDecompression(it->second.cmpInfo)) {
persistentImageBarriers.push_back(srcBarrier);
continue;
}
@@ -2855,36 +2554,31 @@
sizeCompBarrierTemplate.subresourceRange.baseMipLevel = 0;
sizeCompBarrierTemplate.subresourceRange.levelCount = 1;
std::vector<VkImageMemoryBarrier> sizeCompBarriers(
- srcBarrier.subresourceRange.levelCount,
- sizeCompBarrierTemplate);
+ srcBarrier.subresourceRange.levelCount, sizeCompBarrierTemplate);
for (uint32_t j = 0; j < levelCount; j++) {
- sizeCompBarriers[j].image =
- it->second.cmpInfo.sizeCompImgs[baseMipLevel + j];
+ sizeCompBarriers[j].image = it->second.cmpInfo.sizeCompImgs[baseMipLevel + j];
}
// TODO: should we use image layout or access bit?
if (srcBarrier.oldLayout == 0 ||
- (srcBarrier.newLayout != VK_IMAGE_LAYOUT_GENERAL &&
- srcBarrier.newLayout !=
- VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)) {
+ (srcBarrier.newLayout != VK_IMAGE_LAYOUT_GENERAL &&
+ srcBarrier.newLayout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)) {
// TODO: might only need to push one of them?
persistentImageBarriers.push_back(decompBarrier);
persistentImageBarriers.insert(persistentImageBarriers.end(),
- sizeCompBarriers.begin(),
- sizeCompBarriers.end());
+ sizeCompBarriers.begin(), sizeCompBarriers.end());
continue;
}
- if (srcBarrier.newLayout !=
- VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
- srcBarrier.newLayout != VK_IMAGE_LAYOUT_GENERAL) {
+ if (srcBarrier.newLayout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
+ srcBarrier.newLayout != VK_IMAGE_LAYOUT_GENERAL) {
fprintf(stderr,
"WARNING: unexpected usage to transfer "
"compressed image layout from %d to %d\n",
srcBarrier.oldLayout, srcBarrier.newLayout);
}
- VkResult result = it->second.cmpInfo.initDecomp(
- vk, cmdBufferInfoIt->second.device, image);
+ VkResult result =
+ it->second.cmpInfo.initDecomp(vk, cmdBufferInfoIt->second.device, image);
if (result != VK_SUCCESS) {
fprintf(stderr, "WARNING: texture decompression failed\n");
continue;
@@ -2892,9 +2586,8 @@
std::vector<VkImageMemoryBarrier> currImageBarriers;
currImageBarriers.reserve(sizeCompBarriers.size() + 1);
- currImageBarriers.insert(currImageBarriers.end(),
- sizeCompBarriers.begin(),
- sizeCompBarriers.end());
+ currImageBarriers.insert(currImageBarriers.end(), sizeCompBarriers.begin(),
+ sizeCompBarriers.end());
for (auto& barrier : currImageBarriers) {
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
@@ -2908,15 +2601,12 @@
barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
}
vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask,
- VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0,
- nullptr, 0, nullptr,
- currImageBarriers.size(),
- currImageBarriers.data());
+ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0,
+ nullptr, currImageBarriers.size(), currImageBarriers.data());
it->second.cmpInfo.cmdDecompress(
- vk, commandBuffer, dstStageMask, decompBarrier.newLayout,
- decompBarrier.dstAccessMask, baseMipLevel, levelCount,
- srcBarrier.subresourceRange.baseArrayLayer,
- srcBarrier.subresourceRange.layerCount);
+ vk, commandBuffer, dstStageMask, decompBarrier.newLayout,
+ decompBarrier.dstAccessMask, baseMipLevel, levelCount,
+ srcBarrier.subresourceRange.baseArrayLayer, srcBarrier.subresourceRange.layerCount);
needRebind = true;
for (uint32_t j = 0; j < currImageBarriers.size(); j++) {
@@ -2934,55 +2624,45 @@
barrier.newLayout = srcBarrier.newLayout;
}
- vk->vkCmdPipelineBarrier(
- commandBuffer,
- VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, // srcStageMask
- dstStageMask, // dstStageMask
- 0, // dependencyFlags
- 0, // memoryBarrierCount
- nullptr, // pMemoryBarriers
- 0, // bufferMemoryBarrierCount
- nullptr, // pBufferMemoryBarriers
- currImageBarriers.size(), // imageMemoryBarrierCount
- currImageBarriers.data() // pImageMemoryBarriers
- );
+ vk->vkCmdPipelineBarrier(commandBuffer,
+ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, // srcStageMask
+ dstStageMask, // dstStageMask
+ 0, // dependencyFlags
+ 0, // memoryBarrierCount
+ nullptr, // pMemoryBarriers
+ 0, // bufferMemoryBarrierCount
+ nullptr, // pBufferMemoryBarriers
+ currImageBarriers.size(), // imageMemoryBarrierCount
+ currImageBarriers.data() // pImageMemoryBarriers
+ );
}
if (needRebind && cmdBufferInfoIt->second.computePipeline) {
// Recover pipeline bindings
- vk->vkCmdBindPipeline(commandBuffer,
- VK_PIPELINE_BIND_POINT_COMPUTE,
- cmdBufferInfoIt->second.computePipeline);
+ vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
+ cmdBufferInfoIt->second.computePipeline);
if (cmdBufferInfoIt->second.descriptorSets.size() > 0) {
vk->vkCmdBindDescriptorSets(
- commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
- cmdBufferInfoIt->second.descriptorLayout,
- cmdBufferInfoIt->second.firstSet,
- cmdBufferInfoIt->second.descriptorSets.size(),
- cmdBufferInfoIt->second.descriptorSets.data(), 0,
- nullptr);
+ commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
+ cmdBufferInfoIt->second.descriptorLayout, cmdBufferInfoIt->second.firstSet,
+ cmdBufferInfoIt->second.descriptorSets.size(),
+ cmdBufferInfoIt->second.descriptorSets.data(), 0, nullptr);
}
}
- if (memoryBarrierCount || bufferMemoryBarrierCount ||
- !persistentImageBarriers.empty()) {
- vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask,
- dependencyFlags, memoryBarrierCount,
- pMemoryBarriers, bufferMemoryBarrierCount,
- pBufferMemoryBarriers,
- persistentImageBarriers.size(),
- persistentImageBarriers.data());
+ if (memoryBarrierCount || bufferMemoryBarrierCount || !persistentImageBarriers.empty()) {
+ vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, persistentImageBarriers.size(),
+ persistentImageBarriers.data());
}
}
- bool mapHostVisibleMemoryToGuestPhysicalAddressLocked(
- VulkanDispatch* vk,
- VkDevice device,
- VkDeviceMemory memory,
- uint64_t physAddr) {
-
+ bool mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch* vk, VkDevice device,
+ VkDeviceMemory memory,
+ uint64_t physAddr) {
if (!feature_is_enabled(kFeature_GLDirectMem) &&
!feature_is_enabled(kFeature_VirtioGpuNext)) {
// fprintf(stderr, "%s: Tried to use direct mapping "
- // "while GLDirectMem is not enabled!\n");
+ // "while GLDirectMem is not enabled!\n");
}
auto info = android::base::find(mMapInfo, memory);
@@ -2998,17 +2678,12 @@
uintptr_t addr = reinterpret_cast<uintptr_t>(info->ptr);
uintptr_t pageOffset = addr & kPageOffsetMask;
- info->pageAlignedHva =
- reinterpret_cast<void*>(addr - pageOffset);
- info->sizeToPage =
- ((info->size + pageOffset + kPageSize - 1) >>
- kPageBits) << kPageBits;
+ info->pageAlignedHva = reinterpret_cast<void*>(addr - pageOffset);
+ info->sizeToPage = ((info->size + pageOffset + kPageSize - 1) >> kPageBits) << kPageBits;
if (mLogging) {
- fprintf(stderr, "%s: map: %p, %p -> [0x%llx 0x%llx]\n", __func__,
- info->ptr,
- info->pageAlignedHva,
- (unsigned long long)info->guestPhysAddr,
+ fprintf(stderr, "%s: map: %p, %p -> [0x%llx 0x%llx]\n", __func__, info->ptr,
+ info->pageAlignedHva, (unsigned long long)info->guestPhysAddr,
(unsigned long long)info->guestPhysAddr + info->sizeToPage);
}
@@ -3019,24 +2694,19 @@
AutoLock occupiedGpasLock(mOccupiedGpasLock);
- auto existingMemoryInfo =
- android::base::find(mOccupiedGpas, gpa);
+ auto existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
if (existingMemoryInfo) {
-
- fprintf(stderr, "%s: WARNING: already mapped gpa 0x%llx, replacing",
- __func__,
+ fprintf(stderr, "%s: WARNING: already mapped gpa 0x%llx, replacing", __func__,
(unsigned long long)gpa);
- get_emugl_vm_operations().unmapUserBackedRam(
- existingMemoryInfo->gpa,
- existingMemoryInfo->sizeToPage);
+ get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
+ existingMemoryInfo->sizeToPage);
mOccupiedGpas.erase(gpa);
}
- get_emugl_vm_operations().mapUserBackedRam(
- gpa, hva, sizeToPage);
+ get_emugl_vm_operations().mapUserBackedRam(gpa, hva, sizeToPage);
if (mVerbosePrints) {
fprintf(stderr, "VERBOSE:%s: registering gpa 0x%llx to mOccupiedGpas\n", __func__,
@@ -3044,19 +2714,15 @@
}
mOccupiedGpas[gpa] = {
- vk,
- device,
- memory,
- gpa,
- sizeToPage,
+ vk, device, memory, gpa, sizeToPage,
};
if (!mUseOldMemoryCleanupPath) {
get_emugl_address_space_device_control_ops().register_deallocation_callback(
this, gpa, [](void* thisPtr, uint64_t gpa) {
- Impl* implPtr = (Impl*)thisPtr;
- implPtr->unmapMemoryAtGpaIfExists(gpa);
- });
+ Impl* implPtr = (Impl*)thisPtr;
+ implPtr->unmapMemoryAtGpaIfExists(gpa);
+ });
}
return true;
@@ -3073,25 +2739,19 @@
(unsigned long long)gpa);
}
- auto existingMemoryInfo =
- android::base::find(mOccupiedGpas, gpa);
+ auto existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
if (!existingMemoryInfo) return;
- get_emugl_vm_operations().unmapUserBackedRam(
- existingMemoryInfo->gpa,
- existingMemoryInfo->sizeToPage);
+ get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
+ existingMemoryInfo->sizeToPage);
mOccupiedGpas.erase(gpa);
}
- VkResult on_vkAllocateMemory(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory) {
-
+ VkResult on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkMemoryAllocateInfo* pAllocateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -3105,8 +2765,8 @@
vk_find_struct<VkExportMemoryAllocateInfo>(pAllocateInfo);
if (exportAllocInfoPtr) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "Export allocs are to be handled on the guest side / VkCommonOperations.";
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
+ << "Export allocs are to be handled on the guest side / VkCommonOperations.";
}
const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
@@ -3132,19 +2792,22 @@
const VkImportColorBufferGOOGLE* importCbInfoPtr =
vk_find_struct<VkImportColorBufferGOOGLE>(pAllocateInfo);
const VkImportBufferGOOGLE* importBufferInfoPtr =
- vk_find_struct<VkImportBufferGOOGLE>(pAllocateInfo);
+ vk_find_struct<VkImportBufferGOOGLE>(pAllocateInfo);
#ifdef _WIN32
- VkImportMemoryWin32HandleInfoKHR importInfo {
- VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, 0,
- VK_EXT_MEMORY_HANDLE_TYPE_BIT,
- VK_EXT_MEMORY_HANDLE_INVALID, L"",
+ VkImportMemoryWin32HandleInfoKHR importInfo{
+ VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+ 0,
+ VK_EXT_MEMORY_HANDLE_TYPE_BIT,
+ VK_EXT_MEMORY_HANDLE_INVALID,
+ L"",
};
#else
- VkImportMemoryFdInfoKHR importInfo {
- VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, 0,
- VK_EXT_MEMORY_HANDLE_TYPE_BIT,
- VK_EXT_MEMORY_HANDLE_INVALID,
+ VkImportMemoryFdInfoKHR importInfo{
+ VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+ 0,
+ VK_EXT_MEMORY_HANDLE_TYPE_BIT,
+ VK_EXT_MEMORY_HANDLE_INVALID,
};
#endif
@@ -3172,16 +2835,13 @@
// thing.
// First, check validity of the user's type index.
- if (localAllocInfo.memoryTypeIndex >=
- physdevInfo->memoryProperties.memoryTypeCount) {
+ if (localAllocInfo.memoryTypeIndex >= physdevInfo->memoryProperties.memoryTypeCount) {
// Continue allowing invalid behavior.
return VK_ERROR_INCOMPATIBLE_DRIVER;
}
VkMemoryPropertyFlags memoryPropertyFlags =
- physdevInfo->memoryProperties
- .memoryTypes[localAllocInfo.memoryTypeIndex]
- .propertyFlags;
+ physdevInfo->memoryProperties.memoryTypes[localAllocInfo.memoryTypeIndex].propertyFlags;
lock.unlock();
@@ -3190,13 +2850,11 @@
bool vulkanOnly = mGuestUsesAngle;
// Ensure color buffer has Vulkan backing.
- setupVkColorBuffer(importCbInfoPtr->colorBuffer,
- vulkanOnly,
- memoryPropertyFlags, nullptr,
- // Modify the allocation size and type index
- // to suit the resulting image memory size.
- &localAllocInfo.allocationSize,
- &localAllocInfo.memoryTypeIndex, &mappedPtr);
+ setupVkColorBuffer(
+ importCbInfoPtr->colorBuffer, vulkanOnly, memoryPropertyFlags, nullptr,
+ // Modify the allocation size and type index
+ // to suit the resulting image memory size.
+ &localAllocInfo.allocationSize, &localAllocInfo.memoryTypeIndex, &mappedPtr);
if (!vulkanOnly) {
updateVkImageFromColorBuffer(importCbInfoPtr->colorBuffer);
@@ -3209,8 +2867,8 @@
if (cbExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
fprintf(stderr,
"%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
- "colorBuffer 0x%x does not have Vulkan external memory backing\n", __func__,
- importCbInfoPtr->colorBuffer);
+ "colorBuffer 0x%x does not have Vulkan external memory backing\n",
+ __func__, importCbInfoPtr->colorBuffer);
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
@@ -3227,17 +2885,15 @@
if (importBufferInfoPtr) {
// Ensure buffer has Vulkan backing.
- setupVkBuffer(importBufferInfoPtr->buffer,
- true /* Buffers are Vulkan only */,
+ setupVkBuffer(importBufferInfoPtr->buffer, true /* Buffers are Vulkan only */,
memoryPropertyFlags, nullptr,
// Modify the allocation size and type index
// to suit the resulting image memory size.
- &localAllocInfo.allocationSize,
- &localAllocInfo.memoryTypeIndex);
+ &localAllocInfo.allocationSize, &localAllocInfo.memoryTypeIndex);
if (m_emu->instanceSupportsExternalMemoryCapabilities) {
VK_EXT_MEMORY_HANDLE bufferExtMemoryHandle =
- getBufferExtMemoryHandle(importBufferInfoPtr->buffer);
+ getBufferExtMemoryHandle(importBufferInfoPtr->buffer);
if (bufferExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
fprintf(stderr,
@@ -3248,8 +2904,7 @@
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
- bufferExtMemoryHandle =
- dupExternalMemory(bufferExtMemoryHandle);
+ bufferExtMemoryHandle = dupExternalMemory(bufferExtMemoryHandle);
#ifdef _WIN32
importInfo.handle = bufferExtMemoryHandle;
@@ -3260,8 +2915,7 @@
}
}
- VkResult result =
- vk->vkAllocateMemory(device, &localAllocInfo, pAllocator, pMemory);
+ VkResult result = vk->vkAllocateMemory(device, &localAllocInfo, pAllocator, pMemory);
if (result != VK_SUCCESS) {
return result;
@@ -3277,8 +2931,7 @@
mapInfo.mtlTexture = getColorBufferMTLTexture(importCbInfoPtr->colorBuffer);
}
- bool hostVisible =
- memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ bool hostVisible = memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
if (!hostVisible) {
*pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
@@ -3298,8 +2951,8 @@
mapInfo.ptr = mappedPtr;
} else {
mapInfo.needUnmap = true;
- VkResult mapResult = vk->vkMapMemory(device, *pMemory, 0,
- mapInfo.size, 0, &mapInfo.ptr);
+ VkResult mapResult =
+ vk->vkMapMemory(device, *pMemory, 0, mapInfo.size, 0, &mapInfo.ptr);
if (mapResult != VK_SUCCESS) {
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
@@ -3310,12 +2963,8 @@
return result;
}
- void freeMemoryLocked(
- VulkanDispatch* vk,
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
-
+ void freeMemoryLocked(VulkanDispatch* vk, VkDevice device, VkDeviceMemory memory,
+ const VkAllocationCallbacks* pAllocator) {
auto info = android::base::find(mMapInfo, memory);
if (!info) {
@@ -3331,7 +2980,6 @@
#endif
if (info->directMapped) {
-
// if direct mapped, we leave it up to the guest address space driver
// to control the unmapping of kvm slot on the host side
// in order to avoid situations where
@@ -3348,8 +2996,7 @@
if (info->virtioGpuMapped) {
if (mLogging) {
- fprintf(stderr, "%s: unmap hostmem %p id 0x%llx\n", __func__,
- info->ptr,
+ fprintf(stderr, "%s: unmap hostmem %p id 0x%llx\n", __func__, info->ptr,
(unsigned long long)info->hostmemId);
}
@@ -3365,12 +3012,8 @@
mMapInfo.erase(memory);
}
- void on_vkFreeMemory(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkFreeMemory(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -3379,24 +3022,14 @@
freeMemoryLocked(vk, device, memory, pAllocator);
}
- VkResult on_vkMapMemory(
- android::base::BumpPool* pool,
- VkDevice,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
-
+ VkResult on_vkMapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory memory,
+ VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
+ void** ppData) {
AutoLock lock(mLock);
return on_vkMapMemoryLocked(0, memory, offset, size, flags, ppData);
}
- VkResult on_vkMapMemoryLocked(VkDevice,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
+ VkResult on_vkMapMemoryLocked(VkDevice, VkDeviceMemory memory, VkDeviceSize offset,
+ VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {
auto info = android::base::find(mMapInfo, memory);
if (!info) {
@@ -3413,9 +3046,7 @@
return VK_SUCCESS;
}
- void on_vkUnmapMemory(
- android::base::BumpPool* pool,
- VkDevice, VkDeviceMemory) {
+ void on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory) {
// no-op; user-level mapping does not correspond
// to any operation here.
}
@@ -3464,14 +3095,10 @@
const auto& props = emu->deviceInfo.physdevProps;
- res.supportsVulkan1_1 =
- props.apiVersion >= VK_API_VERSION_1_1;
- res.supportsExternalMemory =
- emu->deviceInfo.supportsExternalMemory;
- res.useDeferredCommands =
- emu->useDeferredCommands;
- res.useCreateResourcesWithRequirements =
- emu->useCreateResourcesWithRequirements;
+ res.supportsVulkan1_1 = props.apiVersion >= VK_API_VERSION_1_1;
+ res.supportsExternalMemory = emu->deviceInfo.supportsExternalMemory;
+ res.useDeferredCommands = emu->useDeferredCommands;
+ res.useCreateResourcesWithRequirements = emu->useCreateResourcesWithRequirements;
res.apiVersion = props.apiVersion;
res.driverVersion = props.driverVersion;
@@ -3538,38 +3165,25 @@
#endif
// VK_ANDROID_native_buffer
- VkResult on_vkGetSwapchainGrallocUsageANDROID(
- android::base::BumpPool* pool,
- VkDevice,
- VkFormat format,
- VkImageUsageFlags imageUsage,
- int* grallocUsage) {
+ VkResult on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool, VkDevice,
+ VkFormat format, VkImageUsageFlags imageUsage,
+ int* grallocUsage) {
getGralloc0Usage(format, imageUsage, grallocUsage);
return VK_SUCCESS;
}
VkResult on_vkGetSwapchainGrallocUsage2ANDROID(
- android::base::BumpPool* pool,
- VkDevice,
- VkFormat format,
- VkImageUsageFlags imageUsage,
- VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
- uint64_t* grallocConsumerUsage,
- uint64_t* grallocProducerUsage) {
- getGralloc1Usage(format, imageUsage, swapchainImageUsage,
- grallocConsumerUsage,
- grallocProducerUsage);
+ android::base::BumpPool* pool, VkDevice, VkFormat format, VkImageUsageFlags imageUsage,
+ VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
+ uint64_t* grallocProducerUsage) {
+ getGralloc1Usage(format, imageUsage, swapchainImageUsage, grallocConsumerUsage,
+ grallocProducerUsage);
return VK_SUCCESS;
}
- VkResult on_vkAcquireImageANDROID(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkImage image,
- int nativeFenceFd,
- VkSemaphore semaphore,
- VkFence fence) {
-
+ VkResult on_vkAcquireImageANDROID(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkImage image, int nativeFenceFd, VkSemaphore semaphore,
+ VkFence fence) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -3591,21 +3205,15 @@
AndroidNativeBufferInfo* anbInfo = imageInfo->anbInfo.get();
- return
- setAndroidNativeImageSemaphoreSignaled(
- vk, device,
- defaultQueue, defaultQueueFamilyIndex, defaultQueueLock,
- semaphore, fence, anbInfo);
+ return setAndroidNativeImageSemaphoreSignaled(vk, device, defaultQueue,
+ defaultQueueFamilyIndex, defaultQueueLock,
+ semaphore, fence, anbInfo);
}
- VkResult on_vkQueueSignalReleaseImageANDROID(
- android::base::BumpPool* pool,
- VkQueue boxed_queue,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- VkImage image,
- int* pNativeFenceFd) {
-
+ VkResult on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool* pool, VkQueue boxed_queue,
+ uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores, VkImage image,
+ int* pNativeFenceFd) {
auto queue = unbox_VkQueue(boxed_queue);
auto vk = dispatch_VkQueue(boxed_queue);
@@ -3626,21 +3234,19 @@
auto imageInfo = android::base::find(mImageInfo, image);
auto anbInfo = imageInfo->anbInfo;
- return
- syncImageToColorBuffer(
- vk, queueInfo->queueFamilyIndex, queue, queueInfo->lock, waitSemaphoreCount,
- pWaitSemaphores, pNativeFenceFd, anbInfo);
+ return syncImageToColorBuffer(vk, queueInfo->queueFamilyIndex, queue, queueInfo->lock,
+ waitSemaphoreCount, pWaitSemaphores, pNativeFenceFd, anbInfo);
}
- VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device, VkDeviceMemory memory, uint64_t* pAddress) {
-
+ VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
+ VkDevice boxed_device, VkDeviceMemory memory,
+ uint64_t* pAddress) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
if (!feature_is_enabled(kFeature_GLDirectMem)) {
- fprintf(stderr, "FATAL: Tried to use direct mapping "
+ fprintf(stderr,
+ "FATAL: Tried to use direct mapping "
"while GLDirectMem is not enabled!\n");
}
@@ -3650,12 +3256,10 @@
if (mLogging) {
fprintf(stderr, "%s: deviceMemory: 0x%llx pAddress: 0x%llx\n", __func__,
- (unsigned long long)memory,
- (unsigned long long)(*pAddress));
+ (unsigned long long)memory, (unsigned long long)(*pAddress));
}
- if (!mapHostVisibleMemoryToGuestPhysicalAddressLocked(
- vk, device, memory, *pAddress)) {
+ if (!mapHostVisibleMemoryToGuestPhysicalAddressLocked(vk, device, memory, *pAddress)) {
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
@@ -3666,12 +3270,12 @@
return VK_SUCCESS;
}
- VkResult on_vkGetMemoryHostAddressInfoGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device, VkDeviceMemory memory,
- uint64_t* pAddress, uint64_t* pSize, uint64_t* pHostmemId) {
+ VkResult on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool* pool,
+ VkDevice boxed_device, VkDeviceMemory memory,
+ uint64_t* pAddress, uint64_t* pSize,
+ uint64_t* pHostmemId) {
AutoLock lock(mLock);
- struct MemEntry entry = { 0 };
+ struct MemEntry entry = {0};
auto info = android::base::find(mMapInfo, memory);
@@ -3685,18 +3289,15 @@
constexpr size_t kPageOffsetMask = kPageSize - 1;
uint64_t pageOffset = hva & kPageOffsetMask;
- uint64_t sizeToPage =
- ((size + pageOffset + kPageSize - 1) >>
- kPageBits) << kPageBits;
+ uint64_t sizeToPage = ((size + pageOffset + kPageSize - 1) >> kPageBits) << kPageBits;
entry.hva = (uint64_t)(uintptr_t)(info->ptr);
entry.size = (uint64_t)(uintptr_t)(info->size);
entry.caching = info->caching;
- auto id =
- get_emugl_vm_operations().hostmemRegister(&entry);
+ auto id = get_emugl_vm_operations().hostmemRegister(&entry);
- *pAddress = hva & (0xfff); // Don't expose exact hva to guest
+ *pAddress = hva & (0xfff); // Don't expose exact hva to guest
*pSize = sizeToPage;
*pHostmemId = id;
@@ -3704,27 +3305,21 @@
info->hostmemId = id;
fprintf(stderr, "%s: hva, size, sizeToPage: %p 0x%llx 0x%llx id 0x%llx\n", __func__,
- info->ptr, (unsigned long long)(info->size),
- (unsigned long long)(sizeToPage),
+ info->ptr, (unsigned long long)(info->size), (unsigned long long)(sizeToPage),
(unsigned long long)(*pHostmemId));
return VK_SUCCESS;
}
- VkResult on_vkFreeMemorySyncGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device, VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
-
+ VkResult on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDeviceMemory memory,
+ const VkAllocationCallbacks* pAllocator) {
on_vkFreeMemory(pool, boxed_device, memory, pAllocator);
return VK_SUCCESS;
}
-
- VkResult on_vkRegisterImageColorBufferGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkImage image, uint32_t colorBuffer) {
-
+ VkResult on_vkRegisterImageColorBufferGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkImage image, uint32_t colorBuffer) {
(void)image;
bool success = setupVkColorBuffer(colorBuffer);
@@ -3732,10 +3327,8 @@
return success ? VK_SUCCESS : VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
- VkResult on_vkRegisterBufferColorBufferGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkBuffer buffer, uint32_t colorBuffer) {
-
+ VkResult on_vkRegisterBufferColorBufferGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkBuffer buffer, uint32_t colorBuffer) {
(void)buffer;
bool success = setupVkColorBuffer(colorBuffer);
@@ -3743,17 +3336,13 @@
return success ? VK_SUCCESS : VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
- VkResult on_vkAllocateCommandBuffers(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers) {
-
+ VkResult on_vkAllocateCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkCommandBufferAllocateInfo* pAllocateInfo,
+ VkCommandBuffer* pCommandBuffers) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result = vk->vkAllocateCommandBuffers(
- device, pAllocateInfo, pCommandBuffers);
+ VkResult result = vk->vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
if (result != VK_SUCCESS) {
return result;
@@ -3763,27 +3352,23 @@
for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
mCmdBufferInfo[pCommandBuffers[i]] = CommandBufferInfo();
mCmdBufferInfo[pCommandBuffers[i]].device = device;
- mCmdBufferInfo[pCommandBuffers[i]].cmdPool =
- pAllocateInfo->commandPool;
- auto boxed = new_boxed_VkCommandBuffer(pCommandBuffers[i], vk, false /* does not own dispatch */);
+ mCmdBufferInfo[pCommandBuffers[i]].cmdPool = pAllocateInfo->commandPool;
+ auto boxed = new_boxed_VkCommandBuffer(pCommandBuffers[i], vk,
+ false /* does not own dispatch */);
mCmdBufferInfo[pCommandBuffers[i]].boxed = boxed;
pCommandBuffers[i] = (VkCommandBuffer)boxed;
}
return result;
}
- VkResult on_vkCreateCommandPool(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool) {
-
+ VkResult on_vkCreateCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkCommandPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkCommandPool* pCommandPool) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result = vk->vkCreateCommandPool(device, pCreateInfo,
- pAllocator, pCommandPool);
+ VkResult result = vk->vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
if (result != VK_SUCCESS) {
return result;
}
@@ -3798,12 +3383,9 @@
return result;
}
- void on_vkDestroyCommandPool(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator) {
-
+ void on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkCommandPool commandPool,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
@@ -3816,47 +3398,33 @@
}
}
- VkResult on_vkResetCommandPool(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags) {
-
+ VkResult on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- VkResult result =
- vk->vkResetCommandPool(device, commandPool, flags);
+ VkResult result = vk->vkResetCommandPool(device, commandPool, flags);
if (result != VK_SUCCESS) {
return result;
}
return result;
}
- void on_vkCmdExecuteCommands(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
-
+ void on_vkCmdExecuteCommands(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
+ uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
- vk->vkCmdExecuteCommands(commandBuffer, commandBufferCount,
- pCommandBuffers);
+ vk->vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
AutoLock lock(mLock);
CommandBufferInfo& cmdBuffer = mCmdBufferInfo[commandBuffer];
- cmdBuffer.subCmds.insert(cmdBuffer.subCmds.end(),
- pCommandBuffers, pCommandBuffers + commandBufferCount);
+ cmdBuffer.subCmds.insert(cmdBuffer.subCmds.end(), pCommandBuffers,
+ pCommandBuffers + commandBufferCount);
}
- VkResult on_vkQueueSubmit(
- android::base::BumpPool* pool,
- VkQueue boxed_queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
-
+ VkResult on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue boxed_queue,
+ uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
auto queue = unbox_VkQueue(boxed_queue);
auto vk = dispatch_VkQueue(boxed_queue);
@@ -3865,8 +3433,7 @@
{
auto queueInfo = android::base::find(mQueueInfo, queue);
if (queueInfo) {
- sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(
- queueInfo->device);
+ sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(queueInfo->device);
}
}
@@ -3901,10 +3468,7 @@
return result;
}
- VkResult on_vkQueueWaitIdle(
- android::base::BumpPool* pool,
- VkQueue boxed_queue) {
-
+ VkResult on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue boxed_queue) {
auto queue = unbox_VkQueue(boxed_queue);
auto vk = dispatch_VkQueue(boxed_queue);
@@ -3920,11 +3484,9 @@
return vk->vkQueueWaitIdle(queue);
}
- VkResult on_vkResetCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkCommandBufferResetFlags flags) {
-
+ VkResult on_vkResetCommandBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer,
+ VkCommandBufferResetFlags flags) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
@@ -3941,26 +3503,19 @@
return result;
}
- void on_vkFreeCommandBuffers(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
-
+ void on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkCommandPool commandPool, uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
if (!device) return;
- vk->vkFreeCommandBuffers(device, commandPool, commandBufferCount,
- pCommandBuffers);
+ vk->vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
AutoLock lock(mLock);
for (uint32_t i = 0; i < commandBufferCount; i++) {
- const auto& cmdBufferInfoIt =
- mCmdBufferInfo.find(pCommandBuffers[i]);
+ const auto& cmdBufferInfoIt = mCmdBufferInfo.find(pCommandBuffers[i]);
if (cmdBufferInfoIt != mCmdBufferInfo.end()) {
- const auto& cmdPoolInfoIt =
- mCmdPoolInfo.find(cmdBufferInfoIt->second.cmdPool);
+ const auto& cmdPoolInfoIt = mCmdPoolInfo.find(cmdBufferInfoIt->second.cmdPool);
if (cmdPoolInfoIt != mCmdPoolInfo.end()) {
cmdPoolInfoIt->second.cmdBuffers.erase(pCommandBuffers[i]);
}
@@ -3972,11 +3527,9 @@
}
void on_vkGetPhysicalDeviceExternalSemaphoreProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice boxed_physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
-
+ android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
+ const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+ VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
if (!physicalDevice) {
@@ -4012,138 +3565,109 @@
}
VkResult on_vkCreateDescriptorUpdateTemplate(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
-
+ android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- auto descriptorUpdateTemplateInfo =
- calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
+ auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
- VkResult res = vk->vkCreateDescriptorUpdateTemplate(
- device, &descriptorUpdateTemplateInfo.createInfo,
- pAllocator, pDescriptorUpdateTemplate);
+ VkResult res =
+ vk->vkCreateDescriptorUpdateTemplate(device, &descriptorUpdateTemplateInfo.createInfo,
+ pAllocator, pDescriptorUpdateTemplate);
if (res == VK_SUCCESS) {
- registerDescriptorUpdateTemplate(
- *pDescriptorUpdateTemplate,
- descriptorUpdateTemplateInfo);
- *pDescriptorUpdateTemplate = new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
+ registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
+ descriptorUpdateTemplateInfo);
+ *pDescriptorUpdateTemplate =
+ new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
}
return res;
}
VkResult on_vkCreateDescriptorUpdateTemplateKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
-
+ android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- auto descriptorUpdateTemplateInfo =
- calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
+ auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
VkResult res = vk->vkCreateDescriptorUpdateTemplateKHR(
- device, &descriptorUpdateTemplateInfo.createInfo,
- pAllocator, pDescriptorUpdateTemplate);
+ device, &descriptorUpdateTemplateInfo.createInfo, pAllocator,
+ pDescriptorUpdateTemplate);
if (res == VK_SUCCESS) {
- registerDescriptorUpdateTemplate(
- *pDescriptorUpdateTemplate,
- descriptorUpdateTemplateInfo);
- *pDescriptorUpdateTemplate = new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
+ registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
+ descriptorUpdateTemplateInfo);
+ *pDescriptorUpdateTemplate =
+ new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
}
return res;
}
- void on_vkDestroyDescriptorUpdateTemplate(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
+ void on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- vk->vkDestroyDescriptorUpdateTemplate(
- device, descriptorUpdateTemplate, pAllocator);
+ vk->vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
}
void on_vkDestroyDescriptorUpdateTemplateKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
+ android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VkAllocationCallbacks* pAllocator) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- vk->vkDestroyDescriptorUpdateTemplateKHR(
- device, descriptorUpdateTemplate, pAllocator);
+ vk->vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
}
void on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- uint32_t imageInfoCount,
- uint32_t bufferInfoCount,
- uint32_t bufferViewCount,
- const uint32_t* pImageInfoEntryIndices,
- const uint32_t* pBufferInfoEntryIndices,
- const uint32_t* pBufferViewEntryIndices,
- const VkDescriptorImageInfo* pImageInfos,
- const VkDescriptorBufferInfo* pBufferInfos,
- const VkBufferView* pBufferViews) {
-
+ android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
+ uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
+ const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
+ const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
+ const VkBufferView* pBufferViews) {
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
AutoLock lock(mLock);
- auto info = android::base::find(
- mDescriptorUpdateTemplateInfo,
- descriptorUpdateTemplate);
+ auto info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
if (!info) return;
- memcpy(info->data.data() + info->imageInfoStart,
- pImageInfos,
- imageInfoCount * sizeof(VkDescriptorImageInfo));
- memcpy(info->data.data() + info->bufferInfoStart,
- pBufferInfos,
- bufferInfoCount * sizeof(VkDescriptorBufferInfo));
- memcpy(info->data.data() + info->bufferViewStart,
- pBufferViews,
- bufferViewCount * sizeof(VkBufferView));
+ memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
+ imageInfoCount * sizeof(VkDescriptorImageInfo));
+ memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
+ bufferInfoCount * sizeof(VkDescriptorBufferInfo));
+ memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
+ bufferViewCount * sizeof(VkBufferView));
- vk->vkUpdateDescriptorSetWithTemplate(
- device, descriptorSet, descriptorUpdateTemplate,
- info->data.data());
+ vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
+ info->data.data());
}
- void hostSyncCommandBuffer(
- const char* tag,
- VkCommandBuffer boxed_commandBuffer,
- uint32_t needHostSync,
- uint32_t sequenceNumber) {
-
+ void hostSyncCommandBuffer(const char* tag, VkCommandBuffer boxed_commandBuffer,
+ uint32_t needHostSync, uint32_t sequenceNumber) {
auto nextDeadline = []() {
- return android::base::getUnixTimeUs() + 10000; // 10 ms
+ return android::base::getUnixTimeUs() + 10000; // 10 ms
};
- auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000; // 5 s
+ auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000; // 5 s
OrderMaintenanceInfo* order = ordmaint_VkCommandBuffer(boxed_commandBuffer);
if (!order) return;
@@ -4151,10 +3675,10 @@
AutoLock lock(order->lock);
if (needHostSync) {
- while ((sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
+ while (
+ (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
auto waitUntilUs = nextDeadline();
- order->cv.timedWait(
- &order->lock, waitUntilUs);
+ order->cv.timedWait(&order->lock, waitUntilUs);
if (timeoutDeadline < android::base::getUnixTimeUs()) {
break;
@@ -4167,25 +3691,19 @@
releaseOrderMaintInfo(order);
}
- void on_vkCommandBufferHostSyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- uint32_t needHostSync,
- uint32_t sequenceNumber) {
+ void on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer, uint32_t needHostSync,
+ uint32_t sequenceNumber) {
this->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
}
- void hostSyncQueue(
- const char* tag,
- VkQueue boxed_queue,
- uint32_t needHostSync,
- uint32_t sequenceNumber) {
-
+ void hostSyncQueue(const char* tag, VkQueue boxed_queue, uint32_t needHostSync,
+ uint32_t sequenceNumber) {
auto nextDeadline = []() {
- return android::base::getUnixTimeUs() + 10000; // 10 ms
+ return android::base::getUnixTimeUs() + 10000; // 10 ms
};
- auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000; // 5 s
+ auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000; // 5 s
OrderMaintenanceInfo* order = ordmaint_VkQueue(boxed_queue);
if (!order) return;
@@ -4193,10 +3711,10 @@
AutoLock lock(order->lock);
if (needHostSync) {
- while ((sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
+ while (
+ (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
auto waitUntilUs = nextDeadline();
- order->cv.timedWait(
- &order->lock, waitUntilUs);
+ order->cv.timedWait(&order->lock, waitUntilUs);
if (timeoutDeadline < android::base::getUnixTimeUs()) {
break;
@@ -4209,23 +3727,17 @@
releaseOrderMaintInfo(order);
}
- void on_vkQueueHostSyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t needHostSync,
- uint32_t sequenceNumber) {
+ void on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t needHostSync, uint32_t sequenceNumber) {
this->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
}
-
- VkResult on_vkCreateImageWithRequirementsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
- VkMemoryRequirements* pMemoryRequirements) {
-
+ VkResult on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool* pool,
+ VkDevice boxed_device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkImage* pImage,
+ VkMemoryRequirements* pMemoryRequirements) {
if (pMemoryRequirements) {
memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
}
@@ -4237,22 +3749,18 @@
return imageCreateRes;
}
- on_vkGetImageMemoryRequirements(
- pool, boxed_device,
- unbox_VkImage(*pImage),
- pMemoryRequirements);
+ on_vkGetImageMemoryRequirements(pool, boxed_device, unbox_VkImage(*pImage),
+ pMemoryRequirements);
return imageCreateRes;
}
- VkResult on_vkCreateBufferWithRequirementsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
- VkMemoryRequirements* pMemoryRequirements) {
-
+ VkResult on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool* pool,
+ VkDevice boxed_device,
+ const VkBufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkBuffer* pBuffer,
+ VkMemoryRequirements* pMemoryRequirements) {
if (pMemoryRequirements) {
memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
}
@@ -4267,17 +3775,14 @@
auto device = unbox_VkDevice(boxed_device);
auto vk = dispatch_VkDevice(boxed_device);
- vk->vkGetBufferMemoryRequirements(
- device, unbox_VkBuffer(*pBuffer), pMemoryRequirements);
+ vk->vkGetBufferMemoryRequirements(device, unbox_VkBuffer(*pBuffer), pMemoryRequirements);
return bufferCreateRes;
}
-
- VkResult on_vkBeginCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo) {
+ VkResult on_vkBeginCommandBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer,
+ const VkCommandBufferBeginInfo* pBeginInfo) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
VkResult result = vk->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
@@ -4292,40 +3797,33 @@
return VK_SUCCESS;
}
- VkResult on_vkBeginCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo) {
- return this->on_vkBeginCommandBuffer(
- pool, boxed_commandBuffer, pBeginInfo);
+ VkResult on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer,
+ const VkCommandBufferBeginInfo* pBeginInfo) {
+ return this->on_vkBeginCommandBuffer(pool, boxed_commandBuffer, pBeginInfo);
}
- VkResult on_vkEndCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer) {
+ VkResult on_vkEndCommandBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
return vk->vkEndCommandBuffer(commandBuffer);
}
- void on_vkEndCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer) {
+ void on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer) {
on_vkEndCommandBuffer(pool, boxed_commandBuffer);
}
- void on_vkResetCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkCommandBufferResetFlags flags) {
+ void on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer boxed_commandBuffer,
+ VkCommandBufferResetFlags flags) {
on_vkResetCommandBuffer(pool, boxed_commandBuffer, flags);
}
- void on_vkCmdBindPipeline(android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
+ void on_vkCmdBindPipeline(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
vk->vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
@@ -4341,20 +3839,16 @@
}
void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool,
- VkCommandBuffer boxed_commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
+ VkCommandBuffer boxed_commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
+ uint32_t firstSet, uint32_t descriptorSetCount,
+ const VkDescriptorSet* pDescriptorSets,
+ uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
- vk->vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout,
- firstSet, descriptorSetCount,
- pDescriptorSets, dynamicOffsetCount,
- pDynamicOffsets);
+ vk->vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet,
+ descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
+ pDynamicOffsets);
if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
AutoLock lock(mLock);
auto cmdBufferInfoIt = mCmdBufferInfo.find(commandBuffer);
@@ -4363,15 +3857,13 @@
if (descriptorSetCount) {
cmdBufferInfoIt->second.firstSet = firstSet;
cmdBufferInfoIt->second.descriptorSets.assign(
- pDescriptorSets,
- pDescriptorSets + descriptorSetCount);
+ pDescriptorSets, pDescriptorSets + descriptorSetCount);
}
}
}
}
- VkResult on_vkCreateRenderPass(android::base::BumpPool* pool,
- VkDevice boxed_device,
+ VkResult on_vkCreateRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
const VkRenderPassCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkRenderPass* pRenderPass) {
@@ -4385,11 +3877,10 @@
if (deviceInfoIt == mDeviceInfo.end()) {
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
- if (deviceInfoIt->second.emulateTextureEtc2 ||
- deviceInfoIt->second.emulateTextureAstc) {
+ if (deviceInfoIt->second.emulateTextureEtc2 || deviceInfoIt->second.emulateTextureAstc) {
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
if (deviceInfoIt->second.needEmulatedDecompression(
- pCreateInfo->pAttachments[i].format)) {
+ pCreateInfo->pAttachments[i].format)) {
needReformat = true;
break;
}
@@ -4398,17 +3889,15 @@
std::vector<VkAttachmentDescription> attachments;
if (needReformat) {
createInfo = *pCreateInfo;
- attachments.assign(
- pCreateInfo->pAttachments,
- pCreateInfo->pAttachments + pCreateInfo->attachmentCount);
+ attachments.assign(pCreateInfo->pAttachments,
+ pCreateInfo->pAttachments + pCreateInfo->attachmentCount);
createInfo.pAttachments = attachments.data();
for (auto& attachment : attachments) {
attachment.format = getDecompFormat(attachment.format);
}
pCreateInfo = &createInfo;
}
- VkResult res = vk->vkCreateRenderPass(device, pCreateInfo, pAllocator,
- pRenderPass);
+ VkResult res = vk->vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
if (res != VK_SUCCESS) {
return res;
}
@@ -4418,12 +3907,9 @@
return res;
}
- VkResult on_vkQueueBindSparse(
- android::base::BumpPool* pool,
- VkQueue boxed_queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo, VkFence fence) {
-
+ VkResult on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue boxed_queue,
+ uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+ VkFence fence) {
// If pBindInfo contains VkTimelineSemaphoreSubmitInfo, then it's
// possible the host driver isn't equipped to deal with them yet. To
// work around this, send empty vkQueueSubmits before and after the
@@ -4455,17 +3941,19 @@
} else {
std::vector<VkPipelineStageFlags> waitDstStageMasks;
VkTimelineSemaphoreSubmitInfoKHR currTsSi = {
- VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, 0,
- 0, nullptr,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, 0, 0, nullptr, 0, nullptr,
};
VkSubmitInfo currSi = {
- VK_STRUCTURE_TYPE_SUBMIT_INFO, &currTsSi,
- 0, nullptr,
+ VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ &currTsSi,
+ 0,
nullptr,
- 0, nullptr, // No commands
- 0, nullptr,
+ nullptr,
+ 0,
+ nullptr, // No commands
+ 0,
+ nullptr,
};
VkBindSparseInfo currBi;
@@ -4488,7 +3976,8 @@
currSi.waitSemaphoreCount = pBindInfo[i].waitSemaphoreCount;
currSi.pWaitSemaphores = pBindInfo[i].pWaitSemaphores;
- waitDstStageMasks.resize(pBindInfo[i].waitSemaphoreCount, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+ waitDstStageMasks.resize(pBindInfo[i].waitSemaphoreCount,
+ VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
currSi.pWaitDstStageMask = waitDstStageMasks.data();
currSi.signalSemaphoreCount = 0;
@@ -4519,7 +4008,8 @@
currSi.signalSemaphoreCount = pBindInfo[i].signalSemaphoreCount;
currSi.pSignalSemaphores = pBindInfo[i].pSignalSemaphores;
- res = vk->vkQueueSubmit(queue, 1, &currSi, i == bindInfoCount - 1 ? fence : nullptr);
+ res =
+ vk->vkQueueSubmit(queue, 1, &currSi, i == bindInfoCount - 1 ? fence : nullptr);
if (VK_SUCCESS != res) return res;
}
@@ -4527,12 +4017,9 @@
}
}
- void on_vkGetLinearImageLayoutGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkFormat format,
- VkDeviceSize* pOffset,
- VkDeviceSize* pRowPitchAlignment) {
+ void on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkFormat format, VkDeviceSize* pOffset,
+ VkDeviceSize* pRowPitchAlignment) {
if (mPerFormatLinearImageProperties.find(format) == mPerFormatLinearImageProperties.end()) {
VkDeviceSize offset = 0u;
VkDeviceSize rowPitchAlignment = UINT_MAX;
@@ -4625,12 +4112,9 @@
#include "VkSubDecoder.cpp"
- void on_vkQueueFlushCommandsGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- VkCommandBuffer boxed_commandBuffer,
- VkDeviceSize dataSize,
- const void* pData) {
+ void on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ VkCommandBuffer boxed_commandBuffer, VkDeviceSize dataSize,
+ const void* pData) {
(void)queue;
VkCommandBuffer commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
@@ -4639,15 +4123,11 @@
subDecode(readStream, vk, boxed_commandBuffer, commandBuffer, dataSize, pData);
}
- VkDescriptorSet getOrAllocateDescriptorSetFromPoolAndId(
- VulkanDispatch* vk,
- VkDevice device,
- VkDescriptorPool pool,
- VkDescriptorSetLayout setLayout,
- uint64_t poolId,
- uint32_t pendingAlloc,
- bool* didAlloc) {
-
+ VkDescriptorSet getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch* vk, VkDevice device,
+ VkDescriptorPool pool,
+ VkDescriptorSetLayout setLayout,
+ uint64_t poolId, uint32_t pendingAlloc,
+ bool* didAlloc) {
auto poolInfo = android::base::find(mDescriptorPoolInfo, pool);
if (!poolInfo) {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
@@ -4659,12 +4139,10 @@
if (setHandleInfo->underlying) {
if (pendingAlloc) {
VkDescriptorSet allocedSet;
- vk->vkFreeDescriptorSets(device, pool, 1, (VkDescriptorSet*)(&setHandleInfo->underlying));
+ vk->vkFreeDescriptorSets(device, pool, 1,
+ (VkDescriptorSet*)(&setHandleInfo->underlying));
VkDescriptorSetAllocateInfo dsAi = {
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0,
- pool,
- 1,
- &setLayout,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
};
vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
setHandleInfo->underlying = (uint64_t)allocedSet;
@@ -4679,10 +4157,7 @@
if (pendingAlloc) {
VkDescriptorSet allocedSet;
VkDescriptorSetAllocateInfo dsAi = {
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0,
- pool,
- 1,
- &setLayout,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
};
vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
setHandleInfo->underlying = (uint64_t)allocedSet;
@@ -4691,27 +4166,20 @@
return allocedSet;
} else {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
- << "descriptor pool " << pool << " wanted to get set with id 0x" <<
- std::hex << poolId;
+ << "descriptor pool " << pool << " wanted to get set with id 0x" << std::hex
+ << poolId;
return nullptr;
}
}
}
void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
- android::base::BumpPool* pool,
- VkQueue boxed_queue,
- uint32_t descriptorPoolCount,
- const VkDescriptorPool* pDescriptorPools,
- uint32_t descriptorSetCount,
- const VkDescriptorSetLayout* pDescriptorSetLayouts,
- const uint64_t* pDescriptorSetPoolIds,
- const uint32_t* pDescriptorSetWhichPool,
- const uint32_t* pDescriptorSetPendingAllocation,
- const uint32_t* pDescriptorWriteStartingIndices,
- uint32_t pendingDescriptorWriteCount,
+ android::base::BumpPool* pool, VkQueue boxed_queue, uint32_t descriptorPoolCount,
+ const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
+ const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
+ const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
+ const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
const VkWriteDescriptorSet* pPendingDescriptorWrites) {
-
AutoLock lock(mLock);
VkDevice device;
@@ -4737,20 +4205,17 @@
uint32_t pendingAlloc = pDescriptorSetPendingAllocation[i];
bool didAllocThisTime;
setsToUpdate[i] = getOrAllocateDescriptorSetFromPoolAndId(
- vk, device,
- pDescriptorPools[whichPool],
- pDescriptorSetLayouts[i],
- poolId,
- pendingAlloc,
- &didAllocThisTime);
+ vk, device, pDescriptorPools[whichPool], pDescriptorSetLayouts[i], poolId,
+ pendingAlloc, &didAllocThisTime);
if (didAllocThisTime) didAlloc = true;
}
if (didAlloc) {
-
- std::vector<VkWriteDescriptorSet> writeDescriptorSetsForHostDriver(pendingDescriptorWriteCount);
- memcpy(writeDescriptorSetsForHostDriver.data(), pPendingDescriptorWrites, pendingDescriptorWriteCount * sizeof(VkWriteDescriptorSet));
+ std::vector<VkWriteDescriptorSet> writeDescriptorSetsForHostDriver(
+ pendingDescriptorWriteCount);
+ memcpy(writeDescriptorSetsForHostDriver.data(), pPendingDescriptorWrites,
+ pendingDescriptorWriteCount * sizeof(VkWriteDescriptorSet));
for (uint32_t i = 0; i < descriptorSetCount; ++i) {
uint32_t writeStartIndex = pDescriptorWriteStartingIndices[i];
@@ -4766,25 +4231,17 @@
}
}
this->on_vkUpdateDescriptorSetsImpl(
- pool, vk, device,
- (uint32_t)writeDescriptorSetsForHostDriver.size(),
+ pool, vk, device, (uint32_t)writeDescriptorSetsForHostDriver.size(),
writeDescriptorSetsForHostDriver.data(), 0, nullptr);
} else {
- this->on_vkUpdateDescriptorSetsImpl(
- pool, vk, device,
- pendingDescriptorWriteCount,
- pPendingDescriptorWrites,
- 0, nullptr);
+ this->on_vkUpdateDescriptorSetsImpl(pool, vk, device, pendingDescriptorWriteCount,
+ pPendingDescriptorWrites, 0, nullptr);
}
}
- void on_vkCollectDescriptorPoolIdsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t* pPoolIdCount,
- uint64_t* pPoolIds) {
-
+ void on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkDescriptorPool descriptorPool,
+ uint32_t* pPoolIdCount, uint64_t* pPoolIds) {
AutoLock lock(mLock);
auto& info = mDescriptorPoolInfo[descriptorPool];
*pPoolIdCount = (uint32_t)info.poolIds.size();
@@ -4836,8 +4293,8 @@
lock.lock();
if (mFenceInfo.find(fence) == mFenceInfo.end()) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "Fence was destroyed before vkWaitForFences call.";
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
+ << "Fence was destroyed before vkWaitForFences call.";
}
lock.unlock();
@@ -4868,8 +4325,8 @@
VkImage image = unbox_VkImage(boxed_image);
if (mLogging) {
- fprintf(stderr, "%s: for boxed image 0x%llx image %p\n",
- __func__, (unsigned long long)boxed_image, image);
+ fprintf(stderr, "%s: for boxed image 0x%llx image %p\n", __func__,
+ (unsigned long long)boxed_image, image);
}
if (image == VK_NULL_HANDLE || mImageInfo.find(image) == mImageInfo.end()) {
@@ -4877,7 +4334,7 @@
return VK_SUCCESS;
}
- auto anbInfo = mImageInfo[image].anbInfo; // shared ptr, take ref
+ auto anbInfo = mImageInfo[image].anbInfo; // shared ptr, take ref
lock.unlock();
if (!anbInfo) {
@@ -4885,12 +4342,14 @@
return VK_SUCCESS;
}
if (!anbInfo->vk) {
- fprintf(stderr, "%s:%p warning: image %p anb info not initialized\n", __func__, anbInfo.get(), image);
+ fprintf(stderr, "%s:%p warning: image %p anb info not initialized\n", __func__,
+ anbInfo.get(), image);
return VK_SUCCESS;
}
// Could be null or mismatched image, check later
if (image != anbInfo->image) {
- fprintf(stderr, "%s:%p warning: image %p anb info has wrong image: %p\n", __func__, anbInfo.get(), image, anbInfo->image);
+ fprintf(stderr, "%s:%p warning: image %p anb info has wrong image: %p\n", __func__,
+ anbInfo.get(), image, anbInfo->image);
return VK_SUCCESS;
}
@@ -4908,20 +4367,19 @@
// Transforms
- void transformImpl_VkExternalMemoryProperties_tohost(
- const VkExternalMemoryProperties* props, uint32_t count) {
- VkExternalMemoryProperties* mut =
- (VkExternalMemoryProperties*)props;
+ void transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties* props,
+ uint32_t count) {
+ VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
for (uint32_t i = 0; i < count; ++i) {
mut[i] = transformExternalMemoryProperties_tohost(mut[i]);
}
}
- void transformImpl_VkExternalMemoryProperties_fromhost(
- const VkExternalMemoryProperties* props, uint32_t count) {
- VkExternalMemoryProperties* mut =
- (VkExternalMemoryProperties*)props;
+ void transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties* props,
+ uint32_t count) {
+ VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
for (uint32_t i = 0; i < count; ++i) {
- mut[i] = transformExternalMemoryProperties_fromhost(mut[i], GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);
+ mut[i] = transformExternalMemoryProperties_fromhost(mut[i],
+ GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);
}
}
@@ -4995,51 +4453,52 @@
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Not yet implemented.";
}
-#define DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(type, field) \
- void transformImpl_##type##_tohost(const type* props, uint32_t count) { \
- type* mut = (type*)props; \
- for (uint32_t i = 0; i < count; ++i) { \
- mut[i].field = (VkExternalMemoryHandleTypeFlagBits) \
- transformExternalMemoryHandleTypeFlags_tohost( \
- mut[i].field); \
- } \
- } \
- void transformImpl_##type##_fromhost(const type* props, uint32_t count) { \
- type* mut = (type*)props; \
- for (uint32_t i = 0; i < count; ++i) { \
- mut[i].field = (VkExternalMemoryHandleTypeFlagBits) \
- transformExternalMemoryHandleTypeFlags_fromhost( \
- mut[i].field, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES); \
- } \
- } \
+#define DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(type, field) \
+ void transformImpl_##type##_tohost(const type* props, uint32_t count) { \
+ type* mut = (type*)props; \
+ for (uint32_t i = 0; i < count; ++i) { \
+ mut[i].field = \
+ (VkExternalMemoryHandleTypeFlagBits)transformExternalMemoryHandleTypeFlags_tohost( \
+ mut[i].field); \
+ } \
+ } \
+ void transformImpl_##type##_fromhost(const type* props, uint32_t count) { \
+ type* mut = (type*)props; \
+ for (uint32_t i = 0; i < count; ++i) { \
+ mut[i].field = (VkExternalMemoryHandleTypeFlagBits) \
+ transformExternalMemoryHandleTypeFlags_fromhost( \
+ mut[i].field, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES); \
+ } \
+ }
-#define DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(type) \
- void transformImpl_##type##_tohost(const type* props, uint32_t count) { \
- type* mut = (type*)props; \
- for (uint32_t i = 0; i < count; ++i) { \
- mut[i].externalMemoryProperties = transformExternalMemoryProperties_tohost( \
- mut[i].externalMemoryProperties); \
- } \
- } \
- void transformImpl_##type##_fromhost(const type* props, uint32_t count) { \
- type* mut = (type*)props; \
- for (uint32_t i = 0; i < count; ++i) { \
- mut[i].externalMemoryProperties = transformExternalMemoryProperties_fromhost( \
- mut[i].externalMemoryProperties, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES); \
- } \
- } \
+#define DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(type) \
+ void transformImpl_##type##_tohost(const type* props, uint32_t count) { \
+ type* mut = (type*)props; \
+ for (uint32_t i = 0; i < count; ++i) { \
+ mut[i].externalMemoryProperties = \
+ transformExternalMemoryProperties_tohost(mut[i].externalMemoryProperties); \
+ } \
+ } \
+ void transformImpl_##type##_fromhost(const type* props, uint32_t count) { \
+ type* mut = (type*)props; \
+ for (uint32_t i = 0; i < count; ++i) { \
+ mut[i].externalMemoryProperties = transformExternalMemoryProperties_fromhost( \
+ mut[i].externalMemoryProperties, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES); \
+ } \
+ }
DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo, handleType)
- DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalBufferInfo, handleType)
- DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryImageCreateInfo, handleTypes)
- DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryBufferCreateInfo, handleTypes)
- DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExportMemoryAllocateInfo, handleTypes)
- DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalImageFormatProperties)
- DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalBufferProperties)
+ DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalBufferInfo, handleType)
+ DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryImageCreateInfo, handleTypes)
+ DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryBufferCreateInfo, handleTypes)
+ DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExportMemoryAllocateInfo, handleTypes)
+ DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalImageFormatProperties)
+ DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalBufferProperties)
- uint64_t newGlobalHandle(const DispatchableHandleInfo<uint64_t>& item, BoxedHandleTypeTag typeTag) {
+ uint64_t newGlobalHandle(const DispatchableHandleInfo<uint64_t>& item,
+ BoxedHandleTypeTag typeTag) {
if (!mCreatedHandlesForSnapshotLoad.empty() &&
- (mCreatedHandlesForSnapshotLoad.size() - mCreatedHandlesForSnapshotLoadIndex > 0)) {
+ (mCreatedHandlesForSnapshotLoad.size() - mCreatedHandlesForSnapshotLoadIndex > 0)) {
auto handle = mCreatedHandlesForSnapshotLoad[mCreatedHandlesForSnapshotLoadIndex];
VKDGS_LOG("use handle: %p", handle);
++mCreatedHandlesForSnapshotLoadIndex;
@@ -5050,160 +4509,151 @@
}
}
-#define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL(type) \
- type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) { \
- DispatchableHandleInfo<uint64_t> item; \
- item.underlying = (uint64_t)underlying; \
- item.dispatch = dispatch ? dispatch : new VulkanDispatch; \
- item.ownDispatch = ownDispatch; \
- item.ordMaintInfo = new OrderMaintenanceInfo; \
- item.readStream = nullptr; \
- auto res = (type)newGlobalHandle(item, Tag_##type); \
- return res; \
- } \
- void delete_##type(type boxed) { \
- if (!boxed) return; \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) return; \
- releaseOrderMaintInfo(elt->ordMaintInfo); \
- if (elt->readStream) { \
- sReadStreamRegistry.push(elt->readStream); \
- elt->readStream = nullptr; \
- } \
- sBoxedHandleManager.remove((uint64_t)boxed); \
- } \
- type unbox_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) return VK_NULL_HANDLE; \
- return (type)elt->underlying; \
- } \
- OrderMaintenanceInfo* ordmaint_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) return 0; \
- auto info = elt->ordMaintInfo; \
- if (!info) return 0; \
- acquireOrderMaintInfo(info); return info; \
- } \
- VulkanMemReadingStream* readstream_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) return 0; \
- auto stream = elt->readStream; \
- if (!stream) { \
- stream = sReadStreamRegistry.pop(); \
- elt->readStream = stream; \
- } \
- return stream; \
- } \
- type unboxed_to_boxed_##type(type unboxed) { \
- AutoLock lock(sBoxedHandleManager.lock); \
- return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked( \
- (uint64_t)(uintptr_t)unboxed); \
- } \
- VulkanDispatch* dispatch_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) { fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed); return nullptr; } \
- return elt->dispatch; \
- } \
-
-#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL(type) \
- type new_boxed_non_dispatchable_##type(type underlying) { \
- DispatchableHandleInfo<uint64_t> item; \
- item.underlying = (uint64_t)underlying; \
- auto res = (type)newGlobalHandle(item, Tag_##type); \
- return res; \
- } \
- void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) { \
- sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback); \
- } \
- void delete_##type(type boxed) { \
- sBoxedHandleManager.remove((uint64_t)boxed); \
- } \
- type unboxed_to_boxed_non_dispatchable_##type(type unboxed) { \
- AutoLock lock(sBoxedHandleManager.lock); \
- return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked( \
- (uint64_t)(uintptr_t)unboxed); \
- } \
- type unbox_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) { fprintf(stderr, "%s: unbox %p failed, not found\n", __func__, boxed); abort(); return VK_NULL_HANDLE; } \
- return (type)elt->underlying; \
- } \
-
- GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL)
- GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)
-
- VkDecoderSnapshot* snapshot() { return &mSnapshot; }
-
- private:
- static const bool kEmulateAstc = true;
- bool isEmulatedExtension(const char* name) const {
- for (auto emulatedExt : kEmulatedExtensions) {
- if (!strcmp(emulatedExt, name)) return true;
- }
- return false;
- }
-
- bool supportEmulatedCompressedImageFormatProperty(
- VkFormat compressedFormat,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags
- ) {
- // BUG: 139193497
- return !(usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
- && !(type == VK_IMAGE_TYPE_1D);
+#define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL(type) \
+ type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) { \
+ DispatchableHandleInfo<uint64_t> item; \
+ item.underlying = (uint64_t)underlying; \
+ item.dispatch = dispatch ? dispatch : new VulkanDispatch; \
+ item.ownDispatch = ownDispatch; \
+ item.ordMaintInfo = new OrderMaintenanceInfo; \
+ item.readStream = nullptr; \
+ auto res = (type)newGlobalHandle(item, Tag_##type); \
+ return res; \
+ } \
+ void delete_##type(type boxed) { \
+ if (!boxed) return; \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) return; \
+ releaseOrderMaintInfo(elt->ordMaintInfo); \
+ if (elt->readStream) { \
+ sReadStreamRegistry.push(elt->readStream); \
+ elt->readStream = nullptr; \
+ } \
+ sBoxedHandleManager.remove((uint64_t)boxed); \
+ } \
+ type unbox_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) return VK_NULL_HANDLE; \
+ return (type)elt->underlying; \
+ } \
+ OrderMaintenanceInfo* ordmaint_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) return 0; \
+ auto info = elt->ordMaintInfo; \
+ if (!info) return 0; \
+ acquireOrderMaintInfo(info); \
+ return info; \
+ } \
+ VulkanMemReadingStream* readstream_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) return 0; \
+ auto stream = elt->readStream; \
+ if (!stream) { \
+ stream = sReadStreamRegistry.pop(); \
+ elt->readStream = stream; \
+ } \
+ return stream; \
+ } \
+ type unboxed_to_boxed_##type(type unboxed) { \
+ AutoLock lock(sBoxedHandleManager.lock); \
+ return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
+ } \
+ VulkanDispatch* dispatch_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) { \
+ fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed); \
+ return nullptr; \
+ } \
+ return elt->dispatch; \
}
- std::vector<const char*>
- filteredExtensionNames(
- uint32_t count, const char* const* extNames) {
- std::vector<const char*> res;
- for (uint32_t i = 0; i < count; ++i) {
- auto extName = extNames[i];
- if (!isEmulatedExtension(extName)) {
- res.push_back(extName);
- }
- if (m_emu->instanceSupportsMoltenVK) {
- continue;
- }
- if (!strcmp(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, extName)) {
- res.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
- }
- if (!strcmp(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, extName)) {
- res.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- }
- if (!strcmp(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, extName)) {
- res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
- }
- if (!strcmp(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, extName)) {
- res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- }
- if (!strcmp("VK_ANDROID_external_memory_android_hardware_buffer", extName) ||
- !strcmp("VK_FUCHSIA_external_memory", extName)) {
-#ifdef _WIN32
- res.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
-#else
- res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
-#endif
- }
- // External semaphore - non-Windows case is handled here
- // Windows case is handled in on_vkCreateDevice
- if (!strcmp(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, extName)) {
-#ifndef _WIN32
- res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
-#endif
- }
- }
+#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL(type) \
+ type new_boxed_non_dispatchable_##type(type underlying) { \
+ DispatchableHandleInfo<uint64_t> item; \
+ item.underlying = (uint64_t)underlying; \
+ auto res = (type)newGlobalHandle(item, Tag_##type); \
+ return res; \
+ } \
+ void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) { \
+ sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback); \
+ } \
+ void delete_##type(type boxed) { sBoxedHandleManager.remove((uint64_t)boxed); } \
+ type unboxed_to_boxed_non_dispatchable_##type(type unboxed) { \
+ AutoLock lock(sBoxedHandleManager.lock); \
+ return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
+ } \
+ type unbox_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) { \
+ fprintf(stderr, "%s: unbox %p failed, not found\n", __func__, boxed); \
+ abort(); \
+ return VK_NULL_HANDLE; \
+ } \
+ return (type)elt->underlying; \
+ }
- return res;
+ GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL)
+ GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)
+
+ VkDecoderSnapshot* snapshot() { return &mSnapshot; }
+
+ private:
+ static const bool kEmulateAstc = true;
+ bool isEmulatedExtension(const char* name) const {
+ for (auto emulatedExt : kEmulatedExtensions) {
+ if (!strcmp(emulatedExt, name)) return true;
}
+ return false;
+ }
+
+ bool supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat, VkImageType type,
+ VkImageTiling tiling, VkImageUsageFlags usage,
+ VkImageCreateFlags flags) {
+ // BUG: 139193497
+ return !(usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(type == VK_IMAGE_TYPE_1D);
+ }
+
+ std::vector<const char*> filteredExtensionNames(uint32_t count, const char* const* extNames) {
+ std::vector<const char*> res;
+ for (uint32_t i = 0; i < count; ++i) {
+ auto extName = extNames[i];
+ if (!isEmulatedExtension(extName)) {
+ res.push_back(extName);
+ }
+ if (m_emu->instanceSupportsMoltenVK) {
+ continue;
+ }
+ if (!strcmp(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, extName)) {
+ res.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
+ }
+ if (!strcmp(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, extName)) {
+ res.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ }
+ if (!strcmp(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, extName)) {
+ res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
+ }
+ if (!strcmp(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, extName)) {
+ res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+ }
+ if (!strcmp("VK_ANDROID_external_memory_android_hardware_buffer", extName) ||
+ !strcmp("VK_FUCHSIA_external_memory", extName)) {
+#ifdef _WIN32
+ res.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
+#else
+ res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
+#endif
+ }
+ // External semaphore - non-Windows case is handled here
+ // Windows case is handled in on_vkCreateDevice
+ if (!strcmp(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, extName)) {
+#ifndef _WIN32
+ res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
+#endif
+ }
+ }
+
+ return res;
+ }
VkPhysicalDeviceMemoryProperties* memPropsOfDeviceLocked(VkDevice device) {
auto physdev = android::base::find(mDeviceToPhysicalDevice, device);
@@ -5215,15 +4665,13 @@
return &physdevInfo->memoryProperties;
}
- bool getDefaultQueueForDeviceLocked(
- VkDevice device, VkQueue* queue, uint32_t* queueFamilyIndex, Lock** queueLock) {
-
+ bool getDefaultQueueForDeviceLocked(VkDevice device, VkQueue* queue, uint32_t* queueFamilyIndex,
+ Lock** queueLock) {
auto deviceInfo = android::base::find(mDeviceInfo, device);
if (!deviceInfo) return false;
auto zeroIt = deviceInfo->queues.find(0);
- if (zeroIt == deviceInfo->queues.end() ||
- zeroIt->second.size() == 0) {
+ if (zeroIt == deviceInfo->queues.end() || zeroIt->second.size() == 0) {
// Get the first queue / queueFamilyIndex
// that does show up.
for (auto it : deviceInfo->queues) {
@@ -5258,10 +4706,11 @@
}
SpvFileEntry invalid = {
- filename, nullptr, 0,
+ filename,
+ nullptr,
+ 0,
};
- fprintf(stderr, "WARNING: shader source open failed! %s\n",
- filename);
+ fprintf(stderr, "WARNING: shader source open failed! %s\n", filename);
return invalid;
}
@@ -5277,10 +4726,9 @@
VkFormat sizeCompFormat; // Size compatible format
VkDeviceSize alignment = 0;
std::vector<VkDeviceSize> memoryOffsets = {};
- std::vector<VkImage> sizeCompImgs; // Size compatible images
- VkFormat decompFormat =
- VK_FORMAT_R8G8B8A8_UNORM; // Decompressed format
- VkImage decompImg = 0; // Decompressed image
+ std::vector<VkImage> sizeCompImgs; // Size compatible images
+ VkFormat decompFormat = VK_FORMAT_R8G8B8A8_UNORM; // Decompressed format
+ VkImage decompImg = 0; // Decompressed image
VkExtent3D extent;
uint32_t compressedBlockWidth = 1;
uint32_t compressedBlockHeight = 1;
@@ -5296,23 +4744,17 @@
return std::max<uint32_t>(extent.depth >> level, 1);
}
uint32_t sizeCompMipmapWidth(uint32_t level) {
- return (mipmapWidth(level) + compressedBlockWidth - 1) /
- compressedBlockWidth;
+ return (mipmapWidth(level) + compressedBlockWidth - 1) / compressedBlockWidth;
}
uint32_t sizeCompMipmapHeight(uint32_t level) {
if (imageType != VK_IMAGE_TYPE_1D) {
- return (mipmapHeight(level) + compressedBlockHeight - 1) /
- compressedBlockHeight;
+ return (mipmapHeight(level) + compressedBlockHeight - 1) / compressedBlockHeight;
} else {
return 1;
}
}
- uint32_t sizeCompMipmapDepth(uint32_t level) {
- return mipmapDepth(level);
- }
- VkDeviceSize decompPixelSize() {
- return getLinearFormatPixelSize(decompFormat);
- }
+ uint32_t sizeCompMipmapDepth(uint32_t level) { return mipmapDepth(level); }
+ VkDeviceSize decompPixelSize() { return getLinearFormatPixelSize(decompFormat); }
bool needEmulatedAlpha() {
if (!isCompressed) {
return false;
@@ -5347,14 +4789,10 @@
std::vector<VkImageView> sizeCompImageViews = {};
std::vector<VkImageView> decompImageViews = {};
- static VkImageView createDefaultImageView(
- goldfish_vk::VulkanDispatch* vk,
- VkDevice device,
- VkImage image,
- VkFormat format,
- VkImageType imageType,
- uint32_t mipLevel,
- uint32_t layerCount) {
+ static VkImageView createDefaultImageView(goldfish_vk::VulkanDispatch* vk, VkDevice device,
+ VkImage image, VkFormat format,
+ VkImageType imageType, uint32_t mipLevel,
+ uint32_t layerCount) {
VkImageViewCreateInfo imageViewInfo = {};
imageViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewInfo.image = image;
@@ -5377,39 +4815,34 @@
imageViewInfo.components.g = VK_COMPONENT_SWIZZLE_G;
imageViewInfo.components.b = VK_COMPONENT_SWIZZLE_B;
imageViewInfo.components.a = VK_COMPONENT_SWIZZLE_A;
- imageViewInfo.subresourceRange.aspectMask =
- VK_IMAGE_ASPECT_COLOR_BIT;
+ imageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewInfo.subresourceRange.baseMipLevel = mipLevel;
imageViewInfo.subresourceRange.levelCount = 1;
imageViewInfo.subresourceRange.baseArrayLayer = 0;
imageViewInfo.subresourceRange.layerCount = layerCount;
VkImageView imageView;
- if (VK_SUCCESS != vk->vkCreateImageView(device, &imageViewInfo,
- nullptr, &imageView)) {
- fprintf(stderr, "Warning: %s %s:%d failure\n", __func__,
- __FILE__, __LINE__);
+ if (VK_SUCCESS != vk->vkCreateImageView(device, &imageViewInfo, nullptr, &imageView)) {
+ fprintf(stderr, "Warning: %s %s:%d failure\n", __func__, __FILE__, __LINE__);
return 0;
}
return imageView;
}
- VkResult initDecomp(goldfish_vk::VulkanDispatch* vk,
- VkDevice device,
- VkImage image) {
+ VkResult initDecomp(goldfish_vk::VulkanDispatch* vk, VkDevice device, VkImage image) {
if (decompPipeline != 0) {
return VK_SUCCESS;
}
// TODO: release resources on failure
-#define _RETURN_ON_FAILURE(cmd) \
- { \
- VkResult result = cmd; \
- if (VK_SUCCESS != result) { \
- fprintf(stderr, "Warning: %s %s:%d vulkan failure %d\n", __func__, \
- __FILE__, __LINE__, result); \
- return (result); \
- } \
- }
+#define _RETURN_ON_FAILURE(cmd) \
+ { \
+ VkResult result = cmd; \
+ if (VK_SUCCESS != result) { \
+ fprintf(stderr, "Warning: %s %s:%d vulkan failure %d\n", __func__, __FILE__, __LINE__, \
+ result); \
+ return (result); \
+ } \
+ }
std::string shaderSrcFileName;
switch (compFormat) {
@@ -5478,9 +4911,9 @@
}
SpvFileEntry shaderSource = VkDecoderGlobalState::Impl::loadDecompressionShaderSource(
- shaderSrcFileName.c_str());
+ shaderSrcFileName.c_str());
- if (!shaderSource.size) {
+ if (!shaderSource.size) {
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
@@ -5489,51 +4922,46 @@
shaderInfo.codeSize = shaderSource.size;
// DecompressionShaders.h declares everything as aligned to 4 bytes,
// so it is safe to cast
- shaderInfo.pCode =
- reinterpret_cast<const uint32_t*>(shaderSource.base);
- _RETURN_ON_FAILURE(vk->vkCreateShaderModule(
- device, &shaderInfo, nullptr, &decompShader));
+ shaderInfo.pCode = reinterpret_cast<const uint32_t*>(shaderSource.base);
+ _RETURN_ON_FAILURE(
+ vk->vkCreateShaderModule(device, &shaderInfo, nullptr, &decompShader));
VkDescriptorSetLayoutBinding dsLayoutBindings[] = {
{
0, // bindings
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, // descriptorType
- 1, // descriptorCount
- VK_SHADER_STAGE_COMPUTE_BIT, // stageFlags
- 0, // pImmutableSamplers
+ 1, // descriptorCount
+ VK_SHADER_STAGE_COMPUTE_BIT, // stageFlags
+ 0, // pImmutableSamplers
},
{
1, // bindings
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, // descriptorType
- 1, // descriptorCount
- VK_SHADER_STAGE_COMPUTE_BIT, // stageFlags
- 0, // pImmutableSamplers
+ 1, // descriptorCount
+ VK_SHADER_STAGE_COMPUTE_BIT, // stageFlags
+ 0, // pImmutableSamplers
},
};
VkDescriptorSetLayoutCreateInfo dsLayoutInfo = {};
- dsLayoutInfo.sType =
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- dsLayoutInfo.bindingCount = sizeof(dsLayoutBindings) /
- sizeof(VkDescriptorSetLayoutBinding);
+ dsLayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ dsLayoutInfo.bindingCount =
+ sizeof(dsLayoutBindings) / sizeof(VkDescriptorSetLayoutBinding);
dsLayoutInfo.pBindings = dsLayoutBindings;
- _RETURN_ON_FAILURE(vk->vkCreateDescriptorSetLayout(
- device, &dsLayoutInfo, nullptr,
- &decompDescriptorSetLayout));
+ _RETURN_ON_FAILURE(vk->vkCreateDescriptorSetLayout(device, &dsLayoutInfo, nullptr,
+ &decompDescriptorSetLayout));
VkDescriptorPoolSize poolSize[1] = {
{VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 2 * mipLevels},
};
VkDescriptorPoolCreateInfo dsPoolInfo = {};
dsPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- dsPoolInfo.flags =
- VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ dsPoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
dsPoolInfo.maxSets = mipLevels;
dsPoolInfo.poolSizeCount = 1;
dsPoolInfo.pPoolSizes = poolSize;
- _RETURN_ON_FAILURE(vk->vkCreateDescriptorPool(
- device, &dsPoolInfo, nullptr, &decompDescriptorPool));
- std::vector<VkDescriptorSetLayout> layouts(
- mipLevels, decompDescriptorSetLayout);
+ _RETURN_ON_FAILURE(
+ vk->vkCreateDescriptorPool(device, &dsPoolInfo, nullptr, &decompDescriptorPool));
+ std::vector<VkDescriptorSetLayout> layouts(mipLevels, decompDescriptorSetLayout);
VkDescriptorSetAllocateInfo dsInfo = {};
dsInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
@@ -5541,8 +4969,8 @@
dsInfo.descriptorSetCount = mipLevels;
dsInfo.pSetLayouts = layouts.data();
decompDescriptorSets.resize(mipLevels);
- _RETURN_ON_FAILURE(vk->vkAllocateDescriptorSets(
- device, &dsInfo, decompDescriptorSets.data()));
+ _RETURN_ON_FAILURE(
+ vk->vkAllocateDescriptorSets(device, &dsInfo, decompDescriptorSets.data()));
VkPushConstantRange pushConstant = {};
pushConstant.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
@@ -5554,28 +4982,23 @@
}
VkPipelineLayoutCreateInfo pipelineLayoutInfo = {};
- pipelineLayoutInfo.sType =
- VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutInfo.setLayoutCount = 1;
pipelineLayoutInfo.pSetLayouts = &decompDescriptorSetLayout;
pipelineLayoutInfo.pushConstantRangeCount = 1;
pipelineLayoutInfo.pPushConstantRanges = &pushConstant;
- _RETURN_ON_FAILURE(
- vk->vkCreatePipelineLayout(device, &pipelineLayoutInfo,
- nullptr, &decompPipelineLayout));
+ _RETURN_ON_FAILURE(vk->vkCreatePipelineLayout(device, &pipelineLayoutInfo, nullptr,
+ &decompPipelineLayout));
VkComputePipelineCreateInfo computePipelineInfo = {};
- computePipelineInfo.sType =
- VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
- computePipelineInfo.stage.sType =
- VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ computePipelineInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+ computePipelineInfo.stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
computePipelineInfo.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
computePipelineInfo.stage.module = decompShader;
computePipelineInfo.stage.pName = "main";
computePipelineInfo.layout = decompPipelineLayout;
- _RETURN_ON_FAILURE(vk->vkCreateComputePipelines(
- device, 0, 1, &computePipelineInfo, nullptr,
- &decompPipeline));
+ _RETURN_ON_FAILURE(vk->vkCreateComputePipelines(device, 0, 1, &computePipelineInfo,
+ nullptr, &decompPipeline));
VkFormat intermediateFormat = decompFormat;
switch (compFormat) {
@@ -5632,59 +5055,44 @@
decompImageViews.resize(mipLevels);
VkDescriptorImageInfo sizeCompDescriptorImageInfo[1] = {{}};
sizeCompDescriptorImageInfo[0].sampler = 0;
- sizeCompDescriptorImageInfo[0].imageLayout =
- VK_IMAGE_LAYOUT_GENERAL;
+ sizeCompDescriptorImageInfo[0].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
VkDescriptorImageInfo decompDescriptorImageInfo[1] = {{}};
decompDescriptorImageInfo[0].sampler = 0;
decompDescriptorImageInfo[0].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
VkWriteDescriptorSet writeDescriptorSets[2] = {{}, {}};
- writeDescriptorSets[0].sType =
- VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ writeDescriptorSets[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSets[0].dstBinding = 0;
writeDescriptorSets[0].descriptorCount = 1;
- writeDescriptorSets[0].descriptorType =
- VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ writeDescriptorSets[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
writeDescriptorSets[0].pImageInfo = sizeCompDescriptorImageInfo;
- writeDescriptorSets[1].sType =
- VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ writeDescriptorSets[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSets[1].dstBinding = 1;
writeDescriptorSets[1].descriptorCount = 1;
- writeDescriptorSets[1].descriptorType =
- VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ writeDescriptorSets[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
writeDescriptorSets[1].pImageInfo = decompDescriptorImageInfo;
for (uint32_t i = 0; i < mipLevels; i++) {
sizeCompImageViews[i] = createDefaultImageView(
- vk, device, sizeCompImgs[i], sizeCompFormat, imageType,
- 0, layerCount);
+ vk, device, sizeCompImgs[i], sizeCompFormat, imageType, 0, layerCount);
decompImageViews[i] = createDefaultImageView(
- vk, device, decompImg, intermediateFormat, imageType, i,
- layerCount);
- sizeCompDescriptorImageInfo[0].imageView =
- sizeCompImageViews[i];
+ vk, device, decompImg, intermediateFormat, imageType, i, layerCount);
+ sizeCompDescriptorImageInfo[0].imageView = sizeCompImageViews[i];
decompDescriptorImageInfo[0].imageView = decompImageViews[i];
writeDescriptorSets[0].dstSet = decompDescriptorSets[i];
writeDescriptorSets[1].dstSet = decompDescriptorSets[i];
- vk->vkUpdateDescriptorSets(device, 2, writeDescriptorSets, 0,
- nullptr);
+ vk->vkUpdateDescriptorSets(device, 2, writeDescriptorSets, 0, nullptr);
}
return VK_SUCCESS;
}
- void cmdDecompress(goldfish_vk::VulkanDispatch* vk,
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags dstStageMask,
- VkImageLayout newLayout,
- VkAccessFlags dstAccessMask,
- uint32_t baseMipLevel,
- uint32_t levelCount,
- uint32_t baseLayer,
- uint32_t _layerCount) {
- vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
- decompPipeline);
+ void cmdDecompress(goldfish_vk::VulkanDispatch* vk, VkCommandBuffer commandBuffer,
+ VkPipelineStageFlags dstStageMask, VkImageLayout newLayout,
+ VkAccessFlags dstAccessMask, uint32_t baseMipLevel, uint32_t levelCount,
+ uint32_t baseLayer, uint32_t _layerCount) {
+ vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, decompPipeline);
int dispatchZ = _layerCount;
if (isEtc2) {
@@ -5695,8 +5103,8 @@
dispatchZ = extent.depth;
}
vk->vkCmdPushConstants(commandBuffer, decompPipelineLayout,
- VK_SHADER_STAGE_COMPUTE_BIT, 0,
- sizeof(pushConstant), &pushConstant);
+ VK_SHADER_STAGE_COMPUTE_BIT, 0, sizeof(pushConstant),
+ &pushConstant);
} else if (isAstc) {
uint32_t srgb = false;
uint32_t smallBlock = false;
@@ -5735,11 +5143,11 @@
break;
}
AstcPushConstant pushConstant = {
- {compressedBlockWidth, compressedBlockHeight},
- (uint32_t)compFormat,
- baseLayer,
- srgb,
- smallBlock,
+ {compressedBlockWidth, compressedBlockHeight},
+ (uint32_t)compFormat,
+ baseLayer,
+ srgb,
+ smallBlock,
};
if (extent.depth > 1) {
// 3D texture
@@ -5747,24 +5155,21 @@
dispatchZ = extent.depth;
}
vk->vkCmdPushConstants(commandBuffer, decompPipelineLayout,
- VK_SHADER_STAGE_COMPUTE_BIT, 0,
- sizeof(pushConstant), &pushConstant);
+ VK_SHADER_STAGE_COMPUTE_BIT, 0, sizeof(pushConstant),
+ &pushConstant);
}
- for (uint32_t i = baseMipLevel; i < baseMipLevel + levelCount;
- i++) {
- vk->vkCmdBindDescriptorSets(
- commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
- decompPipelineLayout, 0, 1,
- decompDescriptorSets.data() + i, 0, nullptr);
+ for (uint32_t i = baseMipLevel; i < baseMipLevel + levelCount; i++) {
+ vk->vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
+ decompPipelineLayout, 0, 1,
+ decompDescriptorSets.data() + i, 0, nullptr);
- vk->vkCmdDispatch(commandBuffer, sizeCompMipmapWidth(i),
- sizeCompMipmapHeight(i), dispatchZ);
+ vk->vkCmdDispatch(commandBuffer, sizeCompMipmapWidth(i), sizeCompMipmapHeight(i),
+ dispatchZ);
}
}
};
- void createSizeCompImages(goldfish_vk::VulkanDispatch* vk,
- CompressedImageInfo* cmpInfo) {
+ void createSizeCompImages(goldfish_vk::VulkanDispatch* vk, CompressedImageInfo* cmpInfo) {
if (cmpInfo->sizeCompImgs.size() > 0) {
return;
}
@@ -5778,8 +5183,7 @@
imageInfo.extent.height = cmpInfo->sizeCompMipmapHeight(i);
imageInfo.extent.depth = cmpInfo->sizeCompMipmapDepth(i);
VkDevice device = cmpInfo->device;
- vk->vkCreateImage(device, &imageInfo, nullptr,
- cmpInfo->sizeCompImgs.data() + i);
+ vk->vkCreateImage(device, &imageInfo, nullptr, cmpInfo->sizeCompImgs.data() + i);
}
VkPhysicalDevice physicalDevice = mDeviceInfo[device].physicalDevice;
@@ -5789,11 +5193,9 @@
VkDeviceSize decompImageSize = 0;
{
VkMemoryRequirements memRequirements;
- vk->vkGetImageMemoryRequirements(device, cmpInfo->decompImg,
- &memRequirements);
- memIdx = findProperties(physicalDevice,
- memRequirements.memoryTypeBits,
- VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+ vk->vkGetImageMemoryRequirements(device, cmpInfo->decompImg, &memRequirements);
+ memIdx = findProperties(physicalDevice, memRequirements.memoryTypeBits,
+ VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
if (memIdx < 0) {
fprintf(stderr, "Error: cannot find memory property!\n");
return;
@@ -5803,8 +5205,7 @@
}
for (size_t i = 0; i < mipLevels; i++) {
VkMemoryRequirements memRequirements;
- vk->vkGetImageMemoryRequirements(device, cmpInfo->sizeCompImgs[i],
- &memRequirements);
+ vk->vkGetImageMemoryRequirements(device, cmpInfo->sizeCompImgs[i], &memRequirements);
alignment = std::max(alignment, memRequirements.alignment);
memSizes[i] = memRequirements.size;
}
@@ -5813,28 +5214,23 @@
{
VkDeviceSize alignedSize = decompImageSize;
if (alignment != 0) {
- alignedSize =
- (alignedSize + alignment - 1) / alignment * alignment;
+ alignedSize = (alignedSize + alignment - 1) / alignment * alignment;
}
memoryOffsets[0] = alignedSize;
}
for (size_t i = 0; i < cmpInfo->sizeCompImgs.size(); i++) {
VkDeviceSize alignedSize = memSizes[i];
if (alignment != 0) {
- alignedSize =
- (alignedSize + alignment - 1) / alignment * alignment;
+ alignedSize = (alignedSize + alignment - 1) / alignment * alignment;
}
memoryOffsets[i + 1] = memoryOffsets[i] + alignedSize;
}
}
- void updateImageMemorySizeLocked(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
+ void updateImageMemorySizeLocked(VkDevice device, VkImage image,
+ VkMemoryRequirements* pMemoryRequirements) {
auto deviceInfoIt = mDeviceInfo.find(device);
- if (!deviceInfoIt->second.emulateTextureEtc2 &&
- !deviceInfoIt->second.emulateTextureAstc) {
+ if (!deviceInfoIt->second.emulateTextureEtc2 && !deviceInfoIt->second.emulateTextureAstc) {
return;
}
auto it = mImageInfo.find(image);
@@ -5850,15 +5246,13 @@
pMemoryRequirements->size += cmpInfo.memoryOffsets[cmpInfo.mipLevels];
}
- static bool needEmulatedEtc2(VkPhysicalDevice physicalDevice,
- goldfish_vk::VulkanDispatch* vk) {
+ static bool needEmulatedEtc2(VkPhysicalDevice physicalDevice, goldfish_vk::VulkanDispatch* vk) {
VkPhysicalDeviceFeatures feature;
vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
return !feature.textureCompressionETC2;
}
- static bool needEmulatedAstc(VkPhysicalDevice physicalDevice,
- goldfish_vk::VulkanDispatch* vk) {
+ static bool needEmulatedAstc(VkPhysicalDevice physicalDevice, goldfish_vk::VulkanDispatch* vk) {
if (!kEmulateAstc) {
return false;
}
@@ -5887,9 +5281,7 @@
case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
return EtcSignedRG11;
default:
- fprintf(stderr,
- "TODO: unsupported compressed texture format %d\n",
- fmt);
+ fprintf(stderr, "TODO: unsupported compressed texture format %d\n", fmt);
return EtcRGB8;
}
}
@@ -5999,15 +5391,11 @@
bool isEtc2Compatible(VkFormat compFmt1, VkFormat compFmt2) {
const VkFormat kCmpSets[][2] = {
- {VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
- VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK},
- {VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
- VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK},
- {VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
- VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK},
+ {VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK},
+ {VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK},
+ {VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK},
{VK_FORMAT_EAC_R11_UNORM_BLOCK, VK_FORMAT_EAC_R11_SNORM_BLOCK},
- {VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
- VK_FORMAT_EAC_R11G11_SNORM_BLOCK},
+ {VK_FORMAT_EAC_R11G11_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK},
};
if (compFmt1 == compFmt2) {
return true;
@@ -6135,109 +5523,95 @@
}
static const VkFormatFeatureFlags kEmulatedEtc2BufferFeatureMask =
- VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
- VK_FORMAT_FEATURE_BLIT_SRC_BIT |
+ VK_FORMAT_FEATURE_TRANSFER_DST_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT |
VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
- void maskFormatPropertiesForEmulatedEtc2(
- VkFormatProperties* pFormatProperties) {
+ void maskFormatPropertiesForEmulatedEtc2(VkFormatProperties* pFormatProperties) {
pFormatProperties->bufferFeatures &= kEmulatedEtc2BufferFeatureMask;
pFormatProperties->optimalTilingFeatures &= kEmulatedEtc2BufferFeatureMask;
}
- void maskFormatPropertiesForEmulatedEtc2(
- VkFormatProperties2* pFormatProperties) {
- pFormatProperties->formatProperties.bufferFeatures &=
- kEmulatedEtc2BufferFeatureMask;
- pFormatProperties->formatProperties.optimalTilingFeatures &=
- kEmulatedEtc2BufferFeatureMask;
+ void maskFormatPropertiesForEmulatedEtc2(VkFormatProperties2* pFormatProperties) {
+ pFormatProperties->formatProperties.bufferFeatures &= kEmulatedEtc2BufferFeatureMask;
+ pFormatProperties->formatProperties.optimalTilingFeatures &= kEmulatedEtc2BufferFeatureMask;
}
template <class VkFormatProperties1or2>
- void getPhysicalDeviceFormatPropertiesCore(
- std::function<
- void(VkPhysicalDevice, VkFormat, VkFormatProperties1or2*)>
- getPhysicalDeviceFormatPropertiesFunc,
- goldfish_vk::VulkanDispatch* vk,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties1or2* pFormatProperties) {
- switch (format) {
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: {
- if (!needEmulatedEtc2(physicalDevice, vk)) {
- // Hardware supported ETC2
- getPhysicalDeviceFormatPropertiesFunc(
- physicalDevice, format, pFormatProperties);
- return;
- }
- // Emulate ETC formats
- CompressedImageInfo cmpInfo =
- createCompressedImageInfo(format);
- getPhysicalDeviceFormatPropertiesFunc(physicalDevice,
- cmpInfo.decompFormat,
+ void getPhysicalDeviceFormatPropertiesCore(
+ std::function<void(VkPhysicalDevice, VkFormat, VkFormatProperties1or2*)>
+ getPhysicalDeviceFormatPropertiesFunc,
+ goldfish_vk::VulkanDispatch* vk, VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties1or2* pFormatProperties) {
+ switch (format) {
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: {
+ if (!needEmulatedEtc2(physicalDevice, vk)) {
+ // Hardware supported ETC2
+ getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format,
pFormatProperties);
- maskFormatPropertiesForEmulatedEtc2(pFormatProperties);
- break;
+ return;
}
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
- case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: {
- if (!needEmulatedAstc(physicalDevice, vk)) {
- // Hardware supported ETC2
- getPhysicalDeviceFormatPropertiesFunc(
- physicalDevice, format, pFormatProperties);
- return;
- }
- // Emulate ETC formats
- CompressedImageInfo cmpInfo =
- createCompressedImageInfo(format);
- getPhysicalDeviceFormatPropertiesFunc(physicalDevice,
- cmpInfo.decompFormat,
- pFormatProperties);
- maskFormatPropertiesForEmulatedEtc2(pFormatProperties);
- break;
- }
- default:
- getPhysicalDeviceFormatPropertiesFunc(
- physicalDevice, format, pFormatProperties);
- break;
+ // Emulate ETC formats
+ CompressedImageInfo cmpInfo = createCompressedImageInfo(format);
+ getPhysicalDeviceFormatPropertiesFunc(physicalDevice, cmpInfo.decompFormat,
+ pFormatProperties);
+ maskFormatPropertiesForEmulatedEtc2(pFormatProperties);
+ break;
}
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: {
+ if (!needEmulatedAstc(physicalDevice, vk)) {
+ // Hardware supported ETC2
+ getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format,
+ pFormatProperties);
+ return;
+ }
+ // Emulate ETC formats
+ CompressedImageInfo cmpInfo = createCompressedImageInfo(format);
+ getPhysicalDeviceFormatPropertiesFunc(physicalDevice, cmpInfo.decompFormat,
+ pFormatProperties);
+ maskFormatPropertiesForEmulatedEtc2(pFormatProperties);
+ break;
+ }
+ default:
+ getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format, pFormatProperties);
+ break;
}
-
+ }
void executePreprocessRecursive(int level, VkCommandBuffer cmdBuffer) {
auto cmdBufferIt = mCmdBufferInfo.find(cmdBuffer);
@@ -6254,7 +5628,6 @@
}
void teardownInstanceLocked(VkInstance instance) {
-
std::vector<VkDevice> devicesToDestroy;
std::vector<VulkanDispatch*> devicesToDestroyDispatches;
@@ -6265,8 +5638,7 @@
if (instance == *otherInstance) {
devicesToDestroy.push_back(it.first);
devicesToDestroyDispatches.push_back(
- dispatch_VkDevice(
- mDeviceInfo[it.first].boxed));
+ dispatch_VkDevice(mDeviceInfo[it.first].boxed));
}
}
@@ -6284,10 +5656,9 @@
++it;
}
- for (auto mem: toDestroy) {
- freeMemoryLocked(devicesToDestroyDispatches[i],
- devicesToDestroy[i],
- mem, nullptr);
+ for (auto mem : toDestroy) {
+ freeMemoryLocked(devicesToDestroyDispatches[i], devicesToDestroy[i], mem,
+ nullptr);
}
}
@@ -6305,7 +5676,8 @@
}
for (int j = 0; j < toDestroy.size(); ++j) {
- devicesToDestroyDispatches[i]->vkFreeCommandBuffers(devicesToDestroy[i], toDestroyPools[j], 1, &toDestroy[j]);
+ devicesToDestroyDispatches[i]->vkFreeCommandBuffers(
+ devicesToDestroy[i], toDestroyPools[j], 1, &toDestroy[j]);
VkCommandBuffer boxed = unboxed_to_boxed_VkCommandBuffer(toDestroy[j]);
delete_VkCommandBuffer(boxed);
mCmdBufferInfo.erase(toDestroy[j]);
@@ -6325,7 +5697,8 @@
}
for (int j = 0; j < toDestroy.size(); ++j) {
- devicesToDestroyDispatches[i]->vkDestroyCommandPool(devicesToDestroy[i], toDestroy[j], 0);
+ devicesToDestroyDispatches[i]->vkDestroyCommandPool(devicesToDestroy[i],
+ toDestroy[j], 0);
delete_VkCommandPool(toDestroyBoxed[j]);
mCmdPoolInfo.erase(toDestroy[j]);
}
@@ -6344,7 +5717,8 @@
}
for (int j = 0; j < toDestroy.size(); ++j) {
- devicesToDestroyDispatches[i]->vkDestroyDescriptorPool(devicesToDestroy[i], toDestroy[j], 0);
+ devicesToDestroyDispatches[i]->vkDestroyDescriptorPool(devicesToDestroy[i],
+ toDestroy[j], 0);
delete_VkDescriptorPool(toDestroyBoxed[j]);
mDescriptorPoolInfo.erase(toDestroy[j]);
}
@@ -6363,7 +5737,8 @@
}
for (int j = 0; j < toDestroy.size(); ++j) {
- devicesToDestroyDispatches[i]->vkDestroyDescriptorSetLayout(devicesToDestroy[i], toDestroy[j], 0);
+ devicesToDestroyDispatches[i]->vkDestroyDescriptorSetLayout(devicesToDestroy[i],
+ toDestroy[j], 0);
delete_VkDescriptorSetLayout(toDestroyBoxed[j]);
mDescriptorSetLayoutInfo.erase(toDestroy[j]);
}
@@ -6397,8 +5772,7 @@
std::unordered_set<VkCommandBuffer> cmdBuffers = {};
};
- void removeCommandBufferInfo(
- const std::unordered_set<VkCommandBuffer>& cmdBuffers) {
+ void removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer>& cmdBuffers) {
for (const auto& cmdBuffer : cmdBuffers) {
mCmdBufferInfo.erase(cmdBuffer);
}
@@ -6406,28 +5780,27 @@
bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
- (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
- (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
- (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
- (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
+ (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
+ (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
+ (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
+ (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
}
bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
- (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
- (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
- (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
+ (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
+ (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
+ (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
}
bool isDescriptorTypeBufferView(VkDescriptorType descType) {
return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
- (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
+ (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
}
struct DescriptorUpdateTemplateInfo {
VkDescriptorUpdateTemplateCreateInfo createInfo;
- std::vector<VkDescriptorUpdateTemplateEntry>
- linearizedTemplateEntries;
+ std::vector<VkDescriptorUpdateTemplateEntry> linearizedTemplateEntries;
// Preallocated pData
std::vector<uint8_t> data;
size_t imageInfoStart;
@@ -6436,8 +5809,7 @@
};
DescriptorUpdateTemplateInfo calcLinearizedDescriptorUpdateTemplateInfo(
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo) {
-
+ const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo) {
DescriptorUpdateTemplateInfo res;
res.createInfo = *pCreateInfo;
@@ -6475,7 +5847,6 @@
size_t bufferViewCount = 0;
for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
-
const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
VkDescriptorUpdateTemplateEntry entryForHost = entry;
@@ -6483,20 +5854,16 @@
if (isDescriptorTypeImageInfo(type)) {
entryForHost.offset =
- res.imageInfoStart +
- imageInfoCount * sizeof(VkDescriptorImageInfo);
+ res.imageInfoStart + imageInfoCount * sizeof(VkDescriptorImageInfo);
entryForHost.stride = sizeof(VkDescriptorImageInfo);
++imageInfoCount;
} else if (isDescriptorTypeBufferInfo(type)) {
entryForHost.offset =
- res.bufferInfoStart +
- bufferInfoCount * sizeof(VkDescriptorBufferInfo);
+ res.bufferInfoStart + bufferInfoCount * sizeof(VkDescriptorBufferInfo);
entryForHost.stride = sizeof(VkDescriptorBufferInfo);
++bufferInfoCount;
} else if (isDescriptorTypeBufferView(type)) {
- entryForHost.offset =
- res.bufferViewStart +
- bufferViewCount * sizeof(VkBufferView);
+ entryForHost.offset = res.bufferViewStart + bufferViewCount * sizeof(VkBufferView);
entryForHost.stride = sizeof(VkBufferView);
++bufferViewCount;
} else {
@@ -6507,42 +5874,35 @@
res.linearizedTemplateEntries.push_back(entryForHost);
}
- res.createInfo.pDescriptorUpdateEntries =
- res.linearizedTemplateEntries.data();
+ res.createInfo.pDescriptorUpdateEntries = res.linearizedTemplateEntries.data();
return res;
}
- void registerDescriptorUpdateTemplate(
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const DescriptorUpdateTemplateInfo& info) {
+ void registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const DescriptorUpdateTemplateInfo& info) {
AutoLock lock(mLock);
mDescriptorUpdateTemplateInfo[descriptorUpdateTemplate] = info;
}
- void unregisterDescriptorUpdateTemplate(
- VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
+ void unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
AutoLock lock(mLock);
mDescriptorUpdateTemplateInfo.erase(descriptorUpdateTemplate);
}
// Returns the momory property index when succeeds; returns -1 when fails.
- int32_t findProperties(VkPhysicalDevice physicalDevice,
- uint32_t memoryTypeBitsRequirement,
- VkMemoryPropertyFlags requiredProperties) {
+ int32_t findProperties(VkPhysicalDevice physicalDevice, uint32_t memoryTypeBitsRequirement,
+ VkMemoryPropertyFlags requiredProperties) {
VkPhysicalDeviceMemoryProperties memProperties;
- auto ivk = dispatch_VkInstance(
- mInstanceInfo[mPhysicalDeviceToInstance[physicalDevice]].boxed);
+ auto ivk =
+ dispatch_VkInstance(mInstanceInfo[mPhysicalDeviceToInstance[physicalDevice]].boxed);
- ivk->vkGetPhysicalDeviceMemoryProperties(physicalDevice,
- &memProperties);
+ ivk->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &memProperties);
const uint32_t memoryCount = memProperties.memoryTypeCount;
- for (uint32_t memoryIndex = 0; memoryIndex < memoryCount;
- ++memoryIndex) {
+ for (uint32_t memoryIndex = 0; memoryIndex < memoryCount; ++memoryIndex) {
const uint32_t memoryTypeBits = (1 << memoryIndex);
- const bool isRequiredMemoryType =
- memoryTypeBitsRequirement & memoryTypeBits;
+ const bool isRequiredMemoryType = memoryTypeBitsRequirement & memoryTypeBits;
const VkMemoryPropertyFlags properties =
memProperties.memoryTypes[memoryIndex].propertyFlags;
@@ -6614,9 +5974,8 @@
VkPhysicalDevice physicalDevice;
VkDevice boxed = nullptr;
bool needEmulatedDecompression(const CompressedImageInfo& imageInfo) {
- return imageInfo.isCompressed &&
- ((imageInfo.isEtc2 && emulateTextureEtc2) ||
- (imageInfo.isAstc && emulateTextureAstc));
+ return imageInfo.isCompressed && ((imageInfo.isEtc2 && emulateTextureEtc2) ||
+ (imageInfo.isAstc && emulateTextureAstc));
}
bool needEmulatedDecompression(VkFormat format) {
switch (format) {
@@ -6718,11 +6077,10 @@
struct SemaphoreInfo {
int externalHandleId = 0;
- VK_EXT_MEMORY_HANDLE externalHandle =
- VK_EXT_MEMORY_HANDLE_INVALID;
+ VK_EXT_MEMORY_HANDLE externalHandle = VK_EXT_MEMORY_HANDLE_INVALID;
};
- struct DescriptorSetLayoutInfo {
+ struct DescriptorSetLayoutInfo {
VkDevice device = 0;
VkDescriptorSetLayout boxed = 0;
VkDescriptorSetLayoutCreateInfo createInfo;
@@ -6752,13 +6110,13 @@
std::vector<VkDescriptorSetLayoutBinding> bindings;
};
- bool isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState& poolState, const VkDescriptorSetLayoutBinding& binding) {
+ bool isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState& poolState,
+ const VkDescriptorSetLayoutBinding& binding) {
if (binding.descriptorCount && (poolState.type != binding.descriptorType)) {
return false;
}
- uint32_t availDescriptorCount =
- poolState.descriptorCount - poolState.used;
+ uint32_t availDescriptorCount = poolState.descriptorCount - poolState.used;
if (availDescriptorCount < binding.descriptorCount) {
return false;
@@ -6767,21 +6125,20 @@
return true;
}
- bool isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState& poolState, const VkDescriptorSetLayoutBinding& binding) {
+ bool isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState& poolState,
+ const VkDescriptorSetLayoutBinding& binding) {
if (poolState.type != binding.descriptorType) return false;
if (poolState.used < binding.descriptorCount) return false;
return true;
}
- void allocBindingFeasible(
- const VkDescriptorSetLayoutBinding& binding,
- DescriptorPoolInfo::PoolState& poolState) {
+ void allocBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
+ DescriptorPoolInfo::PoolState& poolState) {
poolState.used += binding.descriptorCount;
}
- void freeBindingFeasible(
- const VkDescriptorSetLayoutBinding& binding,
- DescriptorPoolInfo::PoolState& poolState) {
+ void freeBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
+ DescriptorPoolInfo::PoolState& poolState) {
poolState.used -= binding.descriptorCount;
}
@@ -6798,11 +6155,11 @@
// Perform simulated allocation and error out with
// VK_ERROR_OUT_OF_POOL_MEMORY if it fails.
- std::vector<DescriptorPoolInfo::PoolState> poolCopy =
- poolInfo->pools;
+ std::vector<DescriptorPoolInfo::PoolState> poolCopy = poolInfo->pools;
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
- auto setLayoutInfo = android::base::find(mDescriptorSetLayoutInfo, pAllocateInfo->pSetLayouts[i]);
+ auto setLayoutInfo =
+ android::base::find(mDescriptorSetLayoutInfo, pAllocateInfo->pSetLayouts[i]);
if (!setLayoutInfo) return VK_ERROR_INITIALIZATION_FAILED;
for (const auto& binding : setLayoutInfo->bindings) {
@@ -6823,7 +6180,8 @@
return VK_SUCCESS;
}
- void applyDescriptorSetAllocationLocked(DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
+ void applyDescriptorSetAllocationLocked(
+ DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
++poolInfo.usedSets;
for (const auto& binding : bindings) {
for (auto& pool : poolInfo.pools) {
@@ -6834,7 +6192,8 @@
}
}
- void removeDescriptorSetAllocationLocked(DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
+ void removeDescriptorSetAllocationLocked(
+ DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
--poolInfo.usedSets;
for (const auto& binding : bindings) {
for (auto& pool : poolInfo.pools) {
@@ -6851,14 +6210,10 @@
T underlying;
};
- std::unordered_map<VkInstance, InstanceInfo>
- mInstanceInfo;
- std::unordered_map<VkPhysicalDevice, PhysicalDeviceInfo>
- mPhysdevInfo;
- std::unordered_map<VkDevice, DeviceInfo>
- mDeviceInfo;
- std::unordered_map<VkImage, ImageInfo>
- mImageInfo;
+ std::unordered_map<VkInstance, InstanceInfo> mInstanceInfo;
+ std::unordered_map<VkPhysicalDevice, PhysicalDeviceInfo> mPhysdevInfo;
+ std::unordered_map<VkDevice, DeviceInfo> mDeviceInfo;
+ std::unordered_map<VkImage, ImageInfo> mImageInfo;
std::unordered_map<VkImageView, ImageViewInfo> mImageViewInfo;
std::unordered_map<VkSampler, SamplerInfo> mSamplerInfo;
std::unordered_map<VkCommandBuffer, CommandBufferInfo> mCmdBufferInfo;
@@ -6895,7 +6250,8 @@
}
std::unordered_map<int, VkSemaphore> mExternalSemaphoresById;
#endif
- std::unordered_map<VkDescriptorUpdateTemplate, DescriptorUpdateTemplateInfo> mDescriptorUpdateTemplateInfo;
+ std::unordered_map<VkDescriptorUpdateTemplate, DescriptorUpdateTemplateInfo>
+ mDescriptorUpdateTemplateInfo;
VkDecoderSnapshot mSnapshot;
@@ -6971,8 +6327,7 @@
mLinearImageProperties;
};
-VkDecoderGlobalState::VkDecoderGlobalState()
- : mImpl(new VkDecoderGlobalState::Impl()) {}
+VkDecoderGlobalState::VkDecoderGlobalState() : mImpl(new VkDecoderGlobalState::Impl()) {}
VkDecoderGlobalState::~VkDecoderGlobalState() = default;
@@ -6986,29 +6341,17 @@
}
// Snapshots
-bool VkDecoderGlobalState::snapshotsEnabled() const {
- return mImpl->snapshotsEnabled();
-}
+bool VkDecoderGlobalState::snapshotsEnabled() const { return mImpl->snapshotsEnabled(); }
-bool VkDecoderGlobalState::vkCleanupEnabled() const {
- return mImpl->vkCleanupEnabled();
-}
+bool VkDecoderGlobalState::vkCleanupEnabled() const { return mImpl->vkCleanupEnabled(); }
-void VkDecoderGlobalState::save(android::base::Stream* stream) {
- mImpl->save(stream);
-}
+void VkDecoderGlobalState::save(android::base::Stream* stream) { mImpl->save(stream); }
-void VkDecoderGlobalState::load(android::base::Stream* stream) {
- mImpl->load(stream);
-}
+void VkDecoderGlobalState::load(android::base::Stream* stream) { mImpl->load(stream); }
-void VkDecoderGlobalState::lock() {
- mImpl->lock();
-}
+void VkDecoderGlobalState::lock() { mImpl->lock(); }
-void VkDecoderGlobalState::unlock() {
- mImpl->unlock();
-}
+void VkDecoderGlobalState::unlock() { mImpl->unlock(); }
size_t VkDecoderGlobalState::setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
return mImpl->setCreatedHandlesForSnapshotLoad(buffer);
@@ -7018,541 +6361,407 @@
mImpl->clearCreatedHandlesForSnapshotLoad();
}
-VkResult VkDecoderGlobalState::on_vkEnumerateInstanceVersion(
- android::base::BumpPool* pool,
- uint32_t* pApiVersion) {
+VkResult VkDecoderGlobalState::on_vkEnumerateInstanceVersion(android::base::BumpPool* pool,
+ uint32_t* pApiVersion) {
return mImpl->on_vkEnumerateInstanceVersion(pool, pApiVersion);
}
-VkResult VkDecoderGlobalState::on_vkCreateInstance(
- android::base::BumpPool* pool,
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance) {
+VkResult VkDecoderGlobalState::on_vkCreateInstance(android::base::BumpPool* pool,
+ const VkInstanceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkInstance* pInstance) {
return mImpl->on_vkCreateInstance(pool, pCreateInfo, pAllocator, pInstance);
}
-void VkDecoderGlobalState::on_vkDestroyInstance(
- android::base::BumpPool* pool,
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance instance,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyInstance(pool, instance, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDevices(
- android::base::BumpPool* pool,
- VkInstance instance,
- uint32_t* physicalDeviceCount,
- VkPhysicalDevice* physicalDevices) {
- return mImpl->on_vkEnumeratePhysicalDevices(pool, instance, physicalDeviceCount, physicalDevices);
+VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool,
+ VkInstance instance,
+ uint32_t* physicalDeviceCount,
+ VkPhysicalDevice* physicalDevices) {
+ return mImpl->on_vkEnumeratePhysicalDevices(pool, instance, physicalDeviceCount,
+ physicalDevices);
}
-void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures) {
+void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures* pFeatures) {
mImpl->on_vkGetPhysicalDeviceFeatures(pool, physicalDevice, pFeatures);
}
-void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
+void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures2* pFeatures) {
mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceFeatures2KHR* pFeatures) {
mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
}
VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
+ VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
VkImageFormatProperties* pImageFormatProperties) {
return mImpl->on_vkGetPhysicalDeviceImageFormatProperties(
- pool, physicalDevice, format, type, tiling, usage, flags,
- pImageFormatProperties);
+ pool, physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
}
VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
VkImageFormatProperties2* pImageFormatProperties) {
return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
- pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
+ pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
}
VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
VkImageFormatProperties2* pImageFormatProperties) {
return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
- pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
+ pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
VkFormatProperties* pFormatProperties) {
- mImpl->on_vkGetPhysicalDeviceFormatProperties(pool, physicalDevice, format,
- pFormatProperties);
+ mImpl->on_vkGetPhysicalDeviceFormatProperties(pool, physicalDevice, format, pFormatProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
VkFormatProperties2* pFormatProperties) {
- mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format,
- pFormatProperties);
+ mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
VkFormatProperties2* pFormatProperties) {
- mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format,
- pFormatProperties);
+ mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties* pProperties) {
mImpl->on_vkGetPhysicalDeviceProperties(pool, physicalDevice, pProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties2* pProperties) {
mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties2* pProperties) {
mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
- mImpl->on_vkGetPhysicalDeviceMemoryProperties(
- pool, physicalDevice, pMemoryProperties);
+ mImpl->on_vkGetPhysicalDeviceMemoryProperties(pool, physicalDevice, pMemoryProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
- pool, physicalDevice, pMemoryProperties);
+ mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
- pool, physicalDevice, pMemoryProperties);
+ mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
}
VkResult VkDecoderGlobalState::on_vkEnumerateDeviceExtensionProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties) {
- return mImpl->on_vkEnumerateDeviceExtensionProperties(
- pool, physicalDevice, pLayerName, pPropertyCount, pProperties);
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, const char* pLayerName,
+ uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {
+ return mImpl->on_vkEnumerateDeviceExtensionProperties(pool, physicalDevice, pLayerName,
+ pPropertyCount, pProperties);
}
-VkResult VkDecoderGlobalState::on_vkCreateDevice(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice) {
+VkResult VkDecoderGlobalState::on_vkCreateDevice(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ const VkDeviceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDevice* pDevice) {
return mImpl->on_vkCreateDevice(pool, physicalDevice, pCreateInfo, pAllocator, pDevice);
}
-void VkDecoderGlobalState::on_vkGetDeviceQueue(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue) {
+void VkDecoderGlobalState::on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice device,
+ uint32_t queueFamilyIndex, uint32_t queueIndex,
+ VkQueue* pQueue) {
mImpl->on_vkGetDeviceQueue(pool, device, queueFamilyIndex, queueIndex, pQueue);
}
-void VkDecoderGlobalState::on_vkDestroyDevice(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice device,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyDevice(pool, device, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkCreateBuffer(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer) {
+VkResult VkDecoderGlobalState::on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice device,
+ const VkBufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkBuffer* pBuffer) {
return mImpl->on_vkCreateBuffer(pool, device, pCreateInfo, pAllocator, pBuffer);
}
-void VkDecoderGlobalState::on_vkDestroyBuffer(
- android::base::BumpPool* pool,
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice device,
+ VkBuffer buffer,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyBuffer(pool, device, buffer, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkBindBufferMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
+VkResult VkDecoderGlobalState::on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice device,
+ VkBuffer buffer, VkDeviceMemory memory,
+ VkDeviceSize memoryOffset) {
return mImpl->on_vkBindBufferMemory(pool, device, buffer, memory, memoryOffset);
}
-VkResult VkDecoderGlobalState::on_vkBindBufferMemory2(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
+VkResult VkDecoderGlobalState::on_vkBindBufferMemory2(android::base::BumpPool* pool,
+ VkDevice device, uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfo* pBindInfos) {
return mImpl->on_vkBindBufferMemory2(pool, device, bindInfoCount, pBindInfos);
}
-VkResult VkDecoderGlobalState::on_vkBindBufferMemory2KHR(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
+VkResult VkDecoderGlobalState::on_vkBindBufferMemory2KHR(android::base::BumpPool* pool,
+ VkDevice device, uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfo* pBindInfos) {
return mImpl->on_vkBindBufferMemory2KHR(pool, device, bindInfoCount, pBindInfos);
}
-VkResult VkDecoderGlobalState::on_vkCreateImage(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage) {
+VkResult VkDecoderGlobalState::on_vkCreateImage(android::base::BumpPool* pool, VkDevice device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkImage* pImage) {
return mImpl->on_vkCreateImage(pool, device, pCreateInfo, pAllocator, pImage);
}
-void VkDecoderGlobalState::on_vkDestroyImage(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyImage(android::base::BumpPool* pool, VkDevice device,
+ VkImage image,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyImage(pool, device, image, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkBindImageMemory(android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
+VkResult VkDecoderGlobalState::on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice device,
+ VkImage image, VkDeviceMemory memory,
VkDeviceSize memoryOffset) {
- return mImpl->on_vkBindImageMemory(pool, device, image, memory,
- memoryOffset);
+ return mImpl->on_vkBindImageMemory(pool, device, image, memory, memoryOffset);
}
-VkResult VkDecoderGlobalState::on_vkCreateImageView(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView) {
+VkResult VkDecoderGlobalState::on_vkCreateImageView(android::base::BumpPool* pool, VkDevice device,
+ const VkImageViewCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkImageView* pView) {
return mImpl->on_vkCreateImageView(pool, device, pCreateInfo, pAllocator, pView);
}
-void VkDecoderGlobalState::on_vkDestroyImageView(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice device,
+ VkImageView imageView,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyImageView(pool, device, imageView, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkCreateSampler(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler) {
+VkResult VkDecoderGlobalState::on_vkCreateSampler(android::base::BumpPool* pool, VkDevice device,
+ const VkSamplerCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSampler* pSampler) {
return mImpl->on_vkCreateSampler(pool, device, pCreateInfo, pAllocator, pSampler);
}
-void VkDecoderGlobalState::on_vkDestroySampler(
- android::base::BumpPool* pool,
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroySampler(android::base::BumpPool* pool, VkDevice device,
+ VkSampler sampler,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroySampler(pool, device, sampler, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkCreateSemaphore(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore) {
+VkResult VkDecoderGlobalState::on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice device,
+ const VkSemaphoreCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSemaphore* pSemaphore) {
return mImpl->on_vkCreateSemaphore(pool, device, pCreateInfo, pAllocator, pSemaphore);
}
VkResult VkDecoderGlobalState::on_vkImportSemaphoreFdKHR(
- android::base::BumpPool* pool,
- VkDevice device,
+ android::base::BumpPool* pool, VkDevice device,
const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
return mImpl->on_vkImportSemaphoreFdKHR(pool, device, pImportSemaphoreFdInfo);
}
-VkResult VkDecoderGlobalState::on_vkGetSemaphoreFdKHR(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
+VkResult VkDecoderGlobalState::on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool,
+ VkDevice device,
+ const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
+ int* pFd) {
return mImpl->on_vkGetSemaphoreFdKHR(pool, device, pGetFdInfo, pFd);
}
-void VkDecoderGlobalState::on_vkDestroySemaphore(
- android::base::BumpPool* pool,
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice device,
+ VkSemaphore semaphore,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroySemaphore(pool, device, semaphore, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkCreateFence(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- return mImpl->on_vkCreateFence(pool, device, pCreateInfo, pAllocator,
- pFence);
+VkResult VkDecoderGlobalState::on_vkCreateFence(android::base::BumpPool* pool, VkDevice device,
+ const VkFenceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkFence* pFence) {
+ return mImpl->on_vkCreateFence(pool, device, pCreateInfo, pAllocator, pFence);
}
-VkResult VkDecoderGlobalState::on_vkResetFences(android::base::BumpPool* pool,
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences) {
+VkResult VkDecoderGlobalState::on_vkResetFences(android::base::BumpPool* pool, VkDevice device,
+ uint32_t fenceCount, const VkFence* pFences) {
return mImpl->on_vkResetFences(pool, device, fenceCount, pFences);
}
-void VkDecoderGlobalState::on_vkDestroyFence(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyFence(android::base::BumpPool* pool, VkDevice device,
+ VkFence fence,
+ const VkAllocationCallbacks* pAllocator) {
return mImpl->on_vkDestroyFence(pool, device, fence, pAllocator);
}
VkResult VkDecoderGlobalState::on_vkCreateDescriptorSetLayout(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
+ android::base::BumpPool* pool, VkDevice device,
+ const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
VkDescriptorSetLayout* pSetLayout) {
return mImpl->on_vkCreateDescriptorSetLayout(pool, device, pCreateInfo, pAllocator, pSetLayout);
}
void VkDecoderGlobalState::on_vkDestroyDescriptorSetLayout(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
+ android::base::BumpPool* pool, VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyDescriptorSetLayout(pool, device, descriptorSetLayout, pAllocator);
}
VkResult VkDecoderGlobalState::on_vkCreateDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool) {
+ android::base::BumpPool* pool, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {
return mImpl->on_vkCreateDescriptorPool(pool, device, pCreateInfo, pAllocator, pDescriptorPool);
}
-void VkDecoderGlobalState::on_vkDestroyDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator) {
- mImpl->on_vkDestroyDescriptorPool(
- pool, device, descriptorPool, pAllocator);
+void VkDecoderGlobalState::on_vkDestroyDescriptorPool(android::base::BumpPool* pool,
+ VkDevice device,
+ VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks* pAllocator) {
+ mImpl->on_vkDestroyDescriptorPool(pool, device, descriptorPool, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkResetDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags) {
- return mImpl->on_vkResetDescriptorPool(
- pool, device, descriptorPool, flags);
+VkResult VkDecoderGlobalState::on_vkResetDescriptorPool(android::base::BumpPool* pool,
+ VkDevice device,
+ VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags) {
+ return mImpl->on_vkResetDescriptorPool(pool, device, descriptorPool, flags);
}
VkResult VkDecoderGlobalState::on_vkAllocateDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets) {
- return mImpl->on_vkAllocateDescriptorSets(
- pool, device, pAllocateInfo, pDescriptorSets);
+ android::base::BumpPool* pool, VkDevice device,
+ const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {
+ return mImpl->on_vkAllocateDescriptorSets(pool, device, pAllocateInfo, pDescriptorSets);
}
-VkResult VkDecoderGlobalState::on_vkFreeDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets) {
- return mImpl->on_vkFreeDescriptorSets(
- pool, device, descriptorPool, descriptorSetCount, pDescriptorSets);
+VkResult VkDecoderGlobalState::on_vkFreeDescriptorSets(android::base::BumpPool* pool,
+ VkDevice device,
+ VkDescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VkDescriptorSet* pDescriptorSets) {
+ return mImpl->on_vkFreeDescriptorSets(pool, device, descriptorPool, descriptorSetCount,
+ pDescriptorSets);
}
-void VkDecoderGlobalState::on_vkUpdateDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
- mImpl->on_vkUpdateDescriptorSets(pool, device, descriptorWriteCount,
- pDescriptorWrites, descriptorCopyCount,
- pDescriptorCopies);
+void VkDecoderGlobalState::on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
+ uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet* pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet* pDescriptorCopies) {
+ mImpl->on_vkUpdateDescriptorSets(pool, device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
}
-void VkDecoderGlobalState::on_vkCmdCopyBufferToImage(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- mImpl->on_vkCmdCopyBufferToImage(pool, commandBuffer, srcBuffer, dstImage,
- dstImageLayout, regionCount, pRegions);
+void VkDecoderGlobalState::on_vkCmdCopyBufferToImage(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ VkBuffer srcBuffer, VkImage dstImage,
+ VkImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VkBufferImageCopy* pRegions) {
+ mImpl->on_vkCmdCopyBufferToImage(pool, commandBuffer, srcBuffer, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
void VkDecoderGlobalState::on_vkCmdCopyImage(android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
+ VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageCopy* pRegions) {
- mImpl->on_vkCmdCopyImage(pool, commandBuffer, srcImage, srcImageLayout,
- dstImage, dstImageLayout, regionCount, pRegions);
+ mImpl->on_vkCmdCopyImage(pool, commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount, pRegions);
}
-void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- mImpl->on_vkCmdCopyImageToBuffer(pool, commandBuffer, srcImage,
- srcImageLayout, dstBuffer, regionCount,
- pRegions);
+void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ VkImage srcImage, VkImageLayout srcImageLayout,
+ VkBuffer dstBuffer, uint32_t regionCount,
+ const VkBufferImageCopy* pRegions) {
+ mImpl->on_vkCmdCopyImageToBuffer(pool, commandBuffer, srcImage, srcImageLayout, dstBuffer,
+ regionCount, pRegions);
}
void VkDecoderGlobalState::on_vkGetImageMemoryRequirements(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
- mImpl->on_vkGetImageMemoryRequirements(pool, device, image,
- pMemoryRequirements);
+ android::base::BumpPool* pool, VkDevice device, VkImage image,
+ VkMemoryRequirements* pMemoryRequirements) {
+ mImpl->on_vkGetImageMemoryRequirements(pool, device, image, pMemoryRequirements);
}
void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo,
- pMemoryRequirements);
+ android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements) {
+ mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
}
void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2KHR(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo,
- pMemoryRequirements);
+ android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements) {
+ mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
}
void VkDecoderGlobalState::on_vkCmdPipelineBarrier(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- mImpl->on_vkCmdPipelineBarrier(
- pool, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
- memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
- pBufferMemoryBarriers, imageMemoryBarrierCount,
- pImageMemoryBarriers);
+ android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
+ mImpl->on_vkCmdPipelineBarrier(pool, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
}
-VkResult VkDecoderGlobalState::on_vkAllocateMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory) {
+VkResult VkDecoderGlobalState::on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice device,
+ const VkMemoryAllocateInfo* pAllocateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDeviceMemory* pMemory) {
return mImpl->on_vkAllocateMemory(pool, device, pAllocateInfo, pAllocator, pMemory);
}
-void VkDecoderGlobalState::on_vkFreeMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkFreeMemory(android::base::BumpPool* pool, VkDevice device,
+ VkDeviceMemory memory,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkFreeMemory(pool, device, memory, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkMapMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
+VkResult VkDecoderGlobalState::on_vkMapMemory(android::base::BumpPool* pool, VkDevice device,
+ VkDeviceMemory memory, VkDeviceSize offset,
+ VkDeviceSize size, VkMemoryMapFlags flags,
+ void** ppData) {
return mImpl->on_vkMapMemory(pool, device, memory, offset, size, flags, ppData);
}
-void VkDecoderGlobalState::on_vkUnmapMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory) {
+void VkDecoderGlobalState::on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice device,
+ VkDeviceMemory memory) {
mImpl->on_vkUnmapMemory(pool, device, memory);
}
@@ -7564,406 +6773,304 @@
return mImpl->getDeviceMemorySize(memory);
}
-bool VkDecoderGlobalState::usingDirectMapping() const {
- return mImpl->usingDirectMapping();
-}
+bool VkDecoderGlobalState::usingDirectMapping() const { return mImpl->usingDirectMapping(); }
-VkDecoderGlobalState::HostFeatureSupport
-VkDecoderGlobalState::getHostFeatureSupport() const {
+VkDecoderGlobalState::HostFeatureSupport VkDecoderGlobalState::getHostFeatureSupport() const {
return mImpl->getHostFeatureSupport();
}
// VK_ANDROID_native_buffer
-VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsageANDROID(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFormat format,
- VkImageUsageFlags imageUsage,
- int* grallocUsage) {
- return mImpl->on_vkGetSwapchainGrallocUsageANDROID(
- pool, device, format, imageUsage, grallocUsage);
+VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool,
+ VkDevice device,
+ VkFormat format,
+ VkImageUsageFlags imageUsage,
+ int* grallocUsage) {
+ return mImpl->on_vkGetSwapchainGrallocUsageANDROID(pool, device, format, imageUsage,
+ grallocUsage);
}
VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsage2ANDROID(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFormat format,
- VkImageUsageFlags imageUsage,
- VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
- uint64_t* grallocConsumerUsage,
+ android::base::BumpPool* pool, VkDevice device, VkFormat format, VkImageUsageFlags imageUsage,
+ VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
uint64_t* grallocProducerUsage) {
- return mImpl->on_vkGetSwapchainGrallocUsage2ANDROID(
- pool, device, format, imageUsage,
- swapchainImageUsage,
- grallocConsumerUsage,
- grallocProducerUsage);
+ return mImpl->on_vkGetSwapchainGrallocUsage2ANDROID(pool, device, format, imageUsage,
+ swapchainImageUsage, grallocConsumerUsage,
+ grallocProducerUsage);
}
-VkResult VkDecoderGlobalState::on_vkAcquireImageANDROID(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- int nativeFenceFd,
- VkSemaphore semaphore,
- VkFence fence) {
- return mImpl->on_vkAcquireImageANDROID(
- pool, device, image, nativeFenceFd, semaphore, fence);
+VkResult VkDecoderGlobalState::on_vkAcquireImageANDROID(android::base::BumpPool* pool,
+ VkDevice device, VkImage image,
+ int nativeFenceFd, VkSemaphore semaphore,
+ VkFence fence) {
+ return mImpl->on_vkAcquireImageANDROID(pool, device, image, nativeFenceFd, semaphore, fence);
}
VkResult VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROID(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- VkImage image,
- int* pNativeFenceFd) {
- return mImpl->on_vkQueueSignalReleaseImageANDROID(
- pool, queue, waitSemaphoreCount, pWaitSemaphores,
- image, pNativeFenceFd);
+ android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) {
+ return mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount,
+ pWaitSemaphores, image, pNativeFenceFd);
}
// VK_GOOGLE_gfxstream
-VkResult VkDecoderGlobalState::on_vkMapMemoryIntoAddressSpaceGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkDeviceMemory memory, uint64_t* pAddress) {
- return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(
- pool, device, memory, pAddress);
+VkResult VkDecoderGlobalState::on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
+ VkDevice device,
+ VkDeviceMemory memory,
+ uint64_t* pAddress) {
+ return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(pool, device, memory, pAddress);
}
VkResult VkDecoderGlobalState::on_vkGetMemoryHostAddressInfoGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkDeviceMemory memory,
- uint64_t* pAddress, uint64_t* pSize, uint64_t* pHostmemId) {
- return mImpl->on_vkGetMemoryHostAddressInfoGOOGLE(
- pool, device, memory, pAddress, pSize, pHostmemId);
+ android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory, uint64_t* pAddress,
+ uint64_t* pSize, uint64_t* pHostmemId) {
+ return mImpl->on_vkGetMemoryHostAddressInfoGOOGLE(pool, device, memory, pAddress, pSize,
+ pHostmemId);
}
// VK_GOOGLE_gfxstream
-VkResult VkDecoderGlobalState::on_vkFreeMemorySyncGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
+VkResult VkDecoderGlobalState::on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool,
+ VkDevice device, VkDeviceMemory memory,
+ const VkAllocationCallbacks* pAllocator) {
return mImpl->on_vkFreeMemorySyncGOOGLE(pool, device, memory, pAllocator);
}
-
// VK_GOOGLE_color_buffer
-VkResult VkDecoderGlobalState::on_vkRegisterImageColorBufferGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkImage image, uint32_t colorBuffer) {
- return mImpl->on_vkRegisterImageColorBufferGOOGLE(
- pool, device, image, colorBuffer);
+VkResult VkDecoderGlobalState::on_vkRegisterImageColorBufferGOOGLE(android::base::BumpPool* pool,
+ VkDevice device, VkImage image,
+ uint32_t colorBuffer) {
+ return mImpl->on_vkRegisterImageColorBufferGOOGLE(pool, device, image, colorBuffer);
}
-VkResult VkDecoderGlobalState::on_vkRegisterBufferColorBufferGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkBuffer buffer, uint32_t colorBuffer) {
- return mImpl->on_vkRegisterBufferColorBufferGOOGLE(
- pool, device, buffer, colorBuffer);
+VkResult VkDecoderGlobalState::on_vkRegisterBufferColorBufferGOOGLE(android::base::BumpPool* pool,
+ VkDevice device,
+ VkBuffer buffer,
+ uint32_t colorBuffer) {
+ return mImpl->on_vkRegisterBufferColorBufferGOOGLE(pool, device, buffer, colorBuffer);
}
VkResult VkDecoderGlobalState::on_vkAllocateCommandBuffers(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers) {
- return mImpl->on_vkAllocateCommandBuffers(pool, device, pAllocateInfo,
- pCommandBuffers);
+ android::base::BumpPool* pool, VkDevice device,
+ const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {
+ return mImpl->on_vkAllocateCommandBuffers(pool, device, pAllocateInfo, pCommandBuffers);
}
-VkResult VkDecoderGlobalState::on_vkCreateCommandPool(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool) {
- return mImpl->on_vkCreateCommandPool(pool, device, pCreateInfo, pAllocator,
- pCommandPool);
+VkResult VkDecoderGlobalState::on_vkCreateCommandPool(android::base::BumpPool* pool,
+ VkDevice device,
+ const VkCommandPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkCommandPool* pCommandPool) {
+ return mImpl->on_vkCreateCommandPool(pool, device, pCreateInfo, pAllocator, pCommandPool);
}
-void VkDecoderGlobalState::on_vkDestroyCommandPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator) {
+void VkDecoderGlobalState::on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice device,
+ VkCommandPool commandPool,
+ const VkAllocationCallbacks* pAllocator) {
mImpl->on_vkDestroyCommandPool(pool, device, commandPool, pAllocator);
}
-VkResult VkDecoderGlobalState::on_vkResetCommandPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags) {
+VkResult VkDecoderGlobalState::on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice device,
+ VkCommandPool commandPool,
+ VkCommandPoolResetFlags flags) {
return mImpl->on_vkResetCommandPool(pool, device, commandPool, flags);
}
-void VkDecoderGlobalState::on_vkCmdExecuteCommands(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- return mImpl->on_vkCmdExecuteCommands(pool, commandBuffer, commandBufferCount,
- pCommandBuffers);
+void VkDecoderGlobalState::on_vkCmdExecuteCommands(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers) {
+ return mImpl->on_vkCmdExecuteCommands(pool, commandBuffer, commandBufferCount, pCommandBuffers);
}
-VkResult VkDecoderGlobalState::on_vkQueueSubmit(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
+VkResult VkDecoderGlobalState::on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t submitCount, const VkSubmitInfo* pSubmits,
+ VkFence fence) {
return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
}
-VkResult VkDecoderGlobalState::on_vkQueueWaitIdle(
- android::base::BumpPool* pool,
- VkQueue queue) {
+VkResult VkDecoderGlobalState::on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue queue) {
return mImpl->on_vkQueueWaitIdle(pool, queue);
}
-VkResult VkDecoderGlobalState::on_vkResetCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags) {
+VkResult VkDecoderGlobalState::on_vkResetCommandBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags) {
return mImpl->on_vkResetCommandBuffer(pool, commandBuffer, flags);
}
-void VkDecoderGlobalState::on_vkFreeCommandBuffers(
- android::base::BumpPool* pool,
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- return mImpl->on_vkFreeCommandBuffers(pool, device, commandPool,
- commandBufferCount, pCommandBuffers);
+void VkDecoderGlobalState::on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice device,
+ VkCommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers) {
+ return mImpl->on_vkFreeCommandBuffers(pool, device, commandPool, commandBufferCount,
+ pCommandBuffers);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphoreProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
- pool, physicalDevice, pExternalSemaphoreInfo,
- pExternalSemaphoreProperties);
+ pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
}
void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
- pool, physicalDevice, pExternalSemaphoreInfo,
- pExternalSemaphoreProperties);
+ pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
}
// Descriptor update templates
VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplate(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
+ android::base::BumpPool* pool, VkDevice boxed_device,
const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- return mImpl->on_vkCreateDescriptorUpdateTemplate(
- pool, boxed_device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+ return mImpl->on_vkCreateDescriptorUpdateTemplate(pool, boxed_device, pCreateInfo, pAllocator,
+ pDescriptorUpdateTemplate);
}
VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplateKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
+ android::base::BumpPool* pool, VkDevice boxed_device,
const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(
- pool, boxed_device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+ return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(pool, boxed_device, pCreateInfo,
+ pAllocator, pDescriptorUpdateTemplate);
}
void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplate(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- mImpl->on_vkDestroyDescriptorUpdateTemplate(
- pool, boxed_device, descriptorUpdateTemplate, pAllocator);
+ android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
+ mImpl->on_vkDestroyDescriptorUpdateTemplate(pool, boxed_device, descriptorUpdateTemplate,
+ pAllocator);
}
void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplateKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- mImpl->on_vkDestroyDescriptorUpdateTemplateKHR(
- pool, boxed_device, descriptorUpdateTemplate, pAllocator);
+ android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
+ mImpl->on_vkDestroyDescriptorUpdateTemplateKHR(pool, boxed_device, descriptorUpdateTemplate,
+ pAllocator);
}
void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- uint32_t imageInfoCount,
- uint32_t bufferInfoCount,
- uint32_t bufferViewCount,
- const uint32_t* pImageInfoEntryIndices,
- const uint32_t* pBufferInfoEntryIndices,
- const uint32_t* pBufferViewEntryIndices,
- const VkDescriptorImageInfo* pImageInfos,
- const VkDescriptorBufferInfo* pBufferInfos,
+ android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
+ uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
+ const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
+ const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
const VkBufferView* pBufferViews) {
mImpl->on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
- pool, boxed_device,
- descriptorSet,
- descriptorUpdateTemplate,
- imageInfoCount,
- bufferInfoCount,
- bufferViewCount,
- pImageInfoEntryIndices,
- pBufferInfoEntryIndices,
- pBufferViewEntryIndices,
- pImageInfos,
- pBufferInfos,
- pBufferViews);
+ pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
+ bufferInfoCount, bufferViewCount, pImageInfoEntryIndices, pBufferInfoEntryIndices,
+ pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews);
}
-VkResult VkDecoderGlobalState::on_vkBeginCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo) {
+VkResult VkDecoderGlobalState::on_vkBeginCommandBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo* pBeginInfo) {
return mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo);
}
void VkDecoderGlobalState::on_vkBeginCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
+ android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
const VkCommandBufferBeginInfo* pBeginInfo) {
mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo);
}
-VkResult VkDecoderGlobalState::on_vkEndCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer) {
- return mImpl->on_vkEndCommandBuffer(
- pool, commandBuffer);
+VkResult VkDecoderGlobalState::on_vkEndCommandBuffer(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer) {
+ return mImpl->on_vkEndCommandBuffer(pool, commandBuffer);
}
-void VkDecoderGlobalState::on_vkEndCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer) {
- mImpl->on_vkEndCommandBufferAsyncGOOGLE(
- pool, commandBuffer);
+void VkDecoderGlobalState::on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer) {
+ mImpl->on_vkEndCommandBufferAsyncGOOGLE(pool, commandBuffer);
}
-void VkDecoderGlobalState::on_vkResetCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags) {
- mImpl->on_vkResetCommandBufferAsyncGOOGLE(
- pool, commandBuffer, flags);
+void VkDecoderGlobalState::on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags) {
+ mImpl->on_vkResetCommandBufferAsyncGOOGLE(pool, commandBuffer, flags);
}
-void VkDecoderGlobalState::on_vkCommandBufferHostSyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- uint32_t needHostSync,
- uint32_t sequenceNumber) {
+void VkDecoderGlobalState::on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ uint32_t needHostSync,
+ uint32_t sequenceNumber) {
mImpl->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
}
VkResult VkDecoderGlobalState::on_vkCreateImageWithRequirementsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
+ android::base::BumpPool* pool, VkDevice device, const VkImageCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkImage* pImage,
VkMemoryRequirements* pMemoryRequirements) {
- return mImpl->on_vkCreateImageWithRequirementsGOOGLE(
- pool, device, pCreateInfo, pAllocator, pImage, pMemoryRequirements);
+ return mImpl->on_vkCreateImageWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
+ pImage, pMemoryRequirements);
}
VkResult VkDecoderGlobalState::on_vkCreateBufferWithRequirementsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
+ android::base::BumpPool* pool, VkDevice device, const VkBufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer,
VkMemoryRequirements* pMemoryRequirements) {
- return mImpl->on_vkCreateBufferWithRequirementsGOOGLE(
- pool, device, pCreateInfo, pAllocator, pBuffer, pMemoryRequirements);
+ return mImpl->on_vkCreateBufferWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
+ pBuffer, pMemoryRequirements);
}
-void VkDecoderGlobalState::on_vkCmdBindPipeline(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- mImpl->on_vkCmdBindPipeline(pool, commandBuffer, pipelineBindPoint,
- pipeline);
+void VkDecoderGlobalState::on_vkCmdBindPipeline(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline) {
+ mImpl->on_vkCmdBindPipeline(pool, commandBuffer, pipelineBindPoint, pipeline);
}
void VkDecoderGlobalState::on_vkCmdBindDescriptorSets(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
- mImpl->on_vkCmdBindDescriptorSets(pool, commandBuffer, pipelineBindPoint,
- layout, firstSet, descriptorSetCount,
- pDescriptorSets, dynamicOffsetCount,
+ android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet,
+ uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets,
+ uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
+ mImpl->on_vkCmdBindDescriptorSets(pool, commandBuffer, pipelineBindPoint, layout, firstSet,
+ descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
pDynamicOffsets);
}
-VkResult VkDecoderGlobalState::on_vkCreateRenderPass(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- return mImpl->on_vkCreateRenderPass(pool, boxed_device, pCreateInfo,
- pAllocator, pRenderPass);
+VkResult VkDecoderGlobalState::on_vkCreateRenderPass(android::base::BumpPool* pool,
+ VkDevice boxed_device,
+ const VkRenderPassCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkRenderPass* pRenderPass) {
+ return mImpl->on_vkCreateRenderPass(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
}
-void VkDecoderGlobalState::on_vkQueueHostSyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t needHostSync,
- uint32_t sequenceNumber) {
+void VkDecoderGlobalState::on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t needHostSync,
+ uint32_t sequenceNumber) {
mImpl->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
}
-void VkDecoderGlobalState::on_vkQueueSubmitAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
+void VkDecoderGlobalState::on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t submitCount,
+ const VkSubmitInfo* pSubmits,
+ VkFence fence) {
mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
}
-void VkDecoderGlobalState::on_vkQueueWaitIdleAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue) {
+void VkDecoderGlobalState::on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool* pool,
+ VkQueue queue) {
mImpl->on_vkQueueWaitIdle(pool, queue);
}
-void VkDecoderGlobalState::on_vkQueueBindSparseAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo, VkFence fence) {
+void VkDecoderGlobalState::on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool* pool,
+ VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo* pBindInfo,
+ VkFence fence) {
mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
}
-void VkDecoderGlobalState::on_vkGetLinearImageLayoutGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFormat format,
- VkDeviceSize* pOffset,
- VkDeviceSize* pRowPitchAlignment) {
+void VkDecoderGlobalState::on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool,
+ VkDevice device, VkFormat format,
+ VkDeviceSize* pOffset,
+ VkDeviceSize* pRowPitchAlignment) {
mImpl->on_vkGetLinearImageLayoutGOOGLE(pool, device, format, pOffset, pRowPitchAlignment);
}
@@ -7975,62 +7082,52 @@
mImpl->on_vkGetLinearImageLayout2GOOGLE(pool, device, pCreateInfo, pOffset, pRowPitchAlignment);
}
-void VkDecoderGlobalState::on_vkQueueFlushCommandsGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- VkCommandBuffer commandBuffer,
- VkDeviceSize dataSize,
- const void* pData) {
+void VkDecoderGlobalState::on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool,
+ VkQueue queue,
+ VkCommandBuffer commandBuffer,
+ VkDeviceSize dataSize, const void* pData) {
mImpl->on_vkQueueFlushCommandsGOOGLE(pool, queue, commandBuffer, dataSize, pData);
}
void VkDecoderGlobalState::on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t descriptorPoolCount,
- const VkDescriptorPool* pDescriptorPools,
- uint32_t descriptorSetCount,
- const VkDescriptorSetLayout* pDescriptorSetLayouts,
- const uint64_t* pDescriptorSetPoolIds,
- const uint32_t* pDescriptorSetWhichPool,
- const uint32_t* pDescriptorSetPendingAllocation,
- const uint32_t* pDescriptorWriteStartingIndices,
- uint32_t pendingDescriptorWriteCount,
+ android::base::BumpPool* pool, VkQueue queue, uint32_t descriptorPoolCount,
+ const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
+ const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
+ const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
+ const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
const VkWriteDescriptorSet* pPendingDescriptorWrites) {
- mImpl->on_vkQueueCommitDescriptorSetUpdatesGOOGLE(pool, queue, descriptorPoolCount, pDescriptorPools, descriptorSetCount, pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool, pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices, pendingDescriptorWriteCount, pPendingDescriptorWrites);
+ mImpl->on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
+ pool, queue, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
+ pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
+ pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
+ pendingDescriptorWriteCount, pPendingDescriptorWrites);
}
-void VkDecoderGlobalState::on_vkCollectDescriptorPoolIdsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t* pPoolIdCount,
- uint64_t* pPoolIds) {
- mImpl->on_vkCollectDescriptorPoolIdsGOOGLE(pool, device, descriptorPool, pPoolIdCount, pPoolIds);
+void VkDecoderGlobalState::on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool,
+ VkDevice device,
+ VkDescriptorPool descriptorPool,
+ uint32_t* pPoolIdCount,
+ uint64_t* pPoolIds) {
+ mImpl->on_vkCollectDescriptorPoolIdsGOOGLE(pool, device, descriptorPool, pPoolIdCount,
+ pPoolIds);
}
-VkResult VkDecoderGlobalState::on_vkQueueBindSparse(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo, VkFence fence) {
+VkResult VkDecoderGlobalState::on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t bindInfoCount,
+ const VkBindSparseInfo* pBindInfo,
+ VkFence fence) {
return mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
}
void VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- VkImage image) {
+ android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores, VkImage image) {
int fenceFd;
- mImpl->on_vkQueueSignalReleaseImageANDROID(
- pool, queue, waitSemaphoreCount, pWaitSemaphores,
- image, &fenceFd);
+ mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount, pWaitSemaphores,
+ image, &fenceFd);
}
-VkResult VkDecoderGlobalState::waitForFence(VkFence boxed_fence,
- uint64_t timeout) {
+VkResult VkDecoderGlobalState::waitForFence(VkFence boxed_fence, uint64_t timeout) {
return mImpl->waitForFence(boxed_fence, timeout);
}
@@ -8043,141 +7140,142 @@
return mImpl->registerQsriCallback(image, std::move(callback));
}
-void VkDecoderGlobalState::deviceMemoryTransform_tohost(
- VkDeviceMemory* memory, uint32_t memoryCount,
- VkDeviceSize* offset, uint32_t offsetCount,
- VkDeviceSize* size, uint32_t sizeCount,
- uint32_t* typeIndex, uint32_t typeIndexCount,
- uint32_t* typeBits, uint32_t typeBitsCount) {
+void VkDecoderGlobalState::deviceMemoryTransform_tohost(VkDeviceMemory* memory,
+ uint32_t memoryCount, VkDeviceSize* offset,
+ uint32_t offsetCount, VkDeviceSize* size,
+ uint32_t sizeCount, uint32_t* typeIndex,
+ uint32_t typeIndexCount, uint32_t* typeBits,
+ uint32_t typeBitsCount) {
// Not used currently
- (void)memory; (void)memoryCount;
- (void)offset; (void)offsetCount;
- (void)size; (void)sizeCount;
- (void)typeIndex; (void)typeIndexCount;
- (void)typeBits; (void)typeBitsCount;
+ (void)memory;
+ (void)memoryCount;
+ (void)offset;
+ (void)offsetCount;
+ (void)size;
+ (void)sizeCount;
+ (void)typeIndex;
+ (void)typeIndexCount;
+ (void)typeBits;
+ (void)typeBitsCount;
}
void VkDecoderGlobalState::deviceMemoryTransform_fromhost(
- VkDeviceMemory* memory, uint32_t memoryCount,
- VkDeviceSize* offset, uint32_t offsetCount,
- VkDeviceSize* size, uint32_t sizeCount,
- uint32_t* typeIndex, uint32_t typeIndexCount,
+ VkDeviceMemory* memory, uint32_t memoryCount, VkDeviceSize* offset, uint32_t offsetCount,
+ VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex, uint32_t typeIndexCount,
uint32_t* typeBits, uint32_t typeBitsCount) {
// Not used currently
- (void)memory; (void)memoryCount;
- (void)offset; (void)offsetCount;
- (void)size; (void)sizeCount;
- (void)typeIndex; (void)typeIndexCount;
- (void)typeBits; (void)typeBitsCount;
+ (void)memory;
+ (void)memoryCount;
+ (void)offset;
+ (void)offsetCount;
+ (void)size;
+ (void)sizeCount;
+ (void)typeIndex;
+ (void)typeIndexCount;
+ (void)typeBits;
+ (void)typeBitsCount;
}
-VkDecoderSnapshot* VkDecoderGlobalState::snapshot() {
- return mImpl->snapshot();
-}
+VkDecoderSnapshot* VkDecoderGlobalState::snapshot() { return mImpl->snapshot(); }
-#define DEFINE_TRANSFORMED_TYPE_IMPL(type) \
- void VkDecoderGlobalState::transformImpl_##type##_tohost(const type* val, uint32_t count) { \
- mImpl->transformImpl_##type##_tohost(val, count); \
- } \
+#define DEFINE_TRANSFORMED_TYPE_IMPL(type) \
+ void VkDecoderGlobalState::transformImpl_##type##_tohost(const type* val, uint32_t count) { \
+ mImpl->transformImpl_##type##_tohost(val, count); \
+ } \
void VkDecoderGlobalState::transformImpl_##type##_fromhost(const type* val, uint32_t count) { \
- mImpl->transformImpl_##type##_fromhost(val, count); \
- } \
+ mImpl->transformImpl_##type##_fromhost(val, count); \
+ }
LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
-#define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF(type) \
- type VkDecoderGlobalState::new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) { \
- return mImpl->new_boxed_##type(underlying, dispatch, ownDispatch); \
- } \
- void VkDecoderGlobalState::delete_##type(type boxed) { \
- mImpl->delete_##type(boxed); \
- } \
- type VkDecoderGlobalState::unbox_##type(type boxed) { \
- return mImpl->unbox_##type(boxed); \
- } \
- type VkDecoderGlobalState::unboxed_to_boxed_##type(type unboxed) { \
- return mImpl->unboxed_to_boxed_##type(unboxed); \
- } \
- VulkanDispatch* VkDecoderGlobalState::dispatch_##type(type boxed) { \
- return mImpl->dispatch_##type(boxed); \
- } \
+#define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF(type) \
+ type VkDecoderGlobalState::new_boxed_##type(type underlying, VulkanDispatch* dispatch, \
+ bool ownDispatch) { \
+ return mImpl->new_boxed_##type(underlying, dispatch, ownDispatch); \
+ } \
+ void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); } \
+ type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
+ type VkDecoderGlobalState::unboxed_to_boxed_##type(type unboxed) { \
+ return mImpl->unboxed_to_boxed_##type(unboxed); \
+ } \
+ VulkanDispatch* VkDecoderGlobalState::dispatch_##type(type boxed) { \
+ return mImpl->dispatch_##type(boxed); \
+ }
-#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF(type) \
- type VkDecoderGlobalState::new_boxed_non_dispatchable_##type(type underlying) { \
- return mImpl->new_boxed_non_dispatchable_##type(underlying); \
- } \
- void VkDecoderGlobalState::delete_##type(type boxed) { \
- mImpl->delete_##type(boxed); \
- } \
- type VkDecoderGlobalState::unbox_##type(type boxed) { \
- return mImpl->unbox_##type(boxed); \
- } \
- type VkDecoderGlobalState::unboxed_to_boxed_non_dispatchable_##type(type unboxed) { \
- return mImpl->unboxed_to_boxed_non_dispatchable_##type(unboxed); \
- } \
+#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF(type) \
+ type VkDecoderGlobalState::new_boxed_non_dispatchable_##type(type underlying) { \
+ return mImpl->new_boxed_non_dispatchable_##type(underlying); \
+ } \
+ void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); } \
+ type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
+ type VkDecoderGlobalState::unboxed_to_boxed_non_dispatchable_##type(type unboxed) { \
+ return mImpl->unboxed_to_boxed_non_dispatchable_##type(unboxed); \
+ }
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF)
-#define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type) \
- type unbox_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) return VK_NULL_HANDLE; \
- return (type)elt->underlying; \
- } \
- VulkanDispatch* dispatch_##type(type boxed) { \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) { fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed); return nullptr; } \
- return elt->dispatch; \
- } \
- void delete_##type(type boxed) { \
- if (!boxed) return; \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) return; \
- releaseOrderMaintInfo(elt->ordMaintInfo); \
- if (elt->readStream) { \
- sReadStreamRegistry.push(elt->readStream); \
- elt->readStream = nullptr; \
- } \
- sBoxedHandleManager.remove((uint64_t)boxed); \
- } \
- type unboxed_to_boxed_##type(type unboxed) { \
- AutoLock lock(sBoxedHandleManager.lock); \
- return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked( \
- (uint64_t)(uintptr_t)unboxed); \
- } \
+#define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type) \
+ type unbox_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) return VK_NULL_HANDLE; \
+ return (type)elt->underlying; \
+ } \
+ VulkanDispatch* dispatch_##type(type boxed) { \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) { \
+ fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed); \
+ return nullptr; \
+ } \
+ return elt->dispatch; \
+ } \
+ void delete_##type(type boxed) { \
+ if (!boxed) return; \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) return; \
+ releaseOrderMaintInfo(elt->ordMaintInfo); \
+ if (elt->readStream) { \
+ sReadStreamRegistry.push(elt->readStream); \
+ elt->readStream = nullptr; \
+ } \
+ sBoxedHandleManager.remove((uint64_t)boxed); \
+ } \
+ type unboxed_to_boxed_##type(type unboxed) { \
+ AutoLock lock(sBoxedHandleManager.lock); \
+ return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
+ }
-#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type) \
- type new_boxed_non_dispatchable_##type(type underlying) { \
- return VkDecoderGlobalState::get()->new_boxed_non_dispatchable_##type(underlying); \
- } \
- void delete_##type(type boxed) { \
- if (!boxed) return; \
- sBoxedHandleManager.remove((uint64_t)boxed); \
- } \
- void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) { \
- sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback); \
- } \
- type unbox_##type(type boxed) { \
- if (!boxed) return boxed; \
- auto elt = sBoxedHandleManager.get( \
- (uint64_t)(uintptr_t)boxed); \
- if (!elt) { fprintf(stderr, "%s: unbox %p failed, not found\n", __func__, boxed); abort(); return VK_NULL_HANDLE; } \
- return (type)elt->underlying; \
- } \
- type unboxed_to_boxed_non_dispatchable_##type(type unboxed) { \
- AutoLock lock(sBoxedHandleManager.lock); \
- return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked( \
- (uint64_t)(uintptr_t)unboxed); \
- } \
+#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type) \
+ type new_boxed_non_dispatchable_##type(type underlying) { \
+ return VkDecoderGlobalState::get()->new_boxed_non_dispatchable_##type(underlying); \
+ } \
+ void delete_##type(type boxed) { \
+ if (!boxed) return; \
+ sBoxedHandleManager.remove((uint64_t)boxed); \
+ } \
+ void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) { \
+ sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback); \
+ } \
+ type unbox_##type(type boxed) { \
+ if (!boxed) return boxed; \
+ auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed); \
+ if (!elt) { \
+ fprintf(stderr, "%s: unbox %p failed, not found\n", __func__, boxed); \
+ abort(); \
+ return VK_NULL_HANDLE; \
+ } \
+ return (type)elt->underlying; \
+ } \
+ type unboxed_to_boxed_non_dispatchable_##type(type unboxed) { \
+ AutoLock lock(sBoxedHandleManager.lock); \
+ return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
+ }
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
-void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::setup(android::base::BumpPool* pool, uint64_t** bufPtr) {
+void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::setup(android::base::BumpPool* pool,
+ uint64_t** bufPtr) {
mPool = pool;
mPreserveBufPtr = bufPtr;
}
@@ -8186,53 +7284,95 @@
*mPreserveBufPtr = (uint64_t*)mPool->alloc(count * sizeof(uint64_t));
}
-#define BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name) \
- void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(type_name* handles, size_t count) { \
- allocPreserve(count); \
- for (size_t i = 0; i < count; ++i) { \
- (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]); \
- if (handles[i]) { handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); } else { handles[i] = (type_name)nullptr; } ; \
- } \
- } \
- void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) { \
- allocPreserve(count); \
- for (size_t i = 0; i < count; ++i) { \
- (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
- if (handles[i]) { handle_u64s[i] = (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); } else { handle_u64s[i] = 0; } \
- } \
- } \
- void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) { \
- allocPreserve(count); \
- for (size_t i = 0; i < count; ++i) { \
- (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
- if (handle_u64s[i]) { handles[i] = VkDecoderGlobalState::get()->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); } else { handles[i] = (type_name)nullptr; } \
- } \
- } \
+#define BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name) \
+ void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name( \
+ type_name* handles, size_t count) { \
+ allocPreserve(count); \
+ for (size_t i = 0; i < count; ++i) { \
+ (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]); \
+ if (handles[i]) { \
+ handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
+ } else { \
+ handles[i] = (type_name) nullptr; \
+ }; \
+ } \
+ } \
+ void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64( \
+ const type_name* handles, uint64_t* handle_u64s, size_t count) { \
+ allocPreserve(count); \
+ for (size_t i = 0; i < count; ++i) { \
+ (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
+ if (handles[i]) { \
+ handle_u64s[i] = \
+ (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
+ } else { \
+ handle_u64s[i] = 0; \
+ } \
+ } \
+ } \
+ void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name( \
+ const uint64_t* handle_u64s, type_name* handles, size_t count) { \
+ allocPreserve(count); \
+ for (size_t i = 0; i < count; ++i) { \
+ (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
+ if (handle_u64s[i]) { \
+ handles[i] = VkDecoderGlobalState::get()->unbox_##type_name( \
+ (type_name)(uintptr_t)handle_u64s[i]); \
+ } else { \
+ handles[i] = (type_name) nullptr; \
+ } \
+ } \
+ }
-#define BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name) \
- void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(type_name* handles, size_t count) { \
- allocPreserve(count); \
- for (size_t i = 0; i < count; ++i) { \
- (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]); \
- if (handles[i]) { auto boxed = handles[i]; handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); delete_##type_name(boxed); } else { handles[i] = (type_name)nullptr; }; \
- } \
- } \
- void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) { \
- allocPreserve(count); \
- for (size_t i = 0; i < count; ++i) { \
- (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
- if (handles[i]) { auto boxed = handles[i]; handle_u64s[i] = (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); delete_##type_name(boxed); } else { handle_u64s[i] = 0; } \
- } \
- } \
- void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) { \
- allocPreserve(count); \
- for (size_t i = 0; i < count; ++i) { \
- (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
- if (handle_u64s[i]) { auto boxed = (type_name)(uintptr_t)handle_u64s[i]; handles[i] = VkDecoderGlobalState::get()->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); delete_##type_name(boxed); } else { handles[i] = (type_name)nullptr; } \
- } \
- } \
+#define BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name) \
+ void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name( \
+ type_name* handles, size_t count) { \
+ allocPreserve(count); \
+ for (size_t i = 0; i < count; ++i) { \
+ (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]); \
+ if (handles[i]) { \
+ auto boxed = handles[i]; \
+ handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
+ delete_##type_name(boxed); \
+ } else { \
+ handles[i] = (type_name) nullptr; \
+ }; \
+ } \
+ } \
+ void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64( \
+ const type_name* handles, uint64_t* handle_u64s, size_t count) { \
+ allocPreserve(count); \
+ for (size_t i = 0; i < count; ++i) { \
+ (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
+ if (handles[i]) { \
+ auto boxed = handles[i]; \
+ handle_u64s[i] = \
+ (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
+ delete_##type_name(boxed); \
+ } else { \
+ handle_u64s[i] = 0; \
+ } \
+ } \
+ } \
+ void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name( \
+ const uint64_t* handle_u64s, type_name* handles, size_t count) { \
+ allocPreserve(count); \
+ for (size_t i = 0; i < count; ++i) { \
+ (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]); \
+ if (handle_u64s[i]) { \
+ auto boxed = (type_name)(uintptr_t)handle_u64s[i]; \
+ handles[i] = VkDecoderGlobalState::get()->unbox_##type_name( \
+ (type_name)(uintptr_t)handle_u64s[i]); \
+ delete_##type_name(boxed); \
+ } else { \
+ handles[i] = (type_name) nullptr; \
+ } \
+ } \
+ }
-GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
-GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
+GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(
+ BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
+GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(
+ BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VkDecoderGlobalState.h b/stream-servers/vulkan/VkDecoderGlobalState.h
index a32d4aa..78f3831 100644
--- a/stream-servers/vulkan/VkDecoderGlobalState.h
+++ b/stream-servers/vulkan/VkDecoderGlobalState.h
@@ -29,12 +29,11 @@
namespace base {
class BumpPool;
class Stream;
-} // namespace android
-} // namespace base
+} // namespace base
+} // namespace android
namespace goldfish_vk {
-
// Class for tracking host-side state. Currently we only care about
// tracking VkDeviceMemory to make it easier to pass the right data
// from mapped pointers to the guest, but this could get more stuff
@@ -45,7 +44,7 @@
// Currently, it works by interfacing with VkDecoder calling on_<apicall>
// functions.
class VkDecoderGlobalState {
-public:
+ public:
VkDecoderGlobalState();
~VkDecoderGlobalState();
@@ -75,9 +74,7 @@
size_t setCreatedHandlesForSnapshotLoad(const unsigned char* buffer);
void clearCreatedHandlesForSnapshotLoad();
- VkResult on_vkEnumerateInstanceVersion(
- android::base::BumpPool* pool,
- uint32_t* pApiVersion);
+ VkResult on_vkEnumerateInstanceVersion(android::base::BumpPool* pool, uint32_t* pApiVersion);
// Fast way to get dispatch tables associated with a Vulkan object.
// VkInstance
@@ -86,314 +83,204 @@
// VkQueue
// VkCommandBuffer
- VkResult on_vkCreateInstance(
- android::base::BumpPool* pool,
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance);
+ VkResult on_vkCreateInstance(android::base::BumpPool* pool,
+ const VkInstanceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
- void on_vkDestroyInstance(
- android::base::BumpPool* pool,
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance instance,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkEnumeratePhysicalDevices(
- android::base::BumpPool* pool,
- VkInstance instance,
- uint32_t* physicalDeviceCount,
- VkPhysicalDevice* physicalDevices);
+ VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool, VkInstance instance,
+ uint32_t* physicalDeviceCount,
+ VkPhysicalDevice* physicalDevices);
// Override features
- void on_vkGetPhysicalDeviceFeatures(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures);
- void on_vkGetPhysicalDeviceFeatures2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
- void on_vkGetPhysicalDeviceFeatures2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
+ void on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures* pFeatures);
+ void on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures2* pFeatures);
+ void on_vkGetPhysicalDeviceFeatures2KHR(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures2* pFeatures);
// Override image format properties
VkResult on_vkGetPhysicalDeviceImageFormatProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
+ VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
VkImageFormatProperties* pImageFormatProperties);
VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
VkImageFormatProperties2* pImageFormatProperties);
VkResult on_vkGetPhysicalDeviceImageFormatProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
VkImageFormatProperties2* pImageFormatProperties);
// Override format features
- void on_vkGetPhysicalDeviceFormatProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties);
- void on_vkGetPhysicalDeviceFormatProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
- void on_vkGetPhysicalDeviceFormatProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
+ void on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties* pFormatProperties);
+ void on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice, VkFormat format,
+ VkFormatProperties2* pFormatProperties);
+ void on_vkGetPhysicalDeviceFormatProperties2KHR(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkFormat format,
+ VkFormatProperties2* pFormatProperties);
// Override API version
- void on_vkGetPhysicalDeviceProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties);
- void on_vkGetPhysicalDeviceProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
- void on_vkGetPhysicalDeviceProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
+ void on_vkGetPhysicalDeviceProperties(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties* pProperties);
+ void on_vkGetPhysicalDeviceProperties2(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties2* pProperties);
+ void on_vkGetPhysicalDeviceProperties2KHR(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties2* pProperties);
// Override memory types advertised from host
//
void on_vkGetPhysicalDeviceMemoryProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties* pMemoryProperties);
void on_vkGetPhysicalDeviceMemoryProperties2(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
void on_vkGetPhysicalDeviceMemoryProperties2KHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
// Override supported device extension lists
- VkResult on_vkEnumerateDeviceExtensionProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties);
+ VkResult on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool* pool,
+ VkPhysicalDevice physicalDevice,
+ const char* pLayerName,
+ uint32_t* pPropertyCount,
+ VkExtensionProperties* pProperties);
- VkResult on_vkCreateDevice(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice);
+ VkResult on_vkCreateDevice(android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
+ const VkDeviceCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
- void on_vkGetDeviceQueue(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
+ void on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice device,
+ uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
- void on_vkDestroyDevice(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice device,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkCreateBuffer(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer);
+ VkResult on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice device,
+ const VkBufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
- void on_vkDestroyBuffer(
- android::base::BumpPool* pool,
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice device, VkBuffer buffer,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkBindBufferMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
- VkResult on_vkBindBufferMemory2(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
- VkResult on_vkBindBufferMemory2KHR(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
+ VkResult on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice device, VkBuffer buffer,
+ VkDeviceMemory memory, VkDeviceSize memoryOffset);
+ VkResult on_vkBindBufferMemory2(android::base::BumpPool* pool, VkDevice device,
+ uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfo* pBindInfos);
+ VkResult on_vkBindBufferMemory2KHR(android::base::BumpPool* pool, VkDevice device,
+ uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfo* pBindInfos);
- VkResult on_vkCreateImage(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
+ VkResult on_vkCreateImage(android::base::BumpPool* pool, VkDevice device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkImage* pImage);
- void on_vkDestroyImage(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyImage(android::base::BumpPool* pool, VkDevice device, VkImage image,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkBindImageMemory(android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
+ VkResult on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice device, VkImage image,
+ VkDeviceMemory memory, VkDeviceSize memoryOffset);
- VkResult on_vkCreateImageView(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView);
+ VkResult on_vkCreateImageView(android::base::BumpPool* pool, VkDevice device,
+ const VkImageViewCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkImageView* pView);
- void on_vkDestroyImageView(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice device,
+ VkImageView imageView, const VkAllocationCallbacks* pAllocator);
- VkResult on_vkCreateSampler(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler);
+ VkResult on_vkCreateSampler(android::base::BumpPool* pool, VkDevice device,
+ const VkSamplerCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
- void on_vkDestroySampler(
- android::base::BumpPool* pool,
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroySampler(android::base::BumpPool* pool, VkDevice device, VkSampler sampler,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkCreateDescriptorSetLayout(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout);
+ VkResult on_vkCreateDescriptorSetLayout(android::base::BumpPool* pool, VkDevice device,
+ const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorSetLayout* pSetLayout);
- void on_vkDestroyDescriptorSetLayout(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyDescriptorSetLayout(android::base::BumpPool* pool, VkDevice device,
+ VkDescriptorSetLayout descriptorSetLayout,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkCreateDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool);
+ VkResult on_vkCreateDescriptorPool(android::base::BumpPool* pool, VkDevice device,
+ const VkDescriptorPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorPool* pDescriptorPool);
- void on_vkDestroyDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyDescriptorPool(android::base::BumpPool* pool, VkDevice device,
+ VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkResetDescriptorPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags);
+ VkResult on_vkResetDescriptorPool(android::base::BumpPool* pool, VkDevice device,
+ VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags);
- VkResult on_vkAllocateDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets);
+ VkResult on_vkAllocateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
+ const VkDescriptorSetAllocateInfo* pAllocateInfo,
+ VkDescriptorSet* pDescriptorSets);
- VkResult on_vkFreeDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets);
+ VkResult on_vkFreeDescriptorSets(android::base::BumpPool* pool, VkDevice device,
+ VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
+ const VkDescriptorSet* pDescriptorSets);
- void on_vkUpdateDescriptorSets(
- android::base::BumpPool* pool,
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
+ void on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
+ uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet* pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet* pDescriptorCopies);
- void on_vkCmdCopyBufferToImage(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
- void on_vkCmdCopyImage(android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
- void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
+ void on_vkCmdCopyBufferToImage(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkBuffer srcBuffer, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
const VkBufferImageCopy* pRegions);
- void on_vkGetImageMemoryRequirements(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
+ void on_vkCmdCopyImage(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageCopy* pRegions);
+ void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkImage srcImage, VkImageLayout srcImageLayout,
+ VkBuffer dstBuffer, uint32_t regionCount,
+ const VkBufferImageCopy* pRegions);
- void on_vkGetImageMemoryRequirements2(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
+ void on_vkGetImageMemoryRequirements(android::base::BumpPool* pool, VkDevice device,
+ VkImage image, VkMemoryRequirements* pMemoryRequirements);
- void on_vkGetImageMemoryRequirements2KHR(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
+ void on_vkGetImageMemoryRequirements2(android::base::BumpPool* pool, VkDevice device,
+ const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
- void on_vkCmdPipelineBarrier(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
+ void on_vkGetImageMemoryRequirements2KHR(android::base::BumpPool* pool, VkDevice device,
+ const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
+
+ void on_vkCmdPipelineBarrier(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask,
+ VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
+ const VkMemoryBarrier* pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier* pImageMemoryBarriers);
// Do we need to wrap vk(Create|Destroy)Instance to
// update our maps of VkDevices? Spec suggests no:
@@ -405,31 +292,18 @@
// This suggests that we should emulate the invalid behavior by
// not destroying our own VkDevice maps on instance destruction.
- VkResult on_vkAllocateMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory);
+ VkResult on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice device,
+ const VkMemoryAllocateInfo* pAllocateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
- void on_vkFreeMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkFreeMemory(android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkMapMemory(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData);
+ VkResult on_vkMapMemory(android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory,
+ VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
+ void** ppData);
- void on_vkUnmapMemory(
- android::base::BumpPool* pool,
- VkDevice device, VkDeviceMemory memory);
+ void on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory);
uint8_t* getMappedHostPointer(VkDeviceMemory memory);
VkDeviceSize getDeviceMemorySize(VkDeviceMemory memory);
@@ -450,336 +324,208 @@
HostFeatureSupport getHostFeatureSupport() const;
// VK_ANDROID_native_buffer
- VkResult on_vkGetSwapchainGrallocUsageANDROID(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFormat format,
- VkImageUsageFlags imageUsage,
- int* grallocUsage);
+ VkResult on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool, VkDevice device,
+ VkFormat format, VkImageUsageFlags imageUsage,
+ int* grallocUsage);
VkResult on_vkGetSwapchainGrallocUsage2ANDROID(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFormat format,
- VkImageUsageFlags imageUsage,
- VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
- uint64_t* grallocConsumerUsage,
- uint64_t* grallocProducerUsage);
- VkResult on_vkAcquireImageANDROID(
- android::base::BumpPool* pool,
- VkDevice device,
- VkImage image,
- int nativeFenceFd,
- VkSemaphore semaphore,
- VkFence fence);
- VkResult on_vkQueueSignalReleaseImageANDROID(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- VkImage image,
- int* pNativeFenceFd);
+ android::base::BumpPool* pool, VkDevice device, VkFormat format,
+ VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
+ uint64_t* grallocConsumerUsage, uint64_t* grallocProducerUsage);
+ VkResult on_vkAcquireImageANDROID(android::base::BumpPool* pool, VkDevice device, VkImage image,
+ int nativeFenceFd, VkSemaphore semaphore, VkFence fence);
+ VkResult on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores, VkImage image,
+ int* pNativeFenceFd);
// VK_GOOGLE_gfxstream
- VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- uint64_t* pAddress);
- VkResult on_vkGetMemoryHostAddressInfoGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- uint64_t* pAddress,
- uint64_t* pSize,
- uint64_t* pHostmemId);
+ VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkDeviceMemory memory, uint64_t* pAddress);
+ VkResult on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkDeviceMemory memory, uint64_t* pAddress,
+ uint64_t* pSize, uint64_t* pHostmemId);
// VK_GOOGLE_gfxstream
- VkResult on_vkFreeMemorySyncGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
+ VkResult on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkDeviceMemory memory,
+ const VkAllocationCallbacks* pAllocator);
// VK_GOOGLE_color_buffer
- VkResult on_vkRegisterImageColorBufferGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkImage image, uint32_t colorBuffer);
- VkResult on_vkRegisterBufferColorBufferGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device, VkBuffer buffer, uint32_t colorBuffer);
+ VkResult on_vkRegisterImageColorBufferGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkImage image, uint32_t colorBuffer);
+ VkResult on_vkRegisterBufferColorBufferGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkBuffer buffer, uint32_t colorBuffer);
- VkResult on_vkAllocateCommandBuffers(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers);
+ VkResult on_vkAllocateCommandBuffers(android::base::BumpPool* pool, VkDevice device,
+ const VkCommandBufferAllocateInfo* pAllocateInfo,
+ VkCommandBuffer* pCommandBuffers);
- void on_vkCmdExecuteCommands(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
+ void on_vkCmdExecuteCommands(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers);
- VkResult on_vkQueueSubmit(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
+ VkResult on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue queue, uint32_t submitCount,
+ const VkSubmitInfo* pSubmits, VkFence fence);
- VkResult on_vkQueueWaitIdle(
- android::base::BumpPool* pool,
- VkQueue queue);
+ VkResult on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue queue);
- VkResult on_vkResetCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
+ VkResult on_vkResetCommandBuffer(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags);
- void on_vkFreeCommandBuffers(
- android::base::BumpPool* pool,
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
+ void on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice device,
+ VkCommandPool commandPool, uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers);
- VkResult on_vkCreateCommandPool(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool);
+ VkResult on_vkCreateCommandPool(android::base::BumpPool* pool, VkDevice device,
+ const VkCommandPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkCommandPool* pCommandPool);
- void on_vkDestroyCommandPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice device,
+ VkCommandPool commandPool,
+ const VkAllocationCallbacks* pAllocator);
- VkResult on_vkResetCommandPool(
- android::base::BumpPool* pool,
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags);
+ VkResult on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice device,
+ VkCommandPool commandPool, VkCommandPoolResetFlags flags);
void on_vkGetPhysicalDeviceExternalSemaphoreProperties(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
void on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- android::base::BumpPool* pool,
- VkPhysicalDevice physicalDevice,
+ android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
- VkResult on_vkCreateSemaphore(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore);
- VkResult on_vkImportSemaphoreFdKHR(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
- VkResult on_vkGetSemaphoreFdKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd);
- void on_vkDestroySemaphore(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
+ VkResult on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice device,
+ const VkSemaphoreCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
+ VkResult on_vkImportSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice device,
+ const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
+ VkResult on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
+ const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
+ void on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
- VkResult on_vkCreateFence(android::base::BumpPool* pool,
- VkDevice device,
+ VkResult on_vkCreateFence(android::base::BumpPool* pool, VkDevice device,
const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
- VkResult on_vkResetFences(android::base::BumpPool* pool,
- VkDevice device,
- uint32_t fenceCount,
+ const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+ VkResult on_vkResetFences(android::base::BumpPool* pool, VkDevice device, uint32_t fenceCount,
const VkFence* pFences);
- void on_vkDestroyFence(android::base::BumpPool* pool,
- VkDevice device,
- VkFence fence,
+ void on_vkDestroyFence(android::base::BumpPool* pool, VkDevice device, VkFence fence,
const VkAllocationCallbacks* pAllocator);
// Descriptor update templates
VkResult on_vkCreateDescriptorUpdateTemplate(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
+ android::base::BumpPool* pool, VkDevice boxed_device,
const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
VkResult on_vkCreateDescriptorUpdateTemplateKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
+ android::base::BumpPool* pool, VkDevice boxed_device,
const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
- void on_vkDestroyDescriptorUpdateTemplate(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
+ void on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool* pool, VkDevice boxed_device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VkAllocationCallbacks* pAllocator);
void on_vkDestroyDescriptorUpdateTemplateKHR(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
+ android::base::BumpPool* pool, VkDevice boxed_device,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
const VkAllocationCallbacks* pAllocator);
void on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
- android::base::BumpPool* pool,
- VkDevice boxed_device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- uint32_t imageInfoCount,
- uint32_t bufferInfoCount,
- uint32_t bufferViewCount,
- const uint32_t* pImageInfoEntryIndices,
- const uint32_t* pBufferInfoEntryIndices,
- const uint32_t* pBufferViewEntryIndices,
- const VkDescriptorImageInfo* pImageInfos,
- const VkDescriptorBufferInfo* pBufferInfos,
+ android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
+ uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
+ const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
+ const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
const VkBufferView* pBufferViews);
- VkResult on_vkBeginCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
- void on_vkBeginCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
- VkResult on_vkEndCommandBuffer(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer);
- void on_vkEndCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer);
- void on_vkResetCommandBufferAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
- void on_vkCommandBufferHostSyncGOOGLE(
- android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- uint32_t needHostSync,
- uint32_t sequenceNumber);
+ VkResult on_vkBeginCommandBuffer(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo* pBeginInfo);
+ void on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo* pBeginInfo);
+ VkResult on_vkEndCommandBuffer(android::base::BumpPool* pool, VkCommandBuffer commandBuffer);
+ void on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer);
+ void on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags);
+ void on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
+ VkCommandBuffer commandBuffer, uint32_t needHostSync,
+ uint32_t sequenceNumber);
- VkResult on_vkCreateImageWithRequirementsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
- VkMemoryRequirements* pMemoryRequirements);
- VkResult on_vkCreateBufferWithRequirementsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
- VkMemoryRequirements* pMemoryRequirements);
+ VkResult on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ const VkImageCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkImage* pImage,
+ VkMemoryRequirements* pMemoryRequirements);
+ VkResult on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ const VkBufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkBuffer* pBuffer,
+ VkMemoryRequirements* pMemoryRequirements);
- void on_vkCmdBindPipeline(android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
+ void on_vkCmdBindPipeline(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
- void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool,
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
+ void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
+ uint32_t firstSet, uint32_t descriptorSetCount,
const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
+ uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets);
- VkResult on_vkCreateRenderPass(android::base::BumpPool* pool,
- VkDevice device,
+ VkResult on_vkCreateRenderPass(android::base::BumpPool* pool, VkDevice device,
const VkRenderPassCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkRenderPass* pRenderPass);
// VK_GOOGLE_gfxstream
- void on_vkQueueHostSyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t needHostSync,
- uint32_t sequenceNumber);
- void on_vkQueueSubmitAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
- void on_vkQueueWaitIdleAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue);
- void on_vkQueueBindSparseAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo, VkFence fence);
+ void on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t needHostSync, uint32_t sequenceNumber);
+ void on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t submitCount, const VkSubmitInfo* pSubmits,
+ VkFence fence);
+ void on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue);
+ void on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+ VkFence fence);
// VK_GOOGLE_gfxstream
- void on_vkGetLinearImageLayoutGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkFormat format,
- VkDeviceSize* pOffset,
- VkDeviceSize* pRowPitchAlignment);
+ void on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkFormat format, VkDeviceSize* pOffset,
+ VkDeviceSize* pRowPitchAlignment);
void on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool, VkDevice device,
const VkImageCreateInfo* pCreateInfo,
VkDeviceSize* pOffset, VkDeviceSize* pRowPitchAlignment);
// VK_GOOGLE_gfxstream
- void on_vkQueueFlushCommandsGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- VkCommandBuffer commandBuffer,
- VkDeviceSize dataSize,
- const void* pData);
+ void on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool, VkQueue queue,
+ VkCommandBuffer commandBuffer, VkDeviceSize dataSize,
+ const void* pData);
void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t descriptorPoolCount,
- const VkDescriptorPool* pDescriptorPools,
- uint32_t descriptorSetCount,
- const VkDescriptorSetLayout* pDescriptorSetLayouts,
- const uint64_t* pDescriptorSetPoolIds,
- const uint32_t* pDescriptorSetWhichPool,
- const uint32_t* pDescriptorSetPendingAllocation,
- const uint32_t* pDescriptorWriteStartingIndices,
- uint32_t pendingDescriptorWriteCount,
+ android::base::BumpPool* pool, VkQueue queue, uint32_t descriptorPoolCount,
+ const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
+ const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
+ const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
+ const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
const VkWriteDescriptorSet* pPendingDescriptorWrites);
- void on_vkCollectDescriptorPoolIdsGOOGLE(
- android::base::BumpPool* pool,
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t* pPoolIdCount,
- uint64_t* pPoolIds);
- VkResult on_vkQueueBindSparse(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo, VkFence fence);
- void on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(
- android::base::BumpPool* pool,
- VkQueue queue,
- uint32_t waitSemaphoreCount,
- const VkSemaphore* pWaitSemaphores,
- VkImage image);
+ void on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool, VkDevice device,
+ VkDescriptorPool descriptorPool,
+ uint32_t* pPoolIdCount, uint64_t* pPoolIds);
+ VkResult on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue queue,
+ uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+ VkFence fence);
+ void on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(android::base::BumpPool* pool,
+ VkQueue queue, uint32_t waitSemaphoreCount,
+ const VkSemaphore* pWaitSemaphores,
+ VkImage image);
// Fence waits
VkResult waitForFence(VkFence boxed_fence, uint64_t timeout);
@@ -794,183 +540,229 @@
VkResult registerQsriCallback(VkImage boxed_image, VkQsriTimeline::Callback callback);
// Transformations
- void deviceMemoryTransform_tohost(
- VkDeviceMemory* memory, uint32_t memoryCount,
- VkDeviceSize* offset, uint32_t offsetCount,
- VkDeviceSize* size, uint32_t sizeCount,
- uint32_t* typeIndex, uint32_t typeIndexCount,
- uint32_t* typeBits, uint32_t typeBitsCount);
- void deviceMemoryTransform_fromhost(
- VkDeviceMemory* memory, uint32_t memoryCount,
- VkDeviceSize* offset, uint32_t offsetCount,
- VkDeviceSize* size, uint32_t sizeCount,
- uint32_t* typeIndex, uint32_t typeIndexCount,
- uint32_t* typeBits, uint32_t typeBitsCount);
+ void deviceMemoryTransform_tohost(VkDeviceMemory* memory, uint32_t memoryCount,
+ VkDeviceSize* offset, uint32_t offsetCount,
+ VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex,
+ uint32_t typeIndexCount, uint32_t* typeBits,
+ uint32_t typeBitsCount);
+ void deviceMemoryTransform_fromhost(VkDeviceMemory* memory, uint32_t memoryCount,
+ VkDeviceSize* offset, uint32_t offsetCount,
+ VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex,
+ uint32_t typeIndexCount, uint32_t* typeBits,
+ uint32_t typeBitsCount);
// Snapshot access
VkDecoderSnapshot* snapshot();
-#define DEFINE_TRANSFORMED_TYPE_PROTOTYPE(type) \
+#define DEFINE_TRANSFORMED_TYPE_PROTOTYPE(type) \
void transformImpl_##type##_tohost(const type*, uint32_t); \
- void transformImpl_##type##_fromhost(const type*, uint32_t); \
-
-LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_PROTOTYPE)
+ void transformImpl_##type##_fromhost(const type*, uint32_t);
+
+ LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_PROTOTYPE)
// boxed handles
-#define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DECL(type) \
+#define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DECL(type) \
type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch); \
- void delete_##type(type boxed); \
- type unbox_##type(type boxed); \
- type unboxed_to_boxed_##type(type boxed); \
- VulkanDispatch* dispatch_##type(type boxed); \
+ void delete_##type(type boxed); \
+ type unbox_##type(type boxed); \
+ type unboxed_to_boxed_##type(type boxed); \
+ VulkanDispatch* dispatch_##type(type boxed);
-#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DECL(type) \
+#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DECL(type) \
type new_boxed_non_dispatchable_##type(type underlying); \
- void delete_##type(type boxed); \
- type unbox_##type(type boxed); \
- type unboxed_to_boxed_non_dispatchable_##type(type boxed); \
+ void delete_##type(type boxed); \
+ type unbox_##type(type boxed); \
+ type unboxed_to_boxed_non_dispatchable_##type(type boxed);
-GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DECL)
-GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DECL)
+ GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DECL)
+ GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DECL)
-private:
+ private:
class Impl;
std::unique_ptr<Impl> mImpl;
};
-#define MAKE_HANDLE_MAPPING_FOREACH(type_name, map_impl, map_to_u64_impl, map_from_u64_impl) \
- void mapHandles_##type_name(type_name* handles, size_t count) override { \
- for (size_t i = 0; i < count; ++i) { \
- map_impl; \
- } \
- } \
- void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) override { \
- for (size_t i = 0; i < count; ++i) { \
- map_to_u64_impl; \
- } \
- } \
- void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) override { \
- for (size_t i = 0; i < count; ++i) { \
- map_from_u64_impl; \
- } \
- } \
+#define MAKE_HANDLE_MAPPING_FOREACH(type_name, map_impl, map_to_u64_impl, map_from_u64_impl) \
+ void mapHandles_##type_name(type_name* handles, size_t count) override { \
+ for (size_t i = 0; i < count; ++i) { \
+ map_impl; \
+ } \
+ } \
+ void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, \
+ size_t count) override { \
+ for (size_t i = 0; i < count; ++i) { \
+ map_to_u64_impl; \
+ } \
+ } \
+ void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) \
+ override { \
+ for (size_t i = 0; i < count; ++i) { \
+ map_from_u64_impl; \
+ } \
+ }
-#define BOXED_DISPATCHABLE_UNWRAP_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- if (handles[i]) { handles[i] = m_state->unbox_##type_name(handles[i]); } else { handles[i] = (type_name)nullptr; } ;, \
- if (handles[i]) { handle_u64s[i] = (uint64_t)m_state->unbox_##type_name(handles[i]); } else { handle_u64s[i] = 0; }, \
- if (handle_u64s[i]) { handles[i] = m_state->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); } else { handles[i] = (type_name)nullptr; })
+#define BOXED_DISPATCHABLE_UNWRAP_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH( \
+ type_name, if (handles[i]) { handles[i] = m_state->unbox_##type_name(handles[i]); } else { \
+ handles[i] = (type_name) nullptr; \
+ }; \
+ , \
+ if (handles[i]) { \
+ handle_u64s[i] = (uint64_t)m_state->unbox_##type_name(handles[i]); \
+ } else { handle_u64s[i] = 0; }, \
+ if (handle_u64s[i]) { \
+ handles[i] = m_state->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); \
+ } else { handles[i] = (type_name) nullptr; })
-#define BOXED_NON_DISPATCHABLE_UNWRAP_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- if (handles[i]) { handles[i] = m_state->unbox_##type_name(handles[i]); } else { handles[i] = (type_name)nullptr; } ;, \
- if (handles[i]) { handle_u64s[i] = (uint64_t)m_state->unbox_##type_name(handles[i]); } else { handle_u64s[i] = 0; }, \
- if (handle_u64s[i]) { handles[i] = m_state->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); } else { handles[i] = (type_name)nullptr; })
+#define BOXED_NON_DISPATCHABLE_UNWRAP_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH( \
+ type_name, if (handles[i]) { handles[i] = m_state->unbox_##type_name(handles[i]); } else { \
+ handles[i] = (type_name) nullptr; \
+ }; \
+ , \
+ if (handles[i]) { \
+ handle_u64s[i] = (uint64_t)m_state->unbox_##type_name(handles[i]); \
+ } else { handle_u64s[i] = 0; }, \
+ if (handle_u64s[i]) { \
+ handles[i] = m_state->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); \
+ } else { handles[i] = (type_name) nullptr; })
class BoxedHandleUnwrapMapping : public VulkanHandleMapping {
-public:
- BoxedHandleUnwrapMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) { }
- virtual ~BoxedHandleUnwrapMapping() { }
+ public:
+ BoxedHandleUnwrapMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) {}
+ virtual ~BoxedHandleUnwrapMapping() {}
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(BOXED_DISPATCHABLE_UNWRAP_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(BOXED_NON_DISPATCHABLE_UNWRAP_IMPL)
};
-#define BOXED_DISPATCHABLE_WRAP_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- if (handles[i]) { handles[i] = m_state->unboxed_to_boxed_##type_name(handles[i]); } else { handles[i] = (type_name)nullptr; } ;, \
- if (handles[i]) { handle_u64s[i] = (uint64_t)m_state->unboxed_to_boxed_##type_name(handles[i]); } else { handle_u64s[i] = 0; }, \
- if (handle_u64s[i]) { handles[i] = m_state->unboxed_to_boxed_##type_name((type_name)(uintptr_t)handle_u64s[i]); } else { handles[i] = (type_name)nullptr; })
+#define BOXED_DISPATCHABLE_WRAP_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH( \
+ type_name, \
+ if (handles[i]) { handles[i] = m_state->unboxed_to_boxed_##type_name(handles[i]); } else { \
+ handles[i] = (type_name) nullptr; \
+ }; \
+ , \
+ if (handles[i]) { \
+ handle_u64s[i] = (uint64_t)m_state->unboxed_to_boxed_##type_name(handles[i]); \
+ } else { handle_u64s[i] = 0; }, \
+ if (handle_u64s[i]) { \
+ handles[i] = \
+ m_state->unboxed_to_boxed_##type_name((type_name)(uintptr_t)handle_u64s[i]); \
+ } else { handles[i] = (type_name) nullptr; })
-#define BOXED_NON_DISPATCHABLE_WRAP_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- if (handles[i]) { handles[i] = m_state->unboxed_to_boxed_non_dispatchable_##type_name(handles[i]); } else { handles[i] = (type_name)nullptr; } ;, \
- if (handles[i]) { handle_u64s[i] = (uint64_t)m_state->unboxed_to_boxed_non_dispatchable_##type_name(handles[i]); } else { handle_u64s[i] = 0; }, \
- if (handle_u64s[i]) { handles[i] = m_state->unboxed_to_boxed_non_dispatchable_##type_name((type_name)(uintptr_t)handle_u64s[i]); } else { handles[i] = (type_name)nullptr; })
+#define BOXED_NON_DISPATCHABLE_WRAP_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH( \
+ type_name, \
+ if (handles[i]) { \
+ handles[i] = m_state->unboxed_to_boxed_non_dispatchable_##type_name(handles[i]); \
+ } else { handles[i] = (type_name) nullptr; }; \
+ , \
+ if (handles[i]) { \
+ handle_u64s[i] = \
+ (uint64_t)m_state->unboxed_to_boxed_non_dispatchable_##type_name(handles[i]); \
+ } else { handle_u64s[i] = 0; }, \
+ if (handle_u64s[i]) { \
+ handles[i] = m_state->unboxed_to_boxed_non_dispatchable_##type_name( \
+ (type_name)(uintptr_t)handle_u64s[i]); \
+ } else { handles[i] = (type_name) nullptr; })
class BoxedHandleWrapMapping : public VulkanHandleMapping {
-public:
- BoxedHandleWrapMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) { }
- virtual ~BoxedHandleWrapMapping() { }
+ public:
+ BoxedHandleWrapMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) {}
+ virtual ~BoxedHandleWrapMapping() {}
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(BOXED_DISPATCHABLE_WRAP_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(BOXED_NON_DISPATCHABLE_WRAP_IMPL)
};
// Not used, so we do not define.
-#define BOXED_DISPATCHABLE_CREATE_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- (void)handles[i], \
- (void)handle_u64s[i], \
- (void)handles[i];)
+#define BOXED_DISPATCHABLE_CREATE_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH(type_name, (void)handles[i], (void)handle_u64s[i], \
+ (void)handles[i];)
// Not used, so we do not define.
-#define BOXED_DISPATCHABLE_DESTROY_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- (void)handles[i], \
- (void)handle_u64s[i], \
- (void)handles[i];)
+#define BOXED_DISPATCHABLE_DESTROY_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH(type_name, (void)handles[i], (void)handle_u64s[i], \
+ (void)handles[i];)
// We only use the create/destroy mappings for non dispatchable handles.
-#define BOXED_NON_DISPATCHABLE_CREATE_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- handles[i] = new_boxed_non_dispatchable_##type_name(handles[i]);, \
- handle_u64s[i] = (uint64_t)new_boxed_non_dispatchable_##type_name(handles[i]), \
- handles[i] = (type_name)new_boxed_non_dispatchable_##type_name((type_name)(uintptr_t)handle_u64s[i]);)
+#define BOXED_NON_DISPATCHABLE_CREATE_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH( \
+ type_name, handles[i] = new_boxed_non_dispatchable_##type_name(handles[i]); \
+ , handle_u64s[i] = (uint64_t)new_boxed_non_dispatchable_##type_name(handles[i]), \
+ handles[i] = (type_name)new_boxed_non_dispatchable_##type_name( \
+ (type_name)(uintptr_t)handle_u64s[i]);)
-#define BOXED_NON_DISPATCHABLE_DESTROY_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- delete_##type_name(handles[i]), \
- (void)handle_u64s[i]; delete_##type_name(handles[i]), \
- (void)handles[i]; delete_##type_name((type_name)handle_u64s[i]))
+#define BOXED_NON_DISPATCHABLE_DESTROY_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH(type_name, delete_##type_name(handles[i]), (void)handle_u64s[i]; \
+ delete_##type_name(handles[i]), (void)handles[i]; \
+ delete_##type_name((type_name)handle_u64s[i]))
-#define BOXED_NON_DISPATCHABLE_UNWRAP_AND_DELETE_IMPL(type_name) \
- MAKE_HANDLE_MAPPING_FOREACH(type_name, \
- if (handles[i]) { auto boxed = handles[i]; handles[i] = m_state->unbox_##type_name(handles[i]); delete_##type_name(boxed); } else { handles[i] = (type_name)nullptr; } ;, \
- if (handles[i]) { auto boxed = handles[i]; handle_u64s[i] = (uint64_t)m_state->unbox_##type_name(handles[i]); delete_##type_name(boxed); } else { handle_u64s[i] = 0; }, \
- if (handle_u64s[i]) { auto boxed = (type_name)(uintptr_t)handle_u64s[i]; handles[i] = m_state->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); delete_##type_name(boxed); } else { handles[i] = (type_name)nullptr; })
+#define BOXED_NON_DISPATCHABLE_UNWRAP_AND_DELETE_IMPL(type_name) \
+ MAKE_HANDLE_MAPPING_FOREACH( \
+ type_name, \
+ if (handles[i]) { \
+ auto boxed = handles[i]; \
+ handles[i] = m_state->unbox_##type_name(handles[i]); \
+ delete_##type_name(boxed); \
+ } else { handles[i] = (type_name) nullptr; }; \
+ , \
+ if (handles[i]) { \
+ auto boxed = handles[i]; \
+ handle_u64s[i] = (uint64_t)m_state->unbox_##type_name(handles[i]); \
+ delete_##type_name(boxed); \
+ } else { handle_u64s[i] = 0; }, \
+ if (handle_u64s[i]) { \
+ auto boxed = (type_name)(uintptr_t)handle_u64s[i]; \
+ handles[i] = m_state->unbox_##type_name((type_name)(uintptr_t)handle_u64s[i]); \
+ delete_##type_name(boxed); \
+ } else { handles[i] = (type_name) nullptr; })
class BoxedHandleCreateMapping : public VulkanHandleMapping {
-public:
- BoxedHandleCreateMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) { }
- virtual ~BoxedHandleCreateMapping() { }
+ public:
+ BoxedHandleCreateMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) {}
+ virtual ~BoxedHandleCreateMapping() {}
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(BOXED_DISPATCHABLE_CREATE_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(BOXED_NON_DISPATCHABLE_CREATE_IMPL)
};
class BoxedHandleDestroyMapping : public VulkanHandleMapping {
-public:
- BoxedHandleDestroyMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) { }
- virtual ~BoxedHandleDestroyMapping() { }
+ public:
+ BoxedHandleDestroyMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) {}
+ virtual ~BoxedHandleDestroyMapping() {}
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(BOXED_DISPATCHABLE_DESTROY_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(BOXED_NON_DISPATCHABLE_DESTROY_IMPL)
};
class BoxedHandleUnwrapAndDeleteMapping : public VulkanHandleMapping {
-public:
- BoxedHandleUnwrapAndDeleteMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) { }
- virtual ~BoxedHandleUnwrapAndDeleteMapping() { }
+ public:
+ BoxedHandleUnwrapAndDeleteMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) {}
+ virtual ~BoxedHandleUnwrapAndDeleteMapping() {}
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(BOXED_DISPATCHABLE_DESTROY_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(BOXED_NON_DISPATCHABLE_UNWRAP_AND_DELETE_IMPL)
};
-#define HANDLE_MAPPING_DECLS(type_name) \
- void mapHandles_##type_name(type_name* handles, size_t count) override; \
- void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) override; \
- void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) override; \
+#define HANDLE_MAPPING_DECLS(type_name) \
+ void mapHandles_##type_name(type_name* handles, size_t count) override; \
+ void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, \
+ size_t count) override; \
+ void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) \
+ override;
class BoxedHandleUnwrapAndDeletePreserveBoxedMapping : public VulkanHandleMapping {
-public:
- BoxedHandleUnwrapAndDeletePreserveBoxedMapping(VkDecoderGlobalState* state) : VulkanHandleMapping(state) { }
+ public:
+ BoxedHandleUnwrapAndDeletePreserveBoxedMapping(VkDecoderGlobalState* state)
+ : VulkanHandleMapping(state) {}
void setup(android::base::BumpPool* pool, uint64_t** bufPtr);
- virtual ~BoxedHandleUnwrapAndDeletePreserveBoxedMapping() { }
+ virtual ~BoxedHandleUnwrapAndDeletePreserveBoxedMapping() {}
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(HANDLE_MAPPING_DECLS)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(HANDLE_MAPPING_DECLS)
-private:
+ private:
void allocPreserve(size_t count);
android::base::BumpPool* mPool = nullptr;
uint64_t** mPreserveBufPtr = nullptr;
};
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VkFormatUtils.h b/stream-servers/vulkan/VkFormatUtils.h
index f17adf4..9cafb27 100644
--- a/stream-servers/vulkan/VkFormatUtils.h
+++ b/stream-servers/vulkan/VkFormatUtils.h
@@ -20,255 +20,254 @@
// - Macros to iterate over categories of formats
// - Add often-used parameters like the bytes per pixel and ASTC block size
-#define LIST_VK_FORMATS_LINEAR(f) \
- f(VK_FORMAT_UNDEFINED, 0) \
- f(VK_FORMAT_R4G4_UNORM_PACK8, 1) \
- f(VK_FORMAT_R4G4B4A4_UNORM_PACK16, 2) \
- f(VK_FORMAT_B4G4R4A4_UNORM_PACK16, 2) \
- f(VK_FORMAT_R5G6B5_UNORM_PACK16, 2) \
- f(VK_FORMAT_B5G6R5_UNORM_PACK16, 2) \
- f(VK_FORMAT_R5G5B5A1_UNORM_PACK16, 2) \
- f(VK_FORMAT_B5G5R5A1_UNORM_PACK16, 2) \
- f(VK_FORMAT_A1R5G5B5_UNORM_PACK16, 2) \
- f(VK_FORMAT_R8_UNORM, 1) \
- f(VK_FORMAT_R8_SNORM, 1) \
- f(VK_FORMAT_R8_USCALED, 1) \
- f(VK_FORMAT_R8_SSCALED, 1) \
- f(VK_FORMAT_R8_UINT, 1) \
- f(VK_FORMAT_R8_SINT, 1) \
- f(VK_FORMAT_R8_SRGB, 1) \
- f(VK_FORMAT_R8G8_UNORM, 2) \
- f(VK_FORMAT_R8G8_SNORM, 2) \
- f(VK_FORMAT_R8G8_USCALED, 2) \
- f(VK_FORMAT_R8G8_SSCALED, 2) \
- f(VK_FORMAT_R8G8_UINT, 2) \
- f(VK_FORMAT_R8G8_SINT, 2) \
- f(VK_FORMAT_R8G8_SRGB, 2) \
- f(VK_FORMAT_R8G8B8_UNORM, 3) \
- f(VK_FORMAT_R8G8B8_SNORM, 3) \
- f(VK_FORMAT_R8G8B8_USCALED, 3) \
- f(VK_FORMAT_R8G8B8_SSCALED, 3) \
- f(VK_FORMAT_R8G8B8_UINT, 3) \
- f(VK_FORMAT_R8G8B8_SINT, 3) \
- f(VK_FORMAT_R8G8B8_SRGB, 3) \
- f(VK_FORMAT_B8G8R8_UNORM, 3) \
- f(VK_FORMAT_B8G8R8_SNORM, 3) \
- f(VK_FORMAT_B8G8R8_USCALED, 3) \
- f(VK_FORMAT_B8G8R8_SSCALED, 3) \
- f(VK_FORMAT_B8G8R8_UINT, 3) \
- f(VK_FORMAT_B8G8R8_SINT, 3) \
- f(VK_FORMAT_B8G8R8_SRGB, 3) \
- f(VK_FORMAT_R8G8B8A8_UNORM, 4) \
- f(VK_FORMAT_R8G8B8A8_SNORM, 4) \
- f(VK_FORMAT_R8G8B8A8_USCALED, 4) \
- f(VK_FORMAT_R8G8B8A8_SSCALED, 4) \
- f(VK_FORMAT_R8G8B8A8_UINT, 4) \
- f(VK_FORMAT_R8G8B8A8_SINT, 4) \
- f(VK_FORMAT_R8G8B8A8_SRGB, 4) \
- f(VK_FORMAT_B8G8R8A8_UNORM, 4) \
- f(VK_FORMAT_B8G8R8A8_SNORM, 4) \
- f(VK_FORMAT_B8G8R8A8_USCALED, 4) \
- f(VK_FORMAT_B8G8R8A8_SSCALED, 4) \
- f(VK_FORMAT_B8G8R8A8_UINT, 4) \
- f(VK_FORMAT_B8G8R8A8_SINT, 4) \
- f(VK_FORMAT_B8G8R8A8_SRGB, 4) \
- f(VK_FORMAT_A8B8G8R8_UNORM_PACK32, 4) \
- f(VK_FORMAT_A8B8G8R8_SNORM_PACK32, 4) \
- f(VK_FORMAT_A8B8G8R8_USCALED_PACK32, 4) \
- f(VK_FORMAT_A8B8G8R8_SSCALED_PACK32, 4) \
- f(VK_FORMAT_A8B8G8R8_UINT_PACK32, 4) \
- f(VK_FORMAT_A8B8G8R8_SINT_PACK32, 4) \
- f(VK_FORMAT_A8B8G8R8_SRGB_PACK32, 4) \
- f(VK_FORMAT_A2R10G10B10_UNORM_PACK32, 4) \
- f(VK_FORMAT_A2R10G10B10_SNORM_PACK32, 4) \
- f(VK_FORMAT_A2R10G10B10_USCALED_PACK32, 4) \
- f(VK_FORMAT_A2R10G10B10_SSCALED_PACK32, 4) \
- f(VK_FORMAT_A2R10G10B10_UINT_PACK32, 4) \
- f(VK_FORMAT_A2R10G10B10_SINT_PACK32, 4) \
- f(VK_FORMAT_A2B10G10R10_UNORM_PACK32, 4) \
- f(VK_FORMAT_A2B10G10R10_SNORM_PACK32, 4) \
- f(VK_FORMAT_A2B10G10R10_USCALED_PACK32, 4) \
- f(VK_FORMAT_A2B10G10R10_SSCALED_PACK32, 4) \
- f(VK_FORMAT_A2B10G10R10_UINT_PACK32, 4) \
- f(VK_FORMAT_A2B10G10R10_SINT_PACK32, 4) \
- f(VK_FORMAT_R16_UNORM, 2) \
- f(VK_FORMAT_R16_SNORM, 2) \
- f(VK_FORMAT_R16_USCALED, 2) \
- f(VK_FORMAT_R16_SSCALED, 2) \
- f(VK_FORMAT_R16_UINT, 2) \
- f(VK_FORMAT_R16_SINT, 2) \
- f(VK_FORMAT_R16_SFLOAT, 2) \
- f(VK_FORMAT_R16G16_UNORM, 4) \
- f(VK_FORMAT_R16G16_SNORM, 4) \
- f(VK_FORMAT_R16G16_USCALED, 4) \
- f(VK_FORMAT_R16G16_SSCALED, 4) \
- f(VK_FORMAT_R16G16_UINT, 4) \
- f(VK_FORMAT_R16G16_SINT, 4) \
- f(VK_FORMAT_R16G16_SFLOAT, 4) \
- f(VK_FORMAT_R16G16B16_UNORM, 6) \
- f(VK_FORMAT_R16G16B16_SNORM, 6) \
- f(VK_FORMAT_R16G16B16_USCALED, 6) \
- f(VK_FORMAT_R16G16B16_SSCALED, 6) \
- f(VK_FORMAT_R16G16B16_UINT, 6) \
- f(VK_FORMAT_R16G16B16_SINT, 6) \
- f(VK_FORMAT_R16G16B16_SFLOAT, 6) \
- f(VK_FORMAT_R16G16B16A16_UNORM, 8) \
- f(VK_FORMAT_R16G16B16A16_SNORM, 8) \
- f(VK_FORMAT_R16G16B16A16_USCALED, 8) \
- f(VK_FORMAT_R16G16B16A16_SSCALED, 8) \
- f(VK_FORMAT_R16G16B16A16_UINT, 8) \
- f(VK_FORMAT_R16G16B16A16_SINT, 8) \
- f(VK_FORMAT_R16G16B16A16_SFLOAT, 8) \
- f(VK_FORMAT_R32_UINT, 4) \
- f(VK_FORMAT_R32_SINT, 4) \
- f(VK_FORMAT_R32_SFLOAT, 4) \
- f(VK_FORMAT_R32G32_UINT, 8) \
- f(VK_FORMAT_R32G32_SINT, 8) \
- f(VK_FORMAT_R32G32_SFLOAT, 8) \
- f(VK_FORMAT_R32G32B32_UINT, 8) \
- f(VK_FORMAT_R32G32B32_SINT, 8) \
- f(VK_FORMAT_R32G32B32_SFLOAT, 8) \
- f(VK_FORMAT_R32G32B32A32_UINT, 16) \
- f(VK_FORMAT_R32G32B32A32_SINT, 16) \
- f(VK_FORMAT_R32G32B32A32_SFLOAT, 16) \
- f(VK_FORMAT_R64_UINT, 8) \
- f(VK_FORMAT_R64_SINT, 8) \
- f(VK_FORMAT_R64_SFLOAT, 8) \
- f(VK_FORMAT_R64G64_UINT, 16) \
- f(VK_FORMAT_R64G64_SINT, 16) \
- f(VK_FORMAT_R64G64_SFLOAT, 16) \
- f(VK_FORMAT_R64G64B64_UINT, 24) \
- f(VK_FORMAT_R64G64B64_SINT, 24) \
- f(VK_FORMAT_R64G64B64_SFLOAT, 24) \
- f(VK_FORMAT_R64G64B64A64_UINT, 32) \
- f(VK_FORMAT_R64G64B64A64_SINT, 32) \
- f(VK_FORMAT_R64G64B64A64_SFLOAT, 32) \
- f(VK_FORMAT_B10G11R11_UFLOAT_PACK32, 4) \
- f(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, 4) \
- f(VK_FORMAT_D16_UNORM, 2) \
- f(VK_FORMAT_X8_D24_UNORM_PACK32, 4) \
- f(VK_FORMAT_D32_SFLOAT, 4) \
- f(VK_FORMAT_S8_UINT, 1) \
- f(VK_FORMAT_D16_UNORM_S8_UINT, 3) \
- f(VK_FORMAT_D24_UNORM_S8_UINT, 4) \
- f(VK_FORMAT_D32_SFLOAT_S8_UINT, 5) \
- f(VK_FORMAT_R10X6_UNORM_PACK16, 2) \
- f(VK_FORMAT_R10X6G10X6_UNORM_2PACK16, 4) \
+#define LIST_VK_FORMATS_LINEAR(f) \
+ f(VK_FORMAT_UNDEFINED, 0) \
+ f(VK_FORMAT_R4G4_UNORM_PACK8, 1) \
+ f(VK_FORMAT_R4G4B4A4_UNORM_PACK16, 2) \
+ f(VK_FORMAT_B4G4R4A4_UNORM_PACK16, 2) \
+ f(VK_FORMAT_R5G6B5_UNORM_PACK16, 2) \
+ f(VK_FORMAT_B5G6R5_UNORM_PACK16, 2) \
+ f(VK_FORMAT_R5G5B5A1_UNORM_PACK16, 2) \
+ f(VK_FORMAT_B5G5R5A1_UNORM_PACK16, 2) \
+ f(VK_FORMAT_A1R5G5B5_UNORM_PACK16, 2) \
+ f(VK_FORMAT_R8_UNORM, 1) \
+ f(VK_FORMAT_R8_SNORM, 1) \
+ f(VK_FORMAT_R8_USCALED, 1) \
+ f(VK_FORMAT_R8_SSCALED, 1) \
+ f(VK_FORMAT_R8_UINT, 1) \
+ f(VK_FORMAT_R8_SINT, 1) \
+ f(VK_FORMAT_R8_SRGB, 1) \
+ f(VK_FORMAT_R8G8_UNORM, 2) \
+ f(VK_FORMAT_R8G8_SNORM, 2) \
+ f(VK_FORMAT_R8G8_USCALED, 2) \
+ f(VK_FORMAT_R8G8_SSCALED, 2) \
+ f(VK_FORMAT_R8G8_UINT, 2) \
+ f(VK_FORMAT_R8G8_SINT, 2) \
+ f(VK_FORMAT_R8G8_SRGB, 2) \
+ f(VK_FORMAT_R8G8B8_UNORM, 3) \
+ f(VK_FORMAT_R8G8B8_SNORM, 3) \
+ f(VK_FORMAT_R8G8B8_USCALED, 3) \
+ f(VK_FORMAT_R8G8B8_SSCALED, 3) \
+ f(VK_FORMAT_R8G8B8_UINT, 3) \
+ f(VK_FORMAT_R8G8B8_SINT, 3) \
+ f(VK_FORMAT_R8G8B8_SRGB, 3) \
+ f(VK_FORMAT_B8G8R8_UNORM, 3) \
+ f(VK_FORMAT_B8G8R8_SNORM, 3) \
+ f(VK_FORMAT_B8G8R8_USCALED, 3) \
+ f(VK_FORMAT_B8G8R8_SSCALED, 3) \
+ f(VK_FORMAT_B8G8R8_UINT, 3) \
+ f(VK_FORMAT_B8G8R8_SINT, 3) \
+ f(VK_FORMAT_B8G8R8_SRGB, 3) \
+ f(VK_FORMAT_R8G8B8A8_UNORM, 4) \
+ f(VK_FORMAT_R8G8B8A8_SNORM, 4) \
+ f(VK_FORMAT_R8G8B8A8_USCALED, 4) \
+ f(VK_FORMAT_R8G8B8A8_SSCALED, 4) \
+ f(VK_FORMAT_R8G8B8A8_UINT, 4) \
+ f(VK_FORMAT_R8G8B8A8_SINT, 4) \
+ f(VK_FORMAT_R8G8B8A8_SRGB, 4) \
+ f(VK_FORMAT_B8G8R8A8_UNORM, 4) \
+ f(VK_FORMAT_B8G8R8A8_SNORM, 4) \
+ f(VK_FORMAT_B8G8R8A8_USCALED, 4) \
+ f(VK_FORMAT_B8G8R8A8_SSCALED, 4) \
+ f(VK_FORMAT_B8G8R8A8_UINT, 4) \
+ f(VK_FORMAT_B8G8R8A8_SINT, 4) \
+ f(VK_FORMAT_B8G8R8A8_SRGB, 4) \
+ f(VK_FORMAT_A8B8G8R8_UNORM_PACK32, 4) \
+ f(VK_FORMAT_A8B8G8R8_SNORM_PACK32, 4) \
+ f(VK_FORMAT_A8B8G8R8_USCALED_PACK32, 4) \
+ f(VK_FORMAT_A8B8G8R8_SSCALED_PACK32, 4) \
+ f(VK_FORMAT_A8B8G8R8_UINT_PACK32, 4) \
+ f(VK_FORMAT_A8B8G8R8_SINT_PACK32, 4) \
+ f(VK_FORMAT_A8B8G8R8_SRGB_PACK32, 4) \
+ f(VK_FORMAT_A2R10G10B10_UNORM_PACK32, 4) \
+ f(VK_FORMAT_A2R10G10B10_SNORM_PACK32, 4) \
+ f(VK_FORMAT_A2R10G10B10_USCALED_PACK32, 4) \
+ f(VK_FORMAT_A2R10G10B10_SSCALED_PACK32, 4) \
+ f(VK_FORMAT_A2R10G10B10_UINT_PACK32, 4) \
+ f(VK_FORMAT_A2R10G10B10_SINT_PACK32, 4) \
+ f(VK_FORMAT_A2B10G10R10_UNORM_PACK32, 4) \
+ f(VK_FORMAT_A2B10G10R10_SNORM_PACK32, 4) \
+ f(VK_FORMAT_A2B10G10R10_USCALED_PACK32, 4) \
+ f(VK_FORMAT_A2B10G10R10_SSCALED_PACK32, 4) \
+ f(VK_FORMAT_A2B10G10R10_UINT_PACK32, 4) \
+ f(VK_FORMAT_A2B10G10R10_SINT_PACK32, 4) \
+ f(VK_FORMAT_R16_UNORM, 2) \
+ f(VK_FORMAT_R16_SNORM, 2) \
+ f(VK_FORMAT_R16_USCALED, 2) \
+ f(VK_FORMAT_R16_SSCALED, 2) \
+ f(VK_FORMAT_R16_UINT, 2) \
+ f(VK_FORMAT_R16_SINT, 2) \
+ f(VK_FORMAT_R16_SFLOAT, 2) \
+ f(VK_FORMAT_R16G16_UNORM, 4) \
+ f(VK_FORMAT_R16G16_SNORM, 4) \
+ f(VK_FORMAT_R16G16_USCALED, 4) \
+ f(VK_FORMAT_R16G16_SSCALED, 4) \
+ f(VK_FORMAT_R16G16_UINT, 4) \
+ f(VK_FORMAT_R16G16_SINT, 4) \
+ f(VK_FORMAT_R16G16_SFLOAT, 4) \
+ f(VK_FORMAT_R16G16B16_UNORM, 6) \
+ f(VK_FORMAT_R16G16B16_SNORM, 6) \
+ f(VK_FORMAT_R16G16B16_USCALED, 6) \
+ f(VK_FORMAT_R16G16B16_SSCALED, 6) \
+ f(VK_FORMAT_R16G16B16_UINT, 6) \
+ f(VK_FORMAT_R16G16B16_SINT, 6) \
+ f(VK_FORMAT_R16G16B16_SFLOAT, 6) \
+ f(VK_FORMAT_R16G16B16A16_UNORM, 8) \
+ f(VK_FORMAT_R16G16B16A16_SNORM, 8) \
+ f(VK_FORMAT_R16G16B16A16_USCALED, 8) \
+ f(VK_FORMAT_R16G16B16A16_SSCALED, 8) \
+ f(VK_FORMAT_R16G16B16A16_UINT, 8) \
+ f(VK_FORMAT_R16G16B16A16_SINT, 8) \
+ f(VK_FORMAT_R16G16B16A16_SFLOAT, 8) \
+ f(VK_FORMAT_R32_UINT, 4) \
+ f(VK_FORMAT_R32_SINT, 4) \
+ f(VK_FORMAT_R32_SFLOAT, 4) \
+ f(VK_FORMAT_R32G32_UINT, 8) \
+ f(VK_FORMAT_R32G32_SINT, 8) \
+ f(VK_FORMAT_R32G32_SFLOAT, 8) \
+ f(VK_FORMAT_R32G32B32_UINT, 8) \
+ f(VK_FORMAT_R32G32B32_SINT, 8) \
+ f(VK_FORMAT_R32G32B32_SFLOAT, 8) \
+ f(VK_FORMAT_R32G32B32A32_UINT, 16) \
+ f(VK_FORMAT_R32G32B32A32_SINT, 16) \
+ f(VK_FORMAT_R32G32B32A32_SFLOAT, 16) \
+ f(VK_FORMAT_R64_UINT, 8) \
+ f(VK_FORMAT_R64_SINT, 8) \
+ f(VK_FORMAT_R64_SFLOAT, 8) \
+ f(VK_FORMAT_R64G64_UINT, 16) \
+ f(VK_FORMAT_R64G64_SINT, 16) \
+ f(VK_FORMAT_R64G64_SFLOAT, 16) \
+ f(VK_FORMAT_R64G64B64_UINT, 24) \
+ f(VK_FORMAT_R64G64B64_SINT, 24) \
+ f(VK_FORMAT_R64G64B64_SFLOAT, 24) \
+ f(VK_FORMAT_R64G64B64A64_UINT, 32) \
+ f(VK_FORMAT_R64G64B64A64_SINT, 32) \
+ f(VK_FORMAT_R64G64B64A64_SFLOAT, 32) \
+ f(VK_FORMAT_B10G11R11_UFLOAT_PACK32, 4) \
+ f(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, 4) \
+ f(VK_FORMAT_D16_UNORM, 2) \
+ f(VK_FORMAT_X8_D24_UNORM_PACK32, 4) \
+ f(VK_FORMAT_D32_SFLOAT, 4) \
+ f(VK_FORMAT_S8_UINT, 1) \
+ f(VK_FORMAT_D16_UNORM_S8_UINT, 3) \
+ f(VK_FORMAT_D24_UNORM_S8_UINT, 4) \
+ f(VK_FORMAT_D32_SFLOAT_S8_UINT, 5) \
+ f(VK_FORMAT_R10X6_UNORM_PACK16, 2) \
+ f(VK_FORMAT_R10X6G10X6_UNORM_2PACK16, 4) \
f(VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, 8) \
- f(VK_FORMAT_R12X4_UNORM_PACK16, 2) \
- f(VK_FORMAT_R12X4G12X4_UNORM_2PACK16, 4) \
- f(VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, 8) \
+ f(VK_FORMAT_R12X4_UNORM_PACK16, 2) \
+ f(VK_FORMAT_R12X4G12X4_UNORM_2PACK16, 4) \
+ f(VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, 8)
// TODO: BC formats
#define LIST_VK_FORMATS_COMPRESSED_BC(f) \
- f(VK_FORMAT_BC1_RGB_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC1_RGB_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_BC1_RGB_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC1_RGB_SRGB_BLOCK, 0) \
f(VK_FORMAT_BC1_RGBA_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC1_RGBA_SRGB_BLOCK, 0) \
- f(VK_FORMAT_BC2_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC2_SRGB_BLOCK, 0) \
- f(VK_FORMAT_BC3_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC3_SRGB_BLOCK, 0) \
- f(VK_FORMAT_BC4_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC4_SNORM_BLOCK, 0) \
- f(VK_FORMAT_BC5_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC5_SNORM_BLOCK, 0) \
- f(VK_FORMAT_BC6H_UFLOAT_BLOCK, 0) \
- f(VK_FORMAT_BC6H_SFLOAT_BLOCK, 0) \
- f(VK_FORMAT_BC7_UNORM_BLOCK, 0) \
- f(VK_FORMAT_BC7_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_BC1_RGBA_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_BC2_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC2_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_BC3_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC3_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_BC4_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC4_SNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC5_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC5_SNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC6H_UFLOAT_BLOCK, 0) \
+ f(VK_FORMAT_BC6H_SFLOAT_BLOCK, 0) \
+ f(VK_FORMAT_BC7_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_BC7_SRGB_BLOCK, 0)
// TODO: ETC2 format parameters
-#define LIST_VK_FORMATS_COMPRESSED_ETC2(f) \
- f(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, 0) \
- f(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, 0) \
+#define LIST_VK_FORMATS_COMPRESSED_ETC2(f) \
+ f(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, 0) \
f(VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, 0) \
- f(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, 0) \
f(VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, 0) \
- f(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, 0) \
+ f(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, 0)
// TODO: EAC format parameters
-#define LIST_VK_FORMATS_COMPRESSED_EAC(f) \
- f(VK_FORMAT_EAC_R11_UNORM_BLOCK, 0) \
- f(VK_FORMAT_EAC_R11_SNORM_BLOCK, 0) \
+#define LIST_VK_FORMATS_COMPRESSED_EAC(f) \
+ f(VK_FORMAT_EAC_R11_UNORM_BLOCK, 0) \
+ f(VK_FORMAT_EAC_R11_SNORM_BLOCK, 0) \
f(VK_FORMAT_EAC_R11G11_UNORM_BLOCK, 0) \
- f(VK_FORMAT_EAC_R11G11_SNORM_BLOCK, 0) \
+ f(VK_FORMAT_EAC_R11G11_SNORM_BLOCK, 0)
// ASTC: block dimensions, then whether it is SRGB
-#define LIST_VK_FORMATS_COMPRESSED_ASTC(f) \
- f(VK_FORMAT_ASTC_4x4_UNORM_BLOCK, 4, 4, 0) \
- f(VK_FORMAT_ASTC_4x4_SRGB_BLOCK, 4, 4, 1) \
- f(VK_FORMAT_ASTC_5x4_UNORM_BLOCK, 5, 4, 0) \
- f(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, 5, 4, 1) \
- f(VK_FORMAT_ASTC_5x5_UNORM_BLOCK, 5, 5, 0) \
- f(VK_FORMAT_ASTC_5x5_SRGB_BLOCK, 5, 5, 1) \
- f(VK_FORMAT_ASTC_6x5_UNORM_BLOCK, 6, 5, 0) \
- f(VK_FORMAT_ASTC_6x5_SRGB_BLOCK, 6, 5, 1) \
- f(VK_FORMAT_ASTC_6x6_UNORM_BLOCK, 6, 6, 0) \
- f(VK_FORMAT_ASTC_6x6_SRGB_BLOCK, 6, 6, 1) \
- f(VK_FORMAT_ASTC_8x5_UNORM_BLOCK, 8, 5, 0) \
- f(VK_FORMAT_ASTC_8x5_SRGB_BLOCK, 8, 5, 1) \
- f(VK_FORMAT_ASTC_8x6_UNORM_BLOCK, 8, 6, 0) \
- f(VK_FORMAT_ASTC_8x6_SRGB_BLOCK, 8, 6, 1) \
- f(VK_FORMAT_ASTC_8x8_UNORM_BLOCK, 8, 8, 0) \
- f(VK_FORMAT_ASTC_8x8_SRGB_BLOCK, 8, 8, 1) \
- f(VK_FORMAT_ASTC_10x5_UNORM_BLOCK, 10, 5, 0) \
- f(VK_FORMAT_ASTC_10x5_SRGB_BLOCK, 10, 5, 1) \
- f(VK_FORMAT_ASTC_10x6_UNORM_BLOCK, 10, 6, 0) \
- f(VK_FORMAT_ASTC_10x6_SRGB_BLOCK, 10, 6, 1) \
- f(VK_FORMAT_ASTC_10x8_UNORM_BLOCK, 10, 8, 0) \
- f(VK_FORMAT_ASTC_10x8_SRGB_BLOCK, 10, 8, 1) \
+#define LIST_VK_FORMATS_COMPRESSED_ASTC(f) \
+ f(VK_FORMAT_ASTC_4x4_UNORM_BLOCK, 4, 4, 0) \
+ f(VK_FORMAT_ASTC_4x4_SRGB_BLOCK, 4, 4, 1) \
+ f(VK_FORMAT_ASTC_5x4_UNORM_BLOCK, 5, 4, 0) \
+ f(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, 5, 4, 1) \
+ f(VK_FORMAT_ASTC_5x5_UNORM_BLOCK, 5, 5, 0) \
+ f(VK_FORMAT_ASTC_5x5_SRGB_BLOCK, 5, 5, 1) \
+ f(VK_FORMAT_ASTC_6x5_UNORM_BLOCK, 6, 5, 0) \
+ f(VK_FORMAT_ASTC_6x5_SRGB_BLOCK, 6, 5, 1) \
+ f(VK_FORMAT_ASTC_6x6_UNORM_BLOCK, 6, 6, 0) \
+ f(VK_FORMAT_ASTC_6x6_SRGB_BLOCK, 6, 6, 1) \
+ f(VK_FORMAT_ASTC_8x5_UNORM_BLOCK, 8, 5, 0) \
+ f(VK_FORMAT_ASTC_8x5_SRGB_BLOCK, 8, 5, 1) \
+ f(VK_FORMAT_ASTC_8x6_UNORM_BLOCK, 8, 6, 0) \
+ f(VK_FORMAT_ASTC_8x6_SRGB_BLOCK, 8, 6, 1) \
+ f(VK_FORMAT_ASTC_8x8_UNORM_BLOCK, 8, 8, 0) \
+ f(VK_FORMAT_ASTC_8x8_SRGB_BLOCK, 8, 8, 1) \
+ f(VK_FORMAT_ASTC_10x5_UNORM_BLOCK, 10, 5, 0) \
+ f(VK_FORMAT_ASTC_10x5_SRGB_BLOCK, 10, 5, 1) \
+ f(VK_FORMAT_ASTC_10x6_UNORM_BLOCK, 10, 6, 0) \
+ f(VK_FORMAT_ASTC_10x6_SRGB_BLOCK, 10, 6, 1) \
+ f(VK_FORMAT_ASTC_10x8_UNORM_BLOCK, 10, 8, 0) \
+ f(VK_FORMAT_ASTC_10x8_SRGB_BLOCK, 10, 8, 1) \
f(VK_FORMAT_ASTC_10x10_UNORM_BLOCK, 10, 10, 0) \
- f(VK_FORMAT_ASTC_10x10_SRGB_BLOCK, 10, 10, 1) \
+ f(VK_FORMAT_ASTC_10x10_SRGB_BLOCK, 10, 10, 1) \
f(VK_FORMAT_ASTC_12x10_UNORM_BLOCK, 12, 10, 0) \
- f(VK_FORMAT_ASTC_12x10_SRGB_BLOCK, 12, 10, 1) \
+ f(VK_FORMAT_ASTC_12x10_SRGB_BLOCK, 12, 10, 1) \
f(VK_FORMAT_ASTC_12x12_UNORM_BLOCK, 12, 12, 0) \
- f(VK_FORMAT_ASTC_12x12_SRGB_BLOCK, 12, 12, 1) \
+ f(VK_FORMAT_ASTC_12x12_SRGB_BLOCK, 12, 12, 1)
// TODO: Come up with parameters to describe multiplane formats
-#define LIST_VK_FORMATS_MULTIPLANE(f) \
- f(VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, 0) \
- f(VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, 0) \
+#define LIST_VK_FORMATS_MULTIPLANE(f) \
+ f(VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, 0) \
+ f(VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, 0) \
f(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, 0) \
+ f(VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, 0) \
f(VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, 0) \
- f(VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, 0) \
- f(VK_FORMAT_G8B8G8R8_422_UNORM, 0) \
- f(VK_FORMAT_B8G8R8G8_422_UNORM, 0) \
- f(VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, 0) \
- f(VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, 0) \
+ f(VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, 0) \
+ f(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, 0) \
+ f(VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, 0) \
+ f(VK_FORMAT_G8B8G8R8_422_UNORM, 0) \
+ f(VK_FORMAT_B8G8R8G8_422_UNORM, 0) \
+ f(VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, 0) \
+ f(VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, 0) \
f(VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, 0) \
- f(VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, 0) \
- f(VK_FORMAT_G16B16G16R16_422_UNORM, 0) \
- f(VK_FORMAT_B16G16R16G16_422_UNORM, 0) \
- f(VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, 0) \
- f(VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, 0) \
+ f(VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, 0) \
+ f(VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, 0) \
+ f(VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, 0) \
+ f(VK_FORMAT_G16B16G16R16_422_UNORM, 0) \
+ f(VK_FORMAT_B16G16R16G16_422_UNORM, 0) \
+ f(VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, 0) \
+ f(VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, 0) \
f(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, 0) \
- f(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, 0) \
+ f(VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, 0) \
+ f(VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, 0) \
+ f(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, 0) \
f(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, 0) \
+ f(VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, 0) \
f(VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, 0) \
- f(VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, 0) \
+ f(VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, 0)
// TODO: Come up with parameters to describe PVRTC formats
-#define LIST_VK_FORMATS_COMPRESSED_PVRTC(f) \
+#define LIST_VK_FORMATS_COMPRESSED_PVRTC(f) \
f(VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, 0) \
f(VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, 0) \
f(VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, 0) \
f(VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, 0) \
- f(VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, 0) \
- f(VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, 0) \
- f(VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, 0) \
- f(VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, 0) \
+ f(VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, 0) \
+ f(VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, 0) \
+ f(VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, 0) \
+ f(VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, 0)
static inline int getLinearFormatPixelSize(VkFormat format) {
-
#define VK_FORMATS_LINEAR_GET_PIXEL_SIZE(f, size) \
if (format == f) return size;
diff --git a/stream-servers/vulkan/VkReconstruction.cpp b/stream-servers/vulkan/VkReconstruction.cpp
index c6fc2737..ea96d88 100644
--- a/stream-servers/vulkan/VkReconstruction.cpp
+++ b/stream-servers/vulkan/VkReconstruction.cpp
@@ -13,38 +13,37 @@
// limitations under the License.
#include "VkReconstruction.h"
-#include "base/EntityManager.h"
-
-#include "VkDecoder.h"
-#include "IOStream.h"
+#include <string.h>
#include <unordered_map>
-#include <string.h>
+#include "IOStream.h"
+#include "VkDecoder.h"
+#include "base/EntityManager.h"
#define DEBUG_RECONSTRUCTION 0
#if DEBUG_RECONSTRUCTION
-#define DEBUG_RECON(fmt,...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
+#define DEBUG_RECON(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
#else
-#define DEBUG_RECON(fmt,...)
+#define DEBUG_RECON(fmt, ...)
#endif
VkReconstruction::VkReconstruction() = default;
std::vector<uint64_t> typeTagSortedHandles(const std::vector<uint64_t>& handles) {
- using EntityManagerTypeForHandles =
- android::base::EntityManager<32, 16, 16, int>;
+ using EntityManagerTypeForHandles = android::base::EntityManager<32, 16, 16, int>;
std::vector<uint64_t> res = handles;
std::sort(res.begin(), res.end(), [](uint64_t lhs, uint64_t rhs) {
return EntityManagerTypeForHandles::getHandleType(lhs) <
- EntityManagerTypeForHandles::getHandleType(rhs); });
+ EntityManagerTypeForHandles::getHandleType(rhs);
+ });
return res;
}
@@ -59,23 +58,24 @@
std::unordered_map<uint64_t, uint64_t> backDeps;
mHandleReconstructions.forEachLiveComponent_const(
- [&backDeps](bool live, uint64_t componentHandle, uint64_t entityHandle, const HandleReconstruction& item) {
- for (auto handle : item.childHandles) {
- backDeps[handle] = entityHandle;
- }
- });
+ [&backDeps](bool live, uint64_t componentHandle, uint64_t entityHandle,
+ const HandleReconstruction& item) {
+ for (auto handle : item.childHandles) {
+ backDeps[handle] = entityHandle;
+ }
+ });
std::vector<uint64_t> topoOrder;
mHandleReconstructions.forEachLiveComponent_const(
- [&topoOrder, &backDeps](bool live, uint64_t componentHandle, uint64_t entityHandle, const HandleReconstruction& item) {
- // Start with populating the roots
- if (backDeps.find(entityHandle) == backDeps.end()) {
- DEBUG_RECON("found root: 0x%llx",
- (unsigned long long)entityHandle);
- topoOrder.push_back(entityHandle);
- }
- });
+ [&topoOrder, &backDeps](bool live, uint64_t componentHandle, uint64_t entityHandle,
+ const HandleReconstruction& item) {
+ // Start with populating the roots
+ if (backDeps.find(entityHandle) == backDeps.end()) {
+ DEBUG_RECON("found root: 0x%llx", (unsigned long long)entityHandle);
+ topoOrder.push_back(entityHandle);
+ }
+ });
std::vector<uint64_t> next;
@@ -88,19 +88,15 @@
topoOrder = typeTagSortedHandles(topoOrder);
while (!topoOrder.empty()) {
-
next.clear();
for (auto handle : topoOrder) {
auto item = mHandleReconstructions.get(handle);
for (auto apiHandle : item->apiRefs) {
-
if (uniqApiRefsToTopoOrder.find(apiHandle) == uniqApiRefsToTopoOrder.end()) {
- DEBUG_RECON("level %zu: 0x%llx api ref: 0x%llx",
- topoLevel,
- (unsigned long long)handle,
- (unsigned long long)apiHandle);
+ DEBUG_RECON("level %zu: 0x%llx api ref: 0x%llx", topoLevel,
+ (unsigned long long)handle, (unsigned long long)apiHandle);
auto& refs = uniqApiRefsByTopoOrder[topoLevel];
refs.push_back(apiHandle);
}
@@ -122,20 +118,18 @@
uniqApiRefsByTopoOrder[topoLevel] = getOrderedUniqueModifyApis();
++topoLevel;
- size_t totalApiTraceSize = 0; // 4 bytes to store size of created handles
+ size_t totalApiTraceSize = 0; // 4 bytes to store size of created handles
for (size_t i = 0; i < topoLevel; ++i) {
for (auto apiHandle : uniqApiRefsByTopoOrder[i]) {
auto item = mApiTrace.get(apiHandle);
- totalApiTraceSize += 4; // opcode
- totalApiTraceSize += 4; // buffer size of trace
- totalApiTraceSize += item->traceBytes; // the actual trace
+ totalApiTraceSize += 4; // opcode
+ totalApiTraceSize += 4; // buffer size of trace
+ totalApiTraceSize += item->traceBytes; // the actual trace
}
}
- DEBUG_RECON(
- "total api trace size: %zu",
- totalApiTraceSize);
+ DEBUG_RECON("total api trace size: %zu", totalApiTraceSize);
std::vector<uint64_t> createdHandleBuffer;
@@ -161,33 +155,34 @@
memcpy(apiTracePtr, &item->opCode, sizeof(uint32_t));
apiTracePtr += 4;
uint32_t traceBytesForSnapshot = item->traceBytes + 8;
- memcpy(apiTracePtr, &traceBytesForSnapshot, sizeof(uint32_t)); // and 8 bytes for 'self' struct of { opcode, packetlen } as that is what decoder expects
+ memcpy(apiTracePtr, &traceBytesForSnapshot,
+ sizeof(uint32_t)); // and 8 bytes for 'self' struct of { opcode, packetlen } as
+ // that is what decoder expects
apiTracePtr += 4;
memcpy(apiTracePtr, item->trace.data(), item->traceBytes);
apiTracePtr += item->traceBytes;
}
}
- DEBUG_RECON(
- "created handle buffer size: %zu trace: %zu",
- createdHandleBuffer.size(), apiTraceBuffer.size());
+ DEBUG_RECON("created handle buffer size: %zu trace: %zu", createdHandleBuffer.size(),
+ apiTraceBuffer.size());
- android::base::saveBufferRaw(stream, (char*)(createdHandleBuffer.data()), createdHandleBuffer.size() * sizeof(uint64_t));
+ android::base::saveBufferRaw(stream, (char*)(createdHandleBuffer.data()),
+ createdHandleBuffer.size() * sizeof(uint64_t));
android::base::saveBufferRaw(stream, (char*)(apiTraceBuffer.data()), apiTraceBuffer.size());
}
class TrivialStream : public IOStream {
-public:
- TrivialStream() : IOStream(4) { }
+ public:
+ TrivialStream() : IOStream(4) {}
virtual ~TrivialStream() = default;
void* allocBuffer(size_t minSize) {
size_t allocSize = (m_bufsize < minSize ? minSize : m_bufsize);
if (!m_buf) {
- m_buf = (unsigned char *)malloc(allocSize);
- }
- else if (m_bufsize < allocSize) {
- unsigned char *p = (unsigned char *)realloc(m_buf, allocSize);
+ m_buf = (unsigned char*)malloc(allocSize);
+ } else if (m_bufsize < allocSize) {
+ unsigned char* p = (unsigned char*)realloc(m_buf, allocSize);
if (p != NULL) {
m_buf = p;
m_bufsize = allocSize;
@@ -207,22 +202,16 @@
return writeFully(m_buf, size);
}
- int writeFully(const void *buf, size_t len) {
- return 0;
- }
+ int writeFully(const void* buf, size_t len) { return 0; }
- const unsigned char* readFully(void *buf, size_t len) {
- return NULL;
- }
+ const unsigned char* readFully(void* buf, size_t len) { return NULL; }
virtual void* getDmaForReading(uint64_t guest_paddr) { return nullptr; }
- virtual void unlockDma(uint64_t guest_paddr) { }
+ virtual void unlockDma(uint64_t guest_paddr) {}
-protected:
- virtual const unsigned char *readRaw(void *buf, size_t *inout_len) {
- return nullptr;
- }
- virtual void onSave(android::base::Stream* stream) { }
+ protected:
+ virtual const unsigned char* readRaw(void* buf, size_t* inout_len) { return nullptr; }
+ virtual void onSave(android::base::Stream* stream) {}
virtual unsigned char* onLoad(android::base::Stream* stream) { return nullptr; }
};
@@ -237,20 +226,19 @@
android::base::loadBuffer(stream, &createdHandleBuffer);
android::base::loadBuffer(stream, &apiTraceBuffer);
- DEBUG_RECON(
- "created handle buffer size: %zu trace: %zu",
- createdHandleBuffer.size(), apiTraceBuffer.size());
+ DEBUG_RECON("created handle buffer size: %zu trace: %zu", createdHandleBuffer.size(),
+ apiTraceBuffer.size());
uint32_t createdHandleBufferSize = createdHandleBuffer.size();
mLoadedTrace.resize(4 + createdHandleBufferSize + apiTraceBuffer.size());
- unsigned char* finalTraceData =
- (unsigned char*)(mLoadedTrace.data());
+ unsigned char* finalTraceData = (unsigned char*)(mLoadedTrace.data());
memcpy(finalTraceData, &createdHandleBufferSize, sizeof(uint32_t));
memcpy(finalTraceData + 4, createdHandleBuffer.data(), createdHandleBufferSize);
- memcpy(finalTraceData + 4 + createdHandleBufferSize, apiTraceBuffer.data(), apiTraceBuffer.size());
+ memcpy(finalTraceData + 4 + createdHandleBufferSize, apiTraceBuffer.data(),
+ apiTraceBuffer.size());
VkDecoder decoderForLoading;
// A decoder that is set for snapshot load will load up the created handles first,
@@ -287,7 +275,8 @@
return mApiTrace.get(h);
}
-void VkReconstruction::setApiTrace(VkReconstruction::ApiInfo* apiInfo, uint32_t opCode, const uint8_t* traceBegin, size_t traceBytes) {
+void VkReconstruction::setApiTrace(VkReconstruction::ApiInfo* apiInfo, uint32_t opCode,
+ const uint8_t* traceBegin, size_t traceBytes) {
if (apiInfo->trace.size() < traceBytes) apiInfo->trace.resize(traceBytes);
apiInfo->opCode = opCode;
memcpy(apiInfo->trace.data(), traceBegin, traceBytes);
@@ -299,29 +288,42 @@
size_t traceBytesTotal = 0;
- mApiTrace.forEachLiveEntry_const([&traceBytesTotal](bool live, uint64_t handle, const ApiInfo& info) {
- fprintf(stderr, "VkReconstruction::%s: api handle 0x%llx: %s\n", __func__, (unsigned long long)handle, goldfish_vk::api_opcode_to_string(info.opCode));
- traceBytesTotal += info.traceBytes;
- });
+ mApiTrace.forEachLiveEntry_const(
+ [&traceBytesTotal](bool live, uint64_t handle, const ApiInfo& info) {
+ fprintf(stderr, "VkReconstruction::%s: api handle 0x%llx: %s\n", __func__,
+ (unsigned long long)handle, goldfish_vk::api_opcode_to_string(info.opCode));
+ traceBytesTotal += info.traceBytes;
+ });
- mHandleReconstructions.forEachLiveComponent_const([this](bool live, uint64_t componentHandle, uint64_t entityHandle, const HandleReconstruction& reconstruction) {
- fprintf(stderr, "VkReconstruction::%s: %p handle 0x%llx api refs:\n", __func__, this, (unsigned long long)entityHandle);
- for (auto apiHandle : reconstruction.apiRefs) {
- auto apiInfo = mApiTrace.get(apiHandle);
- const char* apiName = apiInfo ? goldfish_vk::api_opcode_to_string(apiInfo->opCode) : "unalloced";
- fprintf(stderr, "VkReconstruction::%s: 0x%llx: %s\n", __func__, (unsigned long long)apiHandle, apiName);
- for (auto createdHandle : apiInfo->createdHandles) {
- fprintf(stderr, "VkReconstruction::%s: created 0x%llx\n", __func__, (unsigned long long)createdHandle);
+ mHandleReconstructions.forEachLiveComponent_const(
+ [this](bool live, uint64_t componentHandle, uint64_t entityHandle,
+ const HandleReconstruction& reconstruction) {
+ fprintf(stderr, "VkReconstruction::%s: %p handle 0x%llx api refs:\n", __func__, this,
+ (unsigned long long)entityHandle);
+ for (auto apiHandle : reconstruction.apiRefs) {
+ auto apiInfo = mApiTrace.get(apiHandle);
+ const char* apiName =
+ apiInfo ? goldfish_vk::api_opcode_to_string(apiInfo->opCode) : "unalloced";
+ fprintf(stderr, "VkReconstruction::%s: 0x%llx: %s\n", __func__,
+ (unsigned long long)apiHandle, apiName);
+ for (auto createdHandle : apiInfo->createdHandles) {
+ fprintf(stderr, "VkReconstruction::%s: created 0x%llx\n", __func__,
+ (unsigned long long)createdHandle);
+ }
}
- }
- });
+ });
- mHandleModifications.forEachLiveComponent_const([this](bool live, uint64_t componentHandle, uint64_t entityHandle, const HandleModification& modification) {
- fprintf(stderr, "VkReconstruction::%s: mod: %p handle 0x%llx api refs:\n", __func__, this, (unsigned long long)entityHandle);
+ mHandleModifications.forEachLiveComponent_const([this](bool live, uint64_t componentHandle,
+ uint64_t entityHandle,
+ const HandleModification& modification) {
+ fprintf(stderr, "VkReconstruction::%s: mod: %p handle 0x%llx api refs:\n", __func__, this,
+ (unsigned long long)entityHandle);
for (auto apiHandle : modification.apiRefs) {
auto apiInfo = mApiTrace.get(apiHandle);
- const char* apiName = apiInfo ? goldfish_vk::api_opcode_to_string(apiInfo->opCode) : "unalloced";
- fprintf(stderr, "VkReconstruction::%s: mod: 0x%llx: %s\n", __func__, (unsigned long long)apiHandle, apiName);
+ const char* apiName =
+ apiInfo ? goldfish_vk::api_opcode_to_string(apiInfo->opCode) : "unalloced";
+ fprintf(stderr, "VkReconstruction::%s: mod: 0x%llx: %s\n", __func__,
+ (unsigned long long)apiHandle, apiName);
}
});
@@ -356,7 +358,8 @@
}
}
-void VkReconstruction::forEachHandleAddApi(const uint64_t* toProcess, uint32_t count, uint64_t apiHandle) {
+void VkReconstruction::forEachHandleAddApi(const uint64_t* toProcess, uint32_t count,
+ uint64_t apiHandle) {
if (!toProcess) return;
for (uint32_t i = 0; i < count; ++i) {
@@ -372,7 +375,6 @@
if (!toProcess) return;
for (uint32_t i = 0; i < count; ++i) {
-
auto item = mHandleReconstructions.get(toProcess[i]);
if (!item) continue;
@@ -391,7 +393,8 @@
}
}
-void VkReconstruction::addHandleDependency(const uint64_t* handles, uint32_t count, uint64_t parentHandle) {
+void VkReconstruction::addHandleDependency(const uint64_t* handles, uint32_t count,
+ uint64_t parentHandle) {
if (!handles) return;
auto item = mHandleReconstructions.get(parentHandle);
@@ -403,7 +406,8 @@
}
}
-void VkReconstruction::setCreatedHandlesForApi(uint64_t apiHandle, const uint64_t* created, uint32_t count) {
+void VkReconstruction::setCreatedHandlesForApi(uint64_t apiHandle, const uint64_t* created,
+ uint32_t count) {
if (!created) return;
auto item = mApiTrace.get(apiHandle);
@@ -415,7 +419,8 @@
}
}
-void VkReconstruction::forEachHandleAddModifyApi(const uint64_t* toProcess, uint32_t count, uint64_t apiHandle) {
+void VkReconstruction::forEachHandleAddModifyApi(const uint64_t* toProcess, uint32_t count,
+ uint64_t apiHandle) {
if (!toProcess) return;
for (uint32_t i = 0; i < count; ++i) {
@@ -434,18 +439,17 @@
// Now add all handle modifications to the trace, ordered by the .order field.
mHandleModifications.forEachLiveComponent_const(
- [&orderedModifies](bool live, uint64_t componentHandle, uint64_t entityHandle, const HandleModification& mod) {
- orderedModifies.push_back(mod);
- });
+ [&orderedModifies](bool live, uint64_t componentHandle, uint64_t entityHandle,
+ const HandleModification& mod) { orderedModifies.push_back(mod); });
// Sort by the |order| field for each modify API
// since it may be important to apply modifies in a particular
// order (e.g., when dealing with descriptor set updates
// or commands in a command buffer).
std::sort(orderedModifies.begin(), orderedModifies.end(),
- [](const HandleModification& lhs, const HandleModification& rhs) {
- return lhs.order < rhs.order;
- });
+ [](const HandleModification& lhs, const HandleModification& rhs) {
+ return lhs.order < rhs.order;
+ });
std::unordered_set<uint64_t> usedModifyApis;
std::vector<uint64_t> orderedUniqueModifyApis;
diff --git a/stream-servers/vulkan/VkReconstruction.h b/stream-servers/vulkan/VkReconstruction.h
index 35bec41..233f5f9 100644
--- a/stream-servers/vulkan/VkReconstruction.h
+++ b/stream-servers/vulkan/VkReconstruction.h
@@ -13,18 +13,16 @@
// limitations under the License.
#pragma once
-#include "base/Stream.h"
-#include "base/EntityManager.h"
-
#include "VulkanHandleMapping.h"
#include "VulkanHandles.h"
-
+#include "base/EntityManager.h"
+#include "base/Stream.h"
#include "common/goldfish_vk_marshaling.h"
// A class that captures all important data structures for
// reconstructing a Vulkan system state via trimmed API record and replay.
class VkReconstruction {
-public:
+ public:
VkReconstruction();
void save(android::base::Stream* stream);
@@ -39,8 +37,7 @@
std::vector<uint64_t> createdHandles;
};
- using ApiTrace =
- android::base::EntityManager<32, 16, 16, ApiInfo>;
+ using ApiTrace = android::base::EntityManager<32, 16, 16, ApiInfo>;
using ApiHandle = ApiTrace::EntityHandle;
struct HandleReconstruction {
@@ -64,7 +61,8 @@
ApiInfo* getApiInfo(ApiHandle h);
- void setApiTrace(ApiInfo* apiInfo, uint32_t opcode, const uint8_t* traceBegin, size_t traceBytes);
+ void setApiTrace(ApiInfo* apiInfo, uint32_t opcode, const uint8_t* traceBegin,
+ size_t traceBytes);
void dump();
@@ -81,8 +79,8 @@
void forEachHandleAddModifyApi(const uint64_t* toProcess, uint32_t count, uint64_t apiHandle);
void setModifiedHandlesForApi(uint64_t apiHandle, const uint64_t* modified, uint32_t count);
-private:
+ private:
std::vector<uint64_t> getOrderedUniqueModifyApis() const;
ApiTrace mApiTrace;
diff --git a/stream-servers/vulkan/VulkanDispatch.cpp b/stream-servers/vulkan/VulkanDispatch.cpp
index c44268f..a4539f4 100644
--- a/stream-servers/vulkan/VulkanDispatch.cpp
+++ b/stream-servers/vulkan/VulkanDispatch.cpp
@@ -14,10 +14,10 @@
#include "VulkanDispatch.h"
-#include "base/PathUtils.h"
-#include "base/System.h"
#include "base/Lock.h"
+#include "base/PathUtils.h"
#include "base/SharedLibrary.h"
+#include "base/System.h"
using android::base::AutoLock;
using android::base::Lock;
@@ -34,9 +34,7 @@
android::base::setEnvironmentVariable("VK_ICD_FILENAMES", path);
}
-static std::string icdJsonNameToProgramAndLauncherPaths(
- const std::string& icdFilename) {
-
+static std::string icdJsonNameToProgramAndLauncherPaths(const std::string& icdFilename) {
std::string suffix = pj({"lib64", "vulkan", icdFilename});
return pj({android::base::getProgramDirectory(), suffix}) + ":" +
@@ -53,13 +51,17 @@
if (forTesting || androidIcd == "swiftshader") {
auto res = pj({android::base::getProgramDirectory(), "lib64", "vulkan"});
// LOG(VERBOSE) << "In test environment or ICD set to swiftshader, using "
- "Swiftshader ICD";
- auto libPath = pj({android::base::getProgramDirectory(), "lib64", "vulkan", "libvk_swiftshader.so"});;
+ "Swiftshader ICD";
+ auto libPath = pj(
+ {android::base::getProgramDirectory(), "lib64", "vulkan", "libvk_swiftshader.so"});
+ ;
if (android::base::pathExists(libPath.c_str())) {
// LOG(VERBOSE) << "Swiftshader library exists";
} else {
// LOG(VERBOSE) << "Swiftshader library doesn't exist, trying launcher path";
- libPath = pj({android::base::getLauncherDirectory(), "lib64", "vulkan", "libvk_swiftshader.so"});;
+ libPath = pj({android::base::getLauncherDirectory(), "lib64", "vulkan",
+ "libvk_swiftshader.so"});
+ ;
if (android::base::pathExists(libPath.c_str())) {
// LOG(VERBOSE) << "Swiftshader library found in launcher path";
} else {
@@ -70,11 +72,11 @@
android::base::setEnvironmentVariable("ANDROID_EMU_VK_ICD", "swiftshader");
} else {
// LOG(VERBOSE) << "Not in test environment. ICD (blank for default): ["
- // << androidIcd << "]";
+ // << androidIcd << "]";
// Mac: Use MoltenVK by default unless GPU mode is set to swiftshader,
// and switch between that and gfx-rs libportability-icd depending on
// the environment variable setting.
- #ifdef __APPLE__
+#ifdef __APPLE__
if (androidIcd == "portability") {
setIcdPath(icdJsonNameToProgramAndLauncherPaths("portability-macos.json"));
} else if (androidIcd == "portability-debug") {
@@ -150,7 +152,7 @@
#endif
class SharedLibraries {
-public:
+ public:
explicit SharedLibraries(size_t sizeLimit = 1) : mSizeLimit(sizeLimit) {}
size_t size() const { return mLibs.size(); }
@@ -166,8 +168,7 @@
mLibs.push_back(library);
fprintf(stderr, "added library %s\n", path.c_str());
return true;
- }
- else {
+ } else {
fprintf(stderr, "cannot add library %s: failed\n", path.c_str());
return false;
}
@@ -185,7 +186,7 @@
return nullptr;
}
-private:
+ private:
size_t mSizeLimit;
std::vector<android::base::SharedLibrary*> mLibs;
};
@@ -203,7 +204,7 @@
}
class VulkanDispatchImpl {
-public:
+ public:
VulkanDispatchImpl() : mVulkanLibs(getVulkanLibraryNumLimits()) {}
void initialize(bool forTesting);
@@ -220,9 +221,8 @@
}
#else
mVulkanLibs.addLibrary(VULKAN_LOADER_FILENAME);
-#endif // __linux__
- }
- else {
+#endif // __linux__
+ } else {
auto loaderPath = getLoaderPath(android::base::getProgramDirectory(), mForTesting);
bool success = mVulkanLibs.addLibrary(loaderPath);
@@ -235,10 +235,11 @@
// On Linux, it might not be called libvulkan.so.
// Try libvulkan.so.1 if that doesn't work.
if (!success) {
- loaderPath = pj({android::base::getLauncherDirectory(), "lib64", "vulkan", "libvulkan.so.1"});
+ loaderPath = pj({android::base::getLauncherDirectory(), "lib64", "vulkan",
+ "libvulkan.so.1"});
mVulkanLibs.addLibrary(loaderPath);
}
-#endif // __linux__
+#endif // __linux__
#ifdef __APPLE__
// On macOS it is possible that we are using MoltenVK as the
// ICD. In that case we need to add MoltenVK libraries to
@@ -254,7 +255,7 @@
success = mVulkanLibs.addLibrary(mvkPath);
}
}
-#endif // __APPLE__
+#endif // __APPLE__
}
}
return static_cast<void*>(&mVulkanLibs);
@@ -266,7 +267,7 @@
VulkanDispatch* dispatch() { return &mDispatch; }
-private:
+ private:
Lock mLock;
bool mForTesting = false;
bool mInitialized = false;
@@ -279,9 +280,7 @@
return impl;
}
-static void* sVulkanDispatchDlOpen() {
- return sVulkanDispatchImpl()->dlopen();
-}
+static void* sVulkanDispatchDlOpen() { return sVulkanDispatchImpl()->dlopen(); }
static void* sVulkanDispatchDlSym(void* lib, const char* sym) {
return sVulkanDispatchImpl()->dlsym(lib, sym);
@@ -297,10 +296,8 @@
mForTesting = forTesting;
initIcdPaths(mForTesting);
- goldfish_vk::init_vulkan_dispatch_from_system_loader(
- sVulkanDispatchDlOpen,
- sVulkanDispatchDlSym,
- &mDispatch);
+ goldfish_vk::init_vulkan_dispatch_from_system_loader(sVulkanDispatchDlOpen,
+ sVulkanDispatchDlSym, &mDispatch);
mInitialized = true;
}
@@ -312,8 +309,7 @@
bool vkDispatchValid(const VulkanDispatch* vk) {
return vk->vkEnumerateInstanceExtensionProperties != nullptr ||
- vk->vkGetInstanceProcAddr != nullptr ||
- vk->vkGetDeviceProcAddr != nullptr;
+ vk->vkGetInstanceProcAddr != nullptr || vk->vkGetDeviceProcAddr != nullptr;
}
-}
+} // namespace emugl
diff --git a/stream-servers/vulkan/VulkanDispatch.h b/stream-servers/vulkan/VulkanDispatch.h
index 4027e05..f238fb8 100644
--- a/stream-servers/vulkan/VulkanDispatch.h
+++ b/stream-servers/vulkan/VulkanDispatch.h
@@ -22,4 +22,4 @@
VulkanDispatch* vkDispatch(bool forTesting = false);
bool vkDispatchValid(const VulkanDispatch* vk);
-} // namespace emugl
\ No newline at end of file
+} // namespace emugl
\ No newline at end of file
diff --git a/stream-servers/vulkan/VulkanHandleMapping.cpp b/stream-servers/vulkan/VulkanHandleMapping.cpp
index 5b2222e..a91f5ed 100644
--- a/stream-servers/vulkan/VulkanHandleMapping.cpp
+++ b/stream-servers/vulkan/VulkanHandleMapping.cpp
@@ -12,21 +12,27 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-#include <vulkan/vulkan.h>
-
#include "VulkanHandleMapping.h"
+#include <vulkan/vulkan.h>
+
namespace goldfish_vk {
-#define DEFAULT_HANDLE_MAP_DEFINE(type) \
-void DefaultHandleMapping::mapHandles_##type(type*, size_t) { return; } \
-void DefaultHandleMapping::mapHandles_##type##_u64(const type* handles, uint64_t* handle_u64s, size_t count) { \
- for (size_t i = 0; i < count; ++i) { handle_u64s[i] = (uint64_t)(uintptr_t)handles[i]; } \
-} \
-void DefaultHandleMapping::mapHandles_u64_##type(const uint64_t* handle_u64s, type* handles, size_t count) { \
- for (size_t i = 0; i < count; ++i) { handles[i] = (type)(uintptr_t)handle_u64s[i]; } \
-} \
+#define DEFAULT_HANDLE_MAP_DEFINE(type) \
+ void DefaultHandleMapping::mapHandles_##type(type*, size_t) { return; } \
+ void DefaultHandleMapping::mapHandles_##type##_u64(const type* handles, uint64_t* handle_u64s, \
+ size_t count) { \
+ for (size_t i = 0; i < count; ++i) { \
+ handle_u64s[i] = (uint64_t)(uintptr_t)handles[i]; \
+ } \
+ } \
+ void DefaultHandleMapping::mapHandles_u64_##type(const uint64_t* handle_u64s, type* handles, \
+ size_t count) { \
+ for (size_t i = 0; i < count; ++i) { \
+ handles[i] = (type)(uintptr_t)handle_u64s[i]; \
+ } \
+ }
GOLDFISH_VK_LIST_HANDLE_TYPES(DEFAULT_HANDLE_MAP_DEFINE)
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VulkanHandleMapping.h b/stream-servers/vulkan/VulkanHandleMapping.h
index 8973e9b..a992336 100644
--- a/stream-servers/vulkan/VulkanHandleMapping.h
+++ b/stream-servers/vulkan/VulkanHandleMapping.h
@@ -16,58 +16,61 @@
#include <vulkan/vulkan.h>
-#include "VulkanHandles.h"
-#include "VulkanDispatch.h"
-
#include <functional>
+#include "VulkanDispatch.h"
+#include "VulkanHandles.h"
+
namespace goldfish_vk {
class VkDecoderGlobalState;
class VulkanHandleMapping {
-public:
- VulkanHandleMapping(VkDecoderGlobalState* state) : m_state (state) { }
- virtual ~VulkanHandleMapping() { }
+ public:
+ VulkanHandleMapping(VkDecoderGlobalState* state) : m_state(state) {}
+ virtual ~VulkanHandleMapping() {}
-#define DECLARE_HANDLE_MAP_PURE_VIRTUAL_METHOD(type) \
- virtual void mapHandles_##type(type* handles, size_t count = 1) = 0; \
- virtual void mapHandles_##type##_u64(const type* handles, uint64_t* handle_u64s, size_t count = 1) = 0; \
- virtual void mapHandles_u64_##type(const uint64_t* handle_u64s, type* handles, size_t count = 1) = 0; \
+#define DECLARE_HANDLE_MAP_PURE_VIRTUAL_METHOD(type) \
+ virtual void mapHandles_##type(type* handles, size_t count = 1) = 0; \
+ virtual void mapHandles_##type##_u64(const type* handles, uint64_t* handle_u64s, \
+ size_t count = 1) = 0; \
+ virtual void mapHandles_u64_##type(const uint64_t* handle_u64s, type* handles, \
+ size_t count = 1) = 0;
GOLDFISH_VK_LIST_HANDLE_TYPES(DECLARE_HANDLE_MAP_PURE_VIRTUAL_METHOD)
-protected:
+ protected:
VkDecoderGlobalState* m_state;
};
class DefaultHandleMapping : public VulkanHandleMapping {
-public:
- DefaultHandleMapping() : VulkanHandleMapping(nullptr) { }
- virtual ~DefaultHandleMapping() { }
+ public:
+ DefaultHandleMapping() : VulkanHandleMapping(nullptr) {}
+ virtual ~DefaultHandleMapping() {}
-#define DECLARE_HANDLE_MAP_OVERRIDE(type) \
- void mapHandles_##type(type* handles, size_t count) override; \
- void mapHandles_##type##_u64(const type* handles, uint64_t* handle_u64s, size_t count) override; \
- void mapHandles_u64_##type(const uint64_t* handle_u64s, type* handles, size_t count) override; \
+#define DECLARE_HANDLE_MAP_OVERRIDE(type) \
+ void mapHandles_##type(type* handles, size_t count) override; \
+ void mapHandles_##type##_u64(const type* handles, uint64_t* handle_u64s, size_t count) \
+ override; \
+ void mapHandles_u64_##type(const uint64_t* handle_u64s, type* handles, size_t count) override;
GOLDFISH_VK_LIST_HANDLE_TYPES(DECLARE_HANDLE_MAP_OVERRIDE)
};
#define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DECL(type) \
- type unbox_##type(type boxed); \
- type unboxed_to_boxed_##type(type boxed); \
- void delete_##type(type boxed); \
- VulkanDispatch* dispatch_##type(type boxed); \
+ type unbox_##type(type boxed); \
+ type unboxed_to_boxed_##type(type boxed); \
+ void delete_##type(type boxed); \
+ VulkanDispatch* dispatch_##type(type boxed);
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DECL)
-#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DECL(type) \
- type new_boxed_non_dispatchable_##type(type underlying); \
- void delete_##type(type boxed); \
+#define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DECL(type) \
+ type new_boxed_non_dispatchable_##type(type underlying); \
+ void delete_##type(type boxed); \
void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback); \
- type unbox_##type(type boxed); \
- type unboxed_to_boxed_non_dispatchable_##type(type boxed); \
+ type unbox_##type(type boxed); \
+ type unboxed_to_boxed_non_dispatchable_##type(type boxed);
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DECL)
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VulkanHandles.h b/stream-servers/vulkan/VulkanHandles.h
index a3d96e0..29cfc1b 100644
--- a/stream-servers/vulkan/VulkanHandles.h
+++ b/stream-servers/vulkan/VulkanHandles.h
@@ -17,92 +17,92 @@
#include <vulkan/vulkan.h>
#define GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(f) \
- f(VkInstance) \
- f(VkPhysicalDevice) \
- f(VkDevice) \
- f(VkQueue) \
- f(VkCommandBuffer) \
+ f(VkInstance) \
+ f(VkPhysicalDevice) \
+ f(VkDevice) \
+ f(VkQueue) \
+ f(VkCommandBuffer)
#define GOLDFISH_VK_LIST_TRIVIAL_NON_DISPATCHABLE_HANDLE_TYPES(f) \
- f(VkBuffer) \
- f(VkBufferView) \
- f(VkImage) \
- f(VkImageView) \
- f(VkShaderModule) \
- f(VkDescriptorPool) \
- f(VkDescriptorSetLayout) \
- f(VkDescriptorSet) \
- f(VkSampler) \
- f(VkPipeline) \
- f(VkPipelineCache) \
- f(VkPipelineLayout) \
- f(VkRenderPass) \
- f(VkFramebuffer) \
- f(VkCommandPool) \
- f(VkFence) \
- f(VkSemaphore) \
- f(VkEvent) \
- f(VkQueryPool) \
- f(VkSamplerYcbcrConversion) \
- f(VkDescriptorUpdateTemplate) \
- f(VkSurfaceKHR) \
- f(VkSwapchainKHR) \
- f(VkDisplayKHR) \
- f(VkDisplayModeKHR) \
- f(VkValidationCacheEXT) \
- f(VkDebugReportCallbackEXT) \
- f(VkDebugUtilsMessengerEXT) \
- f(VkAccelerationStructureNV) \
- f(VkIndirectCommandsLayoutNV) \
- f(VkAccelerationStructureKHR) \
- f(VkCuModuleNVX) \
- f(VkCuFunctionNVX) \
+ f(VkBuffer) \
+ f(VkBufferView) \
+ f(VkImage) \
+ f(VkImageView) \
+ f(VkShaderModule) \
+ f(VkDescriptorPool) \
+ f(VkDescriptorSetLayout) \
+ f(VkDescriptorSet) \
+ f(VkSampler) \
+ f(VkPipeline) \
+ f(VkPipelineCache) \
+ f(VkPipelineLayout) \
+ f(VkRenderPass) \
+ f(VkFramebuffer) \
+ f(VkCommandPool) \
+ f(VkFence) \
+ f(VkSemaphore) \
+ f(VkEvent) \
+ f(VkQueryPool) \
+ f(VkSamplerYcbcrConversion) \
+ f(VkDescriptorUpdateTemplate) \
+ f(VkSurfaceKHR) \
+ f(VkSwapchainKHR) \
+ f(VkDisplayKHR) \
+ f(VkDisplayModeKHR) \
+ f(VkValidationCacheEXT) \
+ f(VkDebugReportCallbackEXT) \
+ f(VkDebugUtilsMessengerEXT) \
+ f(VkAccelerationStructureNV) \
+ f(VkIndirectCommandsLayoutNV) \
+ f(VkAccelerationStructureKHR) \
+ f(VkCuModuleNVX) \
+ f(VkCuFunctionNVX)
#define GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(f) \
- f(VkDeviceMemory) \
+ f(VkDeviceMemory) \
GOLDFISH_VK_LIST_TRIVIAL_NON_DISPATCHABLE_HANDLE_TYPES(f)
-#define GOLDFISH_VK_LIST_HANDLE_TYPES(f) \
+#define GOLDFISH_VK_LIST_HANDLE_TYPES(f) \
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(f) \
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(f)
#define GOLDFISH_VK_LIST_HANDLE_TYPES_BY_STAGE(f) \
- f(VkInstance) \
- f(VkPhysicalDevice) \
- f(VkDevice) \
- f(VkQueue) \
- f(VkBuffer) \
- f(VkImage) \
- f(VkDeviceMemory) \
- f(VkBufferView) \
- f(VkImageView) \
- f(VkShaderModule) \
- f(VkDescriptorSetLayout) \
- f(VkDescriptorPool) \
- f(VkDescriptorSet) \
- f(VkSampler) \
- f(VkSamplerYcbcrConversion) \
- f(VkDescriptorUpdateTemplate) \
- f(VkRenderPass) \
- f(VkFramebuffer) \
- f(VkPipelineLayout) \
- f(VkPipelineCache) \
- f(VkPipeline) \
- f(VkFence) \
- f(VkSemaphore) \
- f(VkEvent) \
- f(VkQueryPool) \
- f(VkSurfaceKHR) \
- f(VkSwapchainKHR) \
- f(VkDisplayKHR) \
- f(VkDisplayModeKHR) \
- f(VkValidationCacheEXT) \
- f(VkDebugReportCallbackEXT) \
- f(VkDebugUtilsMessengerEXT) \
- f(VkCommandPool) \
- f(VkCommandBuffer) \
- f(VkAccelerationStructureNV) \
- f(VkIndirectCommandsLayoutNV) \
- f(VkAccelerationStructureKHR) \
- f(VkCuModuleNVX) \
+ f(VkInstance) \
+ f(VkPhysicalDevice) \
+ f(VkDevice) \
+ f(VkQueue) \
+ f(VkBuffer) \
+ f(VkImage) \
+ f(VkDeviceMemory) \
+ f(VkBufferView) \
+ f(VkImageView) \
+ f(VkShaderModule) \
+ f(VkDescriptorSetLayout) \
+ f(VkDescriptorPool) \
+ f(VkDescriptorSet) \
+ f(VkSampler) \
+ f(VkSamplerYcbcrConversion) \
+ f(VkDescriptorUpdateTemplate) \
+ f(VkRenderPass) \
+ f(VkFramebuffer) \
+ f(VkPipelineLayout) \
+ f(VkPipelineCache) \
+ f(VkPipeline) \
+ f(VkFence) \
+ f(VkSemaphore) \
+ f(VkEvent) \
+ f(VkQueryPool) \
+ f(VkSurfaceKHR) \
+ f(VkSwapchainKHR) \
+ f(VkDisplayKHR) \
+ f(VkDisplayModeKHR) \
+ f(VkValidationCacheEXT) \
+ f(VkDebugReportCallbackEXT) \
+ f(VkDebugUtilsMessengerEXT) \
+ f(VkCommandPool) \
+ f(VkCommandBuffer) \
+ f(VkAccelerationStructureNV) \
+ f(VkIndirectCommandsLayoutNV) \
+ f(VkAccelerationStructureKHR) \
+ f(VkCuModuleNVX) \
f(VkCuFunctionNVX)
diff --git a/stream-servers/vulkan/VulkanStream.cpp b/stream-servers/vulkan/VulkanStream.cpp
index 003dfe0..da202ae 100644
--- a/stream-servers/vulkan/VulkanStream.cpp
+++ b/stream-servers/vulkan/VulkanStream.cpp
@@ -13,25 +13,23 @@
// limitations under the License.
#include "VulkanStream.h"
-#include "IOStream.h"
-
-#include "base/BumpPool.h"
-
-#include "host-common/feature_control.h"
-#include "host-common/GfxstreamFatalError.h"
+#include <inttypes.h>
#include <vector>
-#include <inttypes.h>
+#include "IOStream.h"
+#include "base/BumpPool.h"
+#include "host-common/GfxstreamFatalError.h"
+#include "host-common/feature_control.h"
-#define E(fmt,...) fprintf(stderr, fmt "\n", ##__VA_ARGS__)
+#define E(fmt, ...) fprintf(stderr, fmt "\n", ##__VA_ARGS__)
using emugl::ABORT_REASON_OTHER;
using emugl::FatalError;
namespace goldfish_vk {
-VulkanStream::VulkanStream(IOStream *stream) : mStream(stream) {
+VulkanStream::VulkanStream(IOStream* stream) : mStream(stream) {
unsetHandleMapping();
if (feature_is_enabled(kFeature_VulkanNullOptionalStrings)) {
@@ -47,13 +45,9 @@
VulkanStream::~VulkanStream() = default;
-void VulkanStream::setStream(IOStream* stream) {
- mStream = stream;
-}
+void VulkanStream::setStream(IOStream* stream) { mStream = stream; }
-bool VulkanStream::valid() {
- return true;
-}
+bool VulkanStream::valid() { return true; }
void VulkanStream::alloc(void** ptrAddr, size_t bytes) {
if (!bytes) {
@@ -64,8 +58,7 @@
*ptrAddr = mPool.alloc(bytes);
if (!*ptrAddr) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "alloc failed. Wanted size: " << bytes;
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "alloc failed. Wanted size: " << bytes;
}
}
@@ -89,7 +82,7 @@
alloc((void**)forOutput, count * sizeof(char*));
- char **stringsForOutput = *forOutput;
+ char** stringsForOutput = *forOutput;
for (size_t i = 0; i < count; i++) {
loadStringInPlace(stringsForOutput + i);
@@ -103,8 +96,8 @@
android::base::Stream::fromBe32((uint8_t*)&len);
if (len == UINT32_MAX) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "VulkanStream can't allocate UINT32_MAX bytes";
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
+ << "VulkanStream can't allocate UINT32_MAX bytes";
}
alloc((void**)forOutput, len + 1);
@@ -129,14 +122,14 @@
alloc((void**)forOutput, count * sizeof(char*));
- char **stringsForOutput = *forOutput;
+ char** stringsForOutput = *forOutput;
for (size_t i = 0; i < count; i++) {
loadStringInPlaceWithStreamPtr(stringsForOutput + i, streamPtr);
}
}
-ssize_t VulkanStream::read(void *buffer, size_t size) {
+ssize_t VulkanStream::read(void* buffer, size_t size) {
commitWrite();
if (!mStream->readFully(buffer, size)) {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
@@ -145,11 +138,9 @@
return size;
}
-size_t VulkanStream::remainingWriteBufferSize() const {
- return mWriteBuffer.size() - mWritePos;
-}
+size_t VulkanStream::remainingWriteBufferSize() const { return mWriteBuffer.size() - mWritePos; }
-ssize_t VulkanStream::bufferedWrite(const void *buffer, size_t size) {
+ssize_t VulkanStream::bufferedWrite(const void* buffer, size_t size) {
if (size > remainingWriteBufferSize()) {
mWriteBuffer.resize((mWritePos + size) << 1);
}
@@ -158,18 +149,15 @@
return size;
}
-ssize_t VulkanStream::write(const void *buffer, size_t size) {
- return bufferedWrite(buffer, size);
-}
+ssize_t VulkanStream::write(const void* buffer, size_t size) { return bufferedWrite(buffer, size); }
void VulkanStream::commitWrite() {
if (!valid()) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "Tried to commit write to vulkan pipe with invalid pipe!";
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
+ << "Tried to commit write to vulkan pipe with invalid pipe!";
}
- int written =
- mStream->writeFully(mWriteBuffer.data(), mWritePos);
+ int written = mStream->writeFully(mWriteBuffer.data(), mWritePos);
if (written) {
GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
@@ -178,34 +166,24 @@
mWritePos = 0;
}
-void VulkanStream::clearPool() {
- mPool.freeAll();
-}
+void VulkanStream::clearPool() { mPool.freeAll(); }
void VulkanStream::setHandleMapping(VulkanHandleMapping* mapping) {
mCurrentHandleMapping = mapping;
}
-void VulkanStream::unsetHandleMapping() {
- mCurrentHandleMapping = &mDefaultHandleMapping;
-}
+void VulkanStream::unsetHandleMapping() { mCurrentHandleMapping = &mDefaultHandleMapping; }
-VulkanHandleMapping* VulkanStream::handleMapping() const {
- return mCurrentHandleMapping;
-}
+VulkanHandleMapping* VulkanStream::handleMapping() const { return mCurrentHandleMapping; }
-uint32_t VulkanStream::getFeatureBits() const {
- return mFeatureBits;
-}
+uint32_t VulkanStream::getFeatureBits() const { return mFeatureBits; }
-android::base::BumpPool* VulkanStream::pool() {
- return &mPool;
-}
+android::base::BumpPool* VulkanStream::pool() { return &mPool; }
VulkanMemReadingStream::VulkanMemReadingStream(uint8_t* start)
: VulkanStream(nullptr), mStart(start) {}
-VulkanMemReadingStream::~VulkanMemReadingStream() { }
+VulkanMemReadingStream::~VulkanMemReadingStream() {}
void VulkanMemReadingStream::setBuf(uint8_t* buf) {
mStart = buf;
@@ -213,13 +191,9 @@
resetTrace();
}
-uint8_t* VulkanMemReadingStream::getBuf() {
- return mStart;
-}
+uint8_t* VulkanMemReadingStream::getBuf() { return mStart; }
-void VulkanMemReadingStream::setReadPos(uintptr_t pos) {
- mReadPos = pos;
-}
+void VulkanMemReadingStream::setReadPos(uintptr_t pos) { mReadPos = pos; }
ssize_t VulkanMemReadingStream::read(void* buffer, size_t size) {
memcpy(buffer, mStart + mReadPos, size);
@@ -228,8 +202,8 @@
}
ssize_t VulkanMemReadingStream::write(const void* buffer, size_t size) {
- GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) <<
- "VulkanMemReadingStream does not support writing";
+ GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
+ << "VulkanMemReadingStream does not support writing";
}
uint8_t* VulkanMemReadingStream::beginTrace() {
@@ -243,8 +217,6 @@
return res;
}
-void VulkanMemReadingStream::resetTrace() {
- mTraceStart = mStart + mReadPos;
-}
+void VulkanMemReadingStream::resetTrace() { mTraceStart = mStart + mReadPos; }
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VulkanStream.h b/stream-servers/vulkan/VulkanStream.h
index 6944a23..4442dfc 100644
--- a/stream-servers/vulkan/VulkanStream.h
+++ b/stream-servers/vulkan/VulkanStream.h
@@ -13,32 +13,31 @@
// limitations under the License.
#pragma once
-#include "base/BumpPool.h"
-#include "base/Stream.h"
-#include "base/StreamSerializing.h"
-
-#include "VulkanHandleMapping.h"
-#include "common/goldfish_vk_private_defs.h"
+#include <inttypes.h>
#include <memory>
#include <vector>
-#include <inttypes.h>
+#include "VulkanHandleMapping.h"
+#include "base/BumpPool.h"
+#include "base/Stream.h"
+#include "base/StreamSerializing.h"
+#include "common/goldfish_vk_private_defs.h"
-#define E(fmt,...) fprintf(stderr, fmt "\n", ##__VA_ARGS__)
+#define E(fmt, ...) fprintf(stderr, fmt "\n", ##__VA_ARGS__)
class IOStream;
namespace android {
namespace base {
class BumpPool;
-} // namespace android
-} // namespace base
+} // namespace base
+} // namespace android
namespace goldfish_vk {
class VulkanStream : public android::base::Stream {
-public:
+ public:
VulkanStream(IOStream* stream);
~VulkanStream();
@@ -59,8 +58,8 @@
void loadStringInPlaceWithStreamPtr(char** forOutput, uint8_t** streamPtr);
void loadStringArrayInPlaceWithStreamPtr(char*** forOutput, uint8_t** streamPtr);
- virtual ssize_t read(void *buffer, size_t size);
- virtual ssize_t write(const void *buffer, size_t size);
+ virtual ssize_t read(void* buffer, size_t size);
+ virtual ssize_t write(const void* buffer, size_t size);
void commitWrite();
@@ -75,9 +74,9 @@
android::base::BumpPool* pool();
-private:
+ private:
size_t remainingWriteBufferSize() const;
- ssize_t bufferedWrite(const void *buffer, size_t size);
+ ssize_t bufferedWrite(const void* buffer, size_t size);
android::base::BumpPool mPool;
size_t mWritePos = 0;
std::vector<uint8_t> mWriteBuffer;
@@ -88,7 +87,7 @@
};
class VulkanMemReadingStream : public VulkanStream {
-public:
+ public:
VulkanMemReadingStream(uint8_t* start);
~VulkanMemReadingStream();
@@ -96,13 +95,13 @@
uint8_t* getBuf();
void setReadPos(uintptr_t pos);
- ssize_t read(void *buffer, size_t size) override;
- ssize_t write(const void *buffer, size_t size) override;
+ ssize_t read(void* buffer, size_t size) override;
+ ssize_t write(const void* buffer, size_t size) override;
uint8_t* beginTrace();
size_t endTrace();
-
-private:
+
+ private:
void resetTrace();
uint8_t* mStart;
@@ -110,4 +109,4 @@
uintptr_t mReadPos = 0;
};
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/VulkanStream_unittest.cpp b/stream-servers/vulkan/VulkanStream_unittest.cpp
index 96bffff..eb38747 100644
--- a/stream-servers/vulkan/VulkanStream_unittest.cpp
+++ b/stream-servers/vulkan/VulkanStream_unittest.cpp
@@ -11,35 +11,34 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
+#include <gtest/gtest.h>
+
#include "VulkanStream.h"
-#include "IOStream.h"
+#include <string.h>
+#include <vulkan.h>
+#include "IOStream.h"
+#include "android/base/ArraySize.h"
+#include "android/base/BumpPool.h"
#include "common/goldfish_vk_deepcopy.h"
#include "common/goldfish_vk_extension_structs.h"
#include "common/goldfish_vk_marshaling.h"
#include "common/goldfish_vk_reserved_marshaling.h"
#include "common/goldfish_vk_testing.h"
-#include "android/base/ArraySize.h"
-#include "android/base/BumpPool.h"
-
-#include <gtest/gtest.h>
-#include <string.h>
-#include <vulkan.h>
-
using android::base::arraySize;
namespace goldfish_vk {
class TestStream : public IOStream {
-public:
+ public:
static constexpr size_t kBufSize = 1024;
- TestStream() : IOStream(kBufSize) { }
-protected:
+ TestStream() : IOStream(kBufSize) {}
+ protected:
void* getDmaForReading(uint64_t guest_paddr) override { return nullptr; }
- void unlockDma(uint64_t guest_paddr) override { }
+ void unlockDma(uint64_t guest_paddr) override {}
// VulkanStream should never use these functions.
void* allocBuffer(size_t minSize) override {
@@ -52,7 +51,7 @@
abort();
}
- const unsigned char *readRaw(void *buf, size_t *inout_len) override {
+ const unsigned char* readRaw(void* buf, size_t* inout_len) override {
fprintf(stderr, "%s: FATAL: not intended for use!\n", __func__);
abort();
}
@@ -60,7 +59,6 @@
void onSave(android::base::Stream*) override {
fprintf(stderr, "%s: FATAL: not intended for use!\n", __func__);
abort();
-
}
virtual unsigned char* onLoad(android::base::Stream*) override {
@@ -95,7 +93,7 @@
return (unsigned char*)buf;
}
-private:
+ private:
void clear() {
mBuffer.clear();
mReadCursor = 0;
@@ -128,11 +126,11 @@
VkApplicationInfo appInfo = {
VK_STRUCTURE_TYPE_APPLICATION_INFO,
- 0, // pNext
- "VulkanStreamTest", // application name
- 6, // application version
- "VulkanStreamTestEngine", //engine name
- 4, // engine version,
+ 0, // pNext
+ "VulkanStreamTest", // application name
+ 6, // application version
+ "VulkanStreamTestEngine", // engine name
+ 4, // engine version,
VK_API_VERSION_1_0,
};
@@ -147,16 +145,14 @@
"VK_MVK_macos_surface",
};
- VkInstanceCreateInfo forMarshaling = {
- VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
- 0, // pNext
- 0, // flags,
- &appInfo, // pApplicationInfo,
- arraySize(layerNames),
- layerNames,
- arraySize(extensionNames),
- extensionNames
- };
+ VkInstanceCreateInfo forMarshaling = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ 0, // pNext
+ 0, // flags,
+ &appInfo, // pApplicationInfo,
+ arraySize(layerNames),
+ layerNames,
+ arraySize(extensionNames),
+ extensionNames};
marshal_VkInstanceCreateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
@@ -166,11 +162,11 @@
// Before unmarshaling, these structs should be different.
// Test that the generated comparator can detect inequality.
int inequalities = 0;
- checkEqual_VkInstanceCreateInfo(
- &forMarshaling, &forUnmarshaling, [&inequalities](const char* errMsg) {
- (void)errMsg;
- ++inequalities;
- });
+ checkEqual_VkInstanceCreateInfo(&forMarshaling, &forUnmarshaling,
+ [&inequalities](const char* errMsg) {
+ (void)errMsg;
+ ++inequalities;
+ });
EXPECT_GT(inequalities, 0);
@@ -178,38 +174,30 @@
// Check that the strings are equal as well.
- EXPECT_STREQ(
- forMarshaling.pApplicationInfo->pApplicationName,
- forUnmarshaling.pApplicationInfo->pApplicationName);
+ EXPECT_STREQ(forMarshaling.pApplicationInfo->pApplicationName,
+ forUnmarshaling.pApplicationInfo->pApplicationName);
- EXPECT_STREQ(
- forMarshaling.pApplicationInfo->pEngineName,
- forUnmarshaling.pApplicationInfo->pEngineName);
+ EXPECT_STREQ(forMarshaling.pApplicationInfo->pEngineName,
+ forUnmarshaling.pApplicationInfo->pEngineName);
for (size_t i = 0; i < arraySize(layerNames); ++i) {
- EXPECT_STREQ(
- forMarshaling.ppEnabledLayerNames[i],
- forUnmarshaling.ppEnabledLayerNames[i]);
+ EXPECT_STREQ(forMarshaling.ppEnabledLayerNames[i], forUnmarshaling.ppEnabledLayerNames[i]);
}
for (size_t i = 0; i < arraySize(extensionNames); ++i) {
- EXPECT_STREQ(
- forMarshaling.ppEnabledExtensionNames[i],
- forUnmarshaling.ppEnabledExtensionNames[i]);
+ EXPECT_STREQ(forMarshaling.ppEnabledExtensionNames[i],
+ forUnmarshaling.ppEnabledExtensionNames[i]);
}
EXPECT_EQ(forMarshaling.sType, forUnmarshaling.sType);
EXPECT_EQ(forMarshaling.pNext, forUnmarshaling.pNext);
EXPECT_EQ(forMarshaling.flags, forUnmarshaling.flags);
- EXPECT_EQ(forMarshaling.pApplicationInfo->sType,
- forUnmarshaling.pApplicationInfo->sType);
+ EXPECT_EQ(forMarshaling.pApplicationInfo->sType, forUnmarshaling.pApplicationInfo->sType);
EXPECT_EQ(forMarshaling.pApplicationInfo->apiVersion,
forUnmarshaling.pApplicationInfo->apiVersion);
- checkEqual_VkInstanceCreateInfo(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkInstanceCreateInfo(&forMarshaling, &forUnmarshaling,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
// Try a Vulkan struct that has non-ptr structs in it
@@ -226,119 +214,119 @@
"Intel740",
"123456789abcdef",
{
- 0x00, // maxImageDimension1D;
- 0x01, // maxImageDimension2D;
- 0x02, // maxImageDimension3D;
- 0x03, // maxImageDimensionCube;
- 0x04, // maxImageArrayLayers;
- 0x05, // maxTexelBufferElements;
- 0x06, // maxUniformBufferRange;
- 0x07, // maxStorageBufferRange;
- 0x08, // maxPushConstantsSize;
- 0x09, // maxMemoryAllocationCount;
- 0x0a, // maxSamplerAllocationCount;
- 0x0b, // bufferImageGranularity;
- 0x0c, // sparseAddressSpaceSize;
- 0x0d, // maxBoundDescriptorSets;
- 0x0e, // maxPerStageDescriptorSamplers;
- 0x0f, // maxPerStageDescriptorUniformBuffers;
- 0x10, // maxPerStageDescriptorStorageBuffers;
- 0x11, // maxPerStageDescriptorSampledImages;
- 0x12, // maxPerStageDescriptorStorageImages;
- 0x13, // maxPerStageDescriptorInputAttachments;
- 0x14, // maxPerStageResources;
- 0x15, // maxDescriptorSetSamplers;
- 0x16, // maxDescriptorSetUniformBuffers;
- 0x17, // maxDescriptorSetUniformBuffersDynamic;
- 0x18, // maxDescriptorSetStorageBuffers;
- 0x19, // maxDescriptorSetStorageBuffersDynamic;
- 0x1a, // maxDescriptorSetSampledImages;
- 0x1b, // maxDescriptorSetStorageImages;
- 0x1c, // maxDescriptorSetInputAttachments;
- 0x1d, // maxVertexInputAttributes;
- 0x1e, // maxVertexInputBindings;
- 0x1f, // maxVertexInputAttributeOffset;
- 0x20, // maxVertexInputBindingStride;
- 0x21, // maxVertexOutputComponents;
- 0x22, // maxTessellationGenerationLevel;
- 0x23, // maxTessellationPatchSize;
- 0x24, // maxTessellationControlPerVertexInputComponents;
- 0x25, // maxTessellationControlPerVertexOutputComponents;
- 0x26, // maxTessellationControlPerPatchOutputComponents;
- 0x27, // maxTessellationControlTotalOutputComponents;
- 0x28, // maxTessellationEvaluationInputComponents;
- 0x29, // maxTessellationEvaluationOutputComponents;
- 0x2a, // maxGeometryShaderInvocations;
- 0x2b, // maxGeometryInputComponents;
- 0x2c, // maxGeometryOutputComponents;
- 0x2d, // maxGeometryOutputVertices;
- 0x2e, // maxGeometryTotalOutputComponents;
- 0x2f, // maxFragmentInputComponents;
- 0x30, // maxFragmentOutputAttachments;
- 0x31, // maxFragmentDualSrcAttachments;
- 0x32, // maxFragmentCombinedOutputResources;
- 0x33, // maxComputeSharedMemorySize;
- { 0x1, 0x2, 0x3 }, // maxComputeWorkGroupCount[3];
- 0x35, // maxComputeWorkGroupInvocations;
- { 0x4, 0x5, 0x6 }, // maxComputeWorkGroupSize[3];
- 0x37, // subPixelPrecisionBits;
- 0x38, // subTexelPrecisionBits;
- 0x39, // mipmapPrecisionBits;
- 0x3a, // maxDrawIndexedIndexValue;
- 0x3b, // maxDrawIndirectCount;
- 1.0f, // maxSamplerLodBias;
- 1.0f, // maxSamplerAnisotropy;
- 0x3e, // maxViewports;
- { 0x7, 0x8 }, // maxViewportDimensions[2];
- { 0.4f, 0.5f }, // viewportBoundsRange[2];
- 0x41, // viewportSubPixelBits;
- 0x42, // minMemoryMapAlignment;
- 0x43, // minTexelBufferOffsetAlignment;
- 0x44, // minUniformBufferOffsetAlignment;
- 0x45, // minStorageBufferOffsetAlignment;
- 0x46, // minTexelOffset;
- 0x47, // maxTexelOffset;
- 0x48, // minTexelGatherOffset;
- 0x49, // maxTexelGatherOffset;
- 10.0f, // minInterpolationOffset;
- 11.0f, // maxInterpolationOffset;
- 0x4c, // subPixelInterpolationOffsetBits;
- 0x4d, // maxFramebufferWidth;
- 0x4e, // maxFramebufferHeight;
- 0x4f, // maxFramebufferLayers;
- 0x50, // framebufferColorSampleCounts;
- 0x51, // framebufferDepthSampleCounts;
- 0x52, // framebufferStencilSampleCounts;
- 0x53, // framebufferNoAttachmentsSampleCounts;
- 0x54, // maxColorAttachments;
- 0x55, // sampledImageColorSampleCounts;
- 0x56, // sampledImageIntegerSampleCounts;
- 0x57, // sampledImageDepthSampleCounts;
- 0x58, // sampledImageStencilSampleCounts;
- 0x59, // storageImageSampleCounts;
- 0x5a, // maxSampleMaskWords;
- 0x5b, // timestampComputeAndGraphics;
- 100.0f, // timestampPeriod;
- 0x5d, // maxClipDistances;
- 0x5e, // maxCullDistances;
- 0x5f, // maxCombinedClipAndCullDistances;
- 0x60, // discreteQueuePriorities;
- { 0.0f, 1.0f }, // pointSizeRange[2];
- { 1.0f, 2.0f }, // lineWidthRange[2];
- 3.0f, // pointSizeGranularity;
- 4.0f, // lineWidthGranularity;
- 0x65, // strictLines;
- 0x66, // standardSampleLocations;
- 0x67, // optimalBufferCopyOffsetAlignment;
- 0x68, // optimalBufferCopyRowPitchAlignment;
- 0x69, // nonCoherentAtomSize;
+ 0x00, // maxImageDimension1D;
+ 0x01, // maxImageDimension2D;
+ 0x02, // maxImageDimension3D;
+ 0x03, // maxImageDimensionCube;
+ 0x04, // maxImageArrayLayers;
+ 0x05, // maxTexelBufferElements;
+ 0x06, // maxUniformBufferRange;
+ 0x07, // maxStorageBufferRange;
+ 0x08, // maxPushConstantsSize;
+ 0x09, // maxMemoryAllocationCount;
+ 0x0a, // maxSamplerAllocationCount;
+ 0x0b, // bufferImageGranularity;
+ 0x0c, // sparseAddressSpaceSize;
+ 0x0d, // maxBoundDescriptorSets;
+ 0x0e, // maxPerStageDescriptorSamplers;
+ 0x0f, // maxPerStageDescriptorUniformBuffers;
+ 0x10, // maxPerStageDescriptorStorageBuffers;
+ 0x11, // maxPerStageDescriptorSampledImages;
+ 0x12, // maxPerStageDescriptorStorageImages;
+ 0x13, // maxPerStageDescriptorInputAttachments;
+ 0x14, // maxPerStageResources;
+ 0x15, // maxDescriptorSetSamplers;
+ 0x16, // maxDescriptorSetUniformBuffers;
+ 0x17, // maxDescriptorSetUniformBuffersDynamic;
+ 0x18, // maxDescriptorSetStorageBuffers;
+ 0x19, // maxDescriptorSetStorageBuffersDynamic;
+ 0x1a, // maxDescriptorSetSampledImages;
+ 0x1b, // maxDescriptorSetStorageImages;
+ 0x1c, // maxDescriptorSetInputAttachments;
+ 0x1d, // maxVertexInputAttributes;
+ 0x1e, // maxVertexInputBindings;
+ 0x1f, // maxVertexInputAttributeOffset;
+ 0x20, // maxVertexInputBindingStride;
+ 0x21, // maxVertexOutputComponents;
+ 0x22, // maxTessellationGenerationLevel;
+ 0x23, // maxTessellationPatchSize;
+ 0x24, // maxTessellationControlPerVertexInputComponents;
+ 0x25, // maxTessellationControlPerVertexOutputComponents;
+ 0x26, // maxTessellationControlPerPatchOutputComponents;
+ 0x27, // maxTessellationControlTotalOutputComponents;
+ 0x28, // maxTessellationEvaluationInputComponents;
+ 0x29, // maxTessellationEvaluationOutputComponents;
+ 0x2a, // maxGeometryShaderInvocations;
+ 0x2b, // maxGeometryInputComponents;
+ 0x2c, // maxGeometryOutputComponents;
+ 0x2d, // maxGeometryOutputVertices;
+ 0x2e, // maxGeometryTotalOutputComponents;
+ 0x2f, // maxFragmentInputComponents;
+ 0x30, // maxFragmentOutputAttachments;
+ 0x31, // maxFragmentDualSrcAttachments;
+ 0x32, // maxFragmentCombinedOutputResources;
+ 0x33, // maxComputeSharedMemorySize;
+ {0x1, 0x2, 0x3}, // maxComputeWorkGroupCount[3];
+ 0x35, // maxComputeWorkGroupInvocations;
+ {0x4, 0x5, 0x6}, // maxComputeWorkGroupSize[3];
+ 0x37, // subPixelPrecisionBits;
+ 0x38, // subTexelPrecisionBits;
+ 0x39, // mipmapPrecisionBits;
+ 0x3a, // maxDrawIndexedIndexValue;
+ 0x3b, // maxDrawIndirectCount;
+ 1.0f, // maxSamplerLodBias;
+ 1.0f, // maxSamplerAnisotropy;
+ 0x3e, // maxViewports;
+ {0x7, 0x8}, // maxViewportDimensions[2];
+ {0.4f, 0.5f}, // viewportBoundsRange[2];
+ 0x41, // viewportSubPixelBits;
+ 0x42, // minMemoryMapAlignment;
+ 0x43, // minTexelBufferOffsetAlignment;
+ 0x44, // minUniformBufferOffsetAlignment;
+ 0x45, // minStorageBufferOffsetAlignment;
+ 0x46, // minTexelOffset;
+ 0x47, // maxTexelOffset;
+ 0x48, // minTexelGatherOffset;
+ 0x49, // maxTexelGatherOffset;
+ 10.0f, // minInterpolationOffset;
+ 11.0f, // maxInterpolationOffset;
+ 0x4c, // subPixelInterpolationOffsetBits;
+ 0x4d, // maxFramebufferWidth;
+ 0x4e, // maxFramebufferHeight;
+ 0x4f, // maxFramebufferLayers;
+ 0x50, // framebufferColorSampleCounts;
+ 0x51, // framebufferDepthSampleCounts;
+ 0x52, // framebufferStencilSampleCounts;
+ 0x53, // framebufferNoAttachmentsSampleCounts;
+ 0x54, // maxColorAttachments;
+ 0x55, // sampledImageColorSampleCounts;
+ 0x56, // sampledImageIntegerSampleCounts;
+ 0x57, // sampledImageDepthSampleCounts;
+ 0x58, // sampledImageStencilSampleCounts;
+ 0x59, // storageImageSampleCounts;
+ 0x5a, // maxSampleMaskWords;
+ 0x5b, // timestampComputeAndGraphics;
+ 100.0f, // timestampPeriod;
+ 0x5d, // maxClipDistances;
+ 0x5e, // maxCullDistances;
+ 0x5f, // maxCombinedClipAndCullDistances;
+ 0x60, // discreteQueuePriorities;
+ {0.0f, 1.0f}, // pointSizeRange[2];
+ {1.0f, 2.0f}, // lineWidthRange[2];
+ 3.0f, // pointSizeGranularity;
+ 4.0f, // lineWidthGranularity;
+ 0x65, // strictLines;
+ 0x66, // standardSampleLocations;
+ 0x67, // optimalBufferCopyOffsetAlignment;
+ 0x68, // optimalBufferCopyRowPitchAlignment;
+ 0x69, // nonCoherentAtomSize;
},
{
- 0xff, // residencyStandard2DBlockShape;
- 0x00, // residencyStandard2DMultisampleBlockShape;
- 0x11, // residencyStandard3DBlockShape;
- 0x22, // residencyAlignedMipSize;
- 0x33, // residencyNonResidentStrict;
+ 0xff, // residencyStandard2DBlockShape;
+ 0x00, // residencyStandard2DMultisampleBlockShape;
+ 0x11, // residencyStandard3DBlockShape;
+ 0x22, // residencyAlignedMipSize;
+ 0x33, // residencyNonResidentStrict;
},
};
@@ -349,11 +337,11 @@
// Test the autogenerated testing code
int inequalities = 0;
- checkEqual_VkPhysicalDeviceProperties(
- &forMarshaling, &forUnmarshaling, [&inequalities](const char* errMsg) {
- (void)errMsg;
- ++inequalities;
- });
+ checkEqual_VkPhysicalDeviceProperties(&forMarshaling, &forUnmarshaling,
+ [&inequalities](const char* errMsg) {
+ (void)errMsg;
+ ++inequalities;
+ });
EXPECT_GT(inequalities, 0);
@@ -365,10 +353,8 @@
EXPECT_EQ(2.0f, forUnmarshaling.limits.lineWidthRange[1]);
EXPECT_EQ(11.0f, forUnmarshaling.limits.maxInterpolationOffset);
- checkEqual_VkPhysicalDeviceProperties(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkPhysicalDeviceProperties(&forMarshaling, &forUnmarshaling,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
// Try a Vulkan struct that has ptr fields with count (dynamic arrays)
@@ -384,15 +370,14 @@
VkSparseImageMemoryBind sparseBind = {
// VkImageSubresource subresource
{
- VK_IMAGE_ASPECT_COLOR_BIT |
- VK_IMAGE_ASPECT_DEPTH_BIT,
+ VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT,
i,
i * 2,
},
// VkOffset3D offset
- { 1, 2 + (int32_t)i, 3},
+ {1, 2 + (int32_t)i, 3},
// VkExtent3D extent
- { 10, 20 * i, 30},
+ {10, 20 * i, 30},
// VkDeviceMemory memory
(VkDeviceMemory)(uintptr_t)(0xff - i),
// VkDeviceSize memoryOffset
@@ -413,7 +398,9 @@
marshal_VkSparseImageMemoryBindInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
VkSparseImageMemoryBindInfo forUnmarshaling = {
- 0, 0, nullptr,
+ 0,
+ 0,
+ nullptr,
};
unmarshal_VkSparseImageMemoryBindInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forUnmarshaling);
@@ -424,18 +411,14 @@
// Test some values in there so we know the autogenerated
// compare code works.
for (uint32_t i = 0; i < bindCount; i++) {
- EXPECT_EQ(forMarshaling.pBinds[i].memoryOffset,
- forUnmarshaling.pBinds[i].memoryOffset);
- EXPECT_EQ(forMarshaling.pBinds[i].memoryOffset,
- forUnmarshaling.pBinds[i].memoryOffset);
+ EXPECT_EQ(forMarshaling.pBinds[i].memoryOffset, forUnmarshaling.pBinds[i].memoryOffset);
+ EXPECT_EQ(forMarshaling.pBinds[i].memoryOffset, forUnmarshaling.pBinds[i].memoryOffset);
EXPECT_EQ(forMarshaling.pBinds[i].subresource.arrayLayer,
forUnmarshaling.pBinds[i].subresource.arrayLayer);
}
checkEqual_VkSparseImageMemoryBindInfo(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ &forMarshaling, &forUnmarshaling, [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
// Try a Vulkan struct that has ptr fields that are not structs
@@ -452,21 +435,11 @@
}
VkDeviceQueueCreateInfo forMarshaling = {
- VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
- 0,
- 0,
- 1,
- queueCount,
- queuePriorities.data(),
+ VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, 0, 0, 1, queueCount, queuePriorities.data(),
};
VkDeviceQueueCreateInfo forUnmarshaling = {
- VK_STRUCTURE_TYPE_APPLICATION_INFO,
- 0,
- 0,
- 0,
- 0,
- nullptr,
+ VK_STRUCTURE_TYPE_APPLICATION_INFO, 0, 0, 0, 0, nullptr,
};
marshal_VkDeviceQueueCreateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
@@ -477,10 +450,8 @@
EXPECT_EQ(forMarshaling.pQueuePriorities[i], forUnmarshaling.pQueuePriorities[i]);
}
- checkEqual_VkDeviceQueueCreateInfo(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkDeviceQueueCreateInfo(&forMarshaling, &forUnmarshaling,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
// Vulkan struct with a void* field that refers to actual data
@@ -520,20 +491,16 @@
memset(&forUnmarshaling, 0x0, sizeof(VkSpecializationInfo));
int inequalities = 0;
- checkEqual_VkSpecializationInfo(
- &forMarshaling, &forUnmarshaling, [&inequalities](const char* errMsg) {
- ++inequalities;
- });
+ checkEqual_VkSpecializationInfo(&forMarshaling, &forUnmarshaling,
+ [&inequalities](const char* errMsg) { ++inequalities; });
EXPECT_GT(inequalities, 0);
marshal_VkSpecializationInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
unmarshal_VkSpecializationInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forUnmarshaling);
- checkEqual_VkSpecializationInfo(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkSpecializationInfo(&forMarshaling, &forUnmarshaling,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
// Tests that marshal + unmarshal is equivalent to deepcopy.
@@ -544,11 +511,11 @@
VkApplicationInfo appInfo = {
VK_STRUCTURE_TYPE_APPLICATION_INFO,
- 0, // pNext
- "VulkanStreamTest", // application name
- 6, // application version
- "VulkanStreamTestEngine", //engine name
- 4, // engine version,
+ 0, // pNext
+ "VulkanStreamTest", // application name
+ 6, // application version
+ "VulkanStreamTestEngine", // engine name
+ 4, // engine version,
VK_API_VERSION_1_0,
};
@@ -563,16 +530,14 @@
"VK_MVK_macos_surface",
};
- VkInstanceCreateInfo forMarshaling = {
- VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
- 0, // pNext
- 0, // flags,
- &appInfo, // pApplicationInfo,
- arraySize(layerNames),
- layerNames,
- arraySize(extensionNames),
- extensionNames
- };
+ VkInstanceCreateInfo forMarshaling = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ 0, // pNext
+ 0, // flags,
+ &appInfo, // pApplicationInfo,
+ arraySize(layerNames),
+ layerNames,
+ arraySize(extensionNames),
+ extensionNames};
marshal_VkInstanceCreateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
@@ -582,15 +547,11 @@
unmarshal_VkInstanceCreateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forUnmarshaling);
deepcopy_VkInstanceCreateInfo(&pool, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling, &forDeepcopy);
- checkEqual_VkInstanceCreateInfo(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkInstanceCreateInfo(&forMarshaling, &forUnmarshaling,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
- checkEqual_VkInstanceCreateInfo(
- &forMarshaling, &forDeepcopy, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkInstanceCreateInfo(&forMarshaling, &forDeepcopy,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
// Tests that a struct with an extension struct attached
@@ -604,8 +565,10 @@
VkBuffer buffer = (VkBuffer)2;
VkMemoryDedicatedAllocateInfo dedicatedAllocInfo = {
- VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, 0,
- image, buffer,
+ VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ 0,
+ image,
+ buffer,
};
VkMemoryAllocateInfo forMarshaling = {
@@ -623,31 +586,24 @@
VkMemoryDedicatedAllocateInfo* copiedDedicated =
(VkMemoryDedicatedAllocateInfo*)forUnmarshaling.pNext;
- EXPECT_EQ(VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
- copiedDedicated->sType);
+ EXPECT_EQ(VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, copiedDedicated->sType);
EXPECT_EQ(image, copiedDedicated->image);
EXPECT_EQ(buffer, copiedDedicated->buffer);
- checkEqual_VkMemoryAllocateInfo(
- &forMarshaling, &forUnmarshaling, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkMemoryAllocateInfo(&forMarshaling, &forUnmarshaling,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
VkMemoryAllocateInfo forDeepcopy;
deepcopy_VkMemoryAllocateInfo(&pool, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling, &forDeepcopy);
- copiedDedicated =
- (VkMemoryDedicatedAllocateInfo*)forDeepcopy.pNext;
+ copiedDedicated = (VkMemoryDedicatedAllocateInfo*)forDeepcopy.pNext;
- EXPECT_EQ(VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
- copiedDedicated->sType);
+ EXPECT_EQ(VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, copiedDedicated->sType);
EXPECT_EQ(image, copiedDedicated->image);
EXPECT_EQ(buffer, copiedDedicated->buffer);
- checkEqual_VkMemoryAllocateInfo(
- &forMarshaling, &forDeepcopy, [](const char* errMsg) {
- EXPECT_TRUE(false) << errMsg;
- });
+ checkEqual_VkMemoryAllocateInfo(&forMarshaling, &forDeepcopy,
+ [](const char* errMsg) { EXPECT_TRUE(false) << errMsg; });
}
TEST(VulkanStream, testConflictStructExtensions_marshaling) {
@@ -669,11 +625,12 @@
.memoryTypeIndex = 0xabcd,
};
marshal_VkMemoryAllocateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
-
+
VkMemoryAllocateInfo forUnmarshaling;
unmarshal_VkMemoryAllocateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forUnmarshaling);
ASSERT_TRUE(forUnmarshaling.pNext);
- const VkImportColorBufferGOOGLE* ext = reinterpret_cast<const VkImportColorBufferGOOGLE*>(forUnmarshaling.pNext);
+ const VkImportColorBufferGOOGLE* ext =
+ reinterpret_cast<const VkImportColorBufferGOOGLE*>(forUnmarshaling.pNext);
EXPECT_EQ(ext->sType, VK_STRUCTURE_TYPE_IMPORT_COLOR_BUFFER_GOOGLE);
EXPECT_EQ(ext->pNext, nullptr);
@@ -692,17 +649,20 @@
.pNext = &densityMapFeatures,
};
marshal_VkDeviceCreateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forMarshaling);
-
+
VkDeviceCreateInfo forUnmarshaling;
unmarshal_VkDeviceCreateInfo(&stream, VK_STRUCTURE_TYPE_MAX_ENUM, &forUnmarshaling);
ASSERT_TRUE(forUnmarshaling.pNext);
- const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* ext = reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>(forUnmarshaling.pNext);
-
+ const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* ext =
+ reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>(
+ forUnmarshaling.pNext);
+
EXPECT_EQ(ext->sType, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT);
EXPECT_EQ(ext->pNext, nullptr);
EXPECT_EQ(ext->fragmentDensityMap, densityMapFeatures.fragmentDensityMap);
EXPECT_EQ(ext->fragmentDensityMapDynamic, densityMapFeatures.fragmentDensityMapDynamic);
- EXPECT_EQ(ext->fragmentDensityMapNonSubsampledImages, densityMapFeatures.fragmentDensityMapNonSubsampledImages);
+ EXPECT_EQ(ext->fragmentDensityMapNonSubsampledImages,
+ densityMapFeatures.fragmentDensityMapNonSubsampledImages);
}
}
@@ -741,9 +701,10 @@
.pNext = &densityMapFeatures,
};
- size_t size = goldfish_vk_extension_struct_size(deviceCreateInfo.sType, &densityMapFeatures);
+ size_t size =
+ goldfish_vk_extension_struct_size(deviceCreateInfo.sType, &densityMapFeatures);
EXPECT_EQ(size, sizeof(VkPhysicalDeviceFragmentDensityMapFeaturesEXT));
}
}
-} // namespace goldfish_vk
+} // namespace goldfish_vk
diff --git a/stream-servers/vulkan/vk_format_info.h b/stream-servers/vulkan/vk_format_info.h
index c0548a0..e9b41ba 100644
--- a/stream-servers/vulkan/vk_format_info.h
+++ b/stream-servers/vulkan/vk_format_info.h
@@ -27,126 +27,112 @@
#define VK_FORMAT_INFO_H
#include <stdbool.h>
-#include <vulkan/vulkan.h>
#include <vndk/hardware_buffer.h>
+#include <vulkan/vulkan.h>
/* See i915_private_android_types.h in minigbm. */
#define HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL 0x100
-static inline VkFormat
-vk_format_from_android(unsigned android_format)
-{
- switch (android_format) {
- case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
- return VK_FORMAT_R8G8B8A8_UNORM;
- case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
- case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
- return VK_FORMAT_R8G8B8_UNORM;
- case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
- return VK_FORMAT_R5G6B5_UNORM_PACK16;
- case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
- return VK_FORMAT_R16G16B16A16_SFLOAT;
- case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
- return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
- case HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL:
- return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
- case AHARDWAREBUFFER_FORMAT_BLOB:
- default:
- return VK_FORMAT_UNDEFINED;
- }
+static inline VkFormat vk_format_from_android(unsigned android_format) {
+ switch (android_format) {
+ case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
+ return VK_FORMAT_R8G8B8A8_UNORM;
+ case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
+ case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
+ return VK_FORMAT_R8G8B8_UNORM;
+ case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
+ return VK_FORMAT_R5G6B5_UNORM_PACK16;
+ case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
+ return VK_FORMAT_R16G16B16A16_SFLOAT;
+ case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
+ return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
+ case HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL:
+ return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
+ case AHARDWAREBUFFER_FORMAT_BLOB:
+ default:
+ return VK_FORMAT_UNDEFINED;
+ }
}
-static inline unsigned
-android_format_from_vk(unsigned vk_format)
-{
- switch (vk_format) {
- case VK_FORMAT_R8G8B8A8_UNORM:
- return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
- case VK_FORMAT_R8G8B8_UNORM:
- return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
- case VK_FORMAT_R5G6B5_UNORM_PACK16:
- return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
- case VK_FORMAT_R16G16B16A16_SFLOAT:
- return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
- case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
- return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
- case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
- return HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL;
- default:
- return AHARDWAREBUFFER_FORMAT_BLOB;
- }
+static inline unsigned android_format_from_vk(unsigned vk_format) {
+ switch (vk_format) {
+ case VK_FORMAT_R8G8B8A8_UNORM:
+ return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
+ case VK_FORMAT_R8G8B8_UNORM:
+ return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
+ case VK_FORMAT_R5G6B5_UNORM_PACK16:
+ return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
+ case VK_FORMAT_R16G16B16A16_SFLOAT:
+ return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
+ case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+ return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+ return HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL;
+ default:
+ return AHARDWAREBUFFER_FORMAT_BLOB;
+ }
}
-static inline VkImageAspectFlags
-vk_format_aspects(VkFormat format)
-{
- switch (format) {
- case VK_FORMAT_UNDEFINED:
- return 0;
+static inline VkImageAspectFlags vk_format_aspects(VkFormat format) {
+ switch (format) {
+ case VK_FORMAT_UNDEFINED:
+ return 0;
- case VK_FORMAT_S8_UINT:
- return VK_IMAGE_ASPECT_STENCIL_BIT;
+ case VK_FORMAT_S8_UINT:
+ return VK_IMAGE_ASPECT_STENCIL_BIT;
- case VK_FORMAT_D16_UNORM_S8_UINT:
- case VK_FORMAT_D24_UNORM_S8_UINT:
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- case VK_FORMAT_D16_UNORM:
- case VK_FORMAT_X8_D24_UNORM_PACK32:
- case VK_FORMAT_D32_SFLOAT:
- return VK_IMAGE_ASPECT_DEPTH_BIT;
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D32_SFLOAT:
+ return VK_IMAGE_ASPECT_DEPTH_BIT;
- case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
- case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
- case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
- case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
- case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
- case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
- return (VK_IMAGE_ASPECT_PLANE_0_BIT |
- VK_IMAGE_ASPECT_PLANE_1_BIT |
- VK_IMAGE_ASPECT_PLANE_2_BIT);
+ case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+ case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
+ case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
+ case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
+ case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
+ case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
+ case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
+ case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
+ case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
+ case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
+ case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
+ case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
+ return (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT |
+ VK_IMAGE_ASPECT_PLANE_2_BIT);
- case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
- case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
- case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
- case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
- case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
- case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
- case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
- case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
- return (VK_IMAGE_ASPECT_PLANE_0_BIT |
- VK_IMAGE_ASPECT_PLANE_1_BIT);
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+ case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+ case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
+ case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
+ case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
+ case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
+ case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
+ case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
+ return (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
- default:
- return VK_IMAGE_ASPECT_COLOR_BIT;
- }
+ default:
+ return VK_IMAGE_ASPECT_COLOR_BIT;
+ }
}
-static inline bool
-vk_format_is_color(VkFormat format)
-{
- return vk_format_aspects(format) == VK_IMAGE_ASPECT_COLOR_BIT;
+static inline bool vk_format_is_color(VkFormat format) {
+ return vk_format_aspects(format) == VK_IMAGE_ASPECT_COLOR_BIT;
}
-static inline bool
-vk_format_is_depth_or_stencil(VkFormat format)
-{
- const VkImageAspectFlags aspects = vk_format_aspects(format);
- return aspects & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
+static inline bool vk_format_is_depth_or_stencil(VkFormat format) {
+ const VkImageAspectFlags aspects = vk_format_aspects(format);
+ return aspects & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
}
-static inline bool
-vk_format_has_depth(VkFormat format)
-{
- const VkImageAspectFlags aspects = vk_format_aspects(format);
- return aspects & VK_IMAGE_ASPECT_DEPTH_BIT;
+static inline bool vk_format_has_depth(VkFormat format) {
+ const VkImageAspectFlags aspects = vk_format_aspects(format);
+ return aspects & VK_IMAGE_ASPECT_DEPTH_BIT;
}
#endif /* VK_FORMAT_INFO_H */
\ No newline at end of file
diff --git a/stream-servers/vulkan/vk_util.h b/stream-servers/vulkan/vk_util.h
index c96cef7..202e6ec 100644
--- a/stream-servers/vulkan/vk_util.h
+++ b/stream-servers/vulkan/vk_util.h
@@ -25,14 +25,13 @@
/* common inlines and macros for vulkan drivers */
+#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
-
#include <vulkan/vulkan.h>
#include <chrono>
#include <functional>
-#include <inttypes.h>
#include <memory>
#include <optional>
#include <string>
@@ -41,30 +40,28 @@
#include <type_traits>
#include <vector>
+#include "VulkanDispatch.h"
#include "base/Lock.h"
#include "common/vk_struct_id.h"
#include "host-common/GfxstreamFatalError.h"
#include "host-common/logging.h"
#include "vk_fn_info.h"
-#include "VulkanDispatch.h"
struct vk_struct_common {
VkStructureType sType;
- struct vk_struct_common *pNext;
+ struct vk_struct_common* pNext;
};
struct vk_struct_chain_iterator {
- vk_struct_common *value;
+ vk_struct_common* value;
};
-#define vk_foreach_struct(__iter, __start) \
- for (struct vk_struct_common *__iter = \
- (struct vk_struct_common *)(__start); \
- __iter; __iter = __iter->pNext)
+#define vk_foreach_struct(__iter, __start) \
+ for (struct vk_struct_common* __iter = (struct vk_struct_common*)(__start); __iter; \
+ __iter = __iter->pNext)
-#define vk_foreach_struct_const(__iter, __start) \
- for (const struct vk_struct_common *__iter = \
- (const struct vk_struct_common *)(__start); \
+#define vk_foreach_struct_const(__iter, __start) \
+ for (const struct vk_struct_common* __iter = (const struct vk_struct_common*)(__start); \
__iter; __iter = __iter->pNext)
/**
@@ -98,7 +95,7 @@
*/
struct __vk_outarray {
/** May be null. */
- void *data;
+ void* data;
/**
* Capacity, in number of elements. Capacity is unlimited (UINT32_MAX) if
@@ -110,7 +107,7 @@
* Count of elements successfully written to the array. Every write is
* considered successful if data is null.
*/
- uint32_t *filled_len;
+ uint32_t* filled_len;
/**
* Count of elements that would have been written to the array if its
@@ -120,8 +117,7 @@
uint32_t wanted_len;
};
-static inline void __vk_outarray_init(struct __vk_outarray *a, void *data,
- uint32_t *len) {
+static inline void __vk_outarray_init(struct __vk_outarray* a, void* data, uint32_t* len) {
a->data = data;
a->cap = *len;
a->filled_len = len;
@@ -131,23 +127,21 @@
if (a->data == NULL) a->cap = UINT32_MAX;
}
-static inline VkResult __vk_outarray_status(const struct __vk_outarray *a) {
+static inline VkResult __vk_outarray_status(const struct __vk_outarray* a) {
if (*a->filled_len < a->wanted_len)
return VK_INCOMPLETE;
else
return VK_SUCCESS;
}
-static inline void *__vk_outarray_next(struct __vk_outarray *a,
- size_t elem_size) {
- void *p = NULL;
+static inline void* __vk_outarray_next(struct __vk_outarray* a, size_t elem_size) {
+ void* p = NULL;
a->wanted_len += 1;
if (*a->filled_len >= a->cap) return NULL;
- if (a->data != NULL)
- p = ((uint8_t *)a->data) + (*a->filled_len) * elem_size;
+ if (a->data != NULL) p = ((uint8_t*)a->data) + (*a->filled_len) * elem_size;
*a->filled_len += 1;
@@ -163,8 +157,7 @@
#define vk_outarray_typeof_elem(a) __typeof__((a)->meta[0])
#define vk_outarray_sizeof_elem(a) sizeof((a)->meta[0])
-#define vk_outarray_init(a, data, len) \
- __vk_outarray_init(&(a)->base, (data), (len))
+#define vk_outarray_init(a, data, len) __vk_outarray_init(&(a)->base, (data), (len))
#define VK_OUTARRAY_MAKE(name, data, len) \
vk_outarray(__typeof__((data)[0])) name; \
@@ -172,9 +165,8 @@
#define vk_outarray_status(a) __vk_outarray_status(&(a)->base)
-#define vk_outarray_next(a) \
- ((vk_outarray_typeof_elem(a) *)__vk_outarray_next( \
- &(a)->base, vk_outarray_sizeof_elem(a)))
+#define vk_outarray_next(a) \
+ ((vk_outarray_typeof_elem(a)*)__vk_outarray_next(&(a)->base, vk_outarray_sizeof_elem(a)))
/**
* Append to a Vulkan output array.
@@ -195,11 +187,10 @@
* executes the block. When the block is executed, `elem` is non-null and
* points to the newly appended element.
*/
-#define vk_outarray_append(a, elem) \
- for (vk_outarray_typeof_elem(a) *elem = vk_outarray_next(a); elem != NULL; \
- elem = NULL)
+#define vk_outarray_append(a, elem) \
+ for (vk_outarray_typeof_elem(a)* elem = vk_outarray_next(a); elem != NULL; elem = NULL)
-static inline void *__vk_find_struct(void *start, VkStructureType sType) {
+static inline void* __vk_find_struct(void* start, VkStructureType sType) {
vk_foreach_struct(s, start) {
if (s->sType == sType) return s;
}
@@ -208,18 +199,16 @@
}
template <class T, class H>
-T *vk_find_struct(H *head) {
+T* vk_find_struct(H* head) {
(void)vk_get_vk_struct_id<H>::id;
- return static_cast<T *>(__vk_find_struct(static_cast<void *>(head),
- vk_get_vk_struct_id<T>::id));
+ return static_cast<T*>(__vk_find_struct(static_cast<void*>(head), vk_get_vk_struct_id<T>::id));
}
template <class T, class H>
-const T *vk_find_struct(const H *head) {
+const T* vk_find_struct(const H* head) {
(void)vk_get_vk_struct_id<H>::id;
- return static_cast<const T *>(
- __vk_find_struct(const_cast<void *>(static_cast<const void *>(head)),
- vk_get_vk_struct_id<T>::id));
+ return static_cast<const T*>(__vk_find_struct(const_cast<void*>(static_cast<const void*>(head)),
+ vk_get_vk_struct_id<T>::id));
}
uint32_t vk_get_driver_version(void);
@@ -229,41 +218,39 @@
#define VK_EXT_OFFSET (1000000000UL)
#define VK_ENUM_EXTENSION(__enum) \
((__enum) >= VK_EXT_OFFSET ? ((((__enum)-VK_EXT_OFFSET) / 1000UL) + 1) : 0)
-#define VK_ENUM_OFFSET(__enum) \
- ((__enum) >= VK_EXT_OFFSET ? ((__enum) % 1000) : (__enum))
+#define VK_ENUM_OFFSET(__enum) ((__enum) >= VK_EXT_OFFSET ? ((__enum) % 1000) : (__enum))
template <class T>
-T vk_make_orphan_copy(const T &vk_struct) {
+T vk_make_orphan_copy(const T& vk_struct) {
T copy = vk_struct;
copy.pNext = NULL;
return copy;
}
template <class T>
-vk_struct_chain_iterator vk_make_chain_iterator(T *vk_struct) {
+vk_struct_chain_iterator vk_make_chain_iterator(T* vk_struct) {
vk_get_vk_struct_id<T>::id;
- vk_struct_chain_iterator result = {
- reinterpret_cast<vk_struct_common *>(vk_struct)};
+ vk_struct_chain_iterator result = {reinterpret_cast<vk_struct_common*>(vk_struct)};
return result;
}
template <class T>
-void vk_append_struct(vk_struct_chain_iterator *i, T *vk_struct) {
+void vk_append_struct(vk_struct_chain_iterator* i, T* vk_struct) {
vk_get_vk_struct_id<T>::id;
- vk_struct_common *p = i->value;
+ vk_struct_common* p = i->value;
if (p->pNext) {
::abort();
}
- p->pNext = reinterpret_cast<vk_struct_common *>(vk_struct);
+ p->pNext = reinterpret_cast<vk_struct_common*>(vk_struct);
vk_struct->pNext = NULL;
*i = vk_make_chain_iterator(vk_struct);
}
-template <class S, class T> void vk_struct_chain_remove(S* unwanted, T* vk_struct)
-{
+template <class S, class T>
+void vk_struct_chain_remove(S* unwanted, T* vk_struct) {
if (!unwanted) return;
vk_foreach_struct(current, vk_struct) {
@@ -316,7 +303,7 @@
CallbacksWrapper(std::unique_ptr<T> callbacks) : mCallbacks(std::move(callbacks)) {}
// function should be a member function pointer to T.
template <class U, class... Args>
- void callIfExists(U function, Args &&...args) const {
+ void callIfExists(U function, Args&&... args) const {
if (mCallbacks && (*mCallbacks.*function)) {
(*mCallbacks.*function)(std::forward(args)...);
}
@@ -327,24 +314,22 @@
};
void setVkCheckCallbacks(std::unique_ptr<VkCheckCallbacks>);
-const CallbacksWrapper<VkCheckCallbacks> &getVkCheckCallbacks();
+const CallbacksWrapper<VkCheckCallbacks>& getVkCheckCallbacks();
class CRTPBase {};
template <class T, class U = CRTPBase>
class FindMemoryType : public U {
protected:
- std::optional<uint32_t> findMemoryType(
- uint32_t typeFilter, VkMemoryPropertyFlags properties) const {
- const T &self = static_cast<const T &>(*this);
+ std::optional<uint32_t> findMemoryType(uint32_t typeFilter,
+ VkMemoryPropertyFlags properties) const {
+ const T& self = static_cast<const T&>(*this);
VkPhysicalDeviceMemoryProperties memProperties;
- self.m_vk.vkGetPhysicalDeviceMemoryProperties(self.m_vkPhysicalDevice,
- &memProperties);
+ self.m_vk.vkGetPhysicalDeviceMemoryProperties(self.m_vkPhysicalDevice, &memProperties);
for (uint32_t i = 0; i < memProperties.memoryTypeCount; i++) {
if ((typeFilter & (1 << i)) &&
- (memProperties.memoryTypes[i].propertyFlags & properties) ==
- properties) {
+ (memProperties.memoryTypes[i].propertyFlags & properties) == properties) {
return i;
}
}
@@ -355,21 +340,18 @@
template <class T, class U = CRTPBase>
class RunSingleTimeCommand : public U {
protected:
- void runSingleTimeCommands(
- VkQueue queue, std::shared_ptr<android::base::Lock> queueLock,
- std::function<void(const VkCommandBuffer &commandBuffer)> f) const {
- const T &self = static_cast<const T &>(*this);
+ void runSingleTimeCommands(VkQueue queue, std::shared_ptr<android::base::Lock> queueLock,
+ std::function<void(const VkCommandBuffer& commandBuffer)> f) const {
+ const T& self = static_cast<const T&>(*this);
VkCommandBuffer cmdBuff;
VkCommandBufferAllocateInfo cmdBuffAllocInfo = {
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
.commandPool = self.m_vkCommandPool,
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
.commandBufferCount = 1};
- VK_CHECK(self.m_vk.vkAllocateCommandBuffers(
- self.m_vkDevice, &cmdBuffAllocInfo, &cmdBuff));
- VkCommandBufferBeginInfo beginInfo = {
- .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
- .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT};
+ VK_CHECK(self.m_vk.vkAllocateCommandBuffers(self.m_vkDevice, &cmdBuffAllocInfo, &cmdBuff));
+ VkCommandBufferBeginInfo beginInfo = {.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT};
VK_CHECK(self.m_vk.vkBeginCommandBuffer(cmdBuff, &beginInfo));
f(cmdBuff);
VK_CHECK(self.m_vk.vkEndCommandBuffer(cmdBuff));
@@ -381,57 +363,51 @@
if (queueLock) {
lock = std::make_unique<android::base::AutoLock>(*queueLock);
}
- VK_CHECK(
- self.m_vk.vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
+ VK_CHECK(self.m_vk.vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
VK_CHECK(self.m_vk.vkQueueWaitIdle(queue));
}
- self.m_vk.vkFreeCommandBuffers(self.m_vkDevice, self.m_vkCommandPool, 1,
- &cmdBuff);
+ self.m_vk.vkFreeCommandBuffers(self.m_vkDevice, self.m_vkCommandPool, 1, &cmdBuff);
}
};
template <class T, class U = CRTPBase>
class RecordImageLayoutTransformCommands : public U {
protected:
- void recordImageLayoutTransformCommands(VkCommandBuffer cmdBuff,
- VkImage image,
+ void recordImageLayoutTransformCommands(VkCommandBuffer cmdBuff, VkImage image,
VkImageLayout oldLayout,
VkImageLayout newLayout) const {
- const T &self = static_cast<const T &>(*this);
+ const T& self = static_cast<const T&>(*this);
VkImageMemoryBarrier imageBarrier = {
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
- .srcAccessMask =
- VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
- .dstAccessMask =
- VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
+ .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
+ .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
.oldLayout = oldLayout,
.newLayout = newLayout,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.image = image,
.subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
- .baseMipLevel = 0,
- .levelCount = 1,
- .baseArrayLayer = 0,
- .layerCount = 1}};
- self.m_vk.vkCmdPipelineBarrier(cmdBuff,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &imageBarrier);
+ .baseMipLevel = 0,
+ .levelCount = 1,
+ .baseArrayLayer = 0,
+ .layerCount = 1}};
+ self.m_vk.vkCmdPipelineBarrier(cmdBuff, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0,
+ nullptr, 1, &imageBarrier);
}
};
template <class T>
typename vk_fn_info::GetVkFnInfo<T>::type getVkInstanceProcAddrWithFallback(
- const std::vector<std::function<std::remove_pointer_t<PFN_vkGetInstanceProcAddr>>>
- &vkGetInstanceProcAddrs,
+ const std::vector<std::function<std::remove_pointer_t<PFN_vkGetInstanceProcAddr>>>&
+ vkGetInstanceProcAddrs,
VkInstance instance) {
- for (const auto &vkGetInstanceProcAddr : vkGetInstanceProcAddrs) {
+ for (const auto& vkGetInstanceProcAddr : vkGetInstanceProcAddrs) {
if (!vkGetInstanceProcAddr) {
continue;
}
PFN_vkVoidFunction resWithCurrentVkGetInstanceProcAddr = std::apply(
- [&vkGetInstanceProcAddr, instance](auto &&...names) -> PFN_vkVoidFunction {
- for (const char *name : {names...}) {
+ [&vkGetInstanceProcAddr, instance](auto&&... names) -> PFN_vkVoidFunction {
+ for (const char* name : {names...}) {
if (PFN_vkVoidFunction resWithCurrentName =
vkGetInstanceProcAddr(instance, name)) {
return resWithCurrentName;