blob: cf5fae7a7164307de95ea5956b295bbd1be31e04 [file] [log] [blame]
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001// Copyright 2019 The Android Open Source Project
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
Kaiyi Lifab51002021-08-21 15:12:02 -070014#include <vulkan/vulkan.h>
Lingfeng Yanga285eb42020-10-30 12:39:56 -070015
Gurchetan Singh01b8b482023-07-20 07:39:20 -070016#include <cstdarg>
17#include <cstdio>
Lingfeng Yanga285eb42020-10-30 12:39:56 -070018#include <deque>
Kaiyi Li083a66a2022-08-11 16:23:37 -070019#include <type_traits>
Lingfeng Yanga285eb42020-10-30 12:39:56 -070020#include <unordered_map>
Jason Macnak77ddccd2024-03-15 15:53:30 -070021#include <variant>
Kaiyi Lifab51002021-08-21 15:12:02 -070022
Gurchetan Singh619646f2024-06-28 09:30:12 -070023#include "ExternalObjectManager.h"
Idan Raiterf6377592022-10-31 23:53:02 -070024#include "FrameBuffer.h"
25#include "GfxStreamAgents.h"
Kaiyi Lifab51002021-08-21 15:12:02 -070026#include "VirtioGpuTimelines.h"
Gurchetan Singhd22867a2023-04-10 10:11:41 -070027#include "VkCommonOperations.h"
Joshua Duongef2bbc22022-10-05 11:59:15 -070028#include "aemu/base/AlignedBuf.h"
Joshua Duongef2bbc22022-10-05 11:59:15 -070029#include "aemu/base/ManagedDescriptor.hpp"
Idan Raiter995ba8a2022-12-04 16:44:59 -080030#include "aemu/base/Metrics.h"
Joshua Duongef2bbc22022-10-05 11:59:15 -070031#include "aemu/base/Tracing.h"
Idan Raiter995ba8a2022-12-04 16:44:59 -080032#include "aemu/base/memory/SharedMemory.h"
33#include "aemu/base/synchronization/Lock.h"
Jason Macnak77ddccd2024-03-15 15:53:30 -070034#include "aemu/base/threads/WorkerThread.h"
Jason Macnakf044f012024-02-21 17:25:11 -080035#include "gfxstream/Strings.h"
Jason Macnak26872122024-02-23 10:46:09 -080036#include "gfxstream/host/Features.h"
Kaiyi Lifab51002021-08-21 15:12:02 -070037#include "host-common/AddressSpaceService.h"
Kaiyi Li1fdd22e2022-03-31 10:17:23 -070038#include "host-common/GfxstreamFatalError.h"
Kaiyi Lifab51002021-08-21 15:12:02 -070039#include "host-common/address_space_device.h"
40#include "host-common/android_pipe_common.h"
Idan Raiterf6377592022-10-31 23:53:02 -070041#include "host-common/android_pipe_device.h"
Gurchetan Singh2bc98312022-08-11 09:10:25 -070042#include "host-common/feature_control.h"
Idan Raiterf6377592022-10-31 23:53:02 -070043#include "host-common/globals.h"
Idan Raiterf6377592022-10-31 23:53:02 -070044#include "host-common/opengles-pipe.h"
Idan Raiter995ba8a2022-12-04 16:44:59 -080045#include "host-common/opengles.h"
Idan Raiterf6377592022-10-31 23:53:02 -070046#include "host-common/refcount-pipe.h"
Kaiyi Lifab51002021-08-21 15:12:02 -070047#include "host-common/vm_operations.h"
Gurchetan Singhd88da7d2023-04-12 13:34:01 -070048#include "virgl_hw.h"
Kaiyi Li083a66a2022-08-11 16:23:37 -070049#include "virtgpu_gfxstream_protocol.h"
Idan Raiterf6377592022-10-31 23:53:02 -070050#include "vk_util.h"
Lingfeng Yanga285eb42020-10-30 12:39:56 -070051
Gurchetan Singh29946e82024-02-08 08:51:09 -080052#ifdef GFXSTREAM_ENABLE_HOST_VK_SNAPSHOT
53#include "aemu/base/files/StdioStream.h"
54#endif
55
Lingfeng Yanga285eb42020-10-30 12:39:56 -070056extern "C" {
Lingfeng Yanga285eb42020-10-30 12:39:56 -070057#include "drm_fourcc.h"
Jason Macnake886da92023-09-19 09:08:26 -070058#include "gfxstream/virtio-gpu-gfxstream-renderer-unstable.h"
59#include "gfxstream/virtio-gpu-gfxstream-renderer.h"
Lingfeng Yanga285eb42020-10-30 12:39:56 -070060#include "host-common/goldfish_pipe.h"
Gurchetan Singh9d89d5a2023-07-21 08:51:15 -070061#include "virgl_hw.h"
Lingfeng Yanga285eb42020-10-30 12:39:56 -070062} // extern "C"
63
Joshua Duongc256a3c2023-05-09 08:14:24 -070064#if defined(_WIN32)
65struct iovec {
66 void* iov_base; /* Starting address */
67 size_t iov_len; /* Length in bytes */
68};
Gurchetan Singh630a6082023-08-18 11:24:33 -070069#else
70#include <unistd.h>
Joshua Duongc256a3c2023-05-09 08:14:24 -070071#endif // _WIN32
72
Gurchetan Singh01b8b482023-07-20 07:39:20 -070073#define MAX_DEBUG_BUFFER_SIZE 512
Lingfeng Yanga285eb42020-10-30 12:39:56 -070074
Gurchetan Singh01b8b482023-07-20 07:39:20 -070075void* globalUserData = nullptr;
76stream_renderer_debug_callback globalDebugCallback = nullptr;
Lingfeng Yanga285eb42020-10-30 12:39:56 -070077
Jason Macnak2c826b72024-02-27 16:10:55 -080078void stream_renderer_log(uint32_t type, const char* format, ...) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -070079 char buf[MAX_DEBUG_BUFFER_SIZE];
80 va_list args;
81 va_start(args, format);
82 vsnprintf(buf, MAX_DEBUG_BUFFER_SIZE, format, args);
83 va_end(args);
Lingfeng Yanga285eb42020-10-30 12:39:56 -070084
Gurchetan Singh01b8b482023-07-20 07:39:20 -070085 if (globalUserData && globalDebugCallback) {
86 struct stream_renderer_debug debug = {0};
87 debug.debug_type = type;
88 debug.message = &buf[0];
89
90 globalDebugCallback(globalUserData, &debug);
91 } else {
92 fprintf(stderr, "%s\n", buf);
93 }
94}
95
Jason Macnak2c826b72024-02-27 16:10:55 -080096#if STREAM_RENDERER_LOG_LEVEL >= STREAM_RENDERER_DEBUG_ERROR
97#define stream_renderer_error(format, ...) \
98 do { \
99 stream_renderer_log(STREAM_RENDERER_DEBUG_ERROR, "[%s(%d)] %s " format, __FILE__, \
100 __LINE__, __PRETTY_FUNCTION__, ##__VA_ARGS__); \
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700101 } while (0)
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700102#else
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700103#define stream_renderer_error(format, ...)
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700104#endif
105
Jason Macnak2c826b72024-02-27 16:10:55 -0800106#if STREAM_RENDERER_LOG_LEVEL >= STREAM_RENDERER_DEBUG_WARN
107#define stream_renderer_warn(format, ...) \
108 do { \
109 stream_renderer_log(STREAM_RENDERER_DEBUG_WARN, "[%s(%d)] %s " format, __FILE__, __LINE__, \
110 __PRETTY_FUNCTION__, ##__VA_ARGS__); \
111 } while (0)
112#else
113#define stream_renderer_warn(format, ...)
114#endif
115
116#if STREAM_RENDERER_LOG_LEVEL >= STREAM_RENDERER_DEBUG_INFO
117#define stream_renderer_info(format, ...) \
118 do { \
119 stream_renderer_log(STREAM_RENDERER_DEBUG_INFO, "[%s(%d)] %s " format, __FILE__, __LINE__, \
120 __FUNCTION__, ##__VA_ARGS__); \
Idan Raiterf6377592022-10-31 23:53:02 -0700121 } while (0)
Idan Raiterf6377592022-10-31 23:53:02 -0700122#else
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700123#define stream_renderer_info(format, ...)
Idan Raiterf6377592022-10-31 23:53:02 -0700124#endif
125
Jason Macnak2c826b72024-02-27 16:10:55 -0800126#if STREAM_RENDERER_LOG_LEVEL >= STREAM_RENDERER_DEBUG_DEBUG
127#define stream_renderer_debug(format, ...) \
128 do { \
129 stream_renderer_log(STREAM_RENDERER_DEBUG_DEBUG, "[%s(%d)] %s " format, __FILE__, \
130 __LINE__, __PRETTY_FUNCTION__, ##__VA_ARGS__); \
131 } while (0)
132#else
133#define stream_renderer_debug(format, ...)
134#endif
135
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700136// Virtio Goldfish Pipe: Overview-----------------------------------------------
137//
138// Virtio Goldfish Pipe is meant for running goldfish pipe services with a
139// stock Linux kernel that is already capable of virtio-gpu. It runs DRM
140// VIRTGPU ioctls on top of a custom implementation of virglrenderer on the
141// host side that doesn't (directly) do any rendering, but instead talks to
142// host-side pipe services.
143//
144// This is mainly used for graphics at the moment, though it's possible to run
145// other pipe services over virtio-gpu as well. virtio-gpu is selected over
146// other devices primarily because of the existence of an API (virglrenderer)
147// that is already somewhat separate from virtio-gpu, and not needing to create
148// a new virtio device to handle goldfish pipe.
149//
150// How it works is, existing virglrenderer API are remapped to perform pipe
151// operations. First of all, pipe operations consist of the following:
152//
153// - open() / close(): Starts or stops an instance of a pipe service.
154//
155// - write(const void* buf, size_t len) / read(const void* buf, size_t len):
156// Sends or receives data over the pipe. The first write() is the name of the
157// pipe service. After the pipe service is determined, the host calls
158// resetPipe() to replace the host-side pipe instance with an instance of the
159// pipe service.
160//
161// - reset(void* initialPipe, void* actualPipe): the operation that replaces an
162// initial pipe with an instance of a pipe service.
163//
164// Next, here's how the pipe operations map to virglrenderer commands:
165//
166// - open() -> virgl_renderer_context_create(),
167// virgl_renderer_resource_create(),
168// virgl_renderer_resource_attach_iov()
169//
170// The open() corresponds to a guest-side open of a rendernode, which triggers
171// context creation. Each pipe corresponds 1:1 with a drm virtgpu context id.
172// We also associate an R8 resource with each pipe as the backing data for
173// write/read.
174//
175// - close() -> virgl_rendrerer_resource_unref(),
176// virgl_renderer_context_destroy()
177//
178// The close() corresponds to undoing the operations of open().
179//
180// - write() -> virgl_renderer_transfer_write_iov() OR
181// virgl_renderer_submit_cmd()
182//
183// Pipe write() operation corresponds to performing a TRANSFER_TO_HOST ioctl on
184// the resource created alongside open(), OR an EXECBUFFER ioctl.
185//
186// - read() -> virgl_renderer_transfer_read_iov()
187//
188// Pipe read() operation corresponds to performing a TRANSFER_FROM_HOST ioctl on
189// the resource created alongside open().
190//
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700191// Details on transfer mechanism: mapping 2D transfer to 1D ones----------------
192//
193// Resource objects are typically 2D textures, while we're wanting to transmit
194// 1D buffers to the pipe services on the host. DRM VIRTGPU uses the concept
195// of a 'box' to represent transfers that do not involve an entire resource
196// object. Each box has a x, y, width and height parameter to define the
197// extent of the transfer for a 2D texture. In our use case, we only use the x
198// and width parameters. We've also created the resource with R8 format
199// (byte-by-byte) with width equal to the total size of the transfer buffer we
200// want (around 1 MB).
201//
202// The resource object itself is currently backed via plain guest RAM, which
203// can be physically not-contiguous from the guest POV, and therefore
204// corresponds to a possibly-long list of pointers and sizes (iov) on the host
205// side. The sync_iov helper function converts convert the list of pointers
206// to one contiguous buffer on the host (or vice versa), at the cost of a copy.
207// (TODO: see if we can use host coherent memory to do away with the copy).
208//
209// We can see this abstraction in use via the implementation of
210// transferWriteIov and transferReadIov below, which sync the iovec to/from a
211// linear buffer if necessary, and then perform a corresponding pip operation
212// based on the box parameter's x and width values.
213
Idan Raiterf6377592022-10-31 23:53:02 -0700214using android::AndroidPipe;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700215using android::base::AutoLock;
Gurchetan Singh424f0672022-08-26 11:10:12 -0700216using android::base::DescriptorType;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700217using android::base::Lock;
Gurchetan Singh424f0672022-08-26 11:10:12 -0700218using android::base::ManagedDescriptor;
Idan Raiterf6377592022-10-31 23:53:02 -0700219using android::base::MetricsLogger;
Gurchetan Singh2bc98312022-08-11 09:10:25 -0700220using android::base::SharedMemory;
221
Kaiyi Li4935d312022-01-12 16:57:24 -0800222using emugl::FatalError;
Gurchetan Singh619646f2024-06-28 09:30:12 -0700223using gfxstream::BlobDescriptorInfo;
224using gfxstream::ExternalObjectManager;
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -0700225using gfxstream::SyncDescriptorInfo;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700226
Gurchetan Singhf3ad8892022-03-17 14:59:58 -0700227using VirtioGpuResId = uint32_t;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700228
229static constexpr int kPipeTryAgain = -2;
230
231struct VirtioGpuCmd {
232 uint32_t op;
233 uint32_t cmdSize;
234 unsigned char buf[0];
235} __attribute__((packed));
236
237struct PipeCtxEntry {
Gurchetan Singhcf38c8b2022-08-23 12:00:01 -0700238 std::string name;
239 uint32_t capsetId;
Gurchetan Singhd8485dd2022-03-17 14:58:54 -0700240 VirtioGpuCtxId ctxId;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700241 GoldfishHostPipe* hostPipe;
242 int fence;
243 uint32_t addressSpaceHandle;
244 bool hasAddressSpaceHandle;
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700245 std::unordered_map<VirtioGpuResId, uint32_t> addressSpaceHandles;
Gurchetan Singhef5181f2024-06-25 14:10:19 -0700246 std::unordered_map<uint32_t, struct stream_renderer_resource_create_args> blobMap;
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -0700247 std::shared_ptr<gfxstream::SyncDescriptorInfo> latestFence;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700248};
249
Jason Macnak6402a1b2022-06-02 15:29:05 -0700250enum class ResType {
251 // Used as a communication channel between the guest and the host
252 // which does not need an allocation on the host GPU.
253 PIPE,
254 // Used as a GPU data buffer.
255 BUFFER,
256 // Used as a GPU texture.
257 COLOR_BUFFER,
Gurchetan Singhef5181f2024-06-25 14:10:19 -0700258 // Used as a blob and not known to FrameBuffer.
259 BLOB,
Jason Macnak6402a1b2022-06-02 15:29:05 -0700260};
261
Jason Macnak77ddccd2024-03-15 15:53:30 -0700262struct AlignedMemory {
263 void* addr = nullptr;
264
265 AlignedMemory(size_t align, size_t size)
266 : addr(android::aligned_buf_alloc(align, size)) {}
267
268 ~AlignedMemory() {
269 if (addr != nullptr) {
270 android::aligned_buf_free(addr);
271 }
272 }
273
274 // AlignedMemory is neither copyable nor movable.
275 AlignedMemory(const AlignedMemory& other) = delete;
276 AlignedMemory& operator=(const AlignedMemory& other) = delete;
277 AlignedMemory(AlignedMemory&& other) = delete;
278 AlignedMemory& operator=(AlignedMemory&& other) = delete;
279};
280
281// Memory used as a ring buffer for communication between the guest and host.
282class RingBlob : public std::variant<std::unique_ptr<AlignedMemory>,
283 std::unique_ptr<SharedMemory>> {
284 public:
285 using BaseType = std::variant<std::unique_ptr<AlignedMemory>,
286 std::unique_ptr<SharedMemory>>;
287 // Inherit constructors.
288 using BaseType::BaseType;
289
290 bool isExportable() const {
291 return std::holds_alternative<std::unique_ptr<SharedMemory>>(*this);
292 }
293
294 SharedMemory::handle_type releaseHandle() {
295 if (!isExportable()) {
296 return SharedMemory::invalidHandle();
297 }
298 return std::get<std::unique_ptr<SharedMemory>>(*this)->releaseHandle();
299 }
300};
301
302
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700303struct PipeResEntry {
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -0700304 stream_renderer_resource_create_args args;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700305 iovec* iov;
306 uint32_t numIovs;
307 void* linear;
308 size_t linearSize;
309 GoldfishHostPipe* hostPipe;
Gurchetan Singhd8485dd2022-03-17 14:58:54 -0700310 VirtioGpuCtxId ctxId;
Gurchetan Singhd59f3052022-08-09 18:13:36 -0700311 void* hva;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700312 uint64_t hvaSize;
Gurchetan Singh701f8a62022-04-19 17:19:12 -0700313 uint64_t blobId;
Gurchetan Singh424f0672022-08-26 11:10:12 -0700314 uint32_t blobMem;
315 uint32_t blobFlags;
Gurchetan Singh60737a62022-01-13 16:06:24 -0800316 uint32_t caching;
Jason Macnak6402a1b2022-06-02 15:29:05 -0700317 ResType type;
Jason Macnak77ddccd2024-03-15 15:53:30 -0700318 std::shared_ptr<RingBlob> ringBlob;
Gurchetan Singh2bc98312022-08-11 09:10:25 -0700319 bool externalAddr = false;
Gurchetan Singh619646f2024-06-28 09:30:12 -0700320 std::shared_ptr<BlobDescriptorInfo> descriptorInfo = nullptr;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700321};
322
Idan Raiter995ba8a2022-12-04 16:44:59 -0800323static inline uint32_t align_up(uint32_t n, uint32_t a) { return ((n + a - 1) / a) * a; }
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700324
Jason Macnakb3153632021-02-16 08:57:43 -0800325static inline uint32_t align_up_power_of_2(uint32_t n, uint32_t a) {
326 return (n + (a - 1)) & ~(a - 1);
327}
328
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700329#define VIRGL_FORMAT_NV12 166
330#define VIRGL_FORMAT_YV12 163
Jason Macnak98ec0262022-03-30 12:36:52 -0700331#define VIRGL_FORMAT_P010 314
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700332
333const uint32_t kGlBgra = 0x80e1;
334const uint32_t kGlRgba = 0x1908;
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800335const uint32_t kGlRgba16f = 0x881A;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700336const uint32_t kGlRgb565 = 0x8d62;
Jason Macnakc69b5b32021-09-15 17:03:06 -0700337const uint32_t kGlRgba1010102 = 0x8059;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700338const uint32_t kGlR8 = 0x8229;
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800339const uint32_t kGlR16 = 0x822A;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700340const uint32_t kGlRg8 = 0x822b;
341const uint32_t kGlLuminance = 0x1909;
342const uint32_t kGlLuminanceAlpha = 0x190a;
343const uint32_t kGlUnsignedByte = 0x1401;
344const uint32_t kGlUnsignedShort565 = 0x8363;
Jean-François Thiberta1539a92024-06-13 13:28:59 -0400345const uint32_t kGlDepth16 = 0x81A5;
346const uint32_t kGlDepth24 = 0x81A6;
347const uint32_t kGlDepth24Stencil8 = 0x88F0;
348const uint32_t kGlDepth32f = 0x8CAC;
349const uint32_t kGlDepth32fStencil78 = 0x8CAD;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700350
351constexpr uint32_t kFwkFormatGlCompat = 0;
352constexpr uint32_t kFwkFormatYV12 = 1;
Kaiyi Li1f6758c2021-02-07 10:26:13 -0800353// constexpr uint32_t kFwkFormatYUV420888 = 2;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700354constexpr uint32_t kFwkFormatNV12 = 3;
Jason Macnak98ec0262022-03-30 12:36:52 -0700355constexpr uint32_t kFwkFormatP010 = 4;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700356
357static inline bool virgl_format_is_yuv(uint32_t format) {
358 switch (format) {
359 case VIRGL_FORMAT_B8G8R8X8_UNORM:
360 case VIRGL_FORMAT_B8G8R8A8_UNORM:
361 case VIRGL_FORMAT_R8G8B8X8_UNORM:
362 case VIRGL_FORMAT_R8G8B8A8_UNORM:
363 case VIRGL_FORMAT_B5G6R5_UNORM:
364 case VIRGL_FORMAT_R8_UNORM:
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800365 case VIRGL_FORMAT_R16_UNORM:
366 case VIRGL_FORMAT_R16G16B16A16_FLOAT:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700367 case VIRGL_FORMAT_R8G8_UNORM:
Jason Macnakc69b5b32021-09-15 17:03:06 -0700368 case VIRGL_FORMAT_R10G10B10A2_UNORM:
Jean-François Thiberta1539a92024-06-13 13:28:59 -0400369 case VIRGL_FORMAT_Z16_UNORM:
370 case VIRGL_FORMAT_Z24X8_UNORM:
371 case VIRGL_FORMAT_Z24_UNORM_S8_UINT:
372 case VIRGL_FORMAT_Z32_FLOAT:
373 case VIRGL_FORMAT_Z32_FLOAT_S8X24_UINT:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700374 return false;
375 case VIRGL_FORMAT_NV12:
Jason Macnak98ec0262022-03-30 12:36:52 -0700376 case VIRGL_FORMAT_P010:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700377 case VIRGL_FORMAT_YV12:
378 return true;
379 default:
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700380 stream_renderer_error("Unknown virgl format 0x%x", format);
Doug Horn0c2ea5a2021-10-29 17:30:16 -0700381 return false;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700382 }
383}
384
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700385static inline uint32_t virgl_format_to_gl(uint32_t virgl_format) {
386 switch (virgl_format) {
387 case VIRGL_FORMAT_B8G8R8X8_UNORM:
388 case VIRGL_FORMAT_B8G8R8A8_UNORM:
389 return kGlBgra;
390 case VIRGL_FORMAT_R8G8B8X8_UNORM:
391 case VIRGL_FORMAT_R8G8B8A8_UNORM:
392 return kGlRgba;
393 case VIRGL_FORMAT_B5G6R5_UNORM:
394 return kGlRgb565;
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800395 case VIRGL_FORMAT_R16_UNORM:
396 return kGlR16;
397 case VIRGL_FORMAT_R16G16B16A16_FLOAT:
398 return kGlRgba16f;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700399 case VIRGL_FORMAT_R8_UNORM:
400 return kGlR8;
401 case VIRGL_FORMAT_R8G8_UNORM:
402 return kGlRg8;
403 case VIRGL_FORMAT_NV12:
Jason Macnak98ec0262022-03-30 12:36:52 -0700404 case VIRGL_FORMAT_P010:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700405 case VIRGL_FORMAT_YV12:
406 // emulated as RGBA8888
407 return kGlRgba;
Jason Macnakc69b5b32021-09-15 17:03:06 -0700408 case VIRGL_FORMAT_R10G10B10A2_UNORM:
409 return kGlRgba1010102;
Jean-François Thiberta1539a92024-06-13 13:28:59 -0400410 case VIRGL_FORMAT_Z16_UNORM:
411 return kGlDepth16;
412 case VIRGL_FORMAT_Z24X8_UNORM:
413 return kGlDepth24;
414 case VIRGL_FORMAT_Z24_UNORM_S8_UINT:
415 return kGlDepth24Stencil8;
416 case VIRGL_FORMAT_Z32_FLOAT:
417 return kGlDepth32f;
418 case VIRGL_FORMAT_Z32_FLOAT_S8X24_UINT:
419 return kGlDepth32fStencil78;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700420 default:
421 return kGlRgba;
422 }
423}
424
425static inline uint32_t virgl_format_to_fwk_format(uint32_t virgl_format) {
426 switch (virgl_format) {
427 case VIRGL_FORMAT_NV12:
428 return kFwkFormatNV12;
Jason Macnak98ec0262022-03-30 12:36:52 -0700429 case VIRGL_FORMAT_P010:
430 return kFwkFormatP010;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700431 case VIRGL_FORMAT_YV12:
432 return kFwkFormatYV12;
433 case VIRGL_FORMAT_R8_UNORM:
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800434 case VIRGL_FORMAT_R16_UNORM:
435 case VIRGL_FORMAT_R16G16B16A16_FLOAT:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700436 case VIRGL_FORMAT_R8G8_UNORM:
437 case VIRGL_FORMAT_B8G8R8X8_UNORM:
438 case VIRGL_FORMAT_B8G8R8A8_UNORM:
439 case VIRGL_FORMAT_R8G8B8X8_UNORM:
440 case VIRGL_FORMAT_R8G8B8A8_UNORM:
441 case VIRGL_FORMAT_B5G6R5_UNORM:
Jason Macnakc69b5b32021-09-15 17:03:06 -0700442 case VIRGL_FORMAT_R10G10B10A2_UNORM:
Jean-François Thiberta1539a92024-06-13 13:28:59 -0400443 case VIRGL_FORMAT_Z16_UNORM:
444 case VIRGL_FORMAT_Z24X8_UNORM:
445 case VIRGL_FORMAT_Z24_UNORM_S8_UINT:
446 case VIRGL_FORMAT_Z32_FLOAT:
447 case VIRGL_FORMAT_Z32_FLOAT_S8X24_UINT:
Idan Raiter995ba8a2022-12-04 16:44:59 -0800448 default: // kFwkFormatGlCompat: No extra conversions needed
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700449 return kFwkFormatGlCompat;
450 }
451}
452
453static inline uint32_t gl_format_to_natural_type(uint32_t format) {
454 switch (format) {
455 case kGlBgra:
456 case kGlRgba:
457 case kGlLuminance:
458 case kGlLuminanceAlpha:
459 return kGlUnsignedByte;
460 case kGlRgb565:
461 return kGlUnsignedShort565;
462 default:
463 return kGlUnsignedByte;
464 }
465}
466
Idan Raiter995ba8a2022-12-04 16:44:59 -0800467static inline size_t virgl_format_to_linear_base(uint32_t format, uint32_t totalWidth,
468 uint32_t totalHeight, uint32_t x, uint32_t y,
469 uint32_t w, uint32_t h) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700470 if (virgl_format_is_yuv(format)) {
471 return 0;
472 } else {
473 uint32_t bpp = 4;
474 switch (format) {
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800475 case VIRGL_FORMAT_R16G16B16A16_FLOAT:
476 bpp = 8;
477 break;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700478 case VIRGL_FORMAT_B8G8R8X8_UNORM:
479 case VIRGL_FORMAT_B8G8R8A8_UNORM:
480 case VIRGL_FORMAT_R8G8B8X8_UNORM:
481 case VIRGL_FORMAT_R8G8B8A8_UNORM:
Jason Macnakc69b5b32021-09-15 17:03:06 -0700482 case VIRGL_FORMAT_R10G10B10A2_UNORM:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700483 bpp = 4;
484 break;
485 case VIRGL_FORMAT_B5G6R5_UNORM:
486 case VIRGL_FORMAT_R8G8_UNORM:
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800487 case VIRGL_FORMAT_R16_UNORM:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700488 bpp = 2;
489 break;
490 case VIRGL_FORMAT_R8_UNORM:
491 bpp = 1;
492 break;
493 default:
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700494 stream_renderer_error("Unknown virgl format: 0x%x", format);
495 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700496 }
497
498 uint32_t stride = totalWidth * bpp;
499 return y * stride + x * bpp;
500 }
501 return 0;
502}
503
Idan Raiter995ba8a2022-12-04 16:44:59 -0800504static inline size_t virgl_format_to_total_xfer_len(uint32_t format, uint32_t totalWidth,
505 uint32_t totalHeight, uint32_t x, uint32_t y,
506 uint32_t w, uint32_t h) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700507 if (virgl_format_is_yuv(format)) {
Jason Macnak98ec0262022-03-30 12:36:52 -0700508 uint32_t bpp = format == VIRGL_FORMAT_P010 ? 2 : 1;
Jason Macnakbbe8f912022-10-20 11:06:39 -0700509
Jason Macnakb3153632021-02-16 08:57:43 -0800510 uint32_t yWidth = totalWidth;
511 uint32_t yHeight = totalHeight;
Jason Macnakbbe8f912022-10-20 11:06:39 -0700512 uint32_t yStridePixels;
513 if (format == VIRGL_FORMAT_NV12) {
514 yStridePixels = yWidth;
515 } else if (format == VIRGL_FORMAT_P010) {
516 yStridePixels = yWidth;
517 } else if (format == VIRGL_FORMAT_YV12) {
518 yStridePixels = align_up_power_of_2(yWidth, 32);
519 } else {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700520 stream_renderer_error("Unknown virgl format: 0x%x", format);
521 return 0;
Jason Macnakbbe8f912022-10-20 11:06:39 -0700522 }
523 uint32_t yStrideBytes = yStridePixels * bpp;
524 uint32_t ySize = yStrideBytes * yHeight;
Jason Macnakb3153632021-02-16 08:57:43 -0800525
Jason Macnakbbe8f912022-10-20 11:06:39 -0700526 uint32_t uvStridePixels;
Jason Macnakb3153632021-02-16 08:57:43 -0800527 uint32_t uvPlaneCount;
528 if (format == VIRGL_FORMAT_NV12) {
Jason Macnakbbe8f912022-10-20 11:06:39 -0700529 uvStridePixels = yStridePixels;
Jason Macnakb3153632021-02-16 08:57:43 -0800530 uvPlaneCount = 1;
Jason Macnak98ec0262022-03-30 12:36:52 -0700531 } else if (format == VIRGL_FORMAT_P010) {
Jason Macnakbbe8f912022-10-20 11:06:39 -0700532 uvStridePixels = yStridePixels;
Jason Macnak98ec0262022-03-30 12:36:52 -0700533 uvPlaneCount = 1;
Jason Macnakb3153632021-02-16 08:57:43 -0800534 } else if (format == VIRGL_FORMAT_YV12) {
Jason Macnakbbe8f912022-10-20 11:06:39 -0700535 uvStridePixels = yStridePixels / 2;
Jason Macnakb3153632021-02-16 08:57:43 -0800536 uvPlaneCount = 2;
537 } else {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700538 stream_renderer_error("Unknown virgl yuv format: 0x%x", format);
539 return 0;
Jason Macnakb3153632021-02-16 08:57:43 -0800540 }
Jason Macnakbbe8f912022-10-20 11:06:39 -0700541 uint32_t uvStrideBytes = uvStridePixels * bpp;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700542 uint32_t uvHeight = totalHeight / 2;
Jason Macnakbbe8f912022-10-20 11:06:39 -0700543 uint32_t uvSize = uvStrideBytes * uvHeight * uvPlaneCount;
Jason Macnakb3153632021-02-16 08:57:43 -0800544
545 uint32_t dataSize = ySize + uvSize;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700546 return dataSize;
547 } else {
548 uint32_t bpp = 4;
549 switch (format) {
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800550 case VIRGL_FORMAT_R16G16B16A16_FLOAT:
551 bpp = 8;
552 break;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700553 case VIRGL_FORMAT_B8G8R8X8_UNORM:
554 case VIRGL_FORMAT_B8G8R8A8_UNORM:
555 case VIRGL_FORMAT_R8G8B8X8_UNORM:
556 case VIRGL_FORMAT_R8G8B8A8_UNORM:
Jason Macnakc69b5b32021-09-15 17:03:06 -0700557 case VIRGL_FORMAT_R10G10B10A2_UNORM:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700558 bpp = 4;
559 break;
560 case VIRGL_FORMAT_B5G6R5_UNORM:
Jason Macnakaa8bfb92021-02-11 19:27:46 -0800561 case VIRGL_FORMAT_R16_UNORM:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700562 case VIRGL_FORMAT_R8G8_UNORM:
563 bpp = 2;
564 break;
565 case VIRGL_FORMAT_R8_UNORM:
566 bpp = 1;
567 break;
568 default:
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700569 stream_renderer_error("Unknown virgl format: 0x%x", format);
570 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700571 }
572
573 uint32_t stride = totalWidth * bpp;
574 return (h - 1U) * stride + w * bpp;
575 }
576 return 0;
577}
578
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700579enum IovSyncDir {
580 IOV_TO_LINEAR = 0,
581 LINEAR_TO_IOV = 1,
582};
583
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -0700584static int sync_iov(PipeResEntry* res, uint64_t offset, const stream_renderer_box* box,
585 IovSyncDir dir) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800586 stream_renderer_debug("offset: 0x%llx box: %u %u %u %u size %u x %u iovs %u linearSize %zu",
587 (unsigned long long)offset, box->x, box->y, box->w, box->h,
588 res->args.width, res->args.height, res->numIovs, res->linearSize);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700589
590 if (box->x > res->args.width || box->y > res->args.height) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700591 stream_renderer_error("Box out of range of resource");
592 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700593 }
594 if (box->w == 0U || box->h == 0U) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700595 stream_renderer_error("Empty transfer");
596 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700597 }
598 if (box->x + box->w > res->args.width) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700599 stream_renderer_error("Box overflows resource width");
600 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700601 }
602
603 size_t linearBase = virgl_format_to_linear_base(
Idan Raiter995ba8a2022-12-04 16:44:59 -0800604 res->args.format, res->args.width, res->args.height, box->x, box->y, box->w, box->h);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700605 size_t start = linearBase;
606 // height - 1 in order to treat the (w * bpp) row specially
607 // (i.e., the last row does not occupy the full stride)
608 size_t length = virgl_format_to_total_xfer_len(
Idan Raiter995ba8a2022-12-04 16:44:59 -0800609 res->args.format, res->args.width, res->args.height, box->x, box->y, box->w, box->h);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700610 size_t end = start + length;
611
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700612 if (start == end) {
613 stream_renderer_error("nothing to transfer");
614 return -EINVAL;
615 }
616
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700617 if (end > res->linearSize) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700618 stream_renderer_error("start + length overflows!");
619 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700620 }
621
622 uint32_t iovIndex = 0;
623 size_t iovOffset = 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700624 size_t written = 0;
625 char* linear = static_cast<char*>(res->linear);
626
627 while (written < length) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700628 if (iovIndex >= res->numIovs) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700629 stream_renderer_error("write request overflowed numIovs");
630 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700631 }
632
633 const char* iovBase_const = static_cast<const char*>(res->iov[iovIndex].iov_base);
634 char* iovBase = static_cast<char*>(res->iov[iovIndex].iov_base);
635 size_t iovLen = res->iov[iovIndex].iov_len;
636 size_t iovOffsetEnd = iovOffset + iovLen;
637
638 auto lower_intersect = std::max(iovOffset, start);
639 auto upper_intersect = std::min(iovOffsetEnd, end);
640 if (lower_intersect < upper_intersect) {
641 size_t toWrite = upper_intersect - lower_intersect;
642 switch (dir) {
643 case IOV_TO_LINEAR:
Idan Raiter995ba8a2022-12-04 16:44:59 -0800644 memcpy(linear + lower_intersect, iovBase_const + lower_intersect - iovOffset,
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700645 toWrite);
646 break;
647 case LINEAR_TO_IOV:
Idan Raiter995ba8a2022-12-04 16:44:59 -0800648 memcpy(iovBase + lower_intersect - iovOffset, linear + lower_intersect,
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700649 toWrite);
650 break;
651 default:
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700652 stream_renderer_error("Invalid synchronization dir");
653 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700654 }
655 written += toWrite;
656 }
657 ++iovIndex;
658 iovOffset += iovLen;
659 }
660
661 return 0;
662}
663
664static uint64_t convert32to64(uint32_t lo, uint32_t hi) {
665 return ((uint64_t)lo) | (((uint64_t)hi) << 32);
666}
667
Jason Macnak77ddccd2024-03-15 15:53:30 -0700668class CleanupThread {
669 public:
670 using GenericCleanup = std::function<void()>;
671
672 CleanupThread() : mWorker([](CleanupTask task) {
673 return std::visit([](auto&& work) {
674 using T = std::decay_t<decltype(work)>;
675 if constexpr (std::is_same_v<T, GenericCleanup>) {
676 work();
677 return android::base::WorkerProcessingResult::Continue;
678 } else if constexpr (std::is_same_v<T, Exit>) {
679 return android::base::WorkerProcessingResult::Stop;
680 }
681 }, std::move(task));
682 }) {
683 mWorker.start();
684 }
685
686 ~CleanupThread() { stop(); }
687
688 // CleanupThread is neither copyable nor movable.
689 CleanupThread(const CleanupThread& other) = delete;
690 CleanupThread& operator=(const CleanupThread& other) = delete;
691 CleanupThread(CleanupThread&& other) = delete;
692 CleanupThread& operator=(CleanupThread&& other) = delete;
693
694 void enqueueCleanup(GenericCleanup command) {
695 mWorker.enqueue(std::move(command));
696 }
697
698 void stop() {
699 mWorker.enqueue(Exit{});
700 mWorker.join();
701 }
702
703 private:
704 struct Exit {};
705 using CleanupTask = std::variant<GenericCleanup, Exit>;
706 android::base::WorkerThread<CleanupTask> mWorker;
707};
708
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700709class PipeVirglRenderer {
Idan Raiter995ba8a2022-12-04 16:44:59 -0800710 public:
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700711 PipeVirglRenderer() = default;
712
Jason Macnak26872122024-02-23 10:46:09 -0800713 int init(void* cookie, gfxstream::host::FeatureSet features, stream_renderer_fence_callback fence_callback) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800714 stream_renderer_debug("cookie: %p", cookie);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700715 mCookie = cookie;
Jason Macnak26872122024-02-23 10:46:09 -0800716 mFeatures = features;
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700717 mFenceCallback = fence_callback;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700718 mAddressSpaceDeviceControlOps = get_address_space_device_control_ops();
719 if (!mAddressSpaceDeviceControlOps) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700720 stream_renderer_error("Could not get address space device control ops!");
721 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700722 }
Gurchetan Singh9d89d5a2023-07-21 08:51:15 -0700723 mVirtioGpuTimelines = VirtioGpuTimelines::create(true);
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700724 mVirtioGpuTimelines = VirtioGpuTimelines::create(true);
Gurchetan Singhee6107d2023-08-21 10:59:39 -0700725
726#if !defined(_WIN32)
727 mPageSize = getpagesize();
728#endif
729
Jason Macnak77ddccd2024-03-15 15:53:30 -0700730 mCleanupThread.reset(new CleanupThread());
731
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700732 return 0;
733 }
734
Jason Macnak77ddccd2024-03-15 15:53:30 -0700735 void teardown() {
736 mCleanupThread.reset();
737 }
738
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700739 int resetPipe(GoldfishHwPipe* hwPipe, GoldfishHostPipe* hostPipe) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800740 stream_renderer_debug("Want to reset hwpipe %p to hostpipe %p", hwPipe, hostPipe);
Gurchetan Singhd8485dd2022-03-17 14:58:54 -0700741 VirtioGpuCtxId asCtxId = (VirtioGpuCtxId)(uintptr_t)hwPipe;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700742 auto it = mContexts.find(asCtxId);
743 if (it == mContexts.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700744 stream_renderer_error("fatal: pipe id %u", asCtxId);
745 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700746 }
747
748 auto& entry = it->second;
Jason Macnak2c826b72024-02-27 16:10:55 -0800749 stream_renderer_debug("ctxid: %u prev hostpipe: %p", asCtxId, entry.hostPipe);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700750 entry.hostPipe = hostPipe;
Jason Macnak2c826b72024-02-27 16:10:55 -0800751 stream_renderer_debug("ctxid: %u next hostpipe: %p", asCtxId, entry.hostPipe);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700752
753 // Also update any resources associated with it
754 auto resourcesIt = mContextResources.find(asCtxId);
755
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700756 if (resourcesIt == mContextResources.end()) {
757 return 0;
758 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700759
760 const auto& resIds = resourcesIt->second;
761
762 for (auto resId : resIds) {
763 auto resEntryIt = mResources.find(resId);
764 if (resEntryIt == mResources.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700765 stream_renderer_error("entry with res id %u not found", resId);
766 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700767 }
768
769 auto& resEntry = resEntryIt->second;
770 resEntry.hostPipe = hostPipe;
771 }
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700772
773 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700774 }
775
Gurchetan Singhc748c8b2022-03-29 18:07:48 -0700776 int createContext(VirtioGpuCtxId ctx_id, uint32_t nlen, const char* name,
777 uint32_t context_init) {
Gurchetan Singhcf38c8b2022-08-23 12:00:01 -0700778 std::string contextName(name, nlen);
779
Jason Macnak2c826b72024-02-27 16:10:55 -0800780 stream_renderer_debug("ctxid: %u len: %u name: %s", ctx_id, nlen, contextName.c_str());
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700781 auto ops = ensureAndGetServiceOps();
Idan Raiter995ba8a2022-12-04 16:44:59 -0800782 auto hostPipe = ops->guest_open_with_flags(reinterpret_cast<GoldfishHwPipe*>(ctx_id),
783 0x1 /* is virtio */);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700784
785 if (!hostPipe) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800786 stream_renderer_error("failed to create hw pipe!");
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700787 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700788 }
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700789 std::unordered_map<uint32_t, uint32_t> map;
Gurchetan Singhef5181f2024-06-25 14:10:19 -0700790 std::unordered_map<uint32_t, struct stream_renderer_resource_create_args> blobMap;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700791
792 PipeCtxEntry res = {
Idan Raiter995ba8a2022-12-04 16:44:59 -0800793 std::move(contextName), // contextName
794 context_init, // capsetId
795 ctx_id, // ctxId
796 hostPipe, // hostPipe
797 0, // fence
798 0, // AS handle
799 false, // does not have an AS handle
800 map, // resourceId --> ASG handle map
Gurchetan Singhef5181f2024-06-25 14:10:19 -0700801 blobMap, // blobId -> resource create args
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700802 };
803
Jason Macnak2c826b72024-02-27 16:10:55 -0800804 stream_renderer_debug("initial host pipe for ctxid %u: %p", ctx_id, hostPipe);
Gurchetan Singhc748c8b2022-03-29 18:07:48 -0700805 mContexts[ctx_id] = res;
Gurchetan Singh19587942023-09-15 10:33:59 -0700806 android_onGuestGraphicsProcessCreate(ctx_id);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700807 return 0;
808 }
809
Gurchetan Singhd8485dd2022-03-17 14:58:54 -0700810 int destroyContext(VirtioGpuCtxId handle) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800811 stream_renderer_debug("ctxid: %u", handle);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700812
813 auto it = mContexts.find(handle);
814 if (it == mContexts.end()) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800815 stream_renderer_error("could not find context handle %u", handle);
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700816 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700817 }
818
819 if (it->second.hasAddressSpaceHandle) {
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700820 for (auto const& [resourceId, handle] : it->second.addressSpaceHandles) {
Jason Macnak77ddccd2024-03-15 15:53:30 -0700821 // Note: this can hang as is but this has only been observed to
822 // happen during shutdown. See b/329287602#comment8.
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700823 mAddressSpaceDeviceControlOps->destroy_handle(handle);
824 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700825 }
826
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700827 auto hostPipe = it->second.hostPipe;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700828 if (!hostPipe) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700829 stream_renderer_error("0 is not a valid hostpipe");
830 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700831 }
832
Jason Macnak459dd872023-09-13 08:27:41 -0700833 auto ops = ensureAndGetServiceOps();
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700834 ops->guest_close(hostPipe, GOLDFISH_PIPE_CLOSE_GRACEFUL);
Jason Macnak459dd872023-09-13 08:27:41 -0700835
Gurchetan Singh19587942023-09-15 10:33:59 -0700836 android_cleanupProcGLObjects(handle);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700837 mContexts.erase(it);
838 return 0;
839 }
840
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700841 int setContextAddressSpaceHandleLocked(VirtioGpuCtxId ctxId, uint32_t handle,
842 uint32_t resourceId) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700843 auto ctxIt = mContexts.find(ctxId);
844 if (ctxIt == mContexts.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700845 stream_renderer_error("ctx id %u is not found", ctxId);
846 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700847 }
848
849 auto& ctxEntry = ctxIt->second;
850 ctxEntry.addressSpaceHandle = handle;
851 ctxEntry.hasAddressSpaceHandle = true;
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700852 ctxEntry.addressSpaceHandles[resourceId] = handle;
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700853 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700854 }
855
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700856 uint32_t getAddressSpaceHandleLocked(VirtioGpuCtxId ctxId, uint32_t resourceId) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700857 auto ctxIt = mContexts.find(ctxId);
858 if (ctxIt == mContexts.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700859 stream_renderer_error("ctx id %u is not found", ctxId);
860 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700861 }
862
863 auto& ctxEntry = ctxIt->second;
864
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700865 if (!ctxEntry.addressSpaceHandles.count(resourceId)) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700866 stream_renderer_error("ASG context with resource id %u", resourceId);
867 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700868 }
869
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700870 return ctxEntry.addressSpaceHandles[resourceId];
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700871 }
872
Kaiyi Li083a66a2022-08-11 16:23:37 -0700873#define DECODE(variable, type, input) \
Jason Macnaked0c9e62023-03-30 15:58:24 -0700874 type variable = {}; \
Kaiyi Li083a66a2022-08-11 16:23:37 -0700875 memcpy(&variable, input, sizeof(type));
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700876
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700877 int addressSpaceProcessCmd(VirtioGpuCtxId ctxId, uint32_t* dwords) {
Jason Macnaked0c9e62023-03-30 15:58:24 -0700878 DECODE(header, gfxstream::gfxstreamHeader, dwords)
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700879
880 switch (header.opCode) {
881 case GFXSTREAM_CONTEXT_CREATE: {
Jason Macnaked0c9e62023-03-30 15:58:24 -0700882 DECODE(contextCreate, gfxstream::gfxstreamContextCreate, dwords)
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700883
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700884 auto resEntryIt = mResources.find(contextCreate.resourceId);
885 if (resEntryIt == mResources.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700886 stream_renderer_error("ASG coherent resource %u not found",
887 contextCreate.resourceId);
888 return -EINVAL;
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700889 }
890
Gurchetan Singhcf38c8b2022-08-23 12:00:01 -0700891 auto ctxIt = mContexts.find(ctxId);
892 if (ctxIt == mContexts.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700893 stream_renderer_error("ctx id %u not found", ctxId);
894 return -EINVAL;
Gurchetan Singhcf38c8b2022-08-23 12:00:01 -0700895 }
896
897 auto& ctxEntry = ctxIt->second;
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700898 auto& resEntry = resEntryIt->second;
Gurchetan Singhcf38c8b2022-08-23 12:00:01 -0700899
900 std::string name = ctxEntry.name + "-" + std::to_string(contextCreate.resourceId);
Jason Macnak77ddccd2024-03-15 15:53:30 -0700901
902 // Note: resource ids can not be used as ASG handles because ASGs may outlive the
903 // containing resource due asynchronous ASG destruction.
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700904 uint32_t handle = mAddressSpaceDeviceControlOps->gen_handle();
905
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700906 struct AddressSpaceCreateInfo createInfo = {
907 .handle = handle,
908 .type = android::emulation::VirtioGpuGraphics,
909 .createRenderThread = true,
910 .externalAddr = resEntry.hva,
911 .externalAddrSize = resEntry.hvaSize,
Joshua Duongc3edd0c2022-12-13 08:07:55 -0800912 .virtioGpuContextId = ctxId,
913 .virtioGpuCapsetId = ctxEntry.capsetId,
Gurchetan Singhcf38c8b2022-08-23 12:00:01 -0700914 .contextName = name.c_str(),
915 .contextNameSize = static_cast<uint32_t>(ctxEntry.name.size()),
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700916 };
917
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700918 mAddressSpaceDeviceControlOps->create_instance(createInfo);
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700919 if (setContextAddressSpaceHandleLocked(ctxId, handle, contextCreate.resourceId)) {
920 return -EINVAL;
921 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700922 break;
923 }
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700924 case GFXSTREAM_CONTEXT_PING: {
Jason Macnaked0c9e62023-03-30 15:58:24 -0700925 DECODE(contextPing, gfxstream::gfxstreamContextPing, dwords)
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700926
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -0700927 struct android::emulation::AddressSpaceDevicePingInfo ping = {0};
928 ping.metadata = ASG_NOTIFY_AVAILABLE;
929
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700930 mAddressSpaceDeviceControlOps->ping_at_hva(
Idan Raiter995ba8a2022-12-04 16:44:59 -0800931 getAddressSpaceHandleLocked(ctxId, contextPing.resourceId), &ping);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700932 break;
933 }
934 default:
935 break;
936 }
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700937
938 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700939 }
940
Gurchetan Singh561daa72023-07-11 08:54:01 -0700941 int submitCmd(struct stream_renderer_command* cmd) {
942 if (!cmd) return -EINVAL;
943
944 void* buffer = reinterpret_cast<void*>(cmd->cmd);
945
Jason Macnake95cdca2022-07-12 11:41:23 -0700946 VirtioGpuRing ring = VirtioGpuRingGlobal{};
Jason Macnak2c826b72024-02-27 16:10:55 -0800947 stream_renderer_debug("ctx: % u, ring: %s buffer: %p dwords: %d", cmd->ctx_id,
948 to_string(ring).c_str(), buffer, cmd->cmd_size);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700949
950 if (!buffer) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800951 stream_renderer_error("error: buffer null");
Gurchetan Singh561daa72023-07-11 08:54:01 -0700952 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700953 }
954
Gurchetan Singh561daa72023-07-11 08:54:01 -0700955 if (cmd->cmd_size < 4) {
Jason Macnak2c826b72024-02-27 16:10:55 -0800956 stream_renderer_error("error: not enough bytes (got %d)", cmd->cmd_size);
Gurchetan Singh561daa72023-07-11 08:54:01 -0700957 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700958 }
959
Jason Macnaked0c9e62023-03-30 15:58:24 -0700960 DECODE(header, gfxstream::gfxstreamHeader, buffer);
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700961 switch (header.opCode) {
962 case GFXSTREAM_CONTEXT_CREATE:
963 case GFXSTREAM_CONTEXT_PING:
964 case GFXSTREAM_CONTEXT_PING_WITH_RESPONSE:
Gurchetan Singh01b8b482023-07-20 07:39:20 -0700965 if (addressSpaceProcessCmd(cmd->ctx_id, (uint32_t*)buffer)) {
966 return -EINVAL;
967 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700968 break;
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700969 case GFXSTREAM_CREATE_EXPORT_SYNC: {
Jason Macnaked0c9e62023-03-30 15:58:24 -0700970 DECODE(exportSync, gfxstream::gfxstreamCreateExportSync, buffer)
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700971
Idan Raiter995ba8a2022-12-04 16:44:59 -0800972 uint64_t sync_handle =
973 convert32to64(exportSync.syncHandleLo, exportSync.syncHandleHi);
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700974
Jason Macnak2c826b72024-02-27 16:10:55 -0800975 stream_renderer_debug("wait for gpu ring %s", to_string(ring).c_str());
Kaiyi Li1fdd22e2022-03-31 10:17:23 -0700976 auto taskId = mVirtioGpuTimelines->enqueueTask(ring);
Gurchetan Singhc25eeb62024-06-21 14:36:45 -0700977#if GFXSTREAM_ENABLE_HOST_GLES
978 gfxstream::FrameBuffer::getFB()->asyncWaitForGpuWithCb(sync_handle, [this, taskId] {
Kaiyi Li1fdd22e2022-03-31 10:17:23 -0700979 mVirtioGpuTimelines->notifyTaskCompletion(taskId);
980 });
Gurchetan Singhc25eeb62024-06-21 14:36:45 -0700981#endif
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700982 break;
983 }
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700984 case GFXSTREAM_CREATE_EXPORT_SYNC_VK:
985 case GFXSTREAM_CREATE_IMPORT_SYNC_VK: {
Jason Macnakb777f002023-05-10 11:13:35 -0700986 // The guest sync export assumes fence context support and always uses
987 // VIRTGPU_EXECBUF_RING_IDX. With this, the task created here must use
988 // the same ring as the fence created for the virtio gpu command or the
989 // fence may be signaled without properly waiting for the task to complete.
990 ring = VirtioGpuRingContextSpecific{
Gurchetan Singh561daa72023-07-11 08:54:01 -0700991 .mCtxId = cmd->ctx_id,
Jason Macnakb777f002023-05-10 11:13:35 -0700992 .mRingIdx = 0,
993 };
994
Jason Macnaked0c9e62023-03-30 15:58:24 -0700995 DECODE(exportSyncVK, gfxstream::gfxstreamCreateExportSyncVK, buffer)
Lingfeng Yanga285eb42020-10-30 12:39:56 -0700996
Idan Raiter995ba8a2022-12-04 16:44:59 -0800997 uint64_t device_handle =
998 convert32to64(exportSyncVK.deviceHandleLo, exportSyncVK.deviceHandleHi);
Gurchetan Singha7b07f52022-08-08 17:48:26 -0700999
Idan Raiter995ba8a2022-12-04 16:44:59 -08001000 uint64_t fence_handle =
1001 convert32to64(exportSyncVK.fenceHandleLo, exportSyncVK.fenceHandleHi);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001002
Jason Macnak2c826b72024-02-27 16:10:55 -08001003 stream_renderer_debug("wait for gpu ring %s", to_string(ring).c_str());
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001004 auto taskId = mVirtioGpuTimelines->enqueueTask(ring);
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001005 gfxstream::FrameBuffer::getFB()->asyncWaitForGpuVulkanWithCb(
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001006 device_handle, fence_handle,
1007 [this, taskId] { mVirtioGpuTimelines->notifyTaskCompletion(taskId); });
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001008 break;
1009 }
Gurchetan Singha7b07f52022-08-08 17:48:26 -07001010 case GFXSTREAM_CREATE_QSRI_EXPORT_VK: {
Jason Macnake95cdca2022-07-12 11:41:23 -07001011 // The guest QSRI export assumes fence context support and always uses
1012 // VIRTGPU_EXECBUF_RING_IDX. With this, the task created here must use
1013 // the same ring as the fence created for the virtio gpu command or the
1014 // fence may be signaled without properly waiting for the task to complete.
1015 ring = VirtioGpuRingContextSpecific{
Gurchetan Singh561daa72023-07-11 08:54:01 -07001016 .mCtxId = cmd->ctx_id,
Jason Macnake95cdca2022-07-12 11:41:23 -07001017 .mRingIdx = 0,
1018 };
1019
Jason Macnaked0c9e62023-03-30 15:58:24 -07001020 DECODE(exportQSRI, gfxstream::gfxstreamCreateQSRIExportVK, buffer)
Gurchetan Singha7b07f52022-08-08 17:48:26 -07001021
Idan Raiter995ba8a2022-12-04 16:44:59 -08001022 uint64_t image_handle =
1023 convert32to64(exportQSRI.imageHandleLo, exportQSRI.imageHandleHi);
Gurchetan Singha7b07f52022-08-08 17:48:26 -07001024
Jason Macnak2c826b72024-02-27 16:10:55 -08001025 stream_renderer_debug("wait for gpu vk qsri ring %u image 0x%llx",
1026 to_string(ring).c_str(), (unsigned long long)image_handle);
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001027 auto taskId = mVirtioGpuTimelines->enqueueTask(ring);
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001028 gfxstream::FrameBuffer::getFB()->asyncWaitForGpuVulkanQsriWithCb(
1029 image_handle,
1030 [this, taskId] { mVirtioGpuTimelines->notifyTaskCompletion(taskId); });
Lingfeng Yang9e750252021-07-15 16:50:37 -07001031 break;
1032 }
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001033 case GFXSTREAM_RESOURCE_CREATE_3D: {
1034 DECODE(create3d, gfxstream::gfxstreamResourceCreate3d, buffer)
1035 struct stream_renderer_resource_create_args rc3d = {0};
1036
1037 rc3d.target = create3d.target;
1038 rc3d.format = create3d.format;
1039 rc3d.bind = create3d.bind;
1040 rc3d.width = create3d.width;
1041 rc3d.height = create3d.height;
1042 rc3d.depth = create3d.depth;
1043 rc3d.array_size = create3d.arraySize;
1044 rc3d.last_level = create3d.lastLevel;
1045 rc3d.nr_samples = create3d.nrSamples;
1046 rc3d.flags = create3d.flags;
1047
1048 auto ctxIt = mContexts.find(cmd->ctx_id);
1049 if (ctxIt == mContexts.end()) {
1050 stream_renderer_error("ctx id %u is not found", cmd->ctx_id);
1051 return -EINVAL;
1052 }
1053
1054 auto& ctxEntry = ctxIt->second;
1055 if (ctxEntry.blobMap.count(create3d.blobId)) {
1056 stream_renderer_error("blob ID already in use");
1057 return -EINVAL;
1058 }
1059
1060 ctxEntry.blobMap[create3d.blobId] = rc3d;
1061 break;
1062 }
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -07001063 case GFXSTREAM_ACQUIRE_SYNC: {
1064 DECODE(acquireSync, gfxstream::gfxstreamAcquireSync, buffer);
1065
1066 auto ctxIt = mContexts.find(cmd->ctx_id);
1067 if (ctxIt == mContexts.end()) {
1068 stream_renderer_error("ctx id %u is not found", cmd->ctx_id);
1069 return -EINVAL;
1070 }
1071
1072 auto& ctxEntry = ctxIt->second;
1073 if (ctxEntry.latestFence) {
1074 stream_renderer_error("expected latest fence to empty");
1075 return -EINVAL;
1076 }
1077
1078 auto syncDescriptorInfoOpt = ExternalObjectManager::get()->removeSyncDescriptorInfo(
1079 cmd->ctx_id, acquireSync.syncId);
1080 if (syncDescriptorInfoOpt) {
1081 ctxEntry.latestFence = std::make_shared<gfxstream::SyncDescriptorInfo>(
1082 std::move(*syncDescriptorInfoOpt));
1083 } else {
1084 stream_renderer_error("failed to get sync descriptor info");
1085 return -EINVAL;
1086 }
1087
1088 break;
1089 }
Gurchetan Singh47215132023-04-04 02:53:32 +00001090 case GFXSTREAM_PLACEHOLDER_COMMAND_VK: {
1091 // Do nothing, this is a placeholder command
1092 break;
1093 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001094 default:
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001095 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001096 }
1097
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001098 return 0;
1099 }
1100
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001101 int createFence(uint64_t fence_id, const VirtioGpuRing& ring) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001102 stream_renderer_debug("fenceid: %llu ring: %s", (unsigned long long)fence_id,
1103 to_string(ring).c_str());
Lingfeng Yang0f293642021-07-27 17:12:18 -07001104
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001105 struct {
1106 FenceCompletionCallback operator()(const VirtioGpuRingGlobal&) {
1107 return [renderer = mRenderer, fenceId = mFenceId] {
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001108 struct stream_renderer_fence fence = {0};
1109 fence.fence_id = fenceId;
1110 fence.flags = STREAM_RENDERER_FLAG_FENCE;
1111 renderer->mFenceCallback(renderer->mCookie, &fence);
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001112 };
Lingfeng Yang0a63dfd2021-07-14 13:27:56 -07001113 }
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001114 FenceCompletionCallback operator()(const VirtioGpuRingContextSpecific& ring) {
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001115 return [renderer = mRenderer, fenceId = mFenceId, ring] {
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001116 struct stream_renderer_fence fence = {0};
1117 fence.fence_id = fenceId;
1118 fence.flags = STREAM_RENDERER_FLAG_FENCE | STREAM_RENDERER_FLAG_FENCE_RING_IDX;
1119 fence.ctx_id = ring.mCtxId;
1120 fence.ring_idx = ring.mRingIdx;
1121 renderer->mFenceCallback(renderer->mCookie, &fence);
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001122 };
Lingfeng Yangaf686802021-07-15 20:06:07 -07001123 }
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001124
1125 PipeVirglRenderer* mRenderer;
1126 VirtioGpuTimelines::FenceId mFenceId;
1127 } visitor{
1128 .mRenderer = this,
1129 .mFenceId = fence_id,
1130 };
1131 FenceCompletionCallback callback = std::visit(visitor, ring);
1132 if (!callback) {
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001133 return -EINVAL;
Lingfeng Yangaf686802021-07-15 20:06:07 -07001134 }
Jason Macnakfdecd452023-05-12 15:43:31 -07001135 mVirtioGpuTimelines->enqueueFence(ring, fence_id, std::move(callback));
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001136
Lingfeng Yangaf686802021-07-15 20:06:07 -07001137 return 0;
1138 }
1139
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -07001140 int acquireContextFence(uint32_t ctx_id, uint64_t fenceId) {
1141 auto ctxIt = mContexts.find(ctx_id);
1142 if (ctxIt == mContexts.end()) {
1143 stream_renderer_error("ctx id %u is not found", ctx_id);
1144 return -EINVAL;
1145 }
1146
1147 auto& ctxEntry = ctxIt->second;
1148 if (ctxEntry.latestFence) {
1149 mSyncMap[fenceId] = ctxEntry.latestFence;
1150 ctxEntry.latestFence = nullptr;
1151 } else {
1152 stream_renderer_error("Failed to acquire sync descriptor");
Gurchetan Singhee6d3212024-07-11 15:29:28 -07001153 return -EINVAL;
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -07001154 }
1155
1156 return 0;
1157 }
1158
Kaiyi Li1fdd22e2022-03-31 10:17:23 -07001159 void poll() { mVirtioGpuTimelines->poll(); }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001160
1161 enum pipe_texture_target {
1162 PIPE_BUFFER,
1163 PIPE_TEXTURE_1D,
1164 PIPE_TEXTURE_2D,
1165 PIPE_TEXTURE_3D,
1166 PIPE_TEXTURE_CUBE,
1167 PIPE_TEXTURE_RECT,
1168 PIPE_TEXTURE_1D_ARRAY,
1169 PIPE_TEXTURE_2D_ARRAY,
1170 PIPE_TEXTURE_CUBE_ARRAY,
1171 PIPE_MAX_TEXTURE_TYPES,
1172 };
1173
1174 /**
1175 * * Resource binding flags -- state tracker must specify in advance all
1176 * * the ways a resource might be used.
1177 * */
Idan Raiter995ba8a2022-12-04 16:44:59 -08001178#define PIPE_BIND_DEPTH_STENCIL (1 << 0) /* create_surface */
1179#define PIPE_BIND_RENDER_TARGET (1 << 1) /* create_surface */
1180#define PIPE_BIND_BLENDABLE (1 << 2) /* create_surface */
1181#define PIPE_BIND_SAMPLER_VIEW (1 << 3) /* create_sampler_view */
1182#define PIPE_BIND_VERTEX_BUFFER (1 << 4) /* set_vertex_buffers */
1183#define PIPE_BIND_INDEX_BUFFER (1 << 5) /* draw_elements */
1184#define PIPE_BIND_CONSTANT_BUFFER (1 << 6) /* set_constant_buffer */
1185#define PIPE_BIND_DISPLAY_TARGET (1 << 7) /* flush_front_buffer */
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001186 /* gap */
Idan Raiter995ba8a2022-12-04 16:44:59 -08001187#define PIPE_BIND_STREAM_OUTPUT (1 << 10) /* set_stream_output_buffers */
1188#define PIPE_BIND_CURSOR (1 << 11) /* mouse cursor */
1189#define PIPE_BIND_CUSTOM (1 << 12) /* state-tracker/winsys usages */
1190#define PIPE_BIND_GLOBAL (1 << 13) /* set_global_binding */
1191#define PIPE_BIND_SHADER_BUFFER (1 << 14) /* set_shader_buffers */
1192#define PIPE_BIND_SHADER_IMAGE (1 << 15) /* set_shader_images */
1193#define PIPE_BIND_COMPUTE_RESOURCE (1 << 16) /* set_compute_resources */
1194#define PIPE_BIND_COMMAND_ARGS_BUFFER (1 << 17) /* pipe_draw_info.indirect */
1195#define PIPE_BIND_QUERY_BUFFER (1 << 18) /* get_query_result_resource */
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001196
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001197 ResType getResourceType(const struct stream_renderer_resource_create_args& args) const {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001198 if (args.target == PIPE_BUFFER) {
1199 return ResType::PIPE;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001200 }
1201
Jason Macnak6402a1b2022-06-02 15:29:05 -07001202 if (args.format != VIRGL_FORMAT_R8_UNORM) {
1203 return ResType::COLOR_BUFFER;
1204 }
1205 if (args.bind & VIRGL_BIND_SAMPLER_VIEW) {
1206 return ResType::COLOR_BUFFER;
1207 }
1208 if (args.bind & VIRGL_BIND_RENDER_TARGET) {
1209 return ResType::COLOR_BUFFER;
1210 }
1211 if (args.bind & VIRGL_BIND_SCANOUT) {
1212 return ResType::COLOR_BUFFER;
1213 }
1214 if (args.bind & VIRGL_BIND_CURSOR) {
1215 return ResType::COLOR_BUFFER;
1216 }
1217 if (!(args.bind & VIRGL_BIND_LINEAR)) {
1218 return ResType::COLOR_BUFFER;
1219 }
1220
1221 return ResType::BUFFER;
1222 }
1223
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001224 void handleCreateResourceBuffer(struct stream_renderer_resource_create_args* args) {
Jason Macnak2aaf6ee2024-04-02 10:25:53 -07001225 stream_renderer_debug("w:%u h:%u handle:%u", args->handle, args->width, args->height);
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001226 gfxstream::FrameBuffer::getFB()->createBufferWithHandle(args->width * args->height,
1227 args->handle);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001228 }
1229
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001230 void handleCreateResourceColorBuffer(struct stream_renderer_resource_create_args* args) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001231 stream_renderer_debug("w h %u %u resid %u -> CreateColorBufferWithHandle", args->width,
1232 args->height, args->handle);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001233
1234 const uint32_t glformat = virgl_format_to_gl(args->format);
1235 const uint32_t fwkformat = virgl_format_to_fwk_format(args->format);
Jason Macnak290c9ff2024-07-09 08:27:30 -07001236
1237 const bool linear =
1238#ifdef GFXSTREAM_ENABLE_GUEST_VIRTIO_RESOURCE_TILING_CONTROL
1239 !!(args->bind & VIRGL_BIND_LINEAR);
1240#else
1241 false;
1242#endif
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001243 gfxstream::FrameBuffer::getFB()->createColorBufferWithHandle(
1244 args->width, args->height, glformat, (gfxstream::FrameworkFormat)fwkformat,
1245 args->handle, linear);
1246 gfxstream::FrameBuffer::getFB()->setGuestManagedColorBufferLifetime(
1247 true /* guest manages lifetime */);
1248 gfxstream::FrameBuffer::getFB()->openColorBuffer(args->handle);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001249 }
1250
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001251 int createResource(struct stream_renderer_resource_create_args* args, struct iovec* iov,
Idan Raiter995ba8a2022-12-04 16:44:59 -08001252 uint32_t num_iovs) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001253 stream_renderer_debug("handle: %u. num iovs: %u", args->handle, num_iovs);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001254
Jason Macnak6402a1b2022-06-02 15:29:05 -07001255 const auto resType = getResourceType(*args);
1256 switch (resType) {
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001257 case ResType::BLOB:
1258 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001259 case ResType::PIPE:
1260 break;
1261 case ResType::BUFFER:
1262 handleCreateResourceBuffer(args);
1263 break;
1264 case ResType::COLOR_BUFFER:
1265 handleCreateResourceColorBuffer(args);
1266 break;
1267 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001268
1269 PipeResEntry e;
1270 e.args = *args;
1271 e.linear = 0;
1272 e.hostPipe = 0;
Gurchetan Singhd59f3052022-08-09 18:13:36 -07001273 e.hva = nullptr;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001274 e.hvaSize = 0;
Gurchetan Singh701f8a62022-04-19 17:19:12 -07001275 e.blobId = 0;
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001276 e.blobMem = 0;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001277 e.type = resType;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001278 allocResource(e, iov, num_iovs);
1279
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001280 mResources[args->handle] = e;
1281 return 0;
1282 }
1283
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001284 void unrefResource(uint32_t toUnrefId) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001285 stream_renderer_debug("handle: %u", toUnrefId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001286
1287 auto it = mResources.find(toUnrefId);
1288 if (it == mResources.end()) return;
1289
1290 auto contextsIt = mResourceContexts.find(toUnrefId);
1291 if (contextsIt != mResourceContexts.end()) {
1292 mResourceContexts.erase(contextsIt->first);
1293 }
1294
1295 for (auto& ctxIdResources : mContextResources) {
1296 detachResourceLocked(ctxIdResources.first, toUnrefId);
1297 }
1298
1299 auto& entry = it->second;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001300 switch (entry.type) {
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001301 case ResType::BLOB:
Jason Macnak6402a1b2022-06-02 15:29:05 -07001302 case ResType::PIPE:
1303 break;
1304 case ResType::BUFFER:
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001305 gfxstream::FrameBuffer::getFB()->closeBuffer(toUnrefId);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001306 break;
1307 case ResType::COLOR_BUFFER:
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001308 gfxstream::FrameBuffer::getFB()->closeColorBuffer(toUnrefId);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001309 break;
1310 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001311
1312 if (entry.linear) {
1313 free(entry.linear);
1314 entry.linear = nullptr;
1315 }
1316
1317 if (entry.iov) {
1318 free(entry.iov);
1319 entry.iov = nullptr;
1320 entry.numIovs = 0;
1321 }
1322
Gurchetan Singhd59f3052022-08-09 18:13:36 -07001323 entry.hva = nullptr;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001324 entry.hvaSize = 0;
Gurchetan Singh701f8a62022-04-19 17:19:12 -07001325 entry.blobId = 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001326
1327 mResources.erase(it);
1328 }
1329
1330 int attachIov(int resId, iovec* iov, int num_iovs) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001331 stream_renderer_debug("resid: %d numiovs: %d", resId, num_iovs);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001332
1333 auto it = mResources.find(resId);
1334 if (it == mResources.end()) return ENOENT;
1335
1336 auto& entry = it->second;
Jason Macnak2c826b72024-02-27 16:10:55 -08001337 stream_renderer_debug("res linear: %p", entry.linear);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001338 if (!entry.linear) allocResource(entry, iov, num_iovs);
1339
Jason Macnak2c826b72024-02-27 16:10:55 -08001340 stream_renderer_debug("done");
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001341 return 0;
1342 }
1343
1344 void detachIov(int resId, iovec** iov, int* num_iovs) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001345 auto it = mResources.find(resId);
1346 if (it == mResources.end()) return;
1347
1348 auto& entry = it->second;
1349
1350 if (num_iovs) {
1351 *num_iovs = entry.numIovs;
Jason Macnak2c826b72024-02-27 16:10:55 -08001352 stream_renderer_debug("resid: %d numIovs: %d", resId, *num_iovs);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001353 } else {
Jason Macnak2c826b72024-02-27 16:10:55 -08001354 stream_renderer_debug("resid: %d numIovs: 0", resId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001355 }
1356
1357 entry.numIovs = 0;
1358
1359 if (entry.iov) free(entry.iov);
1360 entry.iov = nullptr;
1361
1362 if (iov) {
1363 *iov = entry.iov;
1364 }
1365
1366 allocResource(entry, entry.iov, entry.numIovs);
Jason Macnak2c826b72024-02-27 16:10:55 -08001367 stream_renderer_debug("done");
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001368 }
1369
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001370 int handleTransferReadPipe(PipeResEntry* res, uint64_t offset, stream_renderer_box* box) {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001371 if (res->type != ResType::PIPE) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001372 stream_renderer_error("resid: %d not a PIPE resource", res->args.handle);
1373 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001374 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001375
Jason Macnak6402a1b2022-06-02 15:29:05 -07001376 // Do the pipe service op here, if there is an associated hostpipe.
1377 auto hostPipe = res->hostPipe;
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001378 if (!hostPipe) return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001379
1380 auto ops = ensureAndGetServiceOps();
1381
1382 size_t readBytes = 0;
1383 size_t wantedBytes = readBytes + (size_t)box->w;
1384
1385 while (readBytes < wantedBytes) {
1386 GoldfishPipeBuffer buf = {
1387 ((char*)res->linear) + box->x + readBytes,
1388 wantedBytes - readBytes,
1389 };
1390 auto status = ops->guest_recv(hostPipe, &buf, 1);
1391
1392 if (status > 0) {
1393 readBytes += status;
Joshua Duong7131c142023-08-18 14:04:53 -07001394 } else if (status == kPipeTryAgain) {
1395 ops->wait_guest_recv(hostPipe);
1396 } else {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001397 return EIO;
1398 }
1399 }
1400
1401 return 0;
1402 }
1403
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001404 int handleTransferWritePipe(PipeResEntry* res, uint64_t offset, stream_renderer_box* box) {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001405 if (res->type != ResType::PIPE) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001406 stream_renderer_error("resid: %d not a PIPE resource", res->args.handle);
1407 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001408 }
1409
1410 // Do the pipe service op here, if there is an associated hostpipe.
1411 auto hostPipe = res->hostPipe;
1412 if (!hostPipe) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001413 stream_renderer_error("No hostPipe");
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001414 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001415 }
1416
Jason Macnak2c826b72024-02-27 16:10:55 -08001417 stream_renderer_debug("resid: %d offset: 0x%llx hostpipe: %p", res->args.handle,
1418 (unsigned long long)offset, hostPipe);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001419
1420 auto ops = ensureAndGetServiceOps();
1421
1422 size_t writtenBytes = 0;
1423 size_t wantedBytes = (size_t)box->w;
1424
1425 while (writtenBytes < wantedBytes) {
1426 GoldfishPipeBuffer buf = {
1427 ((char*)res->linear) + box->x + writtenBytes,
1428 wantedBytes - writtenBytes,
1429 };
1430
1431 // guest_send can now reallocate the pipe.
1432 void* hostPipeBefore = hostPipe;
1433 auto status = ops->guest_send(&hostPipe, &buf, 1);
1434 if (hostPipe != hostPipeBefore) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001435 if (resetPipe((GoldfishHwPipe*)(uintptr_t)(res->ctxId), hostPipe)) {
1436 return -EINVAL;
1437 }
1438
Jason Macnak6402a1b2022-06-02 15:29:05 -07001439 auto it = mResources.find(res->args.handle);
1440 res = &it->second;
1441 }
1442
1443 if (status > 0) {
1444 writtenBytes += status;
Joshua Duong7131c142023-08-18 14:04:53 -07001445 } else if (status == kPipeTryAgain) {
1446 ops->wait_guest_send(hostPipe);
1447 } else {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001448 return EIO;
1449 }
1450 }
1451
1452 return 0;
1453 }
1454
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001455 int handleTransferReadBuffer(PipeResEntry* res, uint64_t offset, stream_renderer_box* box) {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001456 if (res->type != ResType::BUFFER) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001457 stream_renderer_error("resid: %d not a BUFFER resource", res->args.handle);
1458 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001459 }
1460
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001461 gfxstream::FrameBuffer::getFB()->readBuffer(
1462 res->args.handle, 0, res->args.width * res->args.height, res->linear);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001463 return 0;
1464 }
1465
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001466 int handleTransferWriteBuffer(PipeResEntry* res, uint64_t offset, stream_renderer_box* box) {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001467 if (res->type != ResType::BUFFER) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001468 stream_renderer_error("resid: %d not a BUFFER resource", res->args.handle);
1469 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001470 }
1471
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001472 gfxstream::FrameBuffer::getFB()->updateBuffer(
1473 res->args.handle, 0, res->args.width * res->args.height, res->linear);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001474 return 0;
1475 }
1476
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001477 int handleTransferReadColorBuffer(PipeResEntry* res, uint64_t offset,
1478 stream_renderer_box* box) {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001479 if (res->type != ResType::COLOR_BUFFER) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001480 stream_renderer_error("resid: %d not a COLOR_BUFFER resource", res->args.handle);
1481 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001482 }
1483
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001484 auto glformat = virgl_format_to_gl(res->args.format);
1485 auto gltype = gl_format_to_natural_type(glformat);
1486
1487 // We always xfer the whole thing again from GL
1488 // since it's fiddly to calc / copy-out subregions
1489 if (virgl_format_is_yuv(res->args.format)) {
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001490 gfxstream::FrameBuffer::getFB()->readColorBufferYUV(res->args.handle, 0, 0,
1491 res->args.width, res->args.height,
1492 res->linear, res->linearSize);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001493 } else {
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001494 gfxstream::FrameBuffer::getFB()->readColorBuffer(res->args.handle, 0, 0,
1495 res->args.width, res->args.height,
1496 glformat, gltype, res->linear);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001497 }
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001498
1499 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001500 }
1501
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001502 int handleTransferWriteColorBuffer(PipeResEntry* res, uint64_t offset,
1503 stream_renderer_box* box) {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001504 if (res->type != ResType::COLOR_BUFFER) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001505 stream_renderer_error("resid: %d not a COLOR_BUFFER resource", res->args.handle);
1506 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001507 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001508
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001509 auto glformat = virgl_format_to_gl(res->args.format);
1510 auto gltype = gl_format_to_natural_type(glformat);
1511
1512 // We always xfer the whole thing again to GL
1513 // since it's fiddly to calc / copy-out subregions
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001514 gfxstream::FrameBuffer::getFB()->updateColorBuffer(res->args.handle, 0, 0, res->args.width,
1515 res->args.height, glformat, gltype,
1516 res->linear);
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001517 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001518 }
1519
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001520 int transferReadIov(int resId, uint64_t offset, stream_renderer_box* box, struct iovec* iov,
Idan Raiter995ba8a2022-12-04 16:44:59 -08001521 int iovec_cnt) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001522 auto it = mResources.find(resId);
1523 if (it == mResources.end()) return EINVAL;
1524
Jason Macnak6402a1b2022-06-02 15:29:05 -07001525 int ret = 0;
1526
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001527 auto& entry = it->second;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001528 switch (entry.type) {
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001529 case ResType::BLOB:
1530 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001531 case ResType::PIPE:
1532 ret = handleTransferReadPipe(&entry, offset, box);
1533 break;
1534 case ResType::BUFFER:
1535 ret = handleTransferReadBuffer(&entry, offset, box);
1536 break;
1537 case ResType::COLOR_BUFFER:
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001538 ret = handleTransferReadColorBuffer(&entry, offset, box);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001539 break;
1540 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001541
Jason Macnak6402a1b2022-06-02 15:29:05 -07001542 if (ret != 0) {
1543 return ret;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001544 }
1545
Lingfeng Yangb3841682020-11-12 14:13:43 -08001546 if (iovec_cnt) {
1547 PipeResEntry e = {
Idan Raiter995ba8a2022-12-04 16:44:59 -08001548 entry.args, iov, (uint32_t)iovec_cnt, entry.linear, entry.linearSize,
Lingfeng Yangb3841682020-11-12 14:13:43 -08001549 };
Jason Macnak6402a1b2022-06-02 15:29:05 -07001550 ret = sync_iov(&e, offset, box, LINEAR_TO_IOV);
Lingfeng Yangb3841682020-11-12 14:13:43 -08001551 } else {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001552 ret = sync_iov(&entry, offset, box, LINEAR_TO_IOV);
Lingfeng Yangb3841682020-11-12 14:13:43 -08001553 }
1554
Jason Macnak6402a1b2022-06-02 15:29:05 -07001555 return ret;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001556 }
1557
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001558 int transferWriteIov(int resId, uint64_t offset, stream_renderer_box* box, struct iovec* iov,
Idan Raiter995ba8a2022-12-04 16:44:59 -08001559 int iovec_cnt) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001560 auto it = mResources.find(resId);
1561 if (it == mResources.end()) return EINVAL;
1562
1563 auto& entry = it->second;
Lingfeng Yangb3841682020-11-12 14:13:43 -08001564
Jason Macnak6402a1b2022-06-02 15:29:05 -07001565 int ret = 0;
Lingfeng Yangb3841682020-11-12 14:13:43 -08001566 if (iovec_cnt) {
1567 PipeResEntry e = {
Idan Raiter995ba8a2022-12-04 16:44:59 -08001568 entry.args, iov, (uint32_t)iovec_cnt, entry.linear, entry.linearSize,
Lingfeng Yangb3841682020-11-12 14:13:43 -08001569 };
Jason Macnak6402a1b2022-06-02 15:29:05 -07001570 ret = sync_iov(&e, offset, box, IOV_TO_LINEAR);
Lingfeng Yangb3841682020-11-12 14:13:43 -08001571 } else {
Jason Macnak6402a1b2022-06-02 15:29:05 -07001572 ret = sync_iov(&entry, offset, box, IOV_TO_LINEAR);
Lingfeng Yangb3841682020-11-12 14:13:43 -08001573 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001574
Jason Macnak6402a1b2022-06-02 15:29:05 -07001575 if (ret != 0) {
1576 return ret;
1577 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001578
Jason Macnak6402a1b2022-06-02 15:29:05 -07001579 switch (entry.type) {
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001580 case ResType::BLOB:
1581 return -EINVAL;
Jason Macnak6402a1b2022-06-02 15:29:05 -07001582 case ResType::PIPE:
1583 ret = handleTransferWritePipe(&entry, offset, box);
1584 break;
1585 case ResType::BUFFER:
1586 ret = handleTransferWriteBuffer(&entry, offset, box);
1587 break;
1588 case ResType::COLOR_BUFFER:
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001589 ret = handleTransferWriteColorBuffer(&entry, offset, box);
Jason Macnak6402a1b2022-06-02 15:29:05 -07001590 break;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001591 }
1592
Jason Macnak6402a1b2022-06-02 15:29:05 -07001593 return ret;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001594 }
1595
Gurchetan Singh9d89d5a2023-07-21 08:51:15 -07001596 void getCapset(uint32_t set, uint32_t* max_size) {
Gurchetan Singh76498b82023-09-13 12:14:55 -07001597 switch (set) {
1598 case VIRTGPU_CAPSET_GFXSTREAM_VULKAN:
1599 *max_size = sizeof(struct gfxstream::vulkanCapset);
1600 break;
1601 case VIRTGPU_CAPSET_GFXSTREAM_MAGMA:
1602 *max_size = sizeof(struct gfxstream::magmaCapset);
1603 break;
1604 case VIRTGPU_CAPSET_GFXSTREAM_GLES:
1605 *max_size = sizeof(struct gfxstream::glesCapset);
1606 break;
1607 case VIRTGPU_CAPSET_GFXSTREAM_COMPOSER:
1608 *max_size = sizeof(struct gfxstream::composerCapset);
1609 break;
1610 default:
Joshua Duong0cbac122024-05-31 11:31:14 -07001611 stream_renderer_error("Incorrect capability set specified (%u)", set);
Gurchetan Singh76498b82023-09-13 12:14:55 -07001612 }
Gurchetan Singhcd144002023-03-27 15:49:40 -07001613 }
1614
1615 void fillCaps(uint32_t set, void* caps) {
Gurchetan Singh76498b82023-09-13 12:14:55 -07001616 switch (set) {
1617 case VIRTGPU_CAPSET_GFXSTREAM_VULKAN: {
1618 struct gfxstream::vulkanCapset* capset =
1619 reinterpret_cast<struct gfxstream::vulkanCapset*>(caps);
Gurchetan Singhcd144002023-03-27 15:49:40 -07001620
Gurchetan Singh76498b82023-09-13 12:14:55 -07001621 memset(capset, 0, sizeof(*capset));
Gurchetan Singhd22867a2023-04-10 10:11:41 -07001622
Gurchetan Singh76498b82023-09-13 12:14:55 -07001623 capset->protocolVersion = 1;
1624 capset->ringSize = 12288;
1625 capset->bufferSize = 1048576;
1626
1627 auto vk_emu = gfxstream::vk::getGlobalVkEmulation();
Jason Macnak244fd722024-04-02 13:26:50 -07001628 if (vk_emu && vk_emu->live && vk_emu->representativeColorBufferMemoryTypeInfo) {
Gurchetan Singh76498b82023-09-13 12:14:55 -07001629 capset->colorBufferMemoryIndex =
Jason Macnak244fd722024-04-02 13:26:50 -07001630 vk_emu->representativeColorBufferMemoryTypeInfo->guestMemoryTypeIndex;
Gurchetan Singh76498b82023-09-13 12:14:55 -07001631 }
1632
Gurchetan Singhf95cfa72023-09-07 10:17:45 -07001633 capset->noRenderControlEnc = 1;
Gurchetan Singh76498b82023-09-13 12:14:55 -07001634 capset->blobAlignment = mPageSize;
1635 if (vk_emu && vk_emu->live) {
1636 capset->deferredMapping = 1;
1637 }
Gurchetan Singh03e2dff2024-06-25 18:29:13 -07001638
Gurchetan Singha8142992024-07-11 14:55:52 -07001639#if GFXSTREAM_UNSTABLE_VULKAN_DMABUF_WINSYS
Gurchetan Singh03e2dff2024-06-25 18:29:13 -07001640 capset->alwaysBlob = 1;
1641#endif
Gurchetan Singhaa7afe92024-07-03 10:27:40 -07001642
1643#if GFXSTREAM_UNSTABLE_VULKAN_EXTERNAL_SYNC
1644 capset->externalSync = 1;
1645#endif
Gurchetan Singh76498b82023-09-13 12:14:55 -07001646 break;
Gurchetan Singhd22867a2023-04-10 10:11:41 -07001647 }
Gurchetan Singh76498b82023-09-13 12:14:55 -07001648 case VIRTGPU_CAPSET_GFXSTREAM_MAGMA: {
1649 struct gfxstream::magmaCapset* capset =
1650 reinterpret_cast<struct gfxstream::magmaCapset*>(caps);
Gurchetan Singh90b145c2023-05-08 16:36:48 -07001651
Gurchetan Singh76498b82023-09-13 12:14:55 -07001652 capset->protocolVersion = 1;
1653 capset->ringSize = 12288;
1654 capset->bufferSize = 1048576;
1655 capset->blobAlignment = mPageSize;
1656 break;
Gurchetan Singh90b145c2023-05-08 16:36:48 -07001657 }
Gurchetan Singh76498b82023-09-13 12:14:55 -07001658 case VIRTGPU_CAPSET_GFXSTREAM_GLES: {
1659 struct gfxstream::glesCapset* capset =
1660 reinterpret_cast<struct gfxstream::glesCapset*>(caps);
Jason Macnak9c991622023-07-25 16:44:49 -07001661
Gurchetan Singh76498b82023-09-13 12:14:55 -07001662 capset->protocolVersion = 1;
1663 capset->ringSize = 12288;
1664 capset->bufferSize = 1048576;
1665 capset->blobAlignment = mPageSize;
1666 break;
1667 }
1668 case VIRTGPU_CAPSET_GFXSTREAM_COMPOSER: {
1669 struct gfxstream::composerCapset* capset =
1670 reinterpret_cast<struct gfxstream::composerCapset*>(caps);
1671
1672 capset->protocolVersion = 1;
1673 capset->ringSize = 12288;
1674 capset->bufferSize = 1048576;
1675 capset->blobAlignment = mPageSize;
1676 break;
1677 }
1678 default:
1679 stream_renderer_error("Incorrect capability set specified");
Gurchetan Singhcd144002023-03-27 15:49:40 -07001680 }
1681 }
1682
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001683 void attachResource(uint32_t ctxId, uint32_t resId) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001684 stream_renderer_debug("ctxid: %u resid: %u", ctxId, resId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001685
1686 auto resourcesIt = mContextResources.find(ctxId);
1687
1688 if (resourcesIt == mContextResources.end()) {
Gurchetan Singhf3ad8892022-03-17 14:59:58 -07001689 std::vector<VirtioGpuResId> ids;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001690 ids.push_back(resId);
1691 mContextResources[ctxId] = ids;
1692 } else {
1693 auto& ids = resourcesIt->second;
1694 auto idIt = std::find(ids.begin(), ids.end(), resId);
Idan Raiter995ba8a2022-12-04 16:44:59 -08001695 if (idIt == ids.end()) ids.push_back(resId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001696 }
1697
1698 auto contextsIt = mResourceContexts.find(resId);
1699
1700 if (contextsIt == mResourceContexts.end()) {
Gurchetan Singhd8485dd2022-03-17 14:58:54 -07001701 std::vector<VirtioGpuCtxId> ids;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001702 ids.push_back(ctxId);
1703 mResourceContexts[resId] = ids;
1704 } else {
1705 auto& ids = contextsIt->second;
Gurchetan Singh2089f052022-01-06 16:42:03 -08001706 auto idIt = std::find(ids.begin(), ids.end(), ctxId);
Idan Raiter995ba8a2022-12-04 16:44:59 -08001707 if (idIt == ids.end()) ids.push_back(ctxId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001708 }
1709
1710 // Associate the host pipe of the resource entry with the host pipe of
1711 // the context entry. That is, the last context to call attachResource
1712 // wins if there is any conflict.
Idan Raiter995ba8a2022-12-04 16:44:59 -08001713 auto ctxEntryIt = mContexts.find(ctxId);
1714 auto resEntryIt = mResources.find(resId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001715
Idan Raiter995ba8a2022-12-04 16:44:59 -08001716 if (ctxEntryIt == mContexts.end() || resEntryIt == mResources.end()) return;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001717
Jason Macnak2c826b72024-02-27 16:10:55 -08001718 stream_renderer_debug("hostPipe: %p", ctxEntryIt->second.hostPipe);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001719 resEntryIt->second.hostPipe = ctxEntryIt->second.hostPipe;
1720 resEntryIt->second.ctxId = ctxId;
1721 }
1722
1723 void detachResource(uint32_t ctxId, uint32_t toUnrefId) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001724 stream_renderer_debug("ctxid: %u resid: %u", ctxId, toUnrefId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001725 detachResourceLocked(ctxId, toUnrefId);
1726 }
1727
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001728 int getResourceInfo(uint32_t resId, struct stream_renderer_resource_info* info) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001729 stream_renderer_debug("resid: %u", resId);
Idan Raiter995ba8a2022-12-04 16:44:59 -08001730 if (!info) return EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001731
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001732 auto it = mResources.find(resId);
Idan Raiter995ba8a2022-12-04 16:44:59 -08001733 if (it == mResources.end()) return ENOENT;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001734
1735 auto& entry = it->second;
1736
1737 uint32_t bpp = 4U;
1738 switch (entry.args.format) {
1739 case VIRGL_FORMAT_B8G8R8A8_UNORM:
Gurchetan Singh8581cc62021-11-15 11:15:42 -08001740 info->drm_fourcc = DRM_FORMAT_ARGB8888;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001741 break;
Aaron Ruby9ee7b002024-06-03 11:19:58 -04001742 case VIRGL_FORMAT_B8G8R8X8_UNORM:
1743 info->drm_fourcc = DRM_FORMAT_XRGB8888;
1744 break;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001745 case VIRGL_FORMAT_B5G6R5_UNORM:
Gurchetan Singh8581cc62021-11-15 11:15:42 -08001746 info->drm_fourcc = DRM_FORMAT_RGB565;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001747 bpp = 2U;
1748 break;
1749 case VIRGL_FORMAT_R8G8B8A8_UNORM:
Gurchetan Singh8581cc62021-11-15 11:15:42 -08001750 info->drm_fourcc = DRM_FORMAT_ABGR8888;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001751 break;
1752 case VIRGL_FORMAT_R8G8B8X8_UNORM:
Gurchetan Singh8581cc62021-11-15 11:15:42 -08001753 info->drm_fourcc = DRM_FORMAT_XBGR8888;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001754 break;
Gurchetan Singhf8f3c842021-11-04 16:43:48 -07001755 case VIRGL_FORMAT_R8_UNORM:
1756 info->drm_fourcc = DRM_FORMAT_R8;
Gurchetan Singhbc306a22021-11-16 17:47:18 -08001757 bpp = 1U;
Gurchetan Singhf8f3c842021-11-04 16:43:48 -07001758 break;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001759 default:
1760 return EINVAL;
1761 }
1762
1763 info->stride = align_up(entry.args.width * bpp, 16U);
1764 info->virgl_format = entry.args.format;
1765 info->handle = entry.args.handle;
1766 info->height = entry.args.height;
1767 info->width = entry.args.width;
1768 info->depth = entry.args.depth;
1769 info->flags = entry.args.flags;
1770 info->tex_id = 0;
1771 return 0;
1772 }
1773
Gurchetan Singha600f842023-05-18 10:08:48 -07001774 void flushResource(uint32_t res_handle) {
Kaiyi Lid08b5472022-07-19 17:26:19 -07001775 auto taskId = mVirtioGpuTimelines->enqueueTask(VirtioGpuRingGlobal{});
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001776 gfxstream::FrameBuffer::getFB()->postWithCallback(
Kaiyi Lid08b5472022-07-19 17:26:19 -07001777 res_handle, [this, taskId](std::shared_future<void> waitForGpu) {
1778 waitForGpu.wait();
1779 mVirtioGpuTimelines->notifyTaskCompletion(taskId);
1780 });
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001781 }
1782
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001783 int createRingBlob(PipeResEntry& entry, uint32_t res_handle,
1784 const struct stream_renderer_create_blob* create_blob,
1785 const struct stream_renderer_handle* handle) {
Jason Macnak26872122024-02-23 10:46:09 -08001786 if (mFeatures.ExternalBlob.enabled) {
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001787 std::string name = "shared-memory-" + std::to_string(res_handle);
Jason Macnak77ddccd2024-03-15 15:53:30 -07001788 auto shmem = std::make_unique<SharedMemory>(name, create_blob->size);
1789 int ret = shmem->create(0600);
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001790 if (ret) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001791 stream_renderer_error("Failed to create shared memory blob");
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001792 return ret;
1793 }
1794
Jason Macnak77ddccd2024-03-15 15:53:30 -07001795 entry.hva = shmem->get();
1796 entry.ringBlob = std::make_shared<RingBlob>(std::move(shmem));
1797
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001798 } else {
Jason Macnak77ddccd2024-03-15 15:53:30 -07001799 auto mem = std::make_unique<AlignedMemory>(mPageSize, create_blob->size);
1800 if (mem->addr == nullptr) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001801 stream_renderer_error("Failed to allocate ring blob");
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001802 return -ENOMEM;
1803 }
1804
Jason Macnak77ddccd2024-03-15 15:53:30 -07001805 entry.hva = mem->addr;
1806 entry.ringBlob = std::make_shared<RingBlob>(std::move(mem));
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001807 }
1808
1809 entry.hvaSize = create_blob->size;
1810 entry.externalAddr = true;
1811 entry.caching = STREAM_RENDERER_MAP_CACHE_CACHED;
1812
1813 return 0;
1814 }
1815
1816 int createBlob(uint32_t ctx_id, uint32_t res_handle,
1817 const struct stream_renderer_create_blob* create_blob,
1818 const struct stream_renderer_handle* handle) {
Jason Macnak2c826b72024-02-27 16:10:55 -08001819 stream_renderer_debug("ctx:%u res:%u blob-id:%u blob-size:%u", ctx_id, res_handle,
1820 create_blob->blob_id, create_blob->size);
Jason Macnak459dd872023-09-13 08:27:41 -07001821
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001822 PipeResEntry e;
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07001823 struct stream_renderer_resource_create_args args = {0};
Gurchetan Singh619646f2024-06-28 09:30:12 -07001824 std::optional<BlobDescriptorInfo> descriptorInfoOpt = std::nullopt;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001825 e.args = args;
1826 e.hostPipe = 0;
1827
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001828 auto ctxIt = mContexts.find(ctx_id);
1829 if (ctxIt == mContexts.end()) {
1830 stream_renderer_error("ctx id %u is not found", ctx_id);
1831 return -EINVAL;
1832 }
1833
1834 auto& ctxEntry = ctxIt->second;
1835
1836 ResType blobType = ResType::BLOB;
1837
1838 auto blobIt = ctxEntry.blobMap.find(create_blob->blob_id);
1839 if (blobIt != ctxEntry.blobMap.end()) {
1840 auto& create3d = blobIt->second;
1841 create3d.handle = res_handle;
1842
1843 const auto resType = getResourceType(create3d);
1844 switch (resType) {
1845 case ResType::BLOB:
1846 return -EINVAL;
1847 case ResType::PIPE:
1848 // Fallthrough for pipe is intended for blob buffers.
1849 case ResType::BUFFER:
1850 blobType = ResType::BUFFER;
1851 handleCreateResourceBuffer(&create3d);
1852 descriptorInfoOpt = gfxstream::FrameBuffer::getFB()->exportBuffer(res_handle);
1853 break;
1854 case ResType::COLOR_BUFFER:
1855 blobType = ResType::COLOR_BUFFER;
1856 handleCreateResourceColorBuffer(&create3d);
1857 descriptorInfoOpt =
1858 gfxstream::FrameBuffer::getFB()->exportColorBuffer(res_handle);
1859 break;
1860 }
1861
1862 e.args = create3d;
1863 ctxEntry.blobMap.erase(create_blob->blob_id);
1864 }
1865
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001866 if (create_blob->blob_id == 0) {
1867 int ret = createRingBlob(e, res_handle, create_blob, handle);
1868 if (ret) {
1869 return ret;
1870 }
Jason Macnak26872122024-02-23 10:46:09 -08001871 } else if (mFeatures.ExternalBlob.enabled) {
Gurchetan Singh47215132023-04-04 02:53:32 +00001872 if (create_blob->blob_mem == STREAM_BLOB_MEM_GUEST &&
1873 (create_blob->blob_flags & STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE)) {
1874#if defined(__linux__) || defined(__QNX__)
1875 ManagedDescriptor managedHandle(handle->os_handle);
Gurchetan Singh619646f2024-06-28 09:30:12 -07001876 ExternalObjectManager::get()->addBlobDescriptorInfo(
1877 ctx_id, create_blob->blob_id, std::move(managedHandle), handle->handle_type, 0,
1878 std::nullopt);
Bo Hu1b907092023-04-04 01:42:55 +00001879
Gurchetan Singh47215132023-04-04 02:53:32 +00001880 e.caching = STREAM_RENDERER_MAP_CACHE_CACHED;
1881#else
1882 return -EINVAL;
1883#endif
1884 } else {
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001885 if (!descriptorInfoOpt) {
Gurchetan Singh619646f2024-06-28 09:30:12 -07001886 descriptorInfoOpt = ExternalObjectManager::get()->removeBlobDescriptorInfo(
1887 ctx_id, create_blob->blob_id);
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001888 }
1889
Gurchetan Singh47215132023-04-04 02:53:32 +00001890 if (descriptorInfoOpt) {
1891 e.descriptorInfo =
Gurchetan Singh619646f2024-06-28 09:30:12 -07001892 std::make_shared<BlobDescriptorInfo>(std::move(*descriptorInfoOpt));
Gurchetan Singh47215132023-04-04 02:53:32 +00001893 } else {
1894 return -EINVAL;
1895 }
1896
1897 e.caching = e.descriptorInfo->caching;
1898 }
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001899 } else {
Gurchetan Singh619646f2024-06-28 09:30:12 -07001900 auto entryOpt =
1901 ExternalObjectManager::get()->removeMapping(ctx_id, create_blob->blob_id);
Gurchetan Singh633d05c2023-04-26 09:53:49 -07001902 if (entryOpt) {
1903 e.hva = entryOpt->addr;
1904 e.caching = entryOpt->caching;
1905 e.hvaSize = create_blob->size;
1906 } else {
1907 return -EINVAL;
1908 }
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001909 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001910
Gurchetan Singh701f8a62022-04-19 17:19:12 -07001911 e.blobId = create_blob->blob_id;
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001912 e.blobMem = create_blob->blob_mem;
Gurchetan Singh424f0672022-08-26 11:10:12 -07001913 e.blobFlags = create_blob->blob_flags;
Gurchetan Singhef5181f2024-06-25 14:10:19 -07001914 e.type = blobType;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001915 e.iov = nullptr;
1916 e.numIovs = 0;
1917 e.linear = 0;
1918 e.linearSize = 0;
1919
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001920 mResources[res_handle] = e;
Gurchetan Singh2bc98312022-08-11 09:10:25 -07001921 return 0;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001922 }
1923
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001924 int resourceMap(uint32_t res_handle, void** hvaOut, uint64_t* sizeOut) {
Jason Macnak26872122024-02-23 10:46:09 -08001925 if (mFeatures.ExternalBlob.enabled) return -EINVAL;
Gurchetan Singh424f0672022-08-26 11:10:12 -07001926
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001927 auto it = mResources.find(res_handle);
1928 if (it == mResources.end()) {
1929 if (hvaOut) *hvaOut = nullptr;
1930 if (sizeOut) *sizeOut = 0;
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001931 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001932 }
1933
1934 const auto& entry = it->second;
1935
Gurchetan Singhd59f3052022-08-09 18:13:36 -07001936 if (hvaOut) *hvaOut = entry.hva;
1937 if (sizeOut) *sizeOut = entry.hvaSize;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001938 return 0;
1939 }
1940
1941 int resourceUnmap(uint32_t res_handle) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001942 auto it = mResources.find(res_handle);
1943 if (it == mResources.end()) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001944 return -EINVAL;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07001945 }
1946
1947 // TODO(lfy): Good place to run any registered cleanup callbacks.
1948 // No-op for now.
1949 return 0;
1950 }
1951
Josh Simonota1a29a82022-05-12 12:03:35 -04001952 int platformImportResource(int res_handle, int res_info, void* resource) {
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07001953 auto it = mResources.find(res_handle);
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001954 if (it == mResources.end()) return -EINVAL;
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001955 bool success =
1956 gfxstream::FrameBuffer::getFB()->platformImportResource(res_handle, res_info, resource);
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07001957 return success ? 0 : -1;
1958 }
1959
1960 int platformResourceInfo(int res_handle, int* width, int* height, int* internal_format) {
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001961 bool success = false;
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07001962 auto it = mResources.find(res_handle);
Gurchetan Singh01b8b482023-07-20 07:39:20 -07001963 if (it == mResources.end()) return -EINVAL;
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001964#if GFXSTREAM_ENABLE_HOST_GLES
1965 success = gfxstream::FrameBuffer::getFB()->getColorBufferInfo(res_handle, width, height,
1966 internal_format);
1967#endif
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07001968 return success ? 0 : -1;
1969 }
1970
1971 void* platformCreateSharedEglContext() {
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001972 void* ptr = nullptr;
1973#if GFXSTREAM_ENABLE_HOST_GLES
1974 ptr = gfxstream::FrameBuffer::getFB()->platformCreateSharedEglContext();
1975#endif
1976 return ptr;
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07001977 }
1978
1979 int platformDestroySharedEglContext(void* context) {
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001980 bool success = false;
1981#if GFXSTREAM_ENABLE_HOST_GLES
1982 success = gfxstream::FrameBuffer::getFB()->platformDestroySharedEglContext(context);
1983#endif
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07001984 return success ? 0 : -1;
1985 }
1986
Aaron Ruby7609a832024-05-28 14:49:53 -04001987 int waitSyncResource(uint32_t res_handle) {
1988 auto it = mResources.find(res_handle);
1989 if (it == mResources.end()) {
1990 stream_renderer_error("waitSyncResource could not find resource: %d", res_handle);
1991 return -EINVAL;
1992 }
1993 auto& entry = it->second;
1994 if (ResType::COLOR_BUFFER != entry.type) {
1995 stream_renderer_error("waitSyncResource is undefined for non-ColorBuffer resource.");
1996 return -EINVAL;
1997 }
1998
Gurchetan Singhc25eeb62024-06-21 14:36:45 -07001999 return gfxstream::FrameBuffer::getFB()->waitSyncColorBuffer(res_handle);
Aaron Ruby7609a832024-05-28 14:49:53 -04002000 }
2001
Idan Raiter995ba8a2022-12-04 16:44:59 -08002002 int resourceMapInfo(uint32_t res_handle, uint32_t* map_info) {
Gurchetan Singh60737a62022-01-13 16:06:24 -08002003 auto it = mResources.find(res_handle);
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002004 if (it == mResources.end()) return -EINVAL;
Gurchetan Singh60737a62022-01-13 16:06:24 -08002005
2006 const auto& entry = it->second;
2007 *map_info = entry.caching;
2008 return 0;
2009 }
2010
Gurchetan Singh2bc98312022-08-11 09:10:25 -07002011 int exportBlob(uint32_t res_handle, struct stream_renderer_handle* handle) {
Gurchetan Singh2bc98312022-08-11 09:10:25 -07002012 auto it = mResources.find(res_handle);
2013 if (it == mResources.end()) {
2014 return -EINVAL;
2015 }
2016
Gurchetan Singh424f0672022-08-26 11:10:12 -07002017 auto& entry = it->second;
Jason Macnak77ddccd2024-03-15 15:53:30 -07002018 if (entry.ringBlob && entry.ringBlob->isExportable()) {
Gurchetan Singh2bc98312022-08-11 09:10:25 -07002019 // Handle ownership transferred to VMM, gfxstream keeps the mapping.
Kaiyi Li083a66a2022-08-11 16:23:37 -07002020#ifdef _WIN32
2021 handle->os_handle =
2022 static_cast<int64_t>(reinterpret_cast<intptr_t>(entry.ringBlob->releaseHandle()));
2023#else
2024 handle->os_handle = static_cast<int64_t>(entry.ringBlob->releaseHandle());
2025#endif
Gurchetan Singh2bc98312022-08-11 09:10:25 -07002026 handle->handle_type = STREAM_MEM_HANDLE_TYPE_SHM;
2027 return 0;
2028 }
2029
Gurchetan Singh424f0672022-08-26 11:10:12 -07002030 if (entry.descriptorInfo) {
Gurchetan Singh40b15cf2024-06-26 16:14:01 -07002031 DescriptorType rawDescriptor;
2032 auto rawDescriptorOpt = entry.descriptorInfo->descriptor.release();
2033 if (rawDescriptorOpt)
2034 rawDescriptor = *rawDescriptorOpt;
2035 else
Gurchetan Singh424f0672022-08-26 11:10:12 -07002036 return -EINVAL;
Gurchetan Singh424f0672022-08-26 11:10:12 -07002037
2038 handle->handle_type = entry.descriptorInfo->handleType;
2039
2040#ifdef _WIN32
Idan Raiter995ba8a2022-12-04 16:44:59 -08002041 handle->os_handle = static_cast<int64_t>(reinterpret_cast<intptr_t>(rawDescriptor));
Gurchetan Singh424f0672022-08-26 11:10:12 -07002042#else
2043 handle->os_handle = static_cast<int64_t>(rawDescriptor);
2044#endif
2045
2046 return 0;
2047 }
2048
2049 return -EINVAL;
2050 }
2051
Gurchetan Singhf4a01bc2024-07-01 19:23:52 -07002052 int exportFence(uint64_t fenceId, struct stream_renderer_handle* handle) {
2053 auto it = mSyncMap.find(fenceId);
2054 if (it == mSyncMap.end()) {
2055 return -EINVAL;
2056 }
2057
2058 auto& entry = it->second;
2059 DescriptorType rawDescriptor;
2060 auto rawDescriptorOpt = entry->descriptor.release();
2061 if (rawDescriptorOpt)
2062 rawDescriptor = *rawDescriptorOpt;
2063 else
2064 return -EINVAL;
2065
2066 handle->handle_type = entry->handleType;
2067
2068#ifdef _WIN32
2069 handle->os_handle = static_cast<int64_t>(reinterpret_cast<intptr_t>(rawDescriptor));
2070#else
2071 handle->os_handle = static_cast<int64_t>(rawDescriptor);
2072#endif
2073
2074 return 0;
2075 }
2076
Idan Raiter995ba8a2022-12-04 16:44:59 -08002077 int vulkanInfo(uint32_t res_handle, struct stream_renderer_vulkan_info* vulkan_info) {
Gurchetan Singh424f0672022-08-26 11:10:12 -07002078 auto it = mResources.find(res_handle);
2079 if (it == mResources.end()) return -EINVAL;
2080
2081 const auto& entry = it->second;
2082 if (entry.descriptorInfo && entry.descriptorInfo->vulkanInfoOpt) {
2083 vulkan_info->memory_index = (*entry.descriptorInfo->vulkanInfoOpt).memoryIndex;
Idan Raiter26ae9c42022-11-30 15:02:05 -08002084 memcpy(vulkan_info->device_id.device_uuid,
2085 (*entry.descriptorInfo->vulkanInfoOpt).deviceUUID,
2086 sizeof(vulkan_info->device_id.device_uuid));
2087 memcpy(vulkan_info->device_id.driver_uuid,
2088 (*entry.descriptorInfo->vulkanInfoOpt).driverUUID,
2089 sizeof(vulkan_info->device_id.driver_uuid));
Gurchetan Singh424f0672022-08-26 11:10:12 -07002090 return 0;
2091 }
2092
Gurchetan Singh2bc98312022-08-11 09:10:25 -07002093 return -EINVAL;
2094 }
2095
Joshua Duongc256a3c2023-05-09 08:14:24 -07002096#ifdef CONFIG_AEMU
Gurchetan Singh9d89d5a2023-07-21 08:51:15 -07002097 void setServiceOps(const GoldfishPipeServiceOps* ops) { mServiceOps = ops; }
Joshua Duongc256a3c2023-05-09 08:14:24 -07002098#endif // CONFIG_AEMU
Idan Raiter995ba8a2022-12-04 16:44:59 -08002099 private:
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002100 void allocResource(PipeResEntry& entry, iovec* iov, int num_iovs) {
Jason Macnak2c826b72024-02-27 16:10:55 -08002101 stream_renderer_debug("entry linear: %p", entry.linear);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002102 if (entry.linear) free(entry.linear);
2103
2104 size_t linearSize = 0;
2105 for (uint32_t i = 0; i < num_iovs; ++i) {
Jason Macnak2c826b72024-02-27 16:10:55 -08002106 stream_renderer_debug("iov base: %p", iov[i].iov_base);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002107 linearSize += iov[i].iov_len;
Jason Macnak2c826b72024-02-27 16:10:55 -08002108 stream_renderer_debug("has iov of %zu. linearSize current: %zu", iov[i].iov_len,
2109 linearSize);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002110 }
Jason Macnak2c826b72024-02-27 16:10:55 -08002111 stream_renderer_debug("final linearSize: %zu", linearSize);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002112
2113 void* linear = nullptr;
2114
2115 if (linearSize) linear = malloc(linearSize);
2116
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002117 entry.numIovs = num_iovs;
Joshua Duong0cbac122024-05-31 11:31:14 -07002118 entry.iov = (iovec*)malloc(sizeof(*iov) * num_iovs);
2119 if (entry.numIovs > 0) {
2120 memcpy(entry.iov, iov, num_iovs * sizeof(*iov));
2121 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002122 entry.linear = linear;
2123 entry.linearSize = linearSize;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002124 }
2125
2126 void detachResourceLocked(uint32_t ctxId, uint32_t toUnrefId) {
Jason Macnak2c826b72024-02-27 16:10:55 -08002127 stream_renderer_debug("ctxid: %u resid: %u", ctxId, toUnrefId);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002128
2129 auto it = mContextResources.find(ctxId);
2130 if (it == mContextResources.end()) return;
2131
Gurchetan Singhf3ad8892022-03-17 14:59:58 -07002132 std::vector<VirtioGpuResId> withoutRes;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002133 for (auto resId : it->second) {
2134 if (resId != toUnrefId) {
2135 withoutRes.push_back(resId);
2136 }
2137 }
2138 mContextResources[ctxId] = withoutRes;
2139
Jason Macnak77ddccd2024-03-15 15:53:30 -07002140 auto resourceIt = mResources.find(toUnrefId);
2141 if (resourceIt == mResources.end()) return;
2142 auto& resource = resourceIt->second;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002143
Jason Macnak77ddccd2024-03-15 15:53:30 -07002144 resource.hostPipe = 0;
2145 resource.ctxId = 0;
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -07002146
2147 auto ctxIt = mContexts.find(ctxId);
2148 if (ctxIt != mContexts.end()) {
2149 auto& ctxEntry = ctxIt->second;
2150 if (ctxEntry.addressSpaceHandles.count(toUnrefId)) {
Jason Macnak77ddccd2024-03-15 15:53:30 -07002151 uint32_t asgHandle = ctxEntry.addressSpaceHandles[toUnrefId];
2152
2153 mCleanupThread->enqueueCleanup([this, asgBlob = resource.ringBlob, asgHandle](){
2154 mAddressSpaceDeviceControlOps->destroy_handle(asgHandle);
2155 });
2156
Gurchetan Singhe2fe8e32022-08-22 08:53:45 -07002157 ctxEntry.addressSpaceHandles.erase(toUnrefId);
2158 }
2159 }
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002160 }
2161
2162 inline const GoldfishPipeServiceOps* ensureAndGetServiceOps() {
2163 if (mServiceOps) return mServiceOps;
2164 mServiceOps = goldfish_pipe_get_service_ops();
2165 return mServiceOps;
2166 }
2167
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002168 void* mCookie = nullptr;
Jason Macnak26872122024-02-23 10:46:09 -08002169 gfxstream::host::FeatureSet mFeatures;
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002170 stream_renderer_fence_callback mFenceCallback;
Gurchetan Singhee6107d2023-08-21 10:59:39 -07002171 uint32_t mPageSize = 4096;
Idan Raiter995ba8a2022-12-04 16:44:59 -08002172 struct address_space_device_control_ops* mAddressSpaceDeviceControlOps = nullptr;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002173
2174 const GoldfishPipeServiceOps* mServiceOps = nullptr;
2175
Gurchetan Singhd8485dd2022-03-17 14:58:54 -07002176 std::unordered_map<VirtioGpuCtxId, PipeCtxEntry> mContexts;
Gurchetan Singhf3ad8892022-03-17 14:59:58 -07002177 std::unordered_map<VirtioGpuResId, PipeResEntry> mResources;
2178 std::unordered_map<VirtioGpuCtxId, std::vector<VirtioGpuResId>> mContextResources;
2179 std::unordered_map<VirtioGpuResId, std::vector<VirtioGpuCtxId>> mResourceContexts;
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -07002180 std::unordered_map<uint64_t, std::shared_ptr<SyncDescriptorInfo>> mSyncMap;
Kaiyi Lifab51002021-08-21 15:12:02 -07002181
Kaiyi Lifab51002021-08-21 15:12:02 -07002182 // When we wait for gpu or wait for gpu vulkan, the next (and subsequent)
2183 // fences created for that context should not be signaled immediately.
2184 // Rather, they should get in line.
2185 std::unique_ptr<VirtioGpuTimelines> mVirtioGpuTimelines = nullptr;
Jason Macnak77ddccd2024-03-15 15:53:30 -07002186
2187 std::unique_ptr<CleanupThread> mCleanupThread;
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002188};
2189
2190static PipeVirglRenderer* sRenderer() {
2191 static PipeVirglRenderer* p = new PipeVirglRenderer;
2192 return p;
2193}
2194
2195extern "C" {
2196
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002197VG_EXPORT int stream_renderer_resource_create(struct stream_renderer_resource_create_args* args,
2198 struct iovec* iov, uint32_t num_iovs) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002199 return sRenderer()->createResource(args, iov, num_iovs);
2200}
2201
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002202VG_EXPORT void stream_renderer_resource_unref(uint32_t res_handle) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002203 sRenderer()->unrefResource(res_handle);
2204}
2205
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002206VG_EXPORT void stream_renderer_context_destroy(uint32_t handle) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002207 sRenderer()->destroyContext(handle);
2208}
2209
Gurchetan Singh561daa72023-07-11 08:54:01 -07002210VG_EXPORT int stream_renderer_submit_cmd(struct stream_renderer_command* cmd) {
2211 return sRenderer()->submitCmd(cmd);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002212}
2213
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002214VG_EXPORT int stream_renderer_transfer_read_iov(uint32_t handle, uint32_t ctx_id, uint32_t level,
2215 uint32_t stride, uint32_t layer_stride,
2216 struct stream_renderer_box* box, uint64_t offset,
2217 struct iovec* iov, int iovec_cnt) {
Lingfeng Yangb3841682020-11-12 14:13:43 -08002218 return sRenderer()->transferReadIov(handle, offset, box, iov, iovec_cnt);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002219}
2220
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002221VG_EXPORT int stream_renderer_transfer_write_iov(uint32_t handle, uint32_t ctx_id, int level,
2222 uint32_t stride, uint32_t layer_stride,
2223 struct stream_renderer_box* box, uint64_t offset,
2224 struct iovec* iovec, unsigned int iovec_cnt) {
Lingfeng Yangb3841682020-11-12 14:13:43 -08002225 return sRenderer()->transferWriteIov(handle, offset, box, iovec, iovec_cnt);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002226}
2227
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002228VG_EXPORT void stream_renderer_get_cap_set(uint32_t set, uint32_t* max_ver, uint32_t* max_size) {
Gurchetan Singhcd144002023-03-27 15:49:40 -07002229 // `max_ver` not useful
2230 return sRenderer()->getCapset(set, max_size);
2231}
2232
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002233VG_EXPORT void stream_renderer_fill_caps(uint32_t set, uint32_t version, void* caps) {
Gurchetan Singhcd144002023-03-27 15:49:40 -07002234 // `version` not useful
2235 return sRenderer()->fillCaps(set, caps);
2236}
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002237
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002238VG_EXPORT int stream_renderer_resource_attach_iov(int res_handle, struct iovec* iov, int num_iovs) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002239 return sRenderer()->attachIov(res_handle, iov, num_iovs);
2240}
2241
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002242VG_EXPORT void stream_renderer_resource_detach_iov(int res_handle, struct iovec** iov,
2243 int* num_iovs) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002244 return sRenderer()->detachIov(res_handle, iov, num_iovs);
2245}
2246
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002247VG_EXPORT void stream_renderer_ctx_attach_resource(int ctx_id, int res_handle) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002248 sRenderer()->attachResource(ctx_id, res_handle);
2249}
2250
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002251VG_EXPORT void stream_renderer_ctx_detach_resource(int ctx_id, int res_handle) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002252 sRenderer()->detachResource(ctx_id, res_handle);
2253}
2254
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002255VG_EXPORT int stream_renderer_resource_get_info(int res_handle,
2256 struct stream_renderer_resource_info* info) {
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002257 return sRenderer()->getResourceInfo(res_handle, info);
2258}
2259
Gurchetan Singha600f842023-05-18 10:08:48 -07002260VG_EXPORT void stream_renderer_flush(uint32_t res_handle) {
2261 sRenderer()->flushResource(res_handle);
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002262}
2263
Gurchetan Singh56043bb2022-01-27 19:11:27 -08002264VG_EXPORT int stream_renderer_create_blob(uint32_t ctx_id, uint32_t res_handle,
2265 const struct stream_renderer_create_blob* create_blob,
2266 const struct iovec* iovecs, uint32_t num_iovs,
2267 const struct stream_renderer_handle* handle) {
Gurchetan Singh701f8a62022-04-19 17:19:12 -07002268 sRenderer()->createBlob(ctx_id, res_handle, create_blob, handle);
Gurchetan Singh56043bb2022-01-27 19:11:27 -08002269 return 0;
2270}
2271
2272VG_EXPORT int stream_renderer_export_blob(uint32_t res_handle,
2273 struct stream_renderer_handle* handle) {
Gurchetan Singh2bc98312022-08-11 09:10:25 -07002274 return sRenderer()->exportBlob(res_handle, handle);
Gurchetan Singh56043bb2022-01-27 19:11:27 -08002275}
2276
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002277VG_EXPORT int stream_renderer_resource_map(uint32_t res_handle, void** hvaOut, uint64_t* sizeOut) {
2278 return sRenderer()->resourceMap(res_handle, hvaOut, sizeOut);
2279}
2280
2281VG_EXPORT int stream_renderer_resource_unmap(uint32_t res_handle) {
2282 return sRenderer()->resourceUnmap(res_handle);
2283}
2284
Idan Raiter995ba8a2022-12-04 16:44:59 -08002285VG_EXPORT int stream_renderer_context_create(uint32_t ctx_id, uint32_t nlen, const char* name,
Gurchetan Singhc748c8b2022-03-29 18:07:48 -07002286 uint32_t context_init) {
2287 return sRenderer()->createContext(ctx_id, nlen, name, context_init);
2288}
2289
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002290VG_EXPORT int stream_renderer_create_fence(const struct stream_renderer_fence* fence) {
Gurchetan Singhcc5a7f12024-07-01 16:13:32 -07002291 if (fence->flags & STREAM_RENDERER_FLAG_FENCE_SHAREABLE) {
2292 int ret = sRenderer()->acquireContextFence(fence->ctx_id, fence->fence_id);
2293 if (ret) {
2294 return ret;
2295 }
2296 }
2297
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002298 if (fence->flags & STREAM_RENDERER_FLAG_FENCE_RING_IDX) {
2299 sRenderer()->createFence(fence->fence_id, VirtioGpuRingContextSpecific{
2300 .mCtxId = fence->ctx_id,
2301 .mRingIdx = fence->ring_idx,
2302 });
2303 } else {
2304 sRenderer()->createFence(fence->fence_id, VirtioGpuRingGlobal{});
2305 }
2306
Lingfeng Yangaf686802021-07-15 20:06:07 -07002307 return 0;
2308}
2309
Gurchetan Singhf4a01bc2024-07-01 19:23:52 -07002310VG_EXPORT int stream_renderer_export_fence(uint64_t fence_id,
2311 struct stream_renderer_handle* handle) {
2312 return sRenderer()->exportFence(fence_id, handle);
2313}
2314
Idan Raiter995ba8a2022-12-04 16:44:59 -08002315VG_EXPORT int stream_renderer_platform_import_resource(int res_handle, int res_info,
2316 void* resource) {
Josh Simonota1a29a82022-05-12 12:03:35 -04002317 return sRenderer()->platformImportResource(res_handle, res_info, resource);
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07002318}
2319
Idan Raiter995ba8a2022-12-04 16:44:59 -08002320VG_EXPORT int stream_renderer_platform_resource_info(int res_handle, int* width, int* height,
2321 int* internal_format) {
Lingfeng Yangda09c0b2021-07-22 16:05:38 -07002322 return sRenderer()->platformResourceInfo(res_handle, width, height, internal_format);
2323}
2324
2325VG_EXPORT void* stream_renderer_platform_create_shared_egl_context() {
2326 return sRenderer()->platformCreateSharedEglContext();
2327}
2328
2329VG_EXPORT int stream_renderer_platform_destroy_shared_egl_context(void* context) {
2330 return sRenderer()->platformDestroySharedEglContext(context);
2331}
Lingfeng Yanga285eb42020-10-30 12:39:56 -07002332
Aaron Ruby7609a832024-05-28 14:49:53 -04002333VG_EXPORT int stream_renderer_wait_sync_resource(uint32_t res_handle) {
2334 return sRenderer()->waitSyncResource(res_handle);
2335}
2336
Idan Raiter995ba8a2022-12-04 16:44:59 -08002337VG_EXPORT int stream_renderer_resource_map_info(uint32_t res_handle, uint32_t* map_info) {
Gurchetan Singh60737a62022-01-13 16:06:24 -08002338 return sRenderer()->resourceMapInfo(res_handle, map_info);
2339}
2340
Gurchetan Singh424f0672022-08-26 11:10:12 -07002341VG_EXPORT int stream_renderer_vulkan_info(uint32_t res_handle,
Idan Raiter995ba8a2022-12-04 16:44:59 -08002342 struct stream_renderer_vulkan_info* vulkan_info) {
Gurchetan Singh424f0672022-08-26 11:10:12 -07002343 return sRenderer()->vulkanInfo(res_handle, vulkan_info);
2344}
2345
Gurchetan Singh29946e82024-02-08 08:51:09 -08002346VG_EXPORT int stream_renderer_snapshot(const char* dir) {
2347#ifdef GFXSTREAM_ENABLE_HOST_VK_SNAPSHOT
2348 std::string dirString(dir);
2349
2350 std::string snapshotFileName = dirString + "snapshot.bin";
2351
2352 std::unique_ptr<android::base::StdioStream> stream(new android::base::StdioStream(
2353 fopen(snapshotFileName.c_str(), "wb"), android::base::StdioStream::kOwner));
2354
2355 android_getOpenglesRenderer()->pauseAllPreSave();
2356 android::snapshot::SnapshotSaveStream saveStream{
2357 .stream = stream.get(),
2358 };
2359
2360 android_getOpenglesRenderer()->save(saveStream.stream, saveStream.textureSaver);
2361 return 0;
2362#else
2363 stream_renderer_error("Snapshot save requested without support.");
2364 return -EINVAL;
2365#endif
2366}
2367
2368VG_EXPORT int stream_renderer_restore(const char* dir) {
2369#ifdef GFXSTREAM_ENABLE_HOST_VK_SNAPSHOT
2370 std::string dirString(dir);
2371 std::string snapshotFileName = dirString + "snapshot.bin";
2372
2373 std::unique_ptr<android::base::StdioStream> stream(new android::base::StdioStream(
2374 fopen(snapshotFileName.c_str(), "rb"), android::base::StdioStream::kOwner));
2375
2376 android::snapshot::SnapshotLoadStream loadStream{
2377 .stream = stream.get(),
2378 };
2379
2380 android_getOpenglesRenderer()->load(loadStream.stream, loadStream.textureLoader);
2381
2382 // In end2end tests, we don't really do snapshot save for render threads.
2383 // We will need to resume all render threads without waiting for snapshot.
2384 android_getOpenglesRenderer()->resumeAll(false);
2385 return 0;
2386#else
2387 stream_renderer_error("Snapshot save requested without support.");
2388 return -EINVAL;
2389#endif
2390}
2391
Idan Raiterf6377592022-10-31 23:53:02 -07002392static const GoldfishPipeServiceOps goldfish_pipe_service_ops = {
2393 // guest_open()
2394 [](GoldfishHwPipe* hwPipe) -> GoldfishHostPipe* {
Roman Kiryanovf8b329e2023-01-24 11:18:27 -08002395 return static_cast<GoldfishHostPipe*>(android_pipe_guest_open(hwPipe));
Idan Raiterf6377592022-10-31 23:53:02 -07002396 },
2397 // guest_open_with_flags()
2398 [](GoldfishHwPipe* hwPipe, uint32_t flags) -> GoldfishHostPipe* {
Roman Kiryanovf8b329e2023-01-24 11:18:27 -08002399 return static_cast<GoldfishHostPipe*>(android_pipe_guest_open_with_flags(hwPipe, flags));
Idan Raiterf6377592022-10-31 23:53:02 -07002400 },
2401 // guest_close()
2402 [](GoldfishHostPipe* hostPipe, GoldfishPipeCloseReason reason) {
Idan Raiter995ba8a2022-12-04 16:44:59 -08002403 static_assert((int)GOLDFISH_PIPE_CLOSE_GRACEFUL == (int)PIPE_CLOSE_GRACEFUL,
Idan Raiterf6377592022-10-31 23:53:02 -07002404 "Invalid PIPE_CLOSE_GRACEFUL value");
Idan Raiter995ba8a2022-12-04 16:44:59 -08002405 static_assert((int)GOLDFISH_PIPE_CLOSE_REBOOT == (int)PIPE_CLOSE_REBOOT,
2406 "Invalid PIPE_CLOSE_REBOOT value");
2407 static_assert((int)GOLDFISH_PIPE_CLOSE_LOAD_SNAPSHOT == (int)PIPE_CLOSE_LOAD_SNAPSHOT,
Idan Raiterf6377592022-10-31 23:53:02 -07002408 "Invalid PIPE_CLOSE_LOAD_SNAPSHOT value");
Idan Raiter995ba8a2022-12-04 16:44:59 -08002409 static_assert((int)GOLDFISH_PIPE_CLOSE_ERROR == (int)PIPE_CLOSE_ERROR,
2410 "Invalid PIPE_CLOSE_ERROR value");
Idan Raiterf6377592022-10-31 23:53:02 -07002411
Idan Raiter995ba8a2022-12-04 16:44:59 -08002412 android_pipe_guest_close(hostPipe, static_cast<PipeCloseReason>(reason));
Idan Raiterf6377592022-10-31 23:53:02 -07002413 },
2414 // guest_pre_load()
2415 [](QEMUFile* file) { (void)file; },
2416 // guest_post_load()
2417 [](QEMUFile* file) { (void)file; },
2418 // guest_pre_save()
2419 [](QEMUFile* file) { (void)file; },
2420 // guest_post_save()
2421 [](QEMUFile* file) { (void)file; },
2422 // guest_load()
Idan Raiter995ba8a2022-12-04 16:44:59 -08002423 [](QEMUFile* file, GoldfishHwPipe* hwPipe, char* force_close) -> GoldfishHostPipe* {
Idan Raiterf6377592022-10-31 23:53:02 -07002424 (void)file;
2425 (void)hwPipe;
2426 (void)force_close;
2427 return nullptr;
2428 },
2429 // guest_save()
2430 [](GoldfishHostPipe* hostPipe, QEMUFile* file) {
2431 (void)hostPipe;
2432 (void)file;
2433 },
2434 // guest_poll()
2435 [](GoldfishHostPipe* hostPipe) {
Idan Raiter995ba8a2022-12-04 16:44:59 -08002436 static_assert((int)GOLDFISH_PIPE_POLL_IN == (int)PIPE_POLL_IN, "invalid POLL_IN values");
2437 static_assert((int)GOLDFISH_PIPE_POLL_OUT == (int)PIPE_POLL_OUT, "invalid POLL_OUT values");
2438 static_assert((int)GOLDFISH_PIPE_POLL_HUP == (int)PIPE_POLL_HUP, "invalid POLL_HUP values");
Idan Raiterf6377592022-10-31 23:53:02 -07002439
Idan Raiter995ba8a2022-12-04 16:44:59 -08002440 return static_cast<GoldfishPipePollFlags>(android_pipe_guest_poll(hostPipe));
Idan Raiterf6377592022-10-31 23:53:02 -07002441 },
2442 // guest_recv()
Idan Raiter995ba8a2022-12-04 16:44:59 -08002443 [](GoldfishHostPipe* hostPipe, GoldfishPipeBuffer* buffers, int numBuffers) -> int {
Idan Raiterf6377592022-10-31 23:53:02 -07002444 // NOTE: Assumes that AndroidPipeBuffer and GoldfishPipeBuffer
2445 // have exactly the same layout.
Idan Raiter995ba8a2022-12-04 16:44:59 -08002446 static_assert(sizeof(AndroidPipeBuffer) == sizeof(GoldfishPipeBuffer),
2447 "Invalid PipeBuffer sizes");
Idan Raiterf6377592022-10-31 23:53:02 -07002448 // We can't use a static_assert with offsetof() because in msvc, it uses
2449 // reinterpret_cast.
2450 // TODO: Add runtime assertion instead?
2451 // https://developercommunity.visualstudio.com/content/problem/22196/static-assert-cannot-compile-constexprs-method-tha.html
2452#ifndef _MSC_VER
Idan Raiter995ba8a2022-12-04 16:44:59 -08002453 static_assert(offsetof(AndroidPipeBuffer, data) == offsetof(GoldfishPipeBuffer, data),
Idan Raiterf6377592022-10-31 23:53:02 -07002454 "Invalid PipeBuffer::data offsets");
Idan Raiter995ba8a2022-12-04 16:44:59 -08002455 static_assert(offsetof(AndroidPipeBuffer, size) == offsetof(GoldfishPipeBuffer, size),
Idan Raiterf6377592022-10-31 23:53:02 -07002456 "Invalid PipeBuffer::size offsets");
2457#endif
Idan Raiter995ba8a2022-12-04 16:44:59 -08002458 return android_pipe_guest_recv(hostPipe, reinterpret_cast<AndroidPipeBuffer*>(buffers),
2459 numBuffers);
Idan Raiterf6377592022-10-31 23:53:02 -07002460 },
Joshua Duong7131c142023-08-18 14:04:53 -07002461 // wait_guest_recv()
2462 [](GoldfishHostPipe* hostPipe) {
2463 android_pipe_wait_guest_recv(hostPipe);
2464 },
Idan Raiterf6377592022-10-31 23:53:02 -07002465 // guest_send()
Idan Raiter995ba8a2022-12-04 16:44:59 -08002466 [](GoldfishHostPipe** hostPipe, const GoldfishPipeBuffer* buffers, int numBuffers) -> int {
2467 return android_pipe_guest_send(reinterpret_cast<void**>(hostPipe),
2468 reinterpret_cast<const AndroidPipeBuffer*>(buffers),
2469 numBuffers);
Idan Raiterf6377592022-10-31 23:53:02 -07002470 },
Joshua Duong7131c142023-08-18 14:04:53 -07002471 // wait_guest_send()
2472 [](GoldfishHostPipe* hostPipe) {
2473 android_pipe_wait_guest_send(hostPipe);
2474 },
Idan Raiterf6377592022-10-31 23:53:02 -07002475 // guest_wake_on()
2476 [](GoldfishHostPipe* hostPipe, GoldfishPipeWakeFlags wakeFlags) {
2477 android_pipe_guest_wake_on(hostPipe, static_cast<int>(wakeFlags));
2478 },
2479 // dma_add_buffer()
2480 [](void* pipe, uint64_t paddr, uint64_t sz) {
2481 // not considered for virtio
2482 },
2483 // dma_remove_buffer()
2484 [](uint64_t paddr) {
2485 // not considered for virtio
2486 },
2487 // dma_invalidate_host_mappings()
2488 []() {
2489 // not considered for virtio
2490 },
2491 // dma_reset_host_mappings()
2492 []() {
2493 // not considered for virtio
2494 },
2495 // dma_save_mappings()
Idan Raiter995ba8a2022-12-04 16:44:59 -08002496 [](QEMUFile* file) { (void)file; },
Idan Raiterf6377592022-10-31 23:53:02 -07002497 // dma_load_mappings()
Idan Raiter995ba8a2022-12-04 16:44:59 -08002498 [](QEMUFile* file) { (void)file; },
Idan Raiterf6377592022-10-31 23:53:02 -07002499};
2500
Gurchetan Singh9d89d5a2023-07-21 08:51:15 -07002501static int stream_renderer_opengles_init(uint32_t display_width, uint32_t display_height,
Jason Macnak26872122024-02-23 10:46:09 -08002502 int renderer_flags, gfxstream::host::FeatureSet features) {
Jason Macnak2c826b72024-02-27 16:10:55 -08002503 stream_renderer_debug("start. display dimensions: width %u height %u, renderer flags: 0x%x",
2504 display_width, display_height, renderer_flags);
Idan Raiterf6377592022-10-31 23:53:02 -07002505
2506 // Flags processing
2507
2508 // TODO: hook up "gfxstream egl" to the renderer flags
Gurchetan Singh7ad27502023-04-24 10:05:14 -07002509 // STREAM_RENDERER_FLAGS_USE_EGL_BIT in crosvm
Idan Raiterf6377592022-10-31 23:53:02 -07002510 // as it's specified from launch_cvd.
2511 // At the moment, use ANDROID_GFXSTREAM_EGL=1
2512 // For test on GCE
2513 if (android::base::getEnvironmentVariable("ANDROID_GFXSTREAM_EGL") == "1") {
2514 android::base::setEnvironmentVariable("ANDROID_EGL_ON_EGL", "1");
2515 android::base::setEnvironmentVariable("ANDROID_EMUGL_LOG_PRINT", "1");
2516 android::base::setEnvironmentVariable("ANDROID_EMUGL_VERBOSE", "1");
2517 }
2518 // end for test on GCE
2519
2520 android::base::setEnvironmentVariable("ANDROID_EMU_HEADLESS", "1");
Idan Raiterf6377592022-10-31 23:53:02 -07002521
2522 bool egl2eglByEnv = android::base::getEnvironmentVariable("ANDROID_EGL_ON_EGL") == "1";
Gurchetan Singh7ad27502023-04-24 10:05:14 -07002523 bool egl2eglByFlag = renderer_flags & STREAM_RENDERER_FLAGS_USE_EGL_BIT;
Idan Raiterf6377592022-10-31 23:53:02 -07002524 bool enable_egl2egl = egl2eglByFlag || egl2eglByEnv;
2525 if (enable_egl2egl) {
2526 android::base::setEnvironmentVariable("ANDROID_GFXSTREAM_EGL", "1");
2527 android::base::setEnvironmentVariable("ANDROID_EGL_ON_EGL", "1");
2528 }
2529
Gurchetan Singh7ad27502023-04-24 10:05:14 -07002530 bool surfaceless = renderer_flags & STREAM_RENDERER_FLAGS_USE_SURFACELESS_BIT;
Jason Macnakf044f012024-02-21 17:25:11 -08002531
Idan Raiterf6377592022-10-31 23:53:02 -07002532 android::featurecontrol::productFeatureOverride();
2533
Jason Macnaked0c9e62023-03-30 15:58:24 -07002534 gfxstream::vk::vkDispatch(false /* don't use test ICD */);
Idan Raiterf6377592022-10-31 23:53:02 -07002535
2536 auto androidHw = aemu_get_android_hw();
2537
2538 androidHw->hw_gltransport_asg_writeBufferSize = 1048576;
2539 androidHw->hw_gltransport_asg_writeStepSize = 262144;
2540 androidHw->hw_gltransport_asg_dataRingSize = 524288;
2541 androidHw->hw_gltransport_drawFlushInterval = 10000;
2542
2543 EmuglConfig config;
2544
2545 // Make all the console agents available.
2546 android::emulation::injectGraphicsAgents(android::emulation::GfxStreamGraphicsAgentFactory());
2547
2548 emuglConfig_init(&config, true /* gpu enabled */, "auto",
Idan Raiter995ba8a2022-12-04 16:44:59 -08002549 enable_egl2egl ? "swiftshader_indirect" : "host", 64, /* bitness */
2550 surfaceless, /* no window */
2551 false, /* blocklisted */
2552 false, /* has guest renderer */
2553 WINSYS_GLESBACKEND_PREFERENCE_AUTO, true /* force host gpu vulkan */);
Idan Raiterf6377592022-10-31 23:53:02 -07002554
2555 emuglConfig_setupEnv(&config);
2556
2557 android_prepareOpenglesEmulation();
2558
2559 {
Jason Macnaked0c9e62023-03-30 15:58:24 -07002560 static gfxstream::RenderLibPtr renderLibPtr = gfxstream::initLibrary();
Jason Macnake70b8e32023-01-20 13:59:35 -08002561 android_setOpenglesEmulation(renderLibPtr.get(), nullptr, nullptr);
Idan Raiterf6377592022-10-31 23:53:02 -07002562 }
2563
2564 int maj;
2565 int min;
Idan Raiter995ba8a2022-12-04 16:44:59 -08002566 android_startOpenglesRenderer(display_width, display_height, 1, 28, getGraphicsAgents()->vm,
2567 getGraphicsAgents()->emu, getGraphicsAgents()->multi_display,
Jason Macnak26872122024-02-23 10:46:09 -08002568 &features, &maj, &min);
Idan Raiterf6377592022-10-31 23:53:02 -07002569
2570 char* vendor = nullptr;
2571 char* renderer = nullptr;
2572 char* version = nullptr;
2573
Idan Raiter995ba8a2022-12-04 16:44:59 -08002574 android_getOpenglesHardwareStrings(&vendor, &renderer, &version);
Idan Raiterf6377592022-10-31 23:53:02 -07002575
Jason Macnak2c826b72024-02-27 16:10:55 -08002576 stream_renderer_info("GL strings; [%s] [%s] [%s].", vendor, renderer, version);
Idan Raiterf6377592022-10-31 23:53:02 -07002577
2578 auto openglesRenderer = android_getOpenglesRenderer();
2579
2580 if (!openglesRenderer) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002581 stream_renderer_error("No renderer started, fatal");
2582 return -EINVAL;
Idan Raiterf6377592022-10-31 23:53:02 -07002583 }
2584
2585 address_space_set_vm_operations(getGraphicsAgents()->vm);
2586 android_init_opengles_pipe();
2587 android_opengles_pipe_set_recv_mode(2 /* virtio-gpu */);
2588 android_init_refcount_pipe();
2589
Joshua Duongc256a3c2023-05-09 08:14:24 -07002590 return 0;
2591}
2592
Jason Macnak26872122024-02-23 10:46:09 -08002593namespace {
2594
2595int parseGfxstreamFeatures(const int renderer_flags,
2596 const std::string& renderer_features,
2597 gfxstream::host::FeatureSet& features) {
2598 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2599 &features, ExternalBlob,
2600 renderer_flags & STREAM_RENDERER_FLAGS_USE_EXTERNAL_BLOB);
Gurchetan Singh3f2eda02024-06-28 12:02:11 -07002601 GFXSTREAM_SET_FEATURE_ON_CONDITION(&features, VulkanExternalSync,
2602 renderer_flags & STREAM_RENDERER_FLAGS_VULKAN_EXTERNAL_SYNC);
Jason Macnak26872122024-02-23 10:46:09 -08002603 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2604 &features, GlAsyncSwap, false);
2605 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2606 &features, GlDirectMem, false);
2607 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2608 &features, GlDma, false);
2609 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2610 &features, GlesDynamicVersion, true);
2611 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2612 &features, GlPipeChecksum, false);
2613 GFXSTREAM_SET_FEATURE_ON_CONDITION(
Aaron Ruby52cef352024-07-02 13:31:23 -04002614 &features, GuestVulkanOnly,
Jason Macnak26872122024-02-23 10:46:09 -08002615 (renderer_flags & STREAM_RENDERER_FLAGS_USE_VK_BIT) &&
2616 !(renderer_flags & STREAM_RENDERER_FLAGS_USE_GLES_BIT));
2617 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2618 &features, HostComposition, true);
2619 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2620 &features, NativeTextureDecompression, false);
2621 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2622 &features, NoDelayCloseColorBuffer, true);
2623 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2624 &features, PlayStoreImage,
2625 !(renderer_flags & STREAM_RENDERER_FLAGS_USE_GLES_BIT));
2626 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2627 &features, RefCountPipe,
2628 /*Resources are ref counted via guest file objects.*/false);
2629 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2630 &features, SystemBlob,
2631 renderer_flags & STREAM_RENDERER_FLAGS_USE_SYSTEM_BLOB);
2632 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2633 &features, VirtioGpuFenceContexts, true);
2634 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2635 &features, VirtioGpuNativeSync, true);
2636 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2637 &features, VirtioGpuNext, true);
2638 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2639 &features, Vulkan,
2640 renderer_flags & STREAM_RENDERER_FLAGS_USE_VK_BIT);
2641 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2642 &features, VulkanBatchedDescriptorSetUpdate, true);
2643 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2644 &features, VulkanIgnoredHandles, true);
2645 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2646 &features, VulkanNativeSwapchain,
2647 renderer_flags & STREAM_RENDERER_FLAGS_VULKAN_NATIVE_SWAPCHAIN_BIT);
2648 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2649 &features, VulkanNullOptionalStrings, true);
2650 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2651 &features, VulkanQueueSubmitWithCommands, true);
2652 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2653 &features, VulkanShaderFloat16Int8, true);
2654 GFXSTREAM_SET_FEATURE_ON_CONDITION(
2655 &features, VulkanSnapshots,
2656 android::base::getEnvironmentVariable("ANDROID_GFXSTREAM_CAPTURE_VK_SNAPSHOT") == "1");
2657
2658 for (const std::string& renderer_feature : gfxstream::Split(renderer_features, ",")) {
2659 if (renderer_feature.empty()) continue;
2660
2661 const std::vector<std::string>& parts = gfxstream::Split(renderer_feature, ":");
2662 if (parts.size() != 2) {
2663 stream_renderer_error("Error: invalid renderer features: %s",
2664 renderer_features.c_str());
2665 return -EINVAL;
2666 }
2667
2668 const std::string& feature_name = parts[0];
2669
2670 auto feature_it = features.map.find(feature_name);
2671 if (feature_it == features.map.end()) {
2672 stream_renderer_error("Error: invalid renderer feature: '%s'", feature_name.c_str());
2673 return -EINVAL;
2674 }
2675
2676 const std::string& feature_status = parts[1];
2677 if (feature_status != "enabled" && feature_status != "disabled") {
2678 stream_renderer_error("Error: invalid option %s for renderer feature: %s",
2679 feature_status.c_str(), feature_name.c_str());
2680 return -EINVAL;
2681 }
2682
2683 auto& feature_info = feature_it->second;
2684 feature_info->enabled = feature_status == "enabled";
2685 feature_info->reason = "Overridden via STREAM_RENDERER_PARAM_RENDERER_FEATURES";
2686
2687 stream_renderer_error("Gfxstream feature %s %s", feature_name.c_str(),
2688 feature_status.c_str());
2689 }
2690
2691 if (features.SystemBlob.enabled) {
2692 if(!features.ExternalBlob.enabled) {
2693 stream_renderer_error("The SystemBlob features requires the ExternalBlob feature.");
2694 return -EINVAL;
2695 }
2696#ifndef _WIN32
2697 stream_renderer_warn("Warning: USE_SYSTEM_BLOB has only been tested on Windows");
2698#endif
2699 }
2700 if (features.VulkanNativeSwapchain.enabled && !features.Vulkan.enabled) {
2701 stream_renderer_error("can't enable vulkan native swapchain, Vulkan is disabled");
2702 return -EINVAL;
2703 }
2704
2705 return 0;
2706}
2707
2708} // namespace
2709
Joshua Duongc256a3c2023-05-09 08:14:24 -07002710VG_EXPORT int stream_renderer_init(struct stream_renderer_param* stream_renderer_params,
2711 uint64_t num_params) {
2712 // Required parameters.
2713 std::unordered_set<uint64_t> required_params{STREAM_RENDERER_PARAM_USER_DATA,
2714 STREAM_RENDERER_PARAM_RENDERER_FLAGS,
2715 STREAM_RENDERER_PARAM_FENCE_CALLBACK};
2716
2717 // String names of the parameters.
2718 std::unordered_map<uint64_t, std::string> param_strings{
2719 {STREAM_RENDERER_PARAM_USER_DATA, "USER_DATA"},
2720 {STREAM_RENDERER_PARAM_RENDERER_FLAGS, "RENDERER_FLAGS"},
2721 {STREAM_RENDERER_PARAM_FENCE_CALLBACK, "FENCE_CALLBACK"},
2722 {STREAM_RENDERER_PARAM_WIN0_WIDTH, "WIN0_WIDTH"},
2723 {STREAM_RENDERER_PARAM_WIN0_HEIGHT, "WIN0_HEIGHT"},
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002724 {STREAM_RENDERER_PARAM_DEBUG_CALLBACK, "DEBUG_CALLBACK"},
Joshua Duongc256a3c2023-05-09 08:14:24 -07002725 {STREAM_RENDERER_SKIP_OPENGLES_INIT, "SKIP_OPENGLES_INIT"},
2726 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT,
2727 "METRICS_CALLBACK_ADD_INSTANT_EVENT"},
2728 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_DESCRIPTOR,
2729 "METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_DESCRIPTOR"},
2730 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_METRIC,
2731 "METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_METRIC"},
2732 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_VULKAN_OUT_OF_MEMORY_EVENT,
2733 "METRICS_CALLBACK_ADD_VULKAN_OUT_OF_MEMORY_EVENT"},
2734 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_SET_ANNOTATION, "METRICS_CALLBACK_SET_ANNOTATION"},
2735 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ABORT, "METRICS_CALLBACK_ABORT"}};
2736
2737 // Print full values for these parameters:
2738 // Values here must not be pointers (e.g. callback functions), to avoid potentially identifying
2739 // someone via ASLR. Pointers in ASLR are randomized on boot, which means pointers may be
2740 // different between users but similar across a single user's sessions.
2741 // As a convenience, any value <= 4096 is also printed, to catch small or null pointer errors.
2742 std::unordered_set<uint64_t> printed_param_values{STREAM_RENDERER_PARAM_RENDERER_FLAGS,
2743 STREAM_RENDERER_PARAM_WIN0_WIDTH,
2744 STREAM_RENDERER_PARAM_WIN0_HEIGHT};
2745
2746 // We may have unknown parameters, so this function is lenient.
2747 auto get_param_string = [&](uint64_t key) -> std::string {
2748 auto param_string = param_strings.find(key);
2749 if (param_string != param_strings.end()) {
2750 return param_string->second;
2751 } else {
2752 return "Unknown param with key=" + std::to_string(key);
2753 }
2754 };
2755
2756 // Initialization data.
2757 uint32_t display_width = 0;
2758 uint32_t display_height = 0;
2759 void* renderer_cookie = nullptr;
2760 int renderer_flags = 0;
Jason Macnak26872122024-02-23 10:46:09 -08002761 std::string renderer_features_str;
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002762 stream_renderer_fence_callback fence_callback = nullptr;
Joshua Duongc256a3c2023-05-09 08:14:24 -07002763 bool skip_opengles = false;
2764
2765 // Iterate all parameters that we support.
Jason Macnak2c826b72024-02-27 16:10:55 -08002766 stream_renderer_debug("Reading stream renderer parameters:");
Joshua Duongc256a3c2023-05-09 08:14:24 -07002767 for (uint64_t i = 0; i < num_params; ++i) {
2768 stream_renderer_param& param = stream_renderer_params[i];
2769
2770 // Print out parameter we are processing. See comment above `printed_param_values` before
2771 // adding new prints.
2772 if (printed_param_values.find(param.key) != printed_param_values.end() ||
2773 param.value <= 4096) {
Jason Macnak2c826b72024-02-27 16:10:55 -08002774 stream_renderer_debug("%s - %llu", get_param_string(param.key).c_str(),
2775 static_cast<unsigned long long>(param.value));
Joshua Duongc256a3c2023-05-09 08:14:24 -07002776 } else {
2777 // If not full value, print that it was passed.
Jason Macnak2c826b72024-02-27 16:10:55 -08002778 stream_renderer_debug("%s", get_param_string(param.key).c_str());
Joshua Duongc256a3c2023-05-09 08:14:24 -07002779 }
2780
2781 // Removing every param we process will leave required_params empty if all provided.
2782 required_params.erase(param.key);
2783
2784 switch (param.key) {
Jason Macnak97743292024-02-27 16:27:12 -08002785 case STREAM_RENDERER_PARAM_NULL:
2786 break;
Joshua Duongc256a3c2023-05-09 08:14:24 -07002787 case STREAM_RENDERER_PARAM_USER_DATA: {
2788 renderer_cookie = reinterpret_cast<void*>(static_cast<uintptr_t>(param.value));
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002789 globalUserData = renderer_cookie;
Joshua Duongc256a3c2023-05-09 08:14:24 -07002790 break;
2791 }
2792 case STREAM_RENDERER_PARAM_RENDERER_FLAGS: {
2793 renderer_flags = static_cast<int>(param.value);
2794 break;
2795 }
2796 case STREAM_RENDERER_PARAM_FENCE_CALLBACK: {
2797 fence_callback = reinterpret_cast<stream_renderer_fence_callback>(
2798 static_cast<uintptr_t>(param.value));
2799 break;
2800 }
2801 case STREAM_RENDERER_PARAM_WIN0_WIDTH: {
2802 display_width = static_cast<uint32_t>(param.value);
2803 break;
2804 }
2805 case STREAM_RENDERER_PARAM_WIN0_HEIGHT: {
2806 display_height = static_cast<uint32_t>(param.value);
2807 break;
2808 }
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002809 case STREAM_RENDERER_PARAM_DEBUG_CALLBACK: {
2810 globalDebugCallback = reinterpret_cast<stream_renderer_debug_callback>(
2811 static_cast<uintptr_t>(param.value));
2812 break;
2813 }
Joshua Duongc256a3c2023-05-09 08:14:24 -07002814 case STREAM_RENDERER_SKIP_OPENGLES_INIT: {
2815 skip_opengles = static_cast<bool>(param.value);
2816 break;
2817 }
2818 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT: {
2819 MetricsLogger::add_instant_event_callback =
2820 reinterpret_cast<stream_renderer_param_metrics_callback_add_instant_event>(
2821 static_cast<uintptr_t>(param.value));
2822 break;
2823 }
2824 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_DESCRIPTOR: {
2825 MetricsLogger::add_instant_event_with_descriptor_callback = reinterpret_cast<
2826 stream_renderer_param_metrics_callback_add_instant_event_with_descriptor>(
2827 static_cast<uintptr_t>(param.value));
2828 break;
2829 }
2830 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_METRIC: {
2831 MetricsLogger::add_instant_event_with_metric_callback = reinterpret_cast<
2832 stream_renderer_param_metrics_callback_add_instant_event_with_metric>(
2833 static_cast<uintptr_t>(param.value));
2834 break;
2835 }
2836 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_VULKAN_OUT_OF_MEMORY_EVENT: {
2837 MetricsLogger::add_vulkan_out_of_memory_event = reinterpret_cast<
2838 stream_renderer_param_metrics_callback_add_vulkan_out_of_memory_event>(
2839 static_cast<uintptr_t>(param.value));
2840 break;
2841 }
Jason Macnakf044f012024-02-21 17:25:11 -08002842 case STREAM_RENDERER_PARAM_RENDERER_FEATURES: {
Jason Macnak26872122024-02-23 10:46:09 -08002843 renderer_features_str =
Jason Macnakf044f012024-02-21 17:25:11 -08002844 std::string(reinterpret_cast<const char*>(static_cast<uintptr_t>(param.value)));
2845 break;
2846 }
Joshua Duongc256a3c2023-05-09 08:14:24 -07002847 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_SET_ANNOTATION: {
2848 MetricsLogger::set_crash_annotation_callback =
2849 reinterpret_cast<stream_renderer_param_metrics_callback_set_annotation>(
2850 static_cast<uintptr_t>(param.value));
2851 break;
2852 }
2853 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ABORT: {
2854 emugl::setDieFunction(
2855 reinterpret_cast<stream_renderer_param_metrics_callback_abort>(
2856 static_cast<uintptr_t>(param.value)));
2857 break;
2858 }
2859 default: {
2860 // We skip any parameters we don't recognize.
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002861 stream_renderer_error(
2862 "Skipping unknown parameter key: %llu. May need to upgrade gfxstream.",
Joshua Duongc256a3c2023-05-09 08:14:24 -07002863 static_cast<unsigned long long>(param.key));
2864 break;
2865 }
2866 }
2867 }
Jason Macnak2c826b72024-02-27 16:10:55 -08002868 stream_renderer_debug("Finished reading parameters");
Joshua Duongc256a3c2023-05-09 08:14:24 -07002869
2870 // Some required params not found.
2871 if (required_params.size() > 0) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002872 stream_renderer_error("Missing required parameters:");
Joshua Duongc256a3c2023-05-09 08:14:24 -07002873 for (uint64_t param : required_params) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002874 stream_renderer_error("%s", get_param_string(param).c_str());
Joshua Duongc256a3c2023-05-09 08:14:24 -07002875 }
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002876 stream_renderer_error("Failing initialization intentionally");
2877 return -EINVAL;
Joshua Duongc256a3c2023-05-09 08:14:24 -07002878 }
2879
Gurchetan Singh3f2eda02024-06-28 12:02:11 -07002880#if GFXSTREAM_UNSTABLE_VULKAN_EXTERNAL_SYNC
2881 renderer_flags |= STREAM_RENDERER_FLAGS_VULKAN_EXTERNAL_SYNC;
2882#endif
2883
Jason Macnak26872122024-02-23 10:46:09 -08002884 gfxstream::host::FeatureSet features;
2885 int ret = parseGfxstreamFeatures(renderer_flags, renderer_features_str, features);
2886 if (ret) {
2887 stream_renderer_error("Failed to initialize: failed to parse Gfxstream features.");
2888 return ret;
2889 }
2890
2891 stream_renderer_info("Gfxstream features:");
2892 for (const auto& [_, featureInfo] : features.map) {
2893 stream_renderer_info(" %s: %s (%s)", featureInfo->name.c_str(),
2894 (featureInfo->enabled ? "enabled" : "disabled"), featureInfo->reason.c_str());
2895 }
2896
Joshua Duongc256a3c2023-05-09 08:14:24 -07002897 // Set non product-specific callbacks
2898 gfxstream::vk::vk_util::setVkCheckCallbacks(
2899 std::make_unique<gfxstream::vk::vk_util::VkCheckCallbacks>(
2900 gfxstream::vk::vk_util::VkCheckCallbacks{
Jason Macnak05eaa8e2024-07-01 10:55:23 -07002901 .onVkErrorDeviceLost =
2902 []() {
2903 auto fb = gfxstream::FrameBuffer::getFB();
2904 if (!fb) {
2905 ERR("FrameBuffer not yet initialized. Dropping device lost event");
2906 return;
2907 }
2908 fb->logVulkanDeviceLost();
2909 },
Joshua Duongc256a3c2023-05-09 08:14:24 -07002910 .onVkErrorOutOfMemory =
2911 [](VkResult result, const char* function, int line) {
2912 auto fb = gfxstream::FrameBuffer::getFB();
2913 if (!fb) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002914 stream_renderer_error(
2915 "FrameBuffer not yet initialized. Dropping out of memory event");
Joshua Duongc256a3c2023-05-09 08:14:24 -07002916 return;
2917 }
2918 fb->logVulkanOutOfMemory(result, function, line);
2919 },
2920 .onVkErrorOutOfMemoryOnAllocation =
2921 [](VkResult result, const char* function, int line,
2922 std::optional<uint64_t> allocationSize) {
2923 auto fb = gfxstream::FrameBuffer::getFB();
2924 if (!fb) {
Gurchetan Singh01b8b482023-07-20 07:39:20 -07002925 stream_renderer_error(
2926 "FrameBuffer not yet initialized. Dropping out of memory event");
Joshua Duongc256a3c2023-05-09 08:14:24 -07002927 return;
2928 }
2929 fb->logVulkanOutOfMemory(result, function, line, allocationSize);
2930 }}));
2931
2932 if (!skip_opengles) {
2933 // aemu currently does its own opengles initialization in
2934 // qemu/android/android-emu/android/opengles.cpp.
Jason Macnak26872122024-02-23 10:46:09 -08002935 int ret = stream_renderer_opengles_init(display_width, display_height, renderer_flags, features);
Joshua Duongc256a3c2023-05-09 08:14:24 -07002936 if (ret) {
2937 return ret;
2938 }
2939 }
2940
Jason Macnak26872122024-02-23 10:46:09 -08002941 sRenderer()->init(renderer_cookie, features, fence_callback);
Jason Macnake70b8e32023-01-20 13:59:35 -08002942 gfxstream::FrameBuffer::waitUntilInitialized();
2943
Jason Macnak2c826b72024-02-27 16:10:55 -08002944 stream_renderer_info("Gfxstream initialized successfully!");
Idan Raiterd8008052022-10-27 05:53:17 -07002945 return 0;
Idan Raiter7ed04b02022-10-26 15:26:32 -07002946}
2947
Idan Raiter995ba8a2022-12-04 16:44:59 -08002948VG_EXPORT void gfxstream_backend_setup_window(void* native_window_handle, int32_t window_x,
2949 int32_t window_y, int32_t window_width,
2950 int32_t window_height, int32_t fb_width,
2951 int32_t fb_height) {
2952 android_showOpenglesWindow(native_window_handle, window_x, window_y, window_width,
2953 window_height, fb_width, fb_height, 1.0f, 0, false, false);
Idan Raiterf6377592022-10-31 23:53:02 -07002954}
2955
Gurchetan Singh3a06e8c2023-04-12 20:15:33 -07002956VG_EXPORT void stream_renderer_teardown() {
Idan Raiterf6377592022-10-31 23:53:02 -07002957 android_finishOpenglesRenderer();
2958 android_hideOpenglesWindow();
2959 android_stopOpenglesRenderer(true);
Jason Macnak77ddccd2024-03-15 15:53:30 -07002960
2961 sRenderer()->teardown();
Jason Macnak244fd722024-04-02 13:26:50 -07002962 stream_renderer_info("Gfxstream shut down completed!");
Idan Raiterf6377592022-10-31 23:53:02 -07002963}
2964
Idan Raiter995ba8a2022-12-04 16:44:59 -08002965VG_EXPORT void gfxstream_backend_set_screen_mask(int width, int height,
2966 const unsigned char* rgbaData) {
Idan Raiterf6377592022-10-31 23:53:02 -07002967 android_setOpenglesScreenMask(width, height, rgbaData);
2968}
2969
Idan Raiter995ba8a2022-12-04 16:44:59 -08002970const GoldfishPipeServiceOps* goldfish_pipe_get_service_ops() { return &goldfish_pipe_service_ops; }
Idan Raiterf6377592022-10-31 23:53:02 -07002971
Gurchetan Singhdb375c92023-04-12 14:15:34 -07002972static_assert(sizeof(struct stream_renderer_device_id) == 32,
2973 "stream_renderer_device_id must be 32 bytes");
2974static_assert(offsetof(struct stream_renderer_device_id, device_uuid) == 0,
2975 "stream_renderer_device_id.device_uuid must be at offset 0");
2976static_assert(offsetof(struct stream_renderer_device_id, driver_uuid) == 16,
2977 "stream_renderer_device_id.driver_uuid must be at offset 16");
2978
2979static_assert(sizeof(struct stream_renderer_vulkan_info) == 36,
2980 "stream_renderer_vulkan_info must be 36 bytes");
2981static_assert(offsetof(struct stream_renderer_vulkan_info, memory_index) == 0,
2982 "stream_renderer_vulkan_info.memory_index must be at offset 0");
2983static_assert(offsetof(struct stream_renderer_vulkan_info, device_id) == 4,
2984 "stream_renderer_vulkan_info.device_id must be at offset 4");
2985
2986static_assert(sizeof(struct stream_renderer_param_host_visible_memory_mask_entry) == 36,
2987 "stream_renderer_param_host_visible_memory_mask_entry must be 36 bytes");
2988static_assert(offsetof(struct stream_renderer_param_host_visible_memory_mask_entry, device_id) == 0,
2989 "stream_renderer_param_host_visible_memory_mask_entry.device_id must be at offset 0");
2990static_assert(
2991 offsetof(struct stream_renderer_param_host_visible_memory_mask_entry, memory_type_mask) == 32,
2992 "stream_renderer_param_host_visible_memory_mask_entry.memory_type_mask must be at offset 32");
2993
2994static_assert(sizeof(struct stream_renderer_param_host_visible_memory_mask) == 16,
2995 "stream_renderer_param_host_visible_memory_mask must be 16 bytes");
2996static_assert(offsetof(struct stream_renderer_param_host_visible_memory_mask, entries) == 0,
2997 "stream_renderer_param_host_visible_memory_mask.entries must be at offset 0");
2998static_assert(offsetof(struct stream_renderer_param_host_visible_memory_mask, num_entries) == 8,
2999 "stream_renderer_param_host_visible_memory_mask.num_entries must be at offset 8");
3000
3001static_assert(sizeof(struct stream_renderer_param) == 16, "stream_renderer_param must be 16 bytes");
3002static_assert(offsetof(struct stream_renderer_param, key) == 0,
3003 "stream_renderer_param.key must be at offset 0");
3004static_assert(offsetof(struct stream_renderer_param, value) == 8,
3005 "stream_renderer_param.value must be at offset 8");
Joshua Duongc256a3c2023-05-09 08:14:24 -07003006
3007#ifdef CONFIG_AEMU
3008
3009VG_EXPORT void stream_renderer_set_service_ops(const GoldfishPipeServiceOps* ops) {
3010 sRenderer()->setServiceOps(ops);
3011}
3012
3013#endif // CONFIG_AEMU
3014
Idan Raiter995ba8a2022-12-04 16:44:59 -08003015} // extern "C"