blob: 715153b5083da2524887621e9784b4564e987520 [file] [log] [blame]
John Reck4f02bf42014-01-03 18:09:17 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck4f02bf42014-01-03 18:09:17 -080017#include "RenderProxy.h"
18
John Reck2c9a2db2023-07-21 16:35:41 -040019#include <SkBitmap.h>
20#include <SkImage.h>
21#include <SkPicture.h>
rnleece9762b2021-05-21 15:40:53 -070022#include <gui/TraceUtils.h>
John Reck2c9a2db2023-07-21 16:35:41 -040023#include <pthread.h>
24#include <ui/GraphicBufferAllocator.h>
25
John Reckba6adf62015-02-19 14:36:50 -080026#include "DeferredLayerUpdater.h"
27#include "DisplayList.h"
John Recka8963062017-06-14 10:47:50 -070028#include "Properties.h"
John Reck10dd0582016-03-31 16:36:16 -070029#include "Readback.h"
John Reckba6adf62015-02-19 14:36:50 -080030#include "Rect.h"
John Reck5cca8f22018-12-10 17:06:22 -080031#include "WebViewFunctorManager.h"
John Reckba6adf62015-02-19 14:36:50 -080032#include "renderthread/CanvasContext.h"
33#include "renderthread/RenderTask.h"
34#include "renderthread/RenderThread.h"
35#include "utils/Macros.h"
John Reck43871902016-08-01 14:39:24 -070036#include "utils/TimeUtils.h"
John Reck4f02bf42014-01-03 18:09:17 -080037
38namespace android {
39namespace uirenderer {
40namespace renderthread {
41
John Reck1bcacfd2017-11-03 10:12:19 -070042RenderProxy::RenderProxy(bool translucent, RenderNode* rootRenderNode,
43 IContextFactory* contextFactory)
44 : mRenderThread(RenderThread::getInstance()), mContext(nullptr) {
Jerome Gaillard8ab756d2024-04-02 17:38:20 +010045#ifdef __ANDROID__
Matt Buckleye9023cf2022-11-23 22:39:25 +000046 pid_t uiThreadId = pthread_gettid_np(pthread_self());
Jerome Gaillard8ab756d2024-04-02 17:38:20 +010047#else
48 pid_t uiThreadId = 0;
49#endif
Matt Buckleye9023cf2022-11-23 22:39:25 +000050 pid_t renderThreadId = getRenderThreadTid();
51 mContext = mRenderThread.queue().runSync([=, this]() -> CanvasContext* {
Matt Buckleyf5f90f12023-01-28 04:09:14 +000052 CanvasContext* context = CanvasContext::create(mRenderThread, translucent, rootRenderNode,
53 contextFactory, uiThreadId, renderThreadId);
54 if (context != nullptr) {
55 mRenderThread.queue().post([=] { context->startHintSession(); });
56 }
57 return context;
John Reckf8441e62017-10-23 13:10:41 -070058 });
Matt Buckleye9023cf2022-11-23 22:39:25 +000059 mDrawFrameTask.setContext(&mRenderThread, mContext, rootRenderNode);
John Reck4f02bf42014-01-03 18:09:17 -080060}
61
62RenderProxy::~RenderProxy() {
63 destroyContext();
64}
65
John Reck4f02bf42014-01-03 18:09:17 -080066void RenderProxy::destroyContext() {
67 if (mContext) {
Matt Buckleye9023cf2022-11-23 22:39:25 +000068 mDrawFrameTask.setContext(nullptr, nullptr, nullptr);
John Reck668f0e32014-03-26 15:10:40 -070069 // This is also a fence as we need to be certain that there are no
70 // outstanding mDrawFrame tasks posted before it is destroyed
John Reck1bcacfd2017-11-03 10:12:19 -070071 mRenderThread.queue().runSync([this]() { delete mContext; });
John Reckf8441e62017-10-23 13:10:41 -070072 mContext = nullptr;
John Reck4f02bf42014-01-03 18:09:17 -080073 }
74}
75
John Reck1125d1f2014-10-23 11:02:19 -070076void RenderProxy::setSwapBehavior(SwapBehavior swapBehavior) {
John Reck1bcacfd2017-11-03 10:12:19 -070077 mRenderThread.queue().post([this, swapBehavior]() { mContext->setSwapBehavior(swapBehavior); });
John Recke4280ba2014-05-05 16:39:37 -070078}
79
80bool RenderProxy::loadSystemProperties() {
John Reckf8441e62017-10-23 13:10:41 -070081 return mRenderThread.queue().runSync([this]() -> bool {
John Reckd9d7f122018-05-03 14:40:56 -070082 bool needsRedraw = Properties::load();
John Reckf8441e62017-10-23 13:10:41 -070083 if (mContext->profiler().consumeProperties()) {
84 needsRedraw = true;
85 }
86 return needsRedraw;
87 });
John Reckb36016c2015-03-11 08:50:53 -070088}
89
90void RenderProxy::setName(const char* name) {
John Reckf8441e62017-10-23 13:10:41 -070091 // block since name/value pointers owned by caller
92 // TODO: Support move arguments
John Reck1bcacfd2017-11-03 10:12:19 -070093 mRenderThread.queue().runSync([this, name]() { mContext->setName(std::string(name)); });
John Reck4f02bf42014-01-03 18:09:17 -080094}
95
Nader Jawada3521852023-01-30 20:23:46 -080096void RenderProxy::setHardwareBuffer(AHardwareBuffer* buffer) {
Jerome Gaillard8ab756d2024-04-02 17:38:20 +010097#ifdef __ANDROID__
Nader Jawada3521852023-01-30 20:23:46 -080098 if (buffer) {
99 AHardwareBuffer_acquire(buffer);
100 }
101 mRenderThread.queue().post([this, hardwareBuffer = buffer]() mutable {
102 mContext->setHardwareBuffer(hardwareBuffer);
103 if (hardwareBuffer) {
104 AHardwareBuffer_release(hardwareBuffer);
105 }
106 });
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100107#endif
Nader Jawada3521852023-01-30 20:23:46 -0800108}
109
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800110void RenderProxy::setSurface(ANativeWindow* window, bool enableTimeout) {
John Recke95c62d2020-08-18 12:37:43 -0700111 if (window) { ANativeWindow_acquire(window); }
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800112 mRenderThread.queue().post([this, win = window, enableTimeout]() mutable {
113 mContext->setSurface(win, enableTimeout);
John Recke95c62d2020-08-18 12:37:43 -0700114 if (win) { ANativeWindow_release(win); }
John Reckcd18c222019-11-21 14:40:53 -0800115 });
John Reck4f02bf42014-01-03 18:09:17 -0800116}
117
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800118void RenderProxy::setSurfaceControl(ASurfaceControl* surfaceControl) {
119 auto funcs = mRenderThread.getASurfaceControlFunctions();
120 if (surfaceControl) {
121 funcs.acquireFunc(surfaceControl);
122 }
123 mRenderThread.queue().post([this, control = surfaceControl, funcs]() mutable {
124 mContext->setSurfaceControl(control);
125 if (control) {
126 funcs.releaseFunc(control);
127 }
128 });
129}
130
John Reck8785ceb2018-10-29 16:45:58 -0700131void RenderProxy::allocateBuffers() {
John Reck23750022023-11-21 18:30:35 -0500132 mRenderThread.queue().post([this]() { mContext->allocateBuffers(); });
Jorim Jaggi7823ee72018-07-17 15:24:16 +0200133}
134
John Reck8785ceb2018-10-29 16:45:58 -0700135bool RenderProxy::pause() {
John Reck1bcacfd2017-11-03 10:12:19 -0700136 return mRenderThread.queue().runSync([this]() -> bool { return mContext->pauseSurface(); });
John Reck8afcc762016-04-13 10:24:06 -0700137}
138
139void RenderProxy::setStopped(bool stopped) {
John Reck1bcacfd2017-11-03 10:12:19 -0700140 mRenderThread.queue().runSync([this, stopped]() { mContext->setStopped(stopped); });
John Reck8afcc762016-04-13 10:24:06 -0700141}
142
John Reck8785ceb2018-10-29 16:45:58 -0700143void RenderProxy::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
John Reck1bcacfd2017-11-03 10:12:19 -0700144 mRenderThread.queue().post(
John Reck23750022023-11-21 18:30:35 -0500145 [=, this]() { mContext->setLightAlpha(ambientShadowAlpha, spotShadowAlpha); });
Alan Viverette50210d92015-05-14 18:05:36 -0700146}
147
John Reck8785ceb2018-10-29 16:45:58 -0700148void RenderProxy::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
John Reck23750022023-11-21 18:30:35 -0500149 mRenderThread.queue().post(
150 [=, this]() { mContext->setLightGeometry(lightCenter, lightRadius); });
John Reck63a06672014-05-07 13:45:54 -0700151}
152
153void RenderProxy::setOpaque(bool opaque) {
John Reck23750022023-11-21 18:30:35 -0500154 mRenderThread.queue().post([=, this]() { mContext->setOpaque(opaque); });
Romain Guy26a2b972017-04-17 09:39:51 -0700155}
156
John Reck55887762023-01-25 16:51:18 -0500157float RenderProxy::setColorMode(ColorMode mode) {
158 // We only need to figure out what the renderer supports for HDR, otherwise this can stay
159 // an async call since we already know the return value
John Reck0b3f3312023-01-31 16:21:28 -0500160 if (mode == ColorMode::Hdr || mode == ColorMode::Hdr10) {
John Reck55887762023-01-25 16:51:18 -0500161 return mRenderThread.queue().runSync(
John Reck23750022023-11-21 18:30:35 -0500162 [=, this]() -> float { return mContext->setColorMode(mode); });
John Reck55887762023-01-25 16:51:18 -0500163 } else {
John Reck23750022023-11-21 18:30:35 -0500164 mRenderThread.queue().post([=, this]() { mContext->setColorMode(mode); });
John Reck55887762023-01-25 16:51:18 -0500165 return 1.f;
166 }
167}
168
169void RenderProxy::setRenderSdrHdrRatio(float ratio) {
170 mDrawFrameTask.setRenderSdrHdrRatio(ratio);
Romain Guy26a2b972017-04-17 09:39:51 -0700171}
172
John Reckba6adf62015-02-19 14:36:50 -0800173int64_t* RenderProxy::frameInfo() {
174 return mDrawFrameTask.frameInfo();
175}
176
chaviwadba0b12022-03-18 17:42:15 -0500177void RenderProxy::forceDrawNextFrame() {
178 mDrawFrameTask.forceDrawNextFrame();
179}
180
John Reck2de950d2017-01-25 10:58:30 -0800181int RenderProxy::syncAndDrawFrame() {
182 return mDrawFrameTask.drawFrame();
John Reck4f02bf42014-01-03 18:09:17 -0800183}
184
John Reck2de950d2017-01-25 10:58:30 -0800185void RenderProxy::destroy() {
John Reckfae904d2014-04-14 11:01:57 -0700186 // destroyCanvasAndSurface() needs a fence as when it returns the
187 // underlying BufferQueue is going to be released from under
188 // the render thread.
John Reck23750022023-11-21 18:30:35 -0500189 mRenderThread.queue().runSync([this]() { mContext->destroy(); });
John Reck0d1f6342014-03-28 20:30:27 -0700190}
191
John Reck283bb462018-12-13 16:40:14 -0800192void RenderProxy::destroyFunctor(int functor) {
193 ATRACE_CALL();
194 RenderThread& thread = RenderThread::getInstance();
John Reck5cca8f22018-12-10 17:06:22 -0800195 thread.queue().post([=]() { WebViewFunctorManager::instance().destroyFunctor(functor); });
John Reck283bb462018-12-13 16:40:14 -0800196}
197
John Reck19b6bcf2014-02-14 20:03:38 -0800198DeferredLayerUpdater* RenderProxy::createTextureLayer() {
John Reckf8441e62017-10-23 13:10:41 -0700199 return mRenderThread.queue().runSync([this]() -> auto {
200 return mContext->createTextureLayer();
201 });
John Reck3e824952014-08-20 10:08:39 -0700202}
203
John Reck2de950d2017-01-25 10:58:30 -0800204void RenderProxy::buildLayer(RenderNode* node) {
John Reck1bcacfd2017-11-03 10:12:19 -0700205 mRenderThread.queue().runSync([&]() { mContext->buildLayer(node); });
John Reck19b6bcf2014-02-14 20:03:38 -0800206}
207
John Reck3731dc22015-04-13 15:20:29 -0700208bool RenderProxy::copyLayerInto(DeferredLayerUpdater* layer, SkBitmap& bitmap) {
John Reckfbeac3c2019-03-29 11:24:56 -0700209 ATRACE_NAME("TextureView#getBitmap");
Stan Iliev1a025a72018-09-05 16:35:11 -0400210 auto& thread = RenderThread::getInstance();
John Reck5cca8f22018-12-10 17:06:22 -0800211 return thread.queue().runSync([&]() -> bool {
212 return thread.readback().copyLayerInto(layer, &bitmap) == CopyResult::Success;
213 });
John Reck19b6bcf2014-02-14 20:03:38 -0800214}
215
John Reckd72e0a32014-05-29 18:56:11 -0700216void RenderProxy::pushLayerUpdate(DeferredLayerUpdater* layer) {
217 mDrawFrameTask.pushLayerUpdate(layer);
218}
219
220void RenderProxy::cancelLayerUpdate(DeferredLayerUpdater* layer) {
221 mDrawFrameTask.removeLayerUpdate(layer);
John Reck19b6bcf2014-02-14 20:03:38 -0800222}
223
John Reck918ad522014-06-27 14:45:25 -0700224void RenderProxy::detachSurfaceTexture(DeferredLayerUpdater* layer) {
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100225#ifdef __ANDROID__
John Reck1bcacfd2017-11-03 10:12:19 -0700226 return mRenderThread.queue().runSync([&]() { layer->detachSurfaceTexture(); });
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100227#endif
John Recke1628b72014-05-23 15:11:19 -0700228}
229
John Reck2de950d2017-01-25 10:58:30 -0800230void RenderProxy::destroyHardwareResources() {
John Reck1bcacfd2017-11-03 10:12:19 -0700231 return mRenderThread.queue().runSync([&]() { mContext->destroyHardwareResources(); });
John Reckf47a5942014-06-30 16:20:04 -0700232}
233
234void RenderProxy::trimMemory(int level) {
John Reckcd3a22c2014-08-06 13:33:59 -0700235 // Avoid creating a RenderThread to do a trimMemory.
236 if (RenderThread::hasInstance()) {
237 RenderThread& thread = RenderThread::getInstance();
John Reck5f66fb82022-09-23 17:49:23 -0400238 const auto trimLevel = static_cast<TrimLevel>(level);
239 thread.queue().post([&thread, trimLevel]() { thread.trimMemory(trimLevel); });
John Reckcd3a22c2014-08-06 13:33:59 -0700240 }
John Reckf47a5942014-06-30 16:20:04 -0700241}
242
Jernej Virag44db0402023-05-09 20:24:48 +0200243void RenderProxy::trimCaches(int level) {
244 // Avoid creating a RenderThread to do a trimMemory.
245 if (RenderThread::hasInstance()) {
246 RenderThread& thread = RenderThread::getInstance();
247 const auto trimLevel = static_cast<CacheTrimLevel>(level);
248 thread.queue().post([&thread, trimLevel]() { thread.trimCaches(trimLevel); });
249 }
250}
251
John Reck39207682021-05-12 19:10:47 -0400252void RenderProxy::purgeCaches() {
253 if (RenderThread::hasInstance()) {
254 RenderThread& thread = RenderThread::getInstance();
255 thread.queue().post([&thread]() {
256 if (thread.getGrContext()) {
John Reck5f66fb82022-09-23 17:49:23 -0400257 thread.cacheManager().trimMemory(TrimLevel::COMPLETE);
John Reck39207682021-05-12 19:10:47 -0400258 }
259 });
260 }
261}
262
Chris Craik2507c342015-05-04 14:36:49 -0700263void RenderProxy::overrideProperty(const char* name, const char* value) {
John Reckf8441e62017-10-23 13:10:41 -0700264 // expensive, but block here since name/value pointers owned by caller
John Reck1bcacfd2017-11-03 10:12:19 -0700265 RenderThread::getInstance().queue().runSync(
266 [&]() { Properties::overrideProperty(name, value); });
Chris Craik2507c342015-05-04 14:36:49 -0700267}
268
John Reck28ad7b52014-04-07 16:59:25 -0700269void RenderProxy::fence() {
John Reck1bcacfd2017-11-03 10:12:19 -0700270 mRenderThread.queue().runSync([]() {});
John Reck28ad7b52014-04-07 16:59:25 -0700271}
272
John Recke4c1e6c2018-05-24 16:27:35 -0700273int RenderProxy::maxTextureSize() {
John Reck5cca8f22018-12-10 17:06:22 -0800274 static int maxTextureSize = RenderThread::getInstance().queue().runSync(
275 []() { return DeviceInfo::get()->maxTextureSize(); });
John Recke4c1e6c2018-05-24 16:27:35 -0700276 return maxTextureSize;
John Reckf47a5942014-06-30 16:20:04 -0700277}
278
279void RenderProxy::stopDrawing() {
John Reck1bcacfd2017-11-03 10:12:19 -0700280 mRenderThread.queue().runSync([this]() { mContext->stopDrawing(); });
John Recka5dda642014-05-22 15:43:54 -0700281}
282
283void RenderProxy::notifyFramePending() {
John Reck1bcacfd2017-11-03 10:12:19 -0700284 mRenderThread.queue().post([this]() { mContext->notifyFramePending(); });
John Reckfe5e7b72014-05-23 17:42:28 -0700285}
286
Matt Buckleyd98e8052022-10-21 22:13:23 +0000287void RenderProxy::notifyCallbackPending() {
Matt Buckleye9023cf2022-11-23 22:39:25 +0000288 mRenderThread.queue().post([this]() { mContext->sendLoadResetHint(); });
Matt Buckleyd98e8052022-10-21 22:13:23 +0000289}
290
Matt Buckleyac5f7552022-12-19 22:03:27 +0000291void RenderProxy::notifyExpensiveFrame() {
292 mRenderThread.queue().post([this]() { mContext->sendLoadIncreaseHint(); });
293}
294
John Reckba6adf62015-02-19 14:36:50 -0800295void RenderProxy::dumpProfileInfo(int fd, int dumpFlags) {
John Reckf8441e62017-10-23 13:10:41 -0700296 mRenderThread.queue().runSync([&]() {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100297 std::lock_guard lock(mRenderThread.getJankDataMutex());
John Reckf8441e62017-10-23 13:10:41 -0700298 mContext->profiler().dumpData(fd);
299 if (dumpFlags & DumpFlags::FrameStats) {
300 mContext->dumpFrames(fd);
301 }
302 if (dumpFlags & DumpFlags::JankStats) {
303 mRenderThread.globalProfileData()->dump(fd);
304 }
305 if (dumpFlags & DumpFlags::Reset) {
306 mContext->resetFrameStats();
307 }
308 });
John Reck7f2e5e32015-05-05 11:00:53 -0700309}
310
311void RenderProxy::resetProfileInfo() {
John Reck23750022023-11-21 18:30:35 -0500312 mRenderThread.queue().runSync([this]() {
Jorim Jaggi33adb572021-02-22 14:27:53 +0100313 std::lock_guard lock(mRenderThread.getJankDataMutex());
314 mContext->resetFrameStats();
315 });
John Reck7f2e5e32015-05-05 11:00:53 -0700316}
317
John Reckf8441e62017-10-23 13:10:41 -0700318uint32_t RenderProxy::frameTimePercentile(int percentile) {
319 return mRenderThread.queue().runSync([&]() -> auto {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100320 std::lock_guard lock(mRenderThread.globalProfileData().getDataMutex());
John Reckf8441e62017-10-23 13:10:41 -0700321 return mRenderThread.globalProfileData()->findPercentile(percentile);
322 });
John Reck0e89e2b2014-10-31 14:49:06 -0700323}
324
John Reck712eae02021-10-01 15:24:27 -0400325void RenderProxy::dumpGraphicsMemory(int fd, bool includeProfileData, bool resetProfile) {
John Reckba7e9652019-01-23 10:33:41 -0800326 if (RenderThread::hasInstance()) {
327 auto& thread = RenderThread::getInstance();
John Reck712eae02021-10-01 15:24:27 -0400328 thread.queue().runSync([&]() {
329 thread.dumpGraphicsMemory(fd, includeProfileData);
330 if (resetProfile) {
331 thread.globalProfileData()->reset();
332 }
333 });
John Reckba7e9652019-01-23 10:33:41 -0800334 }
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100335#ifdef __ANDROID__
John Reck0341ba52023-08-24 11:37:36 -0400336 if (!Properties::isolatedProcess) {
337 std::string grallocInfo;
338 GraphicBufferAllocator::getInstance().dump(grallocInfo);
339 dprintf(fd, "%s\n", grallocInfo.c_str());
340 }
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100341#endif
John Reckedc524c2015-03-18 15:24:33 -0700342}
343
John Reck39207682021-05-12 19:10:47 -0400344void RenderProxy::getMemoryUsage(size_t* cpuUsage, size_t* gpuUsage) {
345 if (RenderThread::hasInstance()) {
346 auto& thread = RenderThread::getInstance();
347 thread.queue().runSync([&]() { thread.getMemoryUsage(cpuUsage, gpuUsage); });
348 }
349}
350
John Reckedc524c2015-03-18 15:24:33 -0700351void RenderProxy::setProcessStatsBuffer(int fd) {
John Reckdf1742e2017-01-19 15:56:21 -0800352 auto& rt = RenderThread::getInstance();
John Reck0fa0cbc2019-04-05 16:57:46 -0700353 rt.queue().post([&rt, fd = dup(fd)]() {
John Reckf8441e62017-10-23 13:10:41 -0700354 rt.globalProfileData().switchStorageToAshmem(fd);
355 close(fd);
356 });
John Reckdf1742e2017-01-19 15:56:21 -0800357}
358
359void RenderProxy::rotateProcessStatsBuffer() {
John Reckdf1742e2017-01-19 15:56:21 -0800360 auto& rt = RenderThread::getInstance();
John Reck1bcacfd2017-11-03 10:12:19 -0700361 rt.queue().post([&rt]() { rt.globalProfileData().rotateStorage(); });
John Reckedc524c2015-03-18 15:24:33 -0700362}
363
Tim Murray33eb07f2016-06-10 10:03:20 -0700364int RenderProxy::getRenderThreadTid() {
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100365#ifdef __ANDROID__
Tim Murray33eb07f2016-06-10 10:03:20 -0700366 return mRenderThread.getTid();
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100367#else
368 return 0;
369#endif
Tim Murray33eb07f2016-06-10 10:03:20 -0700370}
371
Skuhneea7a7fb2015-08-28 07:10:31 -0700372void RenderProxy::addRenderNode(RenderNode* node, bool placeFront) {
John Reck23750022023-11-21 18:30:35 -0500373 mRenderThread.queue().post([=, this]() { mContext->addRenderNode(node, placeFront); });
Skuhneea7a7fb2015-08-28 07:10:31 -0700374}
375
376void RenderProxy::removeRenderNode(RenderNode* node) {
John Reck23750022023-11-21 18:30:35 -0500377 mRenderThread.queue().post([=, this]() { mContext->removeRenderNode(node); });
Skuhneea7a7fb2015-08-28 07:10:31 -0700378}
379
380void RenderProxy::drawRenderNode(RenderNode* node) {
John Reck23750022023-11-21 18:30:35 -0500381 mRenderThread.queue().runSync([=, this]() { mContext->prepareAndDraw(node); });
Skuhneea7a7fb2015-08-28 07:10:31 -0700382}
383
Skuhneb8160872015-09-22 09:51:39 -0700384void RenderProxy::setContentDrawBounds(int left, int top, int right, int bottom) {
John Reckf138b172017-09-08 11:00:42 -0700385 mDrawFrameTask.setContentDrawBounds(left, top, right, bottom);
Skuhneea7a7fb2015-08-28 07:10:31 -0700386}
387
Nader Jawada3521852023-01-30 20:23:46 -0800388void RenderProxy::setHardwareBufferRenderParams(const HardwareBufferRenderParams& params) {
389 mDrawFrameTask.setHardwareBufferRenderParams(params);
390}
391
John Reck5cca8f22018-12-10 17:06:22 -0800392void RenderProxy::setPictureCapturedCallback(
393 const std::function<void(sk_sp<SkPicture>&&)>& callback) {
394 mRenderThread.queue().post(
John Reck0fa0cbc2019-04-05 16:57:46 -0700395 [this, cb = callback]() { mContext->setPictureCapturedCallback(cb); });
John Reck5cca8f22018-12-10 17:06:22 -0800396}
397
Huihong Luo054b8d32021-02-24 18:48:12 -0800398void RenderProxy::setASurfaceTransactionCallback(
Huihong Luo4df41512021-06-24 10:04:32 -0700399 const std::function<bool(int64_t, int64_t, int64_t)>& callback) {
Huihong Luo054b8d32021-02-24 18:48:12 -0800400 mRenderThread.queue().post(
401 [this, cb = callback]() { mContext->setASurfaceTransactionCallback(cb); });
402}
403
Huihong Luo34f42fd2021-05-03 14:47:36 -0700404void RenderProxy::setPrepareSurfaceControlForWebviewCallback(
405 const std::function<void()>& callback) {
406 mRenderThread.queue().post(
407 [this, cb = callback]() { mContext->setPrepareSurfaceControlForWebviewCallback(cb); });
408}
409
chaviwb6803712021-12-13 15:46:29 -0600410void RenderProxy::setFrameCallback(
411 std::function<std::function<void(bool)>(int32_t, int64_t)>&& callback) {
Mihai Popa95688002018-02-23 16:10:11 +0000412 mDrawFrameTask.setFrameCallback(std::move(callback));
413}
414
chaviw9c137532021-08-20 12:15:48 -0500415void RenderProxy::setFrameCommitCallback(std::function<void(bool)>&& callback) {
416 mDrawFrameTask.setFrameCommitCallback(std::move(callback));
417}
418
419void RenderProxy::setFrameCompleteCallback(std::function<void()>&& callback) {
John Reckcc2eee82018-05-17 10:44:00 -0700420 mDrawFrameTask.setFrameCompleteCallback(std::move(callback));
421}
422
John Reckf8441e62017-10-23 13:10:41 -0700423void RenderProxy::addFrameMetricsObserver(FrameMetricsObserver* observerPtr) {
John Reck0fa0cbc2019-04-05 16:57:46 -0700424 mRenderThread.queue().post([this, observer = sp{observerPtr}]() {
John Reckf8441e62017-10-23 13:10:41 -0700425 mContext->addFrameMetricsObserver(observer.get());
426 });
Andres Morales06f5bc72015-12-15 15:21:31 -0800427}
428
John Reckf8441e62017-10-23 13:10:41 -0700429void RenderProxy::removeFrameMetricsObserver(FrameMetricsObserver* observerPtr) {
John Reck0fa0cbc2019-04-05 16:57:46 -0700430 mRenderThread.queue().post([this, observer = sp{observerPtr}]() {
John Reckf8441e62017-10-23 13:10:41 -0700431 mContext->removeFrameMetricsObserver(observer.get());
432 });
John Reck10dd0582016-03-31 16:36:16 -0700433}
434
Tyler Freeman417accd2023-10-31 02:01:06 +0000435void RenderProxy::setForceDark(ForceDarkType type) {
436 mRenderThread.queue().post([this, type]() { mContext->setForceDark(type); });
John Reckbb3a3582018-09-26 11:21:08 -0700437}
438
John Reck4d73cb12022-07-27 10:32:52 -0400439void RenderProxy::copySurfaceInto(ANativeWindow* window, std::shared_ptr<CopyRequest>&& request) {
John Reckf8441e62017-10-23 13:10:41 -0700440 auto& thread = RenderThread::getInstance();
John Reck4d73cb12022-07-27 10:32:52 -0400441 ANativeWindow_acquire(window);
442 thread.queue().post([&thread, window, request = std::move(request)] {
443 thread.readback().copySurfaceInto(window, request);
444 ANativeWindow_release(window);
445 });
John Reck43871902016-08-01 14:39:24 -0700446}
447
sergeyvec4a4b12016-10-20 18:39:04 -0700448void RenderProxy::prepareToDraw(Bitmap& bitmap) {
John Reck43871902016-08-01 14:39:24 -0700449 // If we haven't spun up a hardware accelerated window yet, there's no
450 // point in precaching these bitmaps as it can't impact jank.
451 // We also don't know if we even will spin up a hardware-accelerated
452 // window or not.
453 if (!RenderThread::hasInstance()) return;
454 RenderThread* renderThread = &RenderThread::getInstance();
sergeyvec4a4b12016-10-20 18:39:04 -0700455 bitmap.ref();
John Reckf8441e62017-10-23 13:10:41 -0700456 auto task = [renderThread, &bitmap]() {
457 CanvasContext::prepareToDraw(*renderThread, &bitmap);
458 bitmap.unref();
459 };
John Reck43871902016-08-01 14:39:24 -0700460 nsecs_t lastVsync = renderThread->timeLord().latestVsync();
461 nsecs_t estimatedNextVsync = lastVsync + renderThread->timeLord().frameIntervalNanos();
Jerome Gaillarde218c692019-06-14 12:58:57 +0100462 nsecs_t timeToNextVsync = estimatedNextVsync - systemTime(SYSTEM_TIME_MONOTONIC);
John Reck43871902016-08-01 14:39:24 -0700463 // We expect the UI thread to take 4ms and for RT to be active from VSYNC+4ms to
464 // VSYNC+12ms or so, so aim for the gap during which RT is expected to
465 // be idle
466 // TODO: Make this concept a first-class supported thing? RT could use
467 // knowledge of pending draws to better schedule this task
468 if (timeToNextVsync > -6_ms && timeToNextVsync < 1_ms) {
John Reckf8441e62017-10-23 13:10:41 -0700469 renderThread->queue().postAt(estimatedNextVsync + 8_ms, task);
John Reck43871902016-08-01 14:39:24 -0700470 } else {
John Reckf8441e62017-10-23 13:10:41 -0700471 renderThread->queue().post(task);
John Reck43871902016-08-01 14:39:24 -0700472 }
473}
474
Stan Iliev1a025a72018-09-05 16:35:11 -0400475int RenderProxy::copyHWBitmapInto(Bitmap* hwBitmap, SkBitmap* bitmap) {
John Reckfbeac3c2019-03-29 11:24:56 -0700476 ATRACE_NAME("HardwareBitmap readback");
Stan Iliev6983bc42017-02-02 14:11:53 -0500477 RenderThread& thread = RenderThread::getInstance();
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100478 if (RenderThread::isCurrent()) {
John Reck1bcacfd2017-11-03 10:12:19 -0700479 // TODO: fix everything that hits this. We should never be triggering a readback ourselves.
Stan Iliev1a025a72018-09-05 16:35:11 -0400480 return (int)thread.readback().copyHWBitmapInto(hwBitmap, bitmap);
Stan Iliev6983bc42017-02-02 14:11:53 -0500481 } else {
John Reck5cca8f22018-12-10 17:06:22 -0800482 return thread.queue().runSync(
483 [&]() -> int { return (int)thread.readback().copyHWBitmapInto(hwBitmap, bitmap); });
Stan Iliev6983bc42017-02-02 14:11:53 -0500484 }
sergeyv59eecb522016-11-17 17:54:57 -0800485}
486
John Reck76005182021-06-09 22:43:05 -0400487int RenderProxy::copyImageInto(const sk_sp<SkImage>& image, SkBitmap* bitmap) {
488 RenderThread& thread = RenderThread::getInstance();
Jerome Gaillard8ab756d2024-04-02 17:38:20 +0100489 if (RenderThread::isCurrent()) {
John Reck76005182021-06-09 22:43:05 -0400490 // TODO: fix everything that hits this. We should never be triggering a readback ourselves.
491 return (int)thread.readback().copyImageInto(image, bitmap);
492 } else {
493 return thread.queue().runSync(
494 [&]() -> int { return (int)thread.readback().copyImageInto(image, bitmap); });
495 }
496}
497
John Recka8963062017-06-14 10:47:50 -0700498void RenderProxy::disableVsync() {
499 Properties::disableVsync = true;
500}
501
Stan Iliev898123b2019-02-14 14:57:44 -0500502void RenderProxy::preload() {
503 // Create RenderThread object and start the thread. Then preload Vulkan/EGL driver.
504 auto& thread = RenderThread::getInstance();
John Reck0fa0cbc2019-04-05 16:57:46 -0700505 thread.queue().post([&thread]() { thread.preload(); });
Stan Iliev898123b2019-02-14 14:57:44 -0500506}
507
chaviw01053d432022-03-18 17:54:00 -0500508void RenderProxy::setRtAnimationsEnabled(bool enabled) {
509 if (RenderThread::hasInstance()) {
510 RenderThread::getInstance().queue().post(
511 [enabled]() { Properties::enableRTAnimations = enabled; });
512 } else {
513 Properties::enableRTAnimations = enabled;
514 }
515}
516
John Reck4f02bf42014-01-03 18:09:17 -0800517} /* namespace renderthread */
518} /* namespace uirenderer */
519} /* namespace android */