diff options
Diffstat (limited to 'libs/hwui/renderthread/OpenGLPipeline.cpp')
-rw-r--r-- | libs/hwui/renderthread/OpenGLPipeline.cpp | 185 |
1 files changed, 176 insertions, 9 deletions
diff --git a/libs/hwui/renderthread/OpenGLPipeline.cpp b/libs/hwui/renderthread/OpenGLPipeline.cpp index e1ae58532a02..7283eb123d6a 100644 --- a/libs/hwui/renderthread/OpenGLPipeline.cpp +++ b/libs/hwui/renderthread/OpenGLPipeline.cpp @@ -58,7 +58,7 @@ Frame OpenGLPipeline::getFrame() { bool OpenGLPipeline::draw(const Frame& frame, const SkRect& screenDirty, const SkRect& dirty, const FrameBuilder::LightGeometry& lightGeometry, LayerUpdateQueue* layerUpdateQueue, - const Rect& contentDrawBounds, bool opaque, + const Rect& contentDrawBounds, bool opaque, bool wideColorGamut, const BakedOpRenderer::LightInfo& lightInfo, const std::vector< sp<RenderNode> >& renderNodes, FrameInfoVisualizer* profiler) { @@ -77,7 +77,7 @@ bool OpenGLPipeline::draw(const Frame& frame, const SkRect& screenDirty, const S frameBuilder.deferRenderNodeScene(renderNodes, contentDrawBounds); BakedOpRenderer renderer(caches, mRenderThread.renderState(), - opaque, lightInfo); + opaque, wideColorGamut, lightInfo); frameBuilder.replayBakedOps<BakedOpDispatcher>(renderer); ProfileRenderer profileRenderer(renderer); profiler->draw(profileRenderer); @@ -146,7 +146,7 @@ void OpenGLPipeline::onStop() { } } -bool OpenGLPipeline::setSurface(Surface* surface, SwapBehavior swapBehavior) { +bool OpenGLPipeline::setSurface(Surface* surface, SwapBehavior swapBehavior, ColorMode colorMode) { if (mEglSurface != EGL_NO_SURFACE) { mEglManager.destroySurface(mEglSurface); @@ -154,7 +154,8 @@ bool OpenGLPipeline::setSurface(Surface* surface, SwapBehavior swapBehavior) { } if (surface) { - mEglSurface = mEglManager.createSurface(surface); + const bool wideColorGamut = colorMode == ColorMode::WideColorGamut; + mEglSurface = mEglManager.createSurface(surface, wideColorGamut); } if (mEglSurface != EGL_NO_SURFACE) { @@ -183,14 +184,14 @@ void OpenGLPipeline::onDestroyHardwareResources() { } void OpenGLPipeline::renderLayers(const FrameBuilder::LightGeometry& lightGeometry, - LayerUpdateQueue* layerUpdateQueue, bool opaque, + LayerUpdateQueue* layerUpdateQueue, bool opaque, bool wideColorGamut, const BakedOpRenderer::LightInfo& lightInfo) { static const std::vector< sp<RenderNode> > emptyNodeList; auto& caches = Caches::getInstance(); FrameBuilder frameBuilder(*layerUpdateQueue, lightGeometry, caches); layerUpdateQueue->clear(); - BakedOpRenderer renderer(caches, mRenderThread.renderState(), - opaque, lightInfo); + // TODO: Handle wide color gamut contexts + BakedOpRenderer renderer(caches, mRenderThread.renderState(), opaque, wideColorGamut, lightInfo); LOG_ALWAYS_FATAL_IF(renderer.didDraw(), "shouldn't draw in buildlayer case"); frameBuilder.replayBakedOps<BakedOpDispatcher>(renderer); } @@ -204,12 +205,13 @@ static bool layerMatchesWH(OffscreenBuffer* layer, int width, int height) { } bool OpenGLPipeline::createOrUpdateLayer(RenderNode* node, - const DamageAccumulator& damageAccumulator) { + const DamageAccumulator& damageAccumulator, bool wideColorGamut) { RenderState& renderState = mRenderThread.renderState(); OffscreenBufferPool& layerPool = renderState.layerPool(); bool transformUpdateNeeded = false; if (node->getLayer() == nullptr) { - node->setLayer(layerPool.get(renderState, node->getWidth(), node->getHeight())); + node->setLayer(layerPool.get(renderState, + node->getWidth(), node->getHeight(), wideColorGamut)); transformUpdateNeeded = true; } else if (!layerMatchesWH(node->getLayer(), node->getWidth(), node->getHeight())) { // TODO: remove now irrelevant, currently enqueued damage (respecting damage ordering) @@ -267,6 +269,171 @@ void OpenGLPipeline::invokeFunctor(const RenderThread& thread, Functor* functor) thread.renderState().invokeFunctor(functor, mode, nullptr); } +#define FENCE_TIMEOUT 2000000000 + +class AutoEglFence { +public: + AutoEglFence(EGLDisplay display) + : mDisplay(display) { + fence = eglCreateSyncKHR(mDisplay, EGL_SYNC_FENCE_KHR, NULL); + } + + ~AutoEglFence() { + if (fence != EGL_NO_SYNC_KHR) { + eglDestroySyncKHR(mDisplay, fence); + } + } + + EGLSyncKHR fence = EGL_NO_SYNC_KHR; +private: + EGLDisplay mDisplay = EGL_NO_DISPLAY; +}; + +class AutoEglImage { +public: + AutoEglImage(EGLDisplay display, EGLClientBuffer clientBuffer) + : mDisplay(display) { + EGLint imageAttrs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE }; + image = eglCreateImageKHR(display, EGL_NO_CONTEXT, + EGL_NATIVE_BUFFER_ANDROID, clientBuffer, imageAttrs); + } + + ~AutoEglImage() { + if (image != EGL_NO_IMAGE_KHR) { + eglDestroyImageKHR(mDisplay, image); + } + } + + EGLImageKHR image = EGL_NO_IMAGE_KHR; +private: + EGLDisplay mDisplay = EGL_NO_DISPLAY; +}; + +class AutoGlTexture { +public: + AutoGlTexture(uirenderer::Caches& caches) + : mCaches(caches) { + glGenTextures(1, &mTexture); + caches.textureState().bindTexture(mTexture); + } + + ~AutoGlTexture() { + mCaches.textureState().deleteTexture(mTexture); + } + +private: + uirenderer::Caches& mCaches; + GLuint mTexture = 0; +}; + +static bool uploadBitmapToGraphicBuffer(uirenderer::Caches& caches, SkBitmap& bitmap, + GraphicBuffer& buffer, GLint format, GLint type) { + EGLDisplay display = eglGetCurrentDisplay(); + LOG_ALWAYS_FATAL_IF(display == EGL_NO_DISPLAY, + "Failed to get EGL_DEFAULT_DISPLAY! err=%s", + uirenderer::renderthread::EglManager::eglErrorString()); + // We use an EGLImage to access the content of the GraphicBuffer + // The EGL image is later bound to a 2D texture + EGLClientBuffer clientBuffer = (EGLClientBuffer) buffer.getNativeBuffer(); + AutoEglImage autoImage(display, clientBuffer); + if (autoImage.image == EGL_NO_IMAGE_KHR) { + ALOGW("Could not create EGL image, err =%s", + uirenderer::renderthread::EglManager::eglErrorString()); + return false; + } + AutoGlTexture glTexture(caches); + glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, autoImage.image); + + GL_CHECKPOINT(MODERATE); + + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, bitmap.width(), bitmap.height(), + format, type, bitmap.getPixels()); + + GL_CHECKPOINT(MODERATE); + + // The fence is used to wait for the texture upload to finish + // properly. We cannot rely on glFlush() and glFinish() as + // some drivers completely ignore these API calls + AutoEglFence autoFence(display); + if (autoFence.fence == EGL_NO_SYNC_KHR) { + LOG_ALWAYS_FATAL("Could not create sync fence %#x", eglGetError()); + return false; + } + // The flag EGL_SYNC_FLUSH_COMMANDS_BIT_KHR will trigger a + // pipeline flush (similar to what a glFlush() would do.) + EGLint waitStatus = eglClientWaitSyncKHR(display, autoFence.fence, + EGL_SYNC_FLUSH_COMMANDS_BIT_KHR, FENCE_TIMEOUT); + if (waitStatus != EGL_CONDITION_SATISFIED_KHR) { + LOG_ALWAYS_FATAL("Failed to wait for the fence %#x", eglGetError()); + return false; + } + return true; +} + +// TODO: handle SRGB sanely +static PixelFormat internalFormatToPixelFormat(GLint internalFormat) { + switch (internalFormat) { + case GL_LUMINANCE: + return PIXEL_FORMAT_RGBA_8888; + case GL_SRGB8_ALPHA8: + return PIXEL_FORMAT_RGBA_8888; + case GL_RGBA: + return PIXEL_FORMAT_RGBA_8888; + case GL_RGB: + return PIXEL_FORMAT_RGB_565; + case GL_RGBA16F: + return PIXEL_FORMAT_RGBA_FP16; + default: + LOG_ALWAYS_FATAL("Unsupported bitmap colorType: %d", internalFormat); + return PIXEL_FORMAT_UNKNOWN; + } +} + +sk_sp<Bitmap> OpenGLPipeline::allocateHardwareBitmap(RenderThread& renderThread, + SkBitmap& skBitmap) { + renderThread.eglManager().initialize(); + uirenderer::Caches& caches = uirenderer::Caches::getInstance(); + + const SkImageInfo& info = skBitmap.info(); + if (info.colorType() == kUnknown_SkColorType || info.colorType() == kAlpha_8_SkColorType) { + ALOGW("unable to create hardware bitmap of colortype: %d", info.colorType()); + return nullptr; + } + + bool needSRGB = uirenderer::transferFunctionCloseToSRGB(skBitmap.info().colorSpace()); + bool hasLinearBlending = caches.extensions().hasLinearBlending(); + GLint format, type, internalFormat; + uirenderer::Texture::colorTypeToGlFormatAndType(caches, skBitmap.colorType(), + needSRGB && hasLinearBlending, &internalFormat, &format, &type); + + PixelFormat pixelFormat = internalFormatToPixelFormat(internalFormat); + sp<GraphicBuffer> buffer = new GraphicBuffer(info.width(), info.height(), pixelFormat, + GraphicBuffer::USAGE_HW_TEXTURE | + GraphicBuffer::USAGE_SW_WRITE_NEVER | + GraphicBuffer::USAGE_SW_READ_NEVER, + std::string("Bitmap::allocateHardwareBitmap pid [") + std::to_string(getpid()) + "]"); + + status_t error = buffer->initCheck(); + if (error < 0) { + ALOGW("createGraphicBuffer() failed in GraphicBuffer.create()"); + return nullptr; + } + + SkBitmap bitmap; + if (CC_UNLIKELY(uirenderer::Texture::hasUnsupportedColorType(skBitmap.info(), + hasLinearBlending))) { + sk_sp<SkColorSpace> sRGB = SkColorSpace::MakeSRGB(); + bitmap = uirenderer::Texture::uploadToN32(skBitmap, hasLinearBlending, std::move(sRGB)); + } else { + bitmap = skBitmap; + } + + if (!uploadBitmapToGraphicBuffer(caches, bitmap, *buffer, format, type)) { + return nullptr; + } + return sk_sp<Bitmap>(new Bitmap(buffer.get(), bitmap.info())); +} + } /* namespace renderthread */ } /* namespace uirenderer */ } /* namespace android */ |