Android智能座驾,carlink场景截屏黑屏问题
-
背景
项目开发过程中,遇到如下问题:
【操作步骤】
1、建立导航+音乐分屏
2、连接Carlink,车机端打开任意Carlink应用,点击音乐图标回到分屏
【结果】
页面会出现1s黑屏再显示分屏的情况 -
详细分析
比较怀疑是截屏的方法拿到的图片就是黑色导致。截屏采用的SurfaceControl.captureDisplay
通过dumpsys SurfaceFlinger和dumpsys window可以发现:不能截图的页面,带有secure属性。
- status_t SurfaceFlinger::captureDisplay(const DisplayCaptureArgs&
args,const sp& captureListener) - ftl::SharedFuture
SurfaceFlinger::captureScreenCommon(
RenderAreaFuture renderAreaFuture, TraverseLayersFunction traverseLayers,
const std::shared_ptrrenderengine::ExternalTexture& buffer, bool regionSampling,
bool grayscale, const sp& captureListener) - ftl::SharedFuture SurfaceFlinger::renderScreenImpl(
const RenderArea& renderArea, TraverseLayersFunction traverseLayers,
const std::shared_ptrrenderengine::ExternalTexture& buffer,
bool canCaptureBlackoutContent, bool regionSampling, bool grayscale,
ScreenCaptureResults& captureResults) - std::optionalcompositionengine::LayerFE::LayerSettings
BufferLayer::prepareClientComposition(
compositionengine::LayerFE::ClientCompositionTargetSettings&
targetSettings)
std::optional<compositionengine::LayerFE::LayerSettings> BufferLayer::prepareClientComposition(
compositionengine::LayerFE::ClientCompositionTargetSettings& targetSettings) {
ATRACE_CALL();
ALOGE("captureDisplay, prepareClientComposition");
std::optional<compositionengine::LayerFE::LayerSettings> result =
Layer::prepareClientComposition(targetSettings);
if (!result) {
return result;
}
if (CC_UNLIKELY(mBufferInfo.mBuffer == 0) && mSidebandStream != nullptr) {
// For surfaceview of tv sideband, there is no activeBuffer
// in bufferqueue, we need return LayerSettings.
return result;
}
const bool blackOutLayer = (isProtected() && !targetSettings.supportsProtectedContent) ||
((isSecure() || isProtected()) && !targetSettings.isSecure);
const bool bufferCanBeUsedAsHwTexture =
mBufferInfo.mBuffer->getUsage() & GraphicBuffer::USAGE_HW_TEXTURE;
compositionengine::LayerFE::LayerSettings& layer = *result;
if (blackOutLayer || !bufferCanBeUsedAsHwTexture) {
ALOGE("captureDisplay, prepareClientComposition-2, blackOutLayer = %d, bufferCanBeUsedAsHwTexture=%d, \
isProtected()=%d, !targetSettings.supportsProtectedContent = %d, isSecure()=%d, !targetSettings.isSecure=%d",
blackOutLayer, bufferCanBeUsedAsHwTexture, isProtected(), (!targetSettings.supportsProtectedContent), isSecure(), (!targetSettings.isSecure));
ALOGE_IF(!bufferCanBeUsedAsHwTexture, "%s is blacked out as buffer is not gpu readable",
mName.c_str());
prepareClearClientComposition(layer, true /* blackout */);
return layer;
}
ALOGE("captureDisplay, prepareClientComposition-3");
const State& s(getDrawingState());
layer.source.buffer.buffer = mBufferInfo.mBuffer;
layer.source.buffer.isOpaque = isOpaque(s);
layer.source.buffer.fence = mBufferInfo.mFence;
layer.source.buffer.textureName = mTextureName;
layer.source.buffer.usePremultipliedAlpha = getPremultipledAlpha();
layer.source.buffer.isY410BT2020 = isHdrY410();
bool hasSmpte2086 = mBufferInfo.mHdrMetadata.validTypes & HdrMetadata::SMPTE2086;
bool hasCta861_3 = mBufferInfo.mHdrMetadata.validTypes & HdrMetadata::CTA861_3;
float maxLuminance = 0.f;
if (hasSmpte2086 && hasCta861_3) {
maxLuminance = std::min(mBufferInfo.mHdrMetadata.smpte2086.maxLuminance,
mBufferInfo.mHdrMetadata.cta8613.maxContentLightLevel);
} else if (hasSmpte2086) {
maxLuminance = mBufferInfo.mHdrMetadata.smpte2086.maxLuminance;
} else if (hasCta861_3) {
maxLuminance = mBufferInfo.mHdrMetadata.cta8613.maxContentLightLevel;
} else {
switch (layer.sourceDataspace & HAL_DATASPACE_TRANSFER_MASK) {
case HAL_DATASPACE_TRANSFER_ST2084:
case HAL_DATASPACE_TRANSFER_HLG:
// Behavior-match previous releases for HDR content
maxLuminance = defaultMaxLuminance;
break;
}
}
layer.source.buffer.maxLuminanceNits = maxLuminance;
layer.frameNumber = mCurrentFrameNumber;
layer.bufferId = mBufferInfo.mBuffer ? mBufferInfo.mBuffer->getId() : 0;
const bool useFiltering =
targetSettings.needsFiltering || mNeedsFiltering || bufferNeedsFiltering();
// Query the texture matrix given our current filtering mode.
float textureMatrix[16];
getDrawingTransformMatrix(useFiltering, textureMatrix);
if (getTransformToDisplayInverse()) {
/*
* the code below applies the primary display's inverse transform to
* the texture transform
*/
uint32_t transform = DisplayDevice::getPrimaryDisplayRotationFlags();
mat4 tr = inverseOrientation(transform);
/**
* TODO(b/36727915): This is basically a hack.
*
* Ensure that regardless of the parent transformation,
* this buffer is always transformed from native display
* orientation to display orientation. For example, in the case
* of a camera where the buffer remains in native orientation,
* we want the pixels to always be upright.
*/
sp<Layer> p = mDrawingParent.promote();
if (p != nullptr) {
const auto parentTransform = p->getTransform();
tr = tr * inverseOrientation(parentTransform.getOrientation());
}
// and finally apply it to the original texture matrix
const mat4 texTransform(mat4(static_cast<const float*>(textureMatrix)) * tr);
memcpy(textureMatrix, texTransform.asArray(), sizeof(textureMatrix));
}
const Rect win{getBounds()};
float bufferWidth = getBufferSize(s).getWidth();
float bufferHeight = getBufferSize(s).getHeight();
// BufferStateLayers can have a "buffer size" of [0, 0, -1, -1] when no display frame has
// been set and there is no parent layer bounds. In that case, the scale is meaningless so
// ignore them.
if (!getBufferSize(s).isValid()) {
bufferWidth = float(win.right) - float(win.left);
bufferHeight = float(win.bottom) - float(win.top);
}
const float scaleHeight = (float(win.bottom) - float(win.top)) / bufferHeight;
const float scaleWidth = (float(win.right) - float(win.left)) / bufferWidth;
const float translateY = float(win.top) / bufferHeight;
const float translateX = float(win.left) / bufferWidth;
// Flip y-coordinates because GLConsumer expects OpenGL convention.
mat4 tr = mat4::translate(vec4(.5, .5, 0, 1)) * mat4::scale(vec4(1, -1, 1, 1)) *
mat4::translate(vec4(-.5, -.5, 0, 1)) *
mat4::translate(vec4(translateX, translateY, 0, 1)) *
mat4::scale(vec4(scaleWidth, scaleHeight, 1.0, 1.0));
layer.source.buffer.useTextureFiltering = useFiltering;
layer.source.buffer.textureTransform = mat4(static_cast<const float*>(textureMatrix)) * tr;
return layer;
}
通过加log debug发现是如下红框部分进入了,出现了黑色图层。
通过log可以知道:blackOutLayer = 1, bufferCanBeUsedAsHwTexture=1
那么问题就出在blackOutLayer ,继续分析:isProtected()=0, !targetSettings.supportsProtectedContent = 1, isSecure()=1, !targetSettings.isSecure=1
接下来排查isSecure() 和 targetSettings.isSecure的来龙去脉。
isSecure()如下:
targetSettings.isSecure如下:
从dump信息来看,display和layer 都是secure的,所以需要将mAllowSecureLayers设置问题true,才能截取secure layer图片。
- 解决方案
截屏的时候,将mCaptureSecureLayers设置为true,问题解决。