v4l2_codec2: Adapt VideoPixelFormat code to Android standards.
This CL reworks the VideoPixelFormat class that was branched
from Chrome:
- Removed all Chrome-specific references from comments
- Removed specific numbering as we don't need compatibility with Chrome
- Removed deprecated formats
- Adhere to the Android coding style.
- Make use of Android logging mechanisms.
- Make use of Android assert macros.
Bug: 155138142
Test: arc.VideoEncodeAccel.h264_192p_i420_vm
Change-Id: I8387af0b427ebc990ec6374432f18ad732894569
diff --git a/common/FormatConverter.cpp b/common/FormatConverter.cpp
index 7e9c2e5..d694bd1 100644
--- a/common/FormatConverter.cpp
+++ b/common/FormatConverter.cpp
@@ -29,9 +29,8 @@
namespace {
// The constant expression of mapping the pixel format conversion pair (src, dst) to a unique
// integer.
-constexpr int convertMap(media::VideoPixelFormat src, media::VideoPixelFormat dst) {
- return static_cast<int>(src) *
- (static_cast<int>(media::VideoPixelFormat::PIXEL_FORMAT_MAX) + 1) +
+constexpr int convertMap(VideoPixelFormat src, VideoPixelFormat dst) {
+ return static_cast<int>(src) * (static_cast<int>(VideoPixelFormat::UNKNOWN) + 1) +
static_cast<int>(dst);
}
@@ -93,12 +92,11 @@
}
// static
-std::unique_ptr<FormatConverter> FormatConverter::Create(media::VideoPixelFormat outFormat,
+std::unique_ptr<FormatConverter> FormatConverter::Create(VideoPixelFormat outFormat,
const ui::Size& visibleSize,
uint32_t inputCount,
const ui::Size& codedSize) {
- if (outFormat != media::VideoPixelFormat::PIXEL_FORMAT_I420 &&
- outFormat != media::VideoPixelFormat::PIXEL_FORMAT_NV12) {
+ if (outFormat != VideoPixelFormat::I420 && outFormat != VideoPixelFormat::NV12) {
ALOGE("Unsupported output format: %d", static_cast<int32_t>(outFormat));
return nullptr;
}
@@ -111,11 +109,10 @@
return converter;
}
-c2_status_t FormatConverter::initialize(media::VideoPixelFormat outFormat,
- const ui::Size& visibleSize, uint32_t inputCount,
- const ui::Size& codedSize) {
+c2_status_t FormatConverter::initialize(VideoPixelFormat outFormat, const ui::Size& visibleSize,
+ uint32_t inputCount, const ui::Size& codedSize) {
ALOGV("initialize(out_format=%s, visible_size=%dx%d, input_count=%u, coded_size=%dx%d)",
- media::VideoPixelFormatToString(outFormat).c_str(), visibleSize.width, visibleSize.height,
+ videoPixelFormatToString(outFormat).c_str(), visibleSize.width, visibleSize.height,
inputCount, codedSize.width, codedSize.height);
std::shared_ptr<C2BlockPool> pool;
@@ -126,7 +123,7 @@
}
HalPixelFormat halFormat;
- if (outFormat == media::VideoPixelFormat::PIXEL_FORMAT_I420) {
+ if (outFormat == VideoPixelFormat::I420) {
// Android HAL format doesn't have I420, we use YV12 instead and swap U and V data while
// conversion to perform I420.
halFormat = HalPixelFormat::YV12;
@@ -201,7 +198,7 @@
const int dstStrideV = outputLayout.planes[C2PlanarLayout::PLANE_U].rowInc; // only for I420
const int dstStrideUV = outputLayout.planes[C2PlanarLayout::PLANE_U].rowInc; // only for NV12
- media::VideoPixelFormat inputFormat = media::VideoPixelFormat::PIXEL_FORMAT_UNKNOWN;
+ VideoPixelFormat inputFormat = VideoPixelFormat::UNKNOWN;
*status = C2_OK;
if (inputLayout.type == C2PlanarLayout::TYPE_YUV) {
const uint8_t* srcY = inputView.data()[C2PlanarLayout::PLANE_Y];
@@ -211,10 +208,9 @@
const int srcStrideU = inputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;
const int srcStrideV = inputLayout.planes[C2PlanarLayout::PLANE_V].rowInc;
if (inputLayout.rootPlanes == 3) {
- inputFormat = media::VideoPixelFormat::PIXEL_FORMAT_YV12;
+ inputFormat = VideoPixelFormat::YV12;
} else if (inputLayout.rootPlanes == 2) {
- inputFormat = (srcV > srcU) ? media::VideoPixelFormat::PIXEL_FORMAT_NV12
- : media::VideoPixelFormat::PIXEL_FORMAT_NV21;
+ inputFormat = (srcV > srcU) ? VideoPixelFormat::NV12 : VideoPixelFormat::NV21;
}
if (inputFormat == mOutFormat) {
@@ -224,32 +220,28 @@
}
switch (convertMap(inputFormat, mOutFormat)) {
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_YV12,
- media::VideoPixelFormat::PIXEL_FORMAT_I420):
+ case convertMap(VideoPixelFormat::YV12, VideoPixelFormat::I420):
libyuv::I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY,
dstU, dstStrideU, dstV, dstStrideV, mVisibleSize.width,
mVisibleSize.height);
break;
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_YV12,
- media::VideoPixelFormat::PIXEL_FORMAT_NV12):
+ case convertMap(VideoPixelFormat::YV12, VideoPixelFormat::NV12):
libyuv::I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY,
dstStrideY, dstUV, dstStrideUV, mVisibleSize.width,
mVisibleSize.height);
break;
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_NV12,
- media::VideoPixelFormat::PIXEL_FORMAT_I420):
+ case convertMap(VideoPixelFormat::NV12, VideoPixelFormat::I420):
libyuv::NV12ToI420(srcY, srcStrideY, srcU, srcStrideU, dstY, dstStrideY, dstU,
dstStrideU, dstV, dstStrideV, mVisibleSize.width,
mVisibleSize.height);
break;
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_NV21,
- media::VideoPixelFormat::PIXEL_FORMAT_I420):
+ case convertMap(VideoPixelFormat::NV21, VideoPixelFormat::I420):
libyuv::NV21ToI420(srcY, srcStrideY, srcV, srcStrideV, dstY, dstStrideY, dstU,
dstStrideU, dstV, dstStrideV, mVisibleSize.width,
mVisibleSize.height);
break;
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_NV21,
- media::VideoPixelFormat::PIXEL_FORMAT_NV12):
+ case convertMap(VideoPixelFormat::NV21, VideoPixelFormat::NV12):
+ ALOGV("%s(): Converting PIXEL_FORMAT_NV21 -> PIXEL_FORMAT_NV12", __func__);
libyuv::CopyPlane(srcY, srcStrideY, dstY, dstStrideY, mVisibleSize.width,
mVisibleSize.height);
copyPlaneByPixel(srcU, srcStrideU, 2, dstUV, dstStrideUV, 2, mVisibleSize.width / 2,
@@ -259,28 +251,26 @@
break;
default:
ALOGE("Unsupported pixel format conversion from %s to %s",
- media::VideoPixelFormatToString(inputFormat).c_str(),
- media::VideoPixelFormatToString(mOutFormat).c_str());
+ videoPixelFormatToString(inputFormat).c_str(),
+ videoPixelFormatToString(mOutFormat).c_str());
*status = C2_CORRUPTED;
return inputBlock; // This is actually redundant and should not be used.
}
} else if (inputLayout.type == C2PlanarLayout::TYPE_RGB) {
// There is only RGBA_8888 specified in C2AllocationGralloc::map(), no BGRA_8888. Maybe
// BGRA_8888 is not used now?
- inputFormat = media::VideoPixelFormat::PIXEL_FORMAT_ABGR;
+ inputFormat = VideoPixelFormat::ABGR;
const uint8_t* srcRGB = (idMap) ? idMap->addr() : inputView.data()[C2PlanarLayout::PLANE_R];
const int srcStrideRGB =
(idMap) ? idMap->rowInc() : inputLayout.planes[C2PlanarLayout::PLANE_R].rowInc;
switch (convertMap(inputFormat, mOutFormat)) {
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_ABGR,
- media::VideoPixelFormat::PIXEL_FORMAT_I420):
+ case convertMap(VideoPixelFormat::ABGR, VideoPixelFormat::I420):
libyuv::ABGRToI420(srcRGB, srcStrideRGB, dstY, dstStrideY, dstU, dstStrideU, dstV,
dstStrideV, mVisibleSize.width, mVisibleSize.height);
break;
- case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_ABGR,
- media::VideoPixelFormat::PIXEL_FORMAT_NV12): {
+ case convertMap(VideoPixelFormat::ABGR, VideoPixelFormat::NV12): {
// There is no libyuv function to convert ABGR to NV12. Therefore, we first convert to
// I420 on dst-Y plane and temporary U/V plane. Then we copy U and V pixels from
// temporary planes to dst-UV interleavedly.
@@ -295,8 +285,8 @@
}
default:
ALOGE("Unsupported pixel format conversion from %s to %s",
- media::VideoPixelFormatToString(inputFormat).c_str(),
- media::VideoPixelFormatToString(mOutFormat).c_str());
+ videoPixelFormatToString(inputFormat).c_str(),
+ videoPixelFormatToString(mOutFormat).c_str());
*status = C2_CORRUPTED;
return inputBlock; // This is actually redundant and should not be used.
}
@@ -307,7 +297,7 @@
}
ALOGV("convertBlock(frame_index=%" PRIu64 ", format=%s)", frameIndex,
- media::VideoPixelFormatToString(inputFormat).c_str());
+ videoPixelFormatToString(inputFormat).c_str());
entry->mAssociatedFrameIndex = frameIndex;
mAvailableQueue.pop();
return outputBlock->share(C2Rect(mVisibleSize.width, mVisibleSize.height), C2Fence());
diff --git a/common/Fourcc.cpp b/common/Fourcc.cpp
index 71120f8..f7d3efd 100644
--- a/common/Fourcc.cpp
+++ b/common/Fourcc.cpp
@@ -36,128 +36,128 @@
case MM21:
return Fourcc(static_cast<Value>(fourcc));
}
- ALOGE("Unmapped fourcc: %s", media::FourccToString(fourcc).c_str());
+ ALOGE("Unmapped fourcc: %s", fourccToString(fourcc).c_str());
return std::nullopt;
}
// static
-std::optional<Fourcc> Fourcc::fromVideoPixelFormat(media::VideoPixelFormat pixelFormat,
+std::optional<Fourcc> Fourcc::fromVideoPixelFormat(VideoPixelFormat pixelFormat,
bool singlePlanar) {
if (singlePlanar) {
switch (pixelFormat) {
- case media::PIXEL_FORMAT_ARGB:
+ case VideoPixelFormat::ARGB:
return Fourcc(AR24);
- case media::PIXEL_FORMAT_ABGR:
+ case VideoPixelFormat::ABGR:
return Fourcc(AB24);
- case media::PIXEL_FORMAT_XRGB:
+ case VideoPixelFormat::XRGB:
return Fourcc(XR24);
- case media::PIXEL_FORMAT_XBGR:
+ case VideoPixelFormat::XBGR:
return Fourcc(XB24);
- case media::PIXEL_FORMAT_BGRA:
+ case VideoPixelFormat::BGRA:
return Fourcc(RGB4);
- case media::PIXEL_FORMAT_I420:
+ case VideoPixelFormat::I420:
return Fourcc(YU12);
- case media::PIXEL_FORMAT_YV12:
+ case VideoPixelFormat::YV12:
return Fourcc(YV12);
- case media::PIXEL_FORMAT_YUY2:
+ case VideoPixelFormat::YUY2:
return Fourcc(YUYV);
- case media::PIXEL_FORMAT_NV12:
+ case VideoPixelFormat::NV12:
return Fourcc(NV12);
- case media::PIXEL_FORMAT_NV21:
+ case VideoPixelFormat::NV21:
return Fourcc(NV21);
- case media::PIXEL_FORMAT_I422:
- case media::PIXEL_FORMAT_I420A:
- case media::PIXEL_FORMAT_I444:
- case media::PIXEL_FORMAT_RGB24:
- case media::PIXEL_FORMAT_MJPEG:
- case media::PIXEL_FORMAT_YUV420P9:
- case media::PIXEL_FORMAT_YUV420P10:
- case media::PIXEL_FORMAT_YUV422P9:
- case media::PIXEL_FORMAT_YUV422P10:
- case media::PIXEL_FORMAT_YUV444P9:
- case media::PIXEL_FORMAT_YUV444P10:
- case media::PIXEL_FORMAT_YUV420P12:
- case media::PIXEL_FORMAT_YUV422P12:
- case media::PIXEL_FORMAT_YUV444P12:
- case media::PIXEL_FORMAT_Y16:
- case media::PIXEL_FORMAT_P016LE:
- case media::PIXEL_FORMAT_XR30:
- case media::PIXEL_FORMAT_XB30:
- case media::PIXEL_FORMAT_UNKNOWN:
+ case VideoPixelFormat::I422:
+ case VideoPixelFormat::I420A:
+ case VideoPixelFormat::I444:
+ case VideoPixelFormat::RGB24:
+ case VideoPixelFormat::MJPEG:
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV444P9:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::YUV422P12:
+ case VideoPixelFormat::YUV444P12:
+ case VideoPixelFormat::Y16:
+ case VideoPixelFormat::P016LE:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ case VideoPixelFormat::UNKNOWN:
break;
}
} else {
switch (pixelFormat) {
- case media::PIXEL_FORMAT_I420:
+ case VideoPixelFormat::I420:
return Fourcc(YM12);
- case media::PIXEL_FORMAT_YV12:
+ case VideoPixelFormat::YV12:
return Fourcc(YM21);
- case media::PIXEL_FORMAT_NV12:
+ case VideoPixelFormat::NV12:
return Fourcc(NM12);
- case media::PIXEL_FORMAT_I422:
+ case VideoPixelFormat::I422:
return Fourcc(YM16);
- case media::PIXEL_FORMAT_NV21:
+ case VideoPixelFormat::NV21:
return Fourcc(NM21);
- case media::PIXEL_FORMAT_I420A:
- case media::PIXEL_FORMAT_I444:
- case media::PIXEL_FORMAT_YUY2:
- case media::PIXEL_FORMAT_ARGB:
- case media::PIXEL_FORMAT_XRGB:
- case media::PIXEL_FORMAT_RGB24:
- case media::PIXEL_FORMAT_MJPEG:
- case media::PIXEL_FORMAT_YUV420P9:
- case media::PIXEL_FORMAT_YUV420P10:
- case media::PIXEL_FORMAT_YUV422P9:
- case media::PIXEL_FORMAT_YUV422P10:
- case media::PIXEL_FORMAT_YUV444P9:
- case media::PIXEL_FORMAT_YUV444P10:
- case media::PIXEL_FORMAT_YUV420P12:
- case media::PIXEL_FORMAT_YUV422P12:
- case media::PIXEL_FORMAT_YUV444P12:
- case media::PIXEL_FORMAT_Y16:
- case media::PIXEL_FORMAT_ABGR:
- case media::PIXEL_FORMAT_XBGR:
- case media::PIXEL_FORMAT_P016LE:
- case media::PIXEL_FORMAT_XR30:
- case media::PIXEL_FORMAT_XB30:
- case media::PIXEL_FORMAT_BGRA:
- case media::PIXEL_FORMAT_UNKNOWN:
+ case VideoPixelFormat::I420A:
+ case VideoPixelFormat::I444:
+ case VideoPixelFormat::YUY2:
+ case VideoPixelFormat::ARGB:
+ case VideoPixelFormat::XRGB:
+ case VideoPixelFormat::RGB24:
+ case VideoPixelFormat::MJPEG:
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV444P9:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::YUV422P12:
+ case VideoPixelFormat::YUV444P12:
+ case VideoPixelFormat::Y16:
+ case VideoPixelFormat::ABGR:
+ case VideoPixelFormat::XBGR:
+ case VideoPixelFormat::P016LE:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ case VideoPixelFormat::BGRA:
+ case VideoPixelFormat::UNKNOWN:
break;
}
}
- ALOGE("Unmapped %s for %s", VideoPixelFormatToString(pixelFormat).c_str(),
+ ALOGE("Unmapped %s for %s", videoPixelFormatToString(pixelFormat).c_str(),
singlePlanar ? "single-planar" : "multi-planar");
return std::nullopt;
}
-media::VideoPixelFormat Fourcc::toVideoPixelFormat() const {
+VideoPixelFormat Fourcc::toVideoPixelFormat() const {
switch (mValue) {
case AR24:
- return media::PIXEL_FORMAT_ARGB;
+ return VideoPixelFormat::ARGB;
case AB24:
- return media::PIXEL_FORMAT_ABGR;
+ return VideoPixelFormat::ABGR;
case XR24:
- return media::PIXEL_FORMAT_XRGB;
+ return VideoPixelFormat::XRGB;
case XB24:
- return media::PIXEL_FORMAT_XBGR;
+ return VideoPixelFormat::XBGR;
case RGB4:
- return media::PIXEL_FORMAT_BGRA;
+ return VideoPixelFormat::BGRA;
case YU12:
case YM12:
- return media::PIXEL_FORMAT_I420;
+ return VideoPixelFormat::I420;
case YV12:
case YM21:
- return media::PIXEL_FORMAT_YV12;
+ return VideoPixelFormat::YV12;
case YUYV:
- return media::PIXEL_FORMAT_YUY2;
+ return VideoPixelFormat::YUY2;
case NV12:
case NM12:
- return media::PIXEL_FORMAT_NV12;
+ return VideoPixelFormat::NV12;
case NV21:
case NM21:
- return media::PIXEL_FORMAT_NV21;
+ return VideoPixelFormat::NV21;
case YM16:
- return media::PIXEL_FORMAT_I422;
+ return VideoPixelFormat::I422;
// V4L2_PIX_FMT_MT21C is only used for MT8173 hardware video decoder output
// and should be converted by MT8173 image processor for compositor to
// render. Since it is an intermediate format for video decoder,
@@ -170,11 +170,11 @@
// similar to V4L2_PIX_FMT_MT21C but is not compressed ; thus it can also
// be mapped to PIXEL_FORMAT_NV12.
case MM21:
- return media::PIXEL_FORMAT_NV12;
+ return VideoPixelFormat::NV12;
}
ALOGE("Unmapped Fourcc: %s", toString().c_str());
- return media::PIXEL_FORMAT_UNKNOWN;
+ return VideoPixelFormat::UNKNOWN;
}
// static
@@ -247,7 +247,7 @@
}
std::string Fourcc::toString() const {
- return media::FourccToString(static_cast<uint32_t>(mValue));
+ return fourccToString(static_cast<uint32_t>(mValue));
}
static_assert(Fourcc::AR24 == V4L2_PIX_FMT_ABGR32, "Mismatch Fourcc");
diff --git a/common/V4L2Device.cpp b/common/V4L2Device.cpp
index cd07f22..a31d82b 100644
--- a/common/V4L2Device.cpp
+++ b/common/V4L2Device.cpp
@@ -1101,7 +1101,7 @@
std::string path = getDevicePathFor(type, v4l2PixFmt);
if (path.empty()) {
- ALOGE("No devices supporting %s for type: %u", media::FourccToString(v4l2PixFmt).c_str(),
+ ALOGE("No devices supporting %s for type: %u", fourccToString(v4l2PixFmt).c_str(),
static_cast<uint32_t>(type));
return false;
}
@@ -1356,7 +1356,7 @@
}
break;
default:
- ALOGE("Unhandled pixelformat %s", media::FourccToString(pixFmt).c_str());
+ ALOGE("Unhandled pixelformat %s", fourccToString(pixFmt).c_str());
return {};
}
@@ -1442,7 +1442,7 @@
ui::Size V4L2Device::allocatedSizeFromV4L2Format(const struct v4l2_format& format) {
ui::Size codedSize;
ui::Size visibleSize;
- media::VideoPixelFormat frameFormat = media::PIXEL_FORMAT_UNKNOWN;
+ VideoPixelFormat frameFormat = VideoPixelFormat::UNKNOWN;
size_t bytesPerLine = 0;
// Total bytes in the frame.
size_t sizeimage = 0;
@@ -1458,7 +1458,7 @@
const uint32_t pixFmt = format.fmt.pix_mp.pixelformat;
const auto frameFourcc = Fourcc::fromV4L2PixFmt(pixFmt);
if (!frameFourcc) {
- ALOGE("Unsupported format %s", media::FourccToString(pixFmt).c_str());
+ ALOGE("Unsupported format %s", fourccToString(pixFmt).c_str());
return codedSize;
}
frameFormat = frameFourcc->toVideoPixelFormat();
@@ -1470,10 +1470,10 @@
const uint32_t fourcc = format.fmt.pix.pixelformat;
const auto frameFourcc = Fourcc::fromV4L2PixFmt(fourcc);
if (!frameFourcc) {
- ALOGE("Unsupported format %s", media::FourccToString(fourcc).c_str());
+ ALOGE("Unsupported format %s", fourccToString(fourcc).c_str());
return codedSize;
}
- frameFormat = frameFourcc ? frameFourcc->toVideoPixelFormat() : media::PIXEL_FORMAT_UNKNOWN;
+ frameFormat = frameFourcc ? frameFourcc->toVideoPixelFormat() : VideoPixelFormat::UNKNOWN;
}
// V4L2 does not provide per-plane bytesperline (bpl) when different components are sharing one
@@ -1485,12 +1485,12 @@
// elsewhere to calculate coded height.
// We need bits per pixel for one component only to calculate the coded width from bytesperline.
- int planeHorizBitsPerPixel = media::PlaneHorizontalBitsPerPixel(frameFormat, 0);
+ int planeHorizBitsPerPixel = planeHorizontalBitsPerPixel(frameFormat, 0);
// Adding up bpp for each component will give us total bpp for all components.
int totalBpp = 0;
- for (size_t i = 0; i < media::NumPlanes(frameFormat); ++i)
- totalBpp += media::PlaneBitsPerPixel(frameFormat, i);
+ for (size_t i = 0; i < numPlanes(frameFormat); ++i)
+ totalBpp += planeBitsPerPixel(frameFormat, i);
if (sizeimage == 0 || bytesPerLine == 0 || planeHorizBitsPerPixel == 0 || totalBpp == 0 ||
(bytesPerLine * 8) % planeHorizBitsPerPixel != 0) {
@@ -1509,8 +1509,8 @@
// Sanity checks. Calculated coded size has to contain given visible size and fulfill buffer
// byte size requirements.
- ALOG_ASSERT(media::Rect(codedSize).Contains(media::Rect(visibleSize)));
- ALOG_ASSERT(sizeimage <= media::AllocationSize(frameFormat, codedSize));
+ ALOG_ASSERT(Rect(codedSize).Contains(Rect(visibleSize)));
+ ALOG_ASSERT(sizeimage <= allocationSize(frameFormat, codedSize));
return codedSize;
}
@@ -1555,7 +1555,7 @@
// single-planar
const struct v4l2_pix_format& pix = format.fmt.pix;
s << ", width_height: " << toString(ui::Size(pix.width, pix.height))
- << ", pixelformat: " << media::FourccToString(pix.pixelformat) << ", field: " << pix.field
+ << ", pixelformat: " << fourccToString(pix.pixelformat) << ", field: " << pix.field
<< ", bytesperline: " << pix.bytesperline << ", sizeimage: " << pix.sizeimage;
} else if (V4L2_TYPE_IS_MULTIPLANAR(format.type)) {
const struct v4l2_pix_format_mplane& pixMp = format.fmt.pix_mp;
@@ -1563,8 +1563,7 @@
// integer, which is not what we want. Casting pix_mp.num_planes unsigned int solves the
// issue.
s << ", width_height: " << toString(ui::Size(pixMp.width, pixMp.height))
- << ", pixelformat: " << media::FourccToString(pixMp.pixelformat)
- << ", field: " << pixMp.field
+ << ", pixelformat: " << fourccToString(pixMp.pixelformat) << ", field: " << pixMp.field
<< ", num_planes: " << static_cast<unsigned int>(pixMp.num_planes);
for (size_t i = 0; i < pixMp.num_planes; ++i) {
const struct v4l2_plane_pix_format& plane_fmt = pixMp.plane_fmt[i];
@@ -1624,20 +1623,20 @@
const auto videoFourcc = Fourcc::fromV4L2PixFmt(pixFmt);
if (!videoFourcc) {
ALOGE("Failed to convert pixel format to VideoPixelFormat: %s",
- media::FourccToString(pixFmt).c_str());
+ fourccToString(pixFmt).c_str());
return std::nullopt;
}
- const media::VideoPixelFormat videoFormat = videoFourcc->toVideoPixelFormat();
+ const VideoPixelFormat videoFormat = videoFourcc->toVideoPixelFormat();
const size_t numBuffers = pixMp.num_planes;
- const size_t numColorPlanes = media::NumPlanes(videoFormat);
+ const size_t numColorPlanes = numPlanes(videoFormat);
if (numColorPlanes == 0) {
ALOGE("Unsupported video format for NumPlanes(): %s",
- VideoPixelFormatToString(videoFormat).c_str());
+ videoPixelFormatToString(videoFormat).c_str());
return std::nullopt;
}
if (numBuffers > numColorPlanes) {
ALOGE("pix_mp.num_planes: %zu should not be larger than NumPlanes(%s): %zu", numBuffers,
- VideoPixelFormatToString(videoFormat).c_str(), numColorPlanes);
+ videoPixelFormatToString(videoFormat).c_str(), numColorPlanes);
return std::nullopt;
}
// Reserve capacity in advance to prevent unnecessary vector reallocation.
@@ -1680,7 +1679,7 @@
}
default:
ALOGE("Cannot derive stride for each plane for pixel format %s",
- media::FourccToString(pixFmt).c_str());
+ fourccToString(pixFmt).c_str());
return std::nullopt;
}
}
@@ -1693,7 +1692,7 @@
size_t V4L2Device::getNumPlanesOfV4L2PixFmt(uint32_t pixFmt) {
std::optional<Fourcc> fourcc = Fourcc::fromV4L2PixFmt(pixFmt);
if (fourcc && fourcc->isMultiPlanar()) {
- return media::NumPlanes(fourcc->toVideoPixelFormat());
+ return numPlanes(fourcc->toVideoPixelFormat());
}
return 1u;
}
@@ -1728,13 +1727,13 @@
maxResolution->set(1920, 1088);
ALOGE("GetSupportedResolution failed to get maximum resolution for fourcc %s, "
"fall back to %s",
- media::FourccToString(pixelFormat).c_str(), toString(*maxResolution).c_str());
+ fourccToString(pixelFormat).c_str(), toString(*maxResolution).c_str());
}
if (isEmpty(*minResolution)) {
minResolution->set(16, 16);
ALOGE("GetSupportedResolution failed to get minimum resolution for fourcc %s, "
"fall back to %s",
- media::FourccToString(pixelFormat).c_str(), toString(*minResolution).c_str());
+ fourccToString(pixelFormat).c_str(), toString(*minResolution).c_str());
}
}
diff --git a/common/VideoPixelFormat.cpp b/common/VideoPixelFormat.cpp
index ec75873..f175c26 100644
--- a/common/VideoPixelFormat.cpp
+++ b/common/VideoPixelFormat.cpp
@@ -6,382 +6,366 @@
#include <v4l2_codec2/common/VideoPixelFormat.h>
-#include "base/bits.h"
-#include "base/logging.h"
-#include "base/stl_util.h"
-#include "base/strings/stringprintf.h"
+#include <base/bits.h>
+#include <utils/Log.h>
-namespace media {
+namespace android {
namespace {
enum {
- kMaxPlanes = 4,
- kYPlane = 0,
- kARGBPlane = kYPlane,
- kUPlane = 1,
- kUVPlane = kUPlane,
- kVPlane = 2,
- kAPlane = 3,
- };
-
+ kMaxPlanes = 4,
+ kYPlane = 0,
+ kARGBPlane = kYPlane,
+ kUPlane = 1,
+ kUVPlane = kUPlane,
+ kVPlane = 2,
+ kAPlane = 3,
+};
}
-std::string VideoPixelFormatToString(VideoPixelFormat format) {
- switch (format) {
- case PIXEL_FORMAT_UNKNOWN:
- return "PIXEL_FORMAT_UNKNOWN";
- case PIXEL_FORMAT_I420:
- return "PIXEL_FORMAT_I420";
- case PIXEL_FORMAT_YV12:
- return "PIXEL_FORMAT_YV12";
- case PIXEL_FORMAT_I422:
- return "PIXEL_FORMAT_I422";
- case PIXEL_FORMAT_I420A:
- return "PIXEL_FORMAT_I420A";
- case PIXEL_FORMAT_I444:
- return "PIXEL_FORMAT_I444";
- case PIXEL_FORMAT_NV12:
- return "PIXEL_FORMAT_NV12";
- case PIXEL_FORMAT_NV21:
- return "PIXEL_FORMAT_NV21";
- case PIXEL_FORMAT_YUY2:
- return "PIXEL_FORMAT_YUY2";
- case PIXEL_FORMAT_ARGB:
- return "PIXEL_FORMAT_ARGB";
- case PIXEL_FORMAT_XRGB:
- return "PIXEL_FORMAT_XRGB";
- case PIXEL_FORMAT_RGB24:
- return "PIXEL_FORMAT_RGB24";
- case PIXEL_FORMAT_MJPEG:
- return "PIXEL_FORMAT_MJPEG";
- case PIXEL_FORMAT_YUV420P9:
- return "PIXEL_FORMAT_YUV420P9";
- case PIXEL_FORMAT_YUV420P10:
- return "PIXEL_FORMAT_YUV420P10";
- case PIXEL_FORMAT_YUV422P9:
- return "PIXEL_FORMAT_YUV422P9";
- case PIXEL_FORMAT_YUV422P10:
- return "PIXEL_FORMAT_YUV422P10";
- case PIXEL_FORMAT_YUV444P9:
- return "PIXEL_FORMAT_YUV444P9";
- case PIXEL_FORMAT_YUV444P10:
- return "PIXEL_FORMAT_YUV444P10";
- case PIXEL_FORMAT_YUV420P12:
- return "PIXEL_FORMAT_YUV420P12";
- case PIXEL_FORMAT_YUV422P12:
- return "PIXEL_FORMAT_YUV422P12";
- case PIXEL_FORMAT_YUV444P12:
- return "PIXEL_FORMAT_YUV444P12";
- case PIXEL_FORMAT_Y16:
- return "PIXEL_FORMAT_Y16";
- case PIXEL_FORMAT_ABGR:
- return "PIXEL_FORMAT_ABGR";
- case PIXEL_FORMAT_XBGR:
- return "PIXEL_FORMAT_XBGR";
- case PIXEL_FORMAT_P016LE:
- return "PIXEL_FORMAT_P016LE";
- case PIXEL_FORMAT_XR30:
- return "PIXEL_FORMAT_XR30";
- case PIXEL_FORMAT_XB30:
- return "PIXEL_FORMAT_XB30";
- case PIXEL_FORMAT_BGRA:
- return "PIXEL_FORMAT_BGRA";
- }
- NOTREACHED() << "Invalid VideoPixelFormat provided: " << format;
- return "";
+std::string videoPixelFormatToString(VideoPixelFormat format) {
+ switch (format) {
+ case VideoPixelFormat::I420:
+ return "I420";
+ case VideoPixelFormat::YV12:
+ return "YV12";
+ case VideoPixelFormat::I422:
+ return "I422";
+ case VideoPixelFormat::I420A:
+ return "I420A";
+ case VideoPixelFormat::I444:
+ return "I444";
+ case VideoPixelFormat::NV12:
+ return "NV12";
+ case VideoPixelFormat::NV21:
+ return "NV21";
+ case VideoPixelFormat::YUY2:
+ return "YUY2";
+ case VideoPixelFormat::ARGB:
+ return "ARGB";
+ case VideoPixelFormat::XRGB:
+ return "XRGB";
+ case VideoPixelFormat::RGB24:
+ return "RGB24";
+ case VideoPixelFormat::MJPEG:
+ return "MJPEG";
+ case VideoPixelFormat::YUV420P9:
+ return "YUV420P9";
+ case VideoPixelFormat::YUV420P10:
+ return "YUV420P10";
+ case VideoPixelFormat::YUV422P9:
+ return "YUV422P9";
+ case VideoPixelFormat::YUV422P10:
+ return "YUV422P10";
+ case VideoPixelFormat::YUV444P9:
+ return "YUV444P9";
+ case VideoPixelFormat::YUV444P10:
+ return "YUV444P10";
+ case VideoPixelFormat::YUV420P12:
+ return "YUV420P12";
+ case VideoPixelFormat::YUV422P12:
+ return "YUV422P12";
+ case VideoPixelFormat::YUV444P12:
+ return "YUV444P12";
+ case VideoPixelFormat::Y16:
+ return "Y16";
+ case VideoPixelFormat::ABGR:
+ return "ABGR";
+ case VideoPixelFormat::XBGR:
+ return "XBGR";
+ case VideoPixelFormat::P016LE:
+ return "P016LE";
+ case VideoPixelFormat::XR30:
+ return "XR30";
+ case VideoPixelFormat::XB30:
+ return "XB30";
+ case VideoPixelFormat::BGRA:
+ return "BGRA";
+ case VideoPixelFormat::UNKNOWN:
+ return "UNKNOWN";
+ }
}
-std::string FourccToString(uint32_t fourcc) {
- std::string result = "0000";
- for (size_t i = 0; i < 4; ++i, fourcc >>= 8) {
- const char c = static_cast<char>(fourcc & 0xFF);
- if (c <= 0x1f || c >= 0x7f)
- return base::StringPrintf("0x%x", fourcc);
- result[i] = c;
- }
- return result;
+std::string fourccToString(uint32_t fourcc) {
+ std::string result = "0000";
+ for (size_t i = 0; i < 4; ++i, fourcc >>= 8) {
+ const char c = static_cast<char>(fourcc & 0xFF);
+ if (c <= 0x1f || c >= 0x7f) {
+ return (std::stringstream("0x") << std::hex << fourcc).str();
+ }
+ result[i] = c;
+ }
+ return result;
}
-size_t BitDepth(VideoPixelFormat format) {
- switch (format) {
- case PIXEL_FORMAT_UNKNOWN:
- NOTREACHED();
- FALLTHROUGH;
- case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_YV12:
- case PIXEL_FORMAT_I422:
- case PIXEL_FORMAT_I420A:
- case PIXEL_FORMAT_I444:
- case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_YUY2:
- case PIXEL_FORMAT_ARGB:
- case PIXEL_FORMAT_XRGB:
- case PIXEL_FORMAT_RGB24:
- case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_ABGR:
- case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_BGRA:
- return 8;
- case PIXEL_FORMAT_YUV420P9:
- case PIXEL_FORMAT_YUV422P9:
- case PIXEL_FORMAT_YUV444P9:
- return 9;
- case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV422P10:
- case PIXEL_FORMAT_YUV444P10:
- case PIXEL_FORMAT_XR30:
- case PIXEL_FORMAT_XB30:
- return 10;
- case PIXEL_FORMAT_YUV420P12:
- case PIXEL_FORMAT_YUV422P12:
- case PIXEL_FORMAT_YUV444P12:
- return 12;
- case PIXEL_FORMAT_Y16:
- case PIXEL_FORMAT_P016LE:
- return 16;
- }
- NOTREACHED();
- return 0;
+size_t bitDepth(VideoPixelFormat format) {
+ switch (format) {
+ case VideoPixelFormat::I420:
+ case VideoPixelFormat::YV12:
+ case VideoPixelFormat::I422:
+ case VideoPixelFormat::I420A:
+ case VideoPixelFormat::I444:
+ case VideoPixelFormat::NV12:
+ case VideoPixelFormat::NV21:
+ case VideoPixelFormat::YUY2:
+ case VideoPixelFormat::ARGB:
+ case VideoPixelFormat::XRGB:
+ case VideoPixelFormat::RGB24:
+ case VideoPixelFormat::MJPEG:
+ case VideoPixelFormat::ABGR:
+ case VideoPixelFormat::XBGR:
+ case VideoPixelFormat::BGRA:
+ return 8;
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV444P9:
+ return 9;
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ return 10;
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::YUV422P12:
+ case VideoPixelFormat::YUV444P12:
+ return 12;
+ case VideoPixelFormat::Y16:
+ case VideoPixelFormat::P016LE:
+ return 16;
+ case VideoPixelFormat::UNKNOWN:
+ ALOGE("Invalid pixel format");
+ return 0;
+ }
}
// If it is required to allocate aligned to multiple-of-two size overall for the
// frame of pixel |format|.
static bool RequiresEvenSizeAllocation(VideoPixelFormat format) {
- switch (format) {
- case PIXEL_FORMAT_ARGB:
- case PIXEL_FORMAT_XRGB:
- case PIXEL_FORMAT_RGB24:
- case PIXEL_FORMAT_Y16:
- case PIXEL_FORMAT_ABGR:
- case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_XR30:
- case PIXEL_FORMAT_XB30:
- case PIXEL_FORMAT_BGRA:
- return false;
- case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_YUY2:
- case PIXEL_FORMAT_YV12:
- case PIXEL_FORMAT_I422:
- case PIXEL_FORMAT_I444:
- case PIXEL_FORMAT_YUV420P9:
- case PIXEL_FORMAT_YUV422P9:
- case PIXEL_FORMAT_YUV444P9:
- case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV422P10:
- case PIXEL_FORMAT_YUV444P10:
- case PIXEL_FORMAT_YUV420P12:
- case PIXEL_FORMAT_YUV422P12:
- case PIXEL_FORMAT_YUV444P12:
- case PIXEL_FORMAT_I420A:
- case PIXEL_FORMAT_P016LE:
- return true;
- case PIXEL_FORMAT_UNKNOWN:
- break;
- }
- NOTREACHED() << "Unsupported video frame format: " << format;
- return false;
-}
-
-size_t NumPlanes(VideoPixelFormat format) {
- switch (format) {
- case PIXEL_FORMAT_YUY2:
- case PIXEL_FORMAT_ARGB:
- case PIXEL_FORMAT_BGRA:
- case PIXEL_FORMAT_XRGB:
- case PIXEL_FORMAT_RGB24:
- case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_Y16:
- case PIXEL_FORMAT_ABGR:
- case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_XR30:
- case PIXEL_FORMAT_XB30:
- return 1;
- case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_P016LE:
- return 2;
- case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_YV12:
- case PIXEL_FORMAT_I422:
- case PIXEL_FORMAT_I444:
- case PIXEL_FORMAT_YUV420P9:
- case PIXEL_FORMAT_YUV422P9:
- case PIXEL_FORMAT_YUV444P9:
- case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV422P10:
- case PIXEL_FORMAT_YUV444P10:
- case PIXEL_FORMAT_YUV420P12:
- case PIXEL_FORMAT_YUV422P12:
- case PIXEL_FORMAT_YUV444P12:
- return 3;
- case PIXEL_FORMAT_I420A:
- return 4;
- case PIXEL_FORMAT_UNKNOWN:
- // Note: PIXEL_FORMAT_UNKNOWN is used for end-of-stream frame.
- // Set its NumPlanes() to zero to avoid NOTREACHED().
- return 0;
- }
- NOTREACHED() << "Unsupported video frame format: " << format;
- return 0;
-}
-
-size_t AllocationSize(VideoPixelFormat format,
- const android::ui::Size& coded_size) {
- size_t total = 0;
- for (size_t i = 0; i < NumPlanes(format); ++i) {
- android::ui::Size plane_size = PlaneSize(format, i, coded_size);
- total += (plane_size.width * plane_size.height);
- }
-
- return total;
-}
-
-android::ui::Size PlaneSize(VideoPixelFormat format,
- size_t plane,
- const android::ui::Size& coded_size) {
- DCHECK(IsValidPlane(plane, format));
-
- int width = coded_size.width;
- int height = coded_size.height;
- if (RequiresEvenSizeAllocation(format)) {
- // Align to multiple-of-two size overall. This ensures that non-subsampled
- // planes can be addressed by pixel with the same scaling as the subsampled
- // planes.
- width = base::bits::Align(width, 2);
- height = base::bits::Align(height, 2);
- }
-
- const android::ui::Size subsample = SampleSize(format, plane);
- DCHECK(width % subsample.width == 0);
- DCHECK(height % subsample.height == 0);
- return android::ui::Size(BytesPerElement(format, plane) * width / subsample.width,
- height / subsample.height);
-}
-
-int PlaneHorizontalBitsPerPixel(VideoPixelFormat format,
- size_t plane) {
- DCHECK(IsValidPlane(plane, format));
- const int bits_per_element = 8 * BytesPerElement(format, plane);
- const int horiz_pixels_per_element = SampleSize(format, plane).width;
- DCHECK_EQ(bits_per_element % horiz_pixels_per_element, 0);
- return bits_per_element / horiz_pixels_per_element;
-}
-
-int PlaneBitsPerPixel(VideoPixelFormat format, size_t plane) {
- DCHECK(IsValidPlane(plane, format));
- return PlaneHorizontalBitsPerPixel(format, plane) /
- SampleSize(format, plane).height;
-}
-
-int BytesPerElement(VideoPixelFormat format, size_t plane) {
- DCHECK(IsValidPlane(format, plane));
- switch (format) {
- case PIXEL_FORMAT_ARGB:
- case PIXEL_FORMAT_BGRA:
- case PIXEL_FORMAT_XRGB:
- case PIXEL_FORMAT_ABGR:
- case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_XR30:
- case PIXEL_FORMAT_XB30:
- return 4;
- case PIXEL_FORMAT_RGB24:
- return 3;
- case PIXEL_FORMAT_Y16:
- case PIXEL_FORMAT_YUY2:
- case PIXEL_FORMAT_YUV420P9:
- case PIXEL_FORMAT_YUV422P9:
- case PIXEL_FORMAT_YUV444P9:
- case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV422P10:
- case PIXEL_FORMAT_YUV444P10:
- case PIXEL_FORMAT_YUV420P12:
- case PIXEL_FORMAT_YUV422P12:
- case PIXEL_FORMAT_YUV444P12:
- case PIXEL_FORMAT_P016LE:
- return 2;
- case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21: {
- static const int bytes_per_element[] = {1, 2};
- DCHECK_LT(plane, base::size(bytes_per_element));
- return bytes_per_element[plane];
+ switch (format) {
+ case VideoPixelFormat::ARGB:
+ case VideoPixelFormat::XRGB:
+ case VideoPixelFormat::RGB24:
+ case VideoPixelFormat::Y16:
+ case VideoPixelFormat::ABGR:
+ case VideoPixelFormat::XBGR:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ case VideoPixelFormat::BGRA:
+ return false;
+ case VideoPixelFormat::NV12:
+ case VideoPixelFormat::NV21:
+ case VideoPixelFormat::I420:
+ case VideoPixelFormat::MJPEG:
+ case VideoPixelFormat::YUY2:
+ case VideoPixelFormat::YV12:
+ case VideoPixelFormat::I422:
+ case VideoPixelFormat::I444:
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV444P9:
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::YUV422P12:
+ case VideoPixelFormat::YUV444P12:
+ case VideoPixelFormat::I420A:
+ case VideoPixelFormat::P016LE:
+ return true;
+ case VideoPixelFormat::UNKNOWN:
+ ALOGE("Invalid pixel format");
+ return false;
}
- case PIXEL_FORMAT_YV12:
- case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_I422:
- case PIXEL_FORMAT_I420A:
- case PIXEL_FORMAT_I444:
- return 1;
- case PIXEL_FORMAT_MJPEG:
- return 0;
- case PIXEL_FORMAT_UNKNOWN:
- break;
- }
- NOTREACHED();
- return 0;
}
-bool IsValidPlane(VideoPixelFormat format, size_t plane) {
- DCHECK_LE(NumPlanes(format), static_cast<size_t>(kMaxPlanes));
- return plane < NumPlanes(format);
+size_t numPlanes(VideoPixelFormat format) {
+ switch (format) {
+ case VideoPixelFormat::YUY2:
+ case VideoPixelFormat::ARGB:
+ case VideoPixelFormat::BGRA:
+ case VideoPixelFormat::XRGB:
+ case VideoPixelFormat::RGB24:
+ case VideoPixelFormat::MJPEG:
+ case VideoPixelFormat::Y16:
+ case VideoPixelFormat::ABGR:
+ case VideoPixelFormat::XBGR:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ return 1;
+ case VideoPixelFormat::NV12:
+ case VideoPixelFormat::NV21:
+ case VideoPixelFormat::P016LE:
+ return 2;
+ case VideoPixelFormat::I420:
+ case VideoPixelFormat::YV12:
+ case VideoPixelFormat::I422:
+ case VideoPixelFormat::I444:
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV444P9:
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::YUV422P12:
+ case VideoPixelFormat::YUV444P12:
+ return 3;
+ case VideoPixelFormat::I420A:
+ return 4;
+ case VideoPixelFormat::UNKNOWN:
+ // Note: VideoPixelFormat::UNKNOWN is used for end-of-stream frame.
+ return 0;
+ }
+}
+
+size_t allocationSize(VideoPixelFormat format, const android::ui::Size& coded_size) {
+ size_t total = 0;
+ for (size_t i = 0; i < numPlanes(format); ++i) {
+ android::ui::Size plane_size = planeSize(format, i, coded_size);
+ total += (plane_size.width * plane_size.height);
+ }
+
+ return total;
+}
+
+android::ui::Size planeSize(VideoPixelFormat format, size_t plane,
+ const android::ui::Size& coded_size) {
+ ALOG_ASSERT(isValidPlane(plane, format));
+
+ int width = coded_size.width;
+ int height = coded_size.height;
+ if (RequiresEvenSizeAllocation(format)) {
+ // Align to multiple-of-two size overall. This ensures that non-subsampled
+ // planes can be addressed by pixel with the same scaling as the subsampled
+ // planes.
+ width = base::bits::Align(width, 2);
+ height = base::bits::Align(height, 2);
+ }
+
+ const android::ui::Size subsample = SampleSize(format, plane);
+ ALOG_ASSERT(width % subsample.width == 0);
+ ALOG_ASSERT(height % subsample.height == 0);
+ return android::ui::Size(bytesPerElement(format, plane) * width / subsample.width,
+ height / subsample.height);
+}
+
+int planeHorizontalBitsPerPixel(VideoPixelFormat format, size_t plane) {
+ ALOG_ASSERT(isValidPlane(plane, format));
+ const int bitsPerElement = 8 * bytesPerElement(format, plane);
+ const int horizPixelsPerElement = SampleSize(format, plane).width;
+ ALOG_ASSERT(bitsPerElement % horizPixelsPerElement == 0);
+ return bitsPerElement / horizPixelsPerElement;
+}
+
+int planeBitsPerPixel(VideoPixelFormat format, size_t plane) {
+ ALOG_ASSERT(isValidPlane(plane, format));
+ return planeHorizontalBitsPerPixel(format, plane) / SampleSize(format, plane).height;
+}
+
+int bytesPerElement(VideoPixelFormat format, size_t plane) {
+ ALOG_ASSERT(isValidPlane(format, plane));
+ switch (format) {
+ case VideoPixelFormat::ARGB:
+ case VideoPixelFormat::BGRA:
+ case VideoPixelFormat::XRGB:
+ case VideoPixelFormat::ABGR:
+ case VideoPixelFormat::XBGR:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ return 4;
+ case VideoPixelFormat::RGB24:
+ return 3;
+ case VideoPixelFormat::Y16:
+ case VideoPixelFormat::YUY2:
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV444P9:
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::YUV422P12:
+ case VideoPixelFormat::YUV444P12:
+ case VideoPixelFormat::P016LE:
+ return 2;
+ case VideoPixelFormat::NV12:
+ case VideoPixelFormat::NV21: {
+ static const int bytes_per_element[] = {1, 2};
+ ALOG_ASSERT(plane < base::size(bytes_per_element));
+ return bytes_per_element[plane];
+ }
+ case VideoPixelFormat::YV12:
+ case VideoPixelFormat::I420:
+ case VideoPixelFormat::I422:
+ case VideoPixelFormat::I420A:
+ case VideoPixelFormat::I444:
+ return 1;
+ case VideoPixelFormat::MJPEG:
+ return 0;
+ case VideoPixelFormat::UNKNOWN:
+ ALOGE("Invalid pixel format");
+ return 0;
+ }
+}
+
+bool isValidPlane(VideoPixelFormat format, size_t plane) {
+ ALOG_ASSERT(numPlanes(format) <= static_cast<size_t>(kMaxPlanes));
+ return plane < numPlanes(format);
}
android::ui::Size SampleSize(VideoPixelFormat format, size_t plane) {
- DCHECK(IsValidPlane(format, plane));
+ ALOG_ASSERT(isValidPlane(format, plane));
- switch (plane) {
+ switch (plane) {
case kYPlane: // and kARGBPlane:
case kAPlane:
- return android::ui::Size(1, 1);
+ return android::ui::Size(1, 1);
case kUPlane: // and kUVPlane:
case kVPlane:
- switch (format) {
- case PIXEL_FORMAT_I444:
- case PIXEL_FORMAT_YUV444P9:
- case PIXEL_FORMAT_YUV444P10:
- case PIXEL_FORMAT_YUV444P12:
- case PIXEL_FORMAT_Y16:
- return android::ui::Size(1, 1);
+ switch (format) {
+ case VideoPixelFormat::I444:
+ case VideoPixelFormat::YUV444P9:
+ case VideoPixelFormat::YUV444P10:
+ case VideoPixelFormat::YUV444P12:
+ case VideoPixelFormat::Y16:
+ return android::ui::Size(1, 1);
- case PIXEL_FORMAT_I422:
- case PIXEL_FORMAT_YUV422P9:
- case PIXEL_FORMAT_YUV422P10:
- case PIXEL_FORMAT_YUV422P12:
- return android::ui::Size(2, 1);
+ case VideoPixelFormat::I422:
+ case VideoPixelFormat::YUV422P9:
+ case VideoPixelFormat::YUV422P10:
+ case VideoPixelFormat::YUV422P12:
+ return android::ui::Size(2, 1);
- case PIXEL_FORMAT_YV12:
- case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_I420A:
- case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_YUV420P9:
- case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV420P12:
- case PIXEL_FORMAT_P016LE:
- return android::ui::Size(2, 2);
+ case VideoPixelFormat::YV12:
+ case VideoPixelFormat::I420:
+ case VideoPixelFormat::I420A:
+ case VideoPixelFormat::NV12:
+ case VideoPixelFormat::NV21:
+ case VideoPixelFormat::YUV420P9:
+ case VideoPixelFormat::YUV420P10:
+ case VideoPixelFormat::YUV420P12:
+ case VideoPixelFormat::P016LE:
+ return android::ui::Size(2, 2);
- case PIXEL_FORMAT_UNKNOWN:
- case PIXEL_FORMAT_YUY2:
- case PIXEL_FORMAT_ARGB:
- case PIXEL_FORMAT_XRGB:
- case PIXEL_FORMAT_RGB24:
- case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_ABGR:
- case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_XR30:
- case PIXEL_FORMAT_XB30:
- case PIXEL_FORMAT_BGRA:
- break;
- }
- }
- NOTREACHED();
- return android::ui::Size();
+ case VideoPixelFormat::UNKNOWN:
+ case VideoPixelFormat::YUY2:
+ case VideoPixelFormat::ARGB:
+ case VideoPixelFormat::XRGB:
+ case VideoPixelFormat::RGB24:
+ case VideoPixelFormat::MJPEG:
+ case VideoPixelFormat::ABGR:
+ case VideoPixelFormat::XBGR:
+ case VideoPixelFormat::XR30:
+ case VideoPixelFormat::XB30:
+ case VideoPixelFormat::BGRA:
+ ALOGE("Invalid pixel format");
+ }
+ }
+
+ return android::ui::Size();
}
-} // namespace media
-
+} // namespace android
diff --git a/common/include/v4l2_codec2/common/Common.h b/common/include/v4l2_codec2/common/Common.h
index d18e06b..0775af1 100644
--- a/common/include/v4l2_codec2/common/Common.h
+++ b/common/include/v4l2_codec2/common/Common.h
@@ -27,7 +27,7 @@
// A video frame's layout, containing pixel format, size and layout of individual planes.
struct VideoFrameLayout {
- media::VideoPixelFormat mFormat = media::PIXEL_FORMAT_UNKNOWN;
+ VideoPixelFormat mFormat = VideoPixelFormat::UNKNOWN;
android::ui::Size mCodedSize;
std::vector<VideoFramePlane> mPlanes;
bool mMultiPlanar = false;
diff --git a/common/include/v4l2_codec2/common/EncodeHelpers.h b/common/include/v4l2_codec2/common/EncodeHelpers.h
index 180e1bc..bfbdd05 100644
--- a/common/include/v4l2_codec2/common/EncodeHelpers.h
+++ b/common/include/v4l2_codec2/common/EncodeHelpers.h
@@ -27,7 +27,7 @@
DMABUF = 1,
};
- media::VideoPixelFormat mInputFormat;
+ VideoPixelFormat mInputFormat;
ui::Size mInputVisibleSize;
C2Config::profile_t mOutputProfile;
uint32_t mInitialBitrate;
diff --git a/common/include/v4l2_codec2/common/FormatConverter.h b/common/include/v4l2_codec2/common/FormatConverter.h
index de9d21a..bc3f85a 100644
--- a/common/include/v4l2_codec2/common/FormatConverter.h
+++ b/common/include/v4l2_codec2/common/FormatConverter.h
@@ -51,7 +51,7 @@
// Create FormatConverter instance and initialize it, nullptr will be returned on
// initialization error.
- static std::unique_ptr<FormatConverter> Create(media::VideoPixelFormat outFormat,
+ static std::unique_ptr<FormatConverter> Create(VideoPixelFormat outFormat,
const ui::Size& visibleSize, uint32_t inputCount,
const ui::Size& codedSize);
@@ -93,7 +93,7 @@
// Initialize foramt converter. It will pre-allocate a set of graphic blocks as |codedSize| and
// |outFormat|. This function should be called prior to other functions.
- c2_status_t initialize(media::VideoPixelFormat outFormat, const ui::Size& visibleSize,
+ c2_status_t initialize(VideoPixelFormat outFormat, const ui::Size& visibleSize,
uint32_t inputCount, const ui::Size& codedSize);
// The array of block entries.
@@ -106,7 +106,7 @@
std::unique_ptr<uint8_t[]> mTempPlaneU;
std::unique_ptr<uint8_t[]> mTempPlaneV;
- media::VideoPixelFormat mOutFormat = media::VideoPixelFormat::PIXEL_FORMAT_UNKNOWN;
+ VideoPixelFormat mOutFormat = VideoPixelFormat::UNKNOWN;
ui::Size mVisibleSize;
};
diff --git a/common/include/v4l2_codec2/common/Fourcc.h b/common/include/v4l2_codec2/common/Fourcc.h
index 85da0b1..a0f5fc4 100644
--- a/common/include/v4l2_codec2/common/Fourcc.h
+++ b/common/include/v4l2_codec2/common/Fourcc.h
@@ -124,7 +124,7 @@
// Converts a VideoPixelFormat to Fourcc. Returns nullopt for invalid input. Note that a
// VideoPixelFormat may have two Fourcc counterparts. Caller has to specify if it is for
// single-planar or multi-planar format.
- static std::optional<Fourcc> fromVideoPixelFormat(media::VideoPixelFormat pixelFormat,
+ static std::optional<Fourcc> fromVideoPixelFormat(VideoPixelFormat pixelFormat,
bool singlePlanar = true);
// Converts a V4L2PixFmt to Fourcc. Returns nullopt for invalid input.
static std::optional<Fourcc> fromV4L2PixFmt(uint32_t v4l2PixFmt);
@@ -132,7 +132,7 @@
// Value getters:
// Returns the VideoPixelFormat counterpart of the value. Returns PIXEL_FORMAT_UNKNOWN if no
// mapping is found.
- media::VideoPixelFormat toVideoPixelFormat() const;
+ VideoPixelFormat toVideoPixelFormat() const;
// Returns the V4L2PixFmt counterpart of the value. Returns 0 if no mapping is found.
uint32_t toV4L2PixFmt() const;
diff --git a/common/include/v4l2_codec2/common/V4L2Device.h b/common/include/v4l2_codec2/common/V4L2Device.h
index fd197a2..b4c909c 100644
--- a/common/include/v4l2_codec2/common/V4L2Device.h
+++ b/common/include/v4l2_codec2/common/V4L2Device.h
@@ -24,7 +24,6 @@
#include <ui/Size.h>
#include <v4l2_codec2/common/Common.h>
#include <v4l2_codec2/common/V4L2DevicePoller.h>
-#include <v4l2_codec2/common/VideoPixelFormat.h>
#include <v4l2_codec2/common/VideoTypes.h>
namespace android {
diff --git a/common/include/v4l2_codec2/common/VideoPixelFormat.h b/common/include/v4l2_codec2/common/VideoPixelFormat.h
index 99e604a..2cfe910 100644
--- a/common/include/v4l2_codec2/common/VideoPixelFormat.h
+++ b/common/include/v4l2_codec2/common/VideoPixelFormat.h
@@ -4,126 +4,87 @@
// Note: ported from Chromium commit head: 3b7ce92816e2
// Note: only necessary functions are ported from video_types.h
-#ifndef VIDEO_PIXEL_FORMAT_H_
-#define VIDEO_PIXEL_FORMAT_H_
+#ifndef ANDROID_V4L2_CODEC2_COMMON_VIDEO_PIXEL_FORMAT_H
+#define ANDROID_V4L2_CODEC2_COMMON_VIDEO_PIXEL_FORMAT_H
#include <string>
#include "ui/Size.h"
-namespace media {
+namespace android {
// Pixel formats roughly based on FOURCC labels, see:
// http://www.fourcc.org/rgb.php and http://www.fourcc.org/yuv.php
-// Logged to UMA, so never reuse values. Leave gaps if necessary.
-// Ordered as planar, semi-planar, YUV-packed, and RGB formats.
-// When a VideoFrame is backed by native textures, VideoPixelFormat describes
-// how those textures should be sampled and combined to produce the final
-// pixels.
-enum VideoPixelFormat {
- PIXEL_FORMAT_UNKNOWN = 0, // Unknown or unspecified format value.
- PIXEL_FORMAT_I420 =
- 1, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
-
- // Note: Chrome does not actually support YVU compositing, so you probably
- // don't actually want to use this. See http://crbug.com/784627.
- PIXEL_FORMAT_YV12 = 2, // 12bpp YVU planar 1x1 Y, 2x2 VU samples.
-
- PIXEL_FORMAT_I422 = 3, // 16bpp YUV planar 1x1 Y, 2x1 UV samples.
- PIXEL_FORMAT_I420A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 UV, 1x1 A samples.
- PIXEL_FORMAT_I444 = 5, // 24bpp YUV planar, no subsampling.
- PIXEL_FORMAT_NV12 =
- 6, // 12bpp with Y plane followed by a 2x2 interleaved UV plane.
- PIXEL_FORMAT_NV21 =
- 7, // 12bpp with Y plane followed by a 2x2 interleaved VU plane.
- /* PIXEL_FORMAT_UYVY = 8, Deprecated */
- PIXEL_FORMAT_YUY2 =
- 9, // 16bpp interleaved 1x1 Y, 2x1 U, 1x1 Y, 2x1 V samples.
- PIXEL_FORMAT_ARGB = 10, // 32bpp BGRA (byte-order), 1 plane.
- PIXEL_FORMAT_XRGB = 11, // 24bpp BGRX (byte-order), 1 plane.
- PIXEL_FORMAT_RGB24 = 12, // 24bpp BGR (byte-order), 1 plane.
-
- /* PIXEL_FORMAT_RGB32 = 13, Deprecated */
- PIXEL_FORMAT_MJPEG = 14, // MJPEG compressed.
- /* PIXEL_FORMAT_MT21 = 15, Deprecated */
-
- // The P* in the formats below designates the number of bits per pixel
- // component. I.e. P9 is 9-bits per pixel component, P10 is 10-bits per pixel
- // component, etc.
- PIXEL_FORMAT_YUV420P9 = 16,
- PIXEL_FORMAT_YUV420P10 = 17,
- PIXEL_FORMAT_YUV422P9 = 18,
- PIXEL_FORMAT_YUV422P10 = 19,
- PIXEL_FORMAT_YUV444P9 = 20,
- PIXEL_FORMAT_YUV444P10 = 21,
- PIXEL_FORMAT_YUV420P12 = 22,
- PIXEL_FORMAT_YUV422P12 = 23,
- PIXEL_FORMAT_YUV444P12 = 24,
-
- /* PIXEL_FORMAT_Y8 = 25, Deprecated */
- PIXEL_FORMAT_Y16 = 26, // single 16bpp plane.
-
- PIXEL_FORMAT_ABGR = 27, // 32bpp RGBA (byte-order), 1 plane.
- PIXEL_FORMAT_XBGR = 28, // 24bpp RGBX (byte-order), 1 plane.
-
- PIXEL_FORMAT_P016LE = 29, // 24bpp NV12, 16 bits per channel
-
- PIXEL_FORMAT_XR30 =
- 30, // 32bpp BGRX, 10 bits per channel, 2 bits ignored, 1 plane
- PIXEL_FORMAT_XB30 =
- 31, // 32bpp RGBX, 10 bits per channel, 2 bits ignored, 1 plane
-
- PIXEL_FORMAT_BGRA = 32, // 32bpp ARGB (byte-order), 1 plane.
-
- // Please update UMA histogram enumeration when adding new formats here.
- PIXEL_FORMAT_MAX =
- PIXEL_FORMAT_BGRA, // Must always be equal to largest entry logged.
+enum class VideoPixelFormat {
+ I420, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
+ YV12, // 12bpp YVU planar 1x1 Y, 2x2 VU samples.
+ I422, // 16bpp YUV planar 1x1 Y, 2x1 UV samples.
+ I420A, // 20bpp YUVA planar 1x1 Y, 2x2 UV, 1x1 A samples.
+ I444, // 24bpp YUV planar, no subsampling.
+ NV12, // 12bpp with Y plane followed by a 2x2 interleaved UV plane.
+ NV21, // 12bpp with Y plane followed by a 2x2 interleaved VU plane.
+ YUY2, // 16bpp interleaved 1x1 Y, 2x1 U, 1x1 Y, 2x1 V samples.
+ ARGB, // 32bpp BGRA (byte-order), 1 plane.
+ XRGB, // 24bpp BGRX (byte-order), 1 plane.
+ RGB24, // 24bpp BGR (byte-order), 1 plane.
+ MJPEG, // MJPEG compressed.
+ Y16, // single 16bpp plane.
+ ABGR, // 32bpp RGBA (byte-order), 1 plane.
+ XBGR, // 24bpp RGBX (byte-order), 1 plane.
+ P016LE, // 24bpp NV12, 16 bits per channel
+ XR30, // 32bpp BGRX, 10 bits per channel, 2 bits ignored, 1 plane
+ XB30, // 32bpp RGBX, 10 bits per channel, 2 bits ignored, 1 plane
+ BGRA, // 32bpp ARGB (byte-order), 1 plane.
+ // The P* in the formats below designates the number of bits per pixel component. I.e. P9 is
+ // 9-bits per pixel component, P10 is 10-bits per pixel component, etc.
+ YUV420P9,
+ YUV420P10,
+ YUV422P9,
+ YUV422P10,
+ YUV444P9,
+ YUV444P10,
+ YUV420P12,
+ YUV422P12,
+ YUV444P12,
+ UNKNOWN, // Unknown or unspecified format value.
};
// Returns the name of a Format as a string.
-std::string VideoPixelFormatToString(VideoPixelFormat format);
+std::string videoPixelFormatToString(VideoPixelFormat format);
-// Returns human readable fourcc string.
-// If any of the four characters is non-printable, it outputs
-// "0x<32-bit integer in hex>", e.g. FourccToString(0x66616b00) returns
-// "0x66616b00".
-std::string FourccToString(uint32_t fourcc);
+// Returns human readable fourcc string. If any of the four characters is non-printable, it outputs
+// "0x<32-bit integer in hex>", e.g. FourccToString(0x66616b00) returns "0x66616b00".
+std::string fourccToString(uint32_t fourcc);
// Returns the number of significant bits per channel.
-size_t BitDepth(VideoPixelFormat format);
+size_t bitDepth(VideoPixelFormat format);
// Returns the number of planes for the |format|.
-size_t NumPlanes(VideoPixelFormat format);
+size_t numPlanes(VideoPixelFormat format);
-// Returns the required allocation size for a (tightly packed) frame of the
-// given coded size and format.
-size_t AllocationSize(VideoPixelFormat format, const android::ui::Size& coded_size);
+// Returns required allocation size for a (tightly packed) frame of the given coded size and format.
+size_t allocationSize(VideoPixelFormat format, const android::ui::Size& coded_size);
-// Returns the plane Size (in bytes) for a plane of the given coded size
-// and format.
-android::ui::Size PlaneSize(VideoPixelFormat format,
- size_t plane,
- const android::ui::Size& coded_size);
+// Returns the plane Size (in bytes) for a plane of the given coded size and format.
+android::ui::Size planeSize(VideoPixelFormat format, size_t plane,
+ const android::ui::Size& coded_size);
// Returns horizontal bits per pixel for given |plane| and |format|.
-int PlaneHorizontalBitsPerPixel(VideoPixelFormat format, size_t plane);
+int planeHorizontalBitsPerPixel(VideoPixelFormat format, size_t plane);
// Returns bits per pixel for given |plane| and |format|.
-int PlaneBitsPerPixel(VideoPixelFormat format, size_t plane);
+int planeBitsPerPixel(VideoPixelFormat format, size_t plane);
// Returns the number of bytes per element for given |plane| and |format|.
-int BytesPerElement(VideoPixelFormat format, size_t plane);
+int bytesPerElement(VideoPixelFormat format, size_t plane);
// Returns true if |plane| is a valid plane index for the given |format|.
-bool IsValidPlane(size_t plane, VideoPixelFormat format);
-
-// Returns true if |plane| is a valid plane index for the given |format|.
-bool IsValidPlane(VideoPixelFormat format, size_t plane);
+bool isValidPlane(VideoPixelFormat format, size_t plane);
// Returns the pixel size of each subsample for a given |plane| and |format|.
-// E.g. 2x2 for the U-plane in PIXEL_FORMAT_I420.
+// E.g. 2x2 for the U-plane in I420.
android::ui::Size SampleSize(VideoPixelFormat format, size_t plane);
-} // namespace media
+} // namespace android
-#endif // VIDEO_PIXEL_FORMAT_H_
+#endif // ANDROID_V4L2_CODEC2_COMMON_VIDEO_PIXEL_FORMAT_H