v4l2_codec2: Adapt VideoPixelFormat code to Android standards.

This CL reworks the VideoPixelFormat class that was branched
from Chrome:
- Removed all Chrome-specific references from comments
- Removed specific numbering as we don't need compatibility with Chrome
- Removed deprecated formats
- Adhere to the Android coding style.
- Make use of Android logging mechanisms.
- Make use of Android assert macros.

Bug: 155138142
Test: arc.VideoEncodeAccel.h264_192p_i420_vm
Change-Id: I8387af0b427ebc990ec6374432f18ad732894569
diff --git a/common/FormatConverter.cpp b/common/FormatConverter.cpp
index 7e9c2e5..d694bd1 100644
--- a/common/FormatConverter.cpp
+++ b/common/FormatConverter.cpp
@@ -29,9 +29,8 @@
 namespace {
 // The constant expression of mapping the pixel format conversion pair (src, dst) to a unique
 // integer.
-constexpr int convertMap(media::VideoPixelFormat src, media::VideoPixelFormat dst) {
-    return static_cast<int>(src) *
-                   (static_cast<int>(media::VideoPixelFormat::PIXEL_FORMAT_MAX) + 1) +
+constexpr int convertMap(VideoPixelFormat src, VideoPixelFormat dst) {
+    return static_cast<int>(src) * (static_cast<int>(VideoPixelFormat::UNKNOWN) + 1) +
            static_cast<int>(dst);
 }
 
@@ -93,12 +92,11 @@
 }
 
 // static
-std::unique_ptr<FormatConverter> FormatConverter::Create(media::VideoPixelFormat outFormat,
+std::unique_ptr<FormatConverter> FormatConverter::Create(VideoPixelFormat outFormat,
                                                          const ui::Size& visibleSize,
                                                          uint32_t inputCount,
                                                          const ui::Size& codedSize) {
-    if (outFormat != media::VideoPixelFormat::PIXEL_FORMAT_I420 &&
-        outFormat != media::VideoPixelFormat::PIXEL_FORMAT_NV12) {
+    if (outFormat != VideoPixelFormat::I420 && outFormat != VideoPixelFormat::NV12) {
         ALOGE("Unsupported output format: %d", static_cast<int32_t>(outFormat));
         return nullptr;
     }
@@ -111,11 +109,10 @@
     return converter;
 }
 
-c2_status_t FormatConverter::initialize(media::VideoPixelFormat outFormat,
-                                        const ui::Size& visibleSize, uint32_t inputCount,
-                                        const ui::Size& codedSize) {
+c2_status_t FormatConverter::initialize(VideoPixelFormat outFormat, const ui::Size& visibleSize,
+                                        uint32_t inputCount, const ui::Size& codedSize) {
     ALOGV("initialize(out_format=%s, visible_size=%dx%d, input_count=%u, coded_size=%dx%d)",
-          media::VideoPixelFormatToString(outFormat).c_str(), visibleSize.width, visibleSize.height,
+          videoPixelFormatToString(outFormat).c_str(), visibleSize.width, visibleSize.height,
           inputCount, codedSize.width, codedSize.height);
 
     std::shared_ptr<C2BlockPool> pool;
@@ -126,7 +123,7 @@
     }
 
     HalPixelFormat halFormat;
-    if (outFormat == media::VideoPixelFormat::PIXEL_FORMAT_I420) {
+    if (outFormat == VideoPixelFormat::I420) {
         // Android HAL format doesn't have I420, we use YV12 instead and swap U and V data while
         // conversion to perform I420.
         halFormat = HalPixelFormat::YV12;
@@ -201,7 +198,7 @@
     const int dstStrideV = outputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;   // only for I420
     const int dstStrideUV = outputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;  // only for NV12
 
-    media::VideoPixelFormat inputFormat = media::VideoPixelFormat::PIXEL_FORMAT_UNKNOWN;
+    VideoPixelFormat inputFormat = VideoPixelFormat::UNKNOWN;
     *status = C2_OK;
     if (inputLayout.type == C2PlanarLayout::TYPE_YUV) {
         const uint8_t* srcY = inputView.data()[C2PlanarLayout::PLANE_Y];
@@ -211,10 +208,9 @@
         const int srcStrideU = inputLayout.planes[C2PlanarLayout::PLANE_U].rowInc;
         const int srcStrideV = inputLayout.planes[C2PlanarLayout::PLANE_V].rowInc;
         if (inputLayout.rootPlanes == 3) {
-            inputFormat = media::VideoPixelFormat::PIXEL_FORMAT_YV12;
+            inputFormat = VideoPixelFormat::YV12;
         } else if (inputLayout.rootPlanes == 2) {
-            inputFormat = (srcV > srcU) ? media::VideoPixelFormat::PIXEL_FORMAT_NV12
-                                        : media::VideoPixelFormat::PIXEL_FORMAT_NV21;
+            inputFormat = (srcV > srcU) ? VideoPixelFormat::NV12 : VideoPixelFormat::NV21;
         }
 
         if (inputFormat == mOutFormat) {
@@ -224,32 +220,28 @@
         }
 
         switch (convertMap(inputFormat, mOutFormat)) {
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_YV12,
-                        media::VideoPixelFormat::PIXEL_FORMAT_I420):
+        case convertMap(VideoPixelFormat::YV12, VideoPixelFormat::I420):
             libyuv::I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY,
                              dstU, dstStrideU, dstV, dstStrideV, mVisibleSize.width,
                              mVisibleSize.height);
             break;
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_YV12,
-                        media::VideoPixelFormat::PIXEL_FORMAT_NV12):
+        case convertMap(VideoPixelFormat::YV12, VideoPixelFormat::NV12):
             libyuv::I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY,
                                dstStrideY, dstUV, dstStrideUV, mVisibleSize.width,
                                mVisibleSize.height);
             break;
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_NV12,
-                        media::VideoPixelFormat::PIXEL_FORMAT_I420):
+        case convertMap(VideoPixelFormat::NV12, VideoPixelFormat::I420):
             libyuv::NV12ToI420(srcY, srcStrideY, srcU, srcStrideU, dstY, dstStrideY, dstU,
                                dstStrideU, dstV, dstStrideV, mVisibleSize.width,
                                mVisibleSize.height);
             break;
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_NV21,
-                        media::VideoPixelFormat::PIXEL_FORMAT_I420):
+        case convertMap(VideoPixelFormat::NV21, VideoPixelFormat::I420):
             libyuv::NV21ToI420(srcY, srcStrideY, srcV, srcStrideV, dstY, dstStrideY, dstU,
                                dstStrideU, dstV, dstStrideV, mVisibleSize.width,
                                mVisibleSize.height);
             break;
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_NV21,
-                        media::VideoPixelFormat::PIXEL_FORMAT_NV12):
+        case convertMap(VideoPixelFormat::NV21, VideoPixelFormat::NV12):
+            ALOGV("%s(): Converting PIXEL_FORMAT_NV21 -> PIXEL_FORMAT_NV12", __func__);
             libyuv::CopyPlane(srcY, srcStrideY, dstY, dstStrideY, mVisibleSize.width,
                               mVisibleSize.height);
             copyPlaneByPixel(srcU, srcStrideU, 2, dstUV, dstStrideUV, 2, mVisibleSize.width / 2,
@@ -259,28 +251,26 @@
             break;
         default:
             ALOGE("Unsupported pixel format conversion from %s to %s",
-                  media::VideoPixelFormatToString(inputFormat).c_str(),
-                  media::VideoPixelFormatToString(mOutFormat).c_str());
+                  videoPixelFormatToString(inputFormat).c_str(),
+                  videoPixelFormatToString(mOutFormat).c_str());
             *status = C2_CORRUPTED;
             return inputBlock;  // This is actually redundant and should not be used.
         }
     } else if (inputLayout.type == C2PlanarLayout::TYPE_RGB) {
         // There is only RGBA_8888 specified in C2AllocationGralloc::map(), no BGRA_8888. Maybe
         // BGRA_8888 is not used now?
-        inputFormat = media::VideoPixelFormat::PIXEL_FORMAT_ABGR;
+        inputFormat = VideoPixelFormat::ABGR;
 
         const uint8_t* srcRGB = (idMap) ? idMap->addr() : inputView.data()[C2PlanarLayout::PLANE_R];
         const int srcStrideRGB =
                 (idMap) ? idMap->rowInc() : inputLayout.planes[C2PlanarLayout::PLANE_R].rowInc;
 
         switch (convertMap(inputFormat, mOutFormat)) {
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_ABGR,
-                        media::VideoPixelFormat::PIXEL_FORMAT_I420):
+        case convertMap(VideoPixelFormat::ABGR, VideoPixelFormat::I420):
             libyuv::ABGRToI420(srcRGB, srcStrideRGB, dstY, dstStrideY, dstU, dstStrideU, dstV,
                                dstStrideV, mVisibleSize.width, mVisibleSize.height);
             break;
-        case convertMap(media::VideoPixelFormat::PIXEL_FORMAT_ABGR,
-                        media::VideoPixelFormat::PIXEL_FORMAT_NV12): {
+        case convertMap(VideoPixelFormat::ABGR, VideoPixelFormat::NV12): {
             // There is no libyuv function to convert ABGR to NV12. Therefore, we first convert to
             // I420 on dst-Y plane and temporary U/V plane. Then we copy U and V pixels from
             // temporary planes to dst-UV interleavedly.
@@ -295,8 +285,8 @@
         }
         default:
             ALOGE("Unsupported pixel format conversion from %s to %s",
-                  media::VideoPixelFormatToString(inputFormat).c_str(),
-                  media::VideoPixelFormatToString(mOutFormat).c_str());
+                  videoPixelFormatToString(inputFormat).c_str(),
+                  videoPixelFormatToString(mOutFormat).c_str());
             *status = C2_CORRUPTED;
             return inputBlock;  // This is actually redundant and should not be used.
         }
@@ -307,7 +297,7 @@
     }
 
     ALOGV("convertBlock(frame_index=%" PRIu64 ", format=%s)", frameIndex,
-          media::VideoPixelFormatToString(inputFormat).c_str());
+          videoPixelFormatToString(inputFormat).c_str());
     entry->mAssociatedFrameIndex = frameIndex;
     mAvailableQueue.pop();
     return outputBlock->share(C2Rect(mVisibleSize.width, mVisibleSize.height), C2Fence());