Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2009-2011 Intel Corporation. All rights reserved. |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "VideoDecoderMPEG2.h" |
| 18 | #include "VideoDecoderTrace.h" |
| 19 | #include <string.h> |
| 20 | |
Austin Hu | c327643 | 2017-05-11 16:30:11 +0800 | [diff] [blame] | 21 | #define MAX_PICTURE_WIDTH_MPEG2 1920 |
| 22 | #define MAX_PICTURE_HEIGHT_MPEG2 1088 |
| 23 | |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 24 | VideoDecoderMPEG2::VideoDecoderMPEG2(const char *mimeType) |
| 25 | : VideoDecoderBase(mimeType, VBP_MPEG2), |
| 26 | mBufferIDs(NULL), |
| 27 | mNumBufferIDs(0) { |
| 28 | //do nothing |
| 29 | } |
| 30 | |
| 31 | VideoDecoderMPEG2::~VideoDecoderMPEG2() { |
| 32 | stop(); |
| 33 | } |
| 34 | |
| 35 | Decode_Status VideoDecoderMPEG2::start(VideoConfigBuffer *buffer) { |
| 36 | Decode_Status status; |
| 37 | |
| 38 | status = VideoDecoderBase::start(buffer); |
| 39 | CHECK_STATUS("VideoDecoderBase::start"); |
| 40 | |
| 41 | if (buffer->data == NULL || buffer->size == 0) { |
| 42 | WTRACE("No config data to start VA."); |
| 43 | return DECODE_SUCCESS; |
| 44 | } |
| 45 | |
| 46 | vbp_data_mpeg2 *data = NULL; |
| 47 | status = VideoDecoderBase::parseBuffer( |
| 48 | buffer->data, |
| 49 | buffer->size, |
| 50 | true, // config flag |
| 51 | (void**)&data); |
| 52 | CHECK_STATUS("VideoDecoderBase::parseBuffer"); |
| 53 | |
Austin Hu | c327643 | 2017-05-11 16:30:11 +0800 | [diff] [blame] | 54 | if (data->codec_data->frame_width > MAX_PICTURE_WIDTH_MPEG2 || |
| 55 | data->codec_data->frame_height > MAX_PICTURE_HEIGHT_MPEG2) { |
| 56 | return DECODE_INVALID_DATA; |
| 57 | } |
| 58 | |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 59 | status = startVA(data); |
| 60 | return status; |
| 61 | } |
| 62 | |
| 63 | void VideoDecoderMPEG2::stop(void) { |
| 64 | if (mBufferIDs) { |
| 65 | delete [] mBufferIDs; |
| 66 | mBufferIDs = NULL; |
| 67 | } |
| 68 | mNumBufferIDs = 0; |
| 69 | |
| 70 | VideoDecoderBase::stop(); |
| 71 | } |
| 72 | |
| 73 | Decode_Status VideoDecoderMPEG2::decode(VideoDecodeBuffer *buffer) { |
| 74 | Decode_Status status; |
| 75 | vbp_data_mpeg2 *data = NULL; |
| 76 | bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER; |
| 77 | |
| 78 | if (buffer == NULL) { |
| 79 | return DECODE_INVALID_DATA; |
| 80 | } |
| 81 | |
| 82 | #ifdef DUMP_INPUT_BUFFER |
| 83 | if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) { |
| 84 | DumpInputBuffer(buffer, "mpeg2"); |
| 85 | } |
| 86 | #endif |
| 87 | |
| 88 | buffer->ext = NULL; |
| 89 | status = VideoDecoderBase::parseBuffer( |
| 90 | buffer->data, |
| 91 | buffer->size, |
| 92 | false, // config flag |
| 93 | (void**)&data); |
| 94 | CHECK_STATUS("VideoDecoderBase::parseBuffer"); |
| 95 | |
Austin Hu | c327643 | 2017-05-11 16:30:11 +0800 | [diff] [blame] | 96 | if (data->codec_data->frame_width > MAX_PICTURE_WIDTH_MPEG2 || |
| 97 | data->codec_data->frame_height > MAX_PICTURE_HEIGHT_MPEG2) { |
| 98 | return DECODE_INVALID_DATA; |
| 99 | } |
| 100 | |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 101 | if (!mVAStarted) { |
| 102 | status = startVA(data); |
| 103 | CHECK_STATUS("startVA"); |
| 104 | } |
| 105 | |
| 106 | if (mSizeChanged && !useGraphicbuffer) { |
| 107 | // some container has the incorrect width/height. |
| 108 | // send the format change to OMX to update the crop info. |
| 109 | mSizeChanged = false; |
| 110 | ITRACE("Video size is changed during startVA"); |
| 111 | return DECODE_FORMAT_CHANGE; |
| 112 | } |
| 113 | |
| 114 | if ((mVideoFormatInfo.width != (uint32_t)data->codec_data->frame_width || |
| 115 | mVideoFormatInfo.height != (uint32_t)data->codec_data->frame_height) && |
| 116 | (data->codec_data->frame_width > 0) && (data->codec_data->frame_height)) { |
| 117 | // update encoded image size |
| 118 | ITRACE("Video size is changed. from %dx%d to %dx%d\n",mVideoFormatInfo.width,mVideoFormatInfo.height, data->codec_data->frame_width,data->codec_data->frame_height); |
Tianmi Chen | 1175ce5 | 2015-04-29 17:10:50 +0800 | [diff] [blame] | 119 | if (useGraphicbuffer && mStoreMetaData) { |
| 120 | pthread_mutex_lock(&mFormatLock); |
| 121 | } |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 122 | mVideoFormatInfo.width = data->codec_data->frame_width; |
| 123 | mVideoFormatInfo.height = data->codec_data->frame_height; |
Tianmi Chen | 1175ce5 | 2015-04-29 17:10:50 +0800 | [diff] [blame] | 124 | bool needFlush = false; |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 125 | if (useGraphicbuffer) { |
Tianmi Chen | 1175ce5 | 2015-04-29 17:10:50 +0800 | [diff] [blame] | 126 | if (mStoreMetaData) { |
| 127 | needFlush = true; |
| 128 | |
| 129 | mVideoFormatInfo.valid = false; |
| 130 | pthread_mutex_unlock(&mFormatLock); |
| 131 | } else { |
| 132 | needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth) |
| 133 | || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight); |
| 134 | } |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 135 | } |
Tianmi Chen | 1175ce5 | 2015-04-29 17:10:50 +0800 | [diff] [blame] | 136 | |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 137 | if (needFlush) { |
Tianmi Chen | 1175ce5 | 2015-04-29 17:10:50 +0800 | [diff] [blame] | 138 | if (mStoreMetaData) { |
| 139 | status = endDecodingFrame(false); |
| 140 | CHECK_STATUS("endDecodingFrame"); |
| 141 | } else { |
| 142 | flushSurfaceBuffers(); |
| 143 | } |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 144 | mSizeChanged = false; |
| 145 | return DECODE_FORMAT_CHANGE; |
| 146 | } else { |
| 147 | mSizeChanged = true; |
| 148 | } |
| 149 | |
| 150 | setRenderRect(); |
Tianmi Chen | 1175ce5 | 2015-04-29 17:10:50 +0800 | [diff] [blame] | 151 | } else { |
| 152 | if (useGraphicbuffer && mStoreMetaData) { |
| 153 | mVideoFormatInfo.valid = true; |
| 154 | } |
Yuanjun Huang | 93c7c35 | 2015-01-27 08:30:58 +0800 | [diff] [blame] | 155 | } |
| 156 | |
| 157 | VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees); |
| 158 | |
| 159 | status = decodeFrame(buffer, data); |
| 160 | CHECK_STATUS("decodeFrame"); |
| 161 | |
| 162 | return status; |
| 163 | } |
| 164 | |
| 165 | void VideoDecoderMPEG2::flush(void) { |
| 166 | VideoDecoderBase::flush(); |
| 167 | } |
| 168 | |
| 169 | Decode_Status VideoDecoderMPEG2::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mpeg2 *data) { |
| 170 | Decode_Status status; |
| 171 | // check if any slice is parsed, we may just receive configuration data |
| 172 | if (data->num_pictures == 0 || data->pic_data == NULL) { |
| 173 | WTRACE("Number of pictures is 0, buffer contains configuration data only?"); |
| 174 | return DECODE_SUCCESS; |
| 175 | } |
| 176 | |
| 177 | status = acquireSurfaceBuffer(); |
| 178 | CHECK_STATUS("acquireSurfaceBuffer"); |
| 179 | |
| 180 | // set referenceFrame to true if frame decoded is I/P frame, false otherwise. |
| 181 | int frameType = data->codec_data->frame_type; |
| 182 | mAcquiredBuffer->referenceFrame = (frameType == MPEG2_PICTURE_TYPE_I || frameType == MPEG2_PICTURE_TYPE_P); |
| 183 | |
| 184 | if (data->num_pictures > 1) { |
| 185 | if (data->pic_data[0].pic_parms->picture_coding_extension.bits.picture_structure == MPEG2_PIC_STRUCT_TOP) |
| 186 | { |
| 187 | mAcquiredBuffer->renderBuffer.scanFormat = VA_TOP_FIELD; |
| 188 | } else { |
| 189 | mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD; |
| 190 | } |
| 191 | } else { |
| 192 | mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE; |
| 193 | } |
| 194 | |
| 195 | mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp; |
| 196 | mAcquiredBuffer->renderBuffer.flag = 0; |
| 197 | if (buffer->flag & WANT_DECODE_ONLY) { |
| 198 | mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY; |
| 199 | } |
| 200 | if (mSizeChanged) { |
| 201 | mSizeChanged = false; |
| 202 | mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE; |
| 203 | } |
| 204 | |
| 205 | for (uint32_t index = 0; index < data->num_pictures; index++) { |
| 206 | status = decodePicture(data, index); |
| 207 | if (status != DECODE_SUCCESS) { |
| 208 | endDecodingFrame(true); |
| 209 | return status; |
| 210 | } |
| 211 | } |
| 212 | |
| 213 | // if sample is successfully decoded, call outputSurfaceBuffer(); otherwise |
| 214 | // call releaseSurfacebuffer(); |
| 215 | status = outputSurfaceBuffer(); |
| 216 | return status; |
| 217 | } |
| 218 | |
| 219 | Decode_Status VideoDecoderMPEG2::decodePicture(vbp_data_mpeg2 *data, int picIndex) { |
| 220 | Decode_Status status; |
| 221 | VAStatus vaStatus; |
| 222 | uint32_t bufferIDCount = 0; |
| 223 | |
| 224 | vbp_picture_data_mpeg2 *picData = &(data->pic_data[picIndex]); |
| 225 | VAPictureParameterBufferMPEG2 *picParam = picData->pic_parms; |
| 226 | |
| 227 | status = allocateVABufferIDs(picData->num_slices * 2 + 2); |
| 228 | CHECK_STATUS("allocateVABufferIDs") |
| 229 | |
| 230 | // send picture parametre for each slice |
| 231 | status = setReference(picParam); |
| 232 | CHECK_STATUS("setReference"); |
| 233 | |
| 234 | vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); |
| 235 | CHECK_VA_STATUS("vaBeginPicture"); |
| 236 | // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding. |
| 237 | mDecodingFrame = true; |
| 238 | |
| 239 | vaStatus = vaCreateBuffer( |
| 240 | mVADisplay, |
| 241 | mVAContext, |
| 242 | VAPictureParameterBufferType, |
| 243 | sizeof(VAPictureParameterBufferMPEG2), |
| 244 | 1, |
| 245 | picParam, |
| 246 | &mBufferIDs[bufferIDCount]); |
| 247 | CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); |
| 248 | bufferIDCount++; |
| 249 | |
| 250 | vaStatus = vaCreateBuffer( |
| 251 | mVADisplay, |
| 252 | mVAContext, |
| 253 | VAIQMatrixBufferType, |
| 254 | sizeof(VAIQMatrixBufferMPEG2), |
| 255 | 1, |
| 256 | data->iq_matrix_buffer, |
| 257 | &mBufferIDs[bufferIDCount]); |
| 258 | CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); |
| 259 | bufferIDCount++; |
| 260 | |
| 261 | for (uint32_t i = 0; i < picData->num_slices; i++) { |
| 262 | vaStatus = vaCreateBuffer( |
| 263 | mVADisplay, |
| 264 | mVAContext, |
| 265 | VASliceParameterBufferType, |
| 266 | sizeof(VASliceParameterBufferMPEG2), |
| 267 | 1, |
| 268 | &(picData->slice_data[i].slice_param), |
| 269 | &mBufferIDs[bufferIDCount]); |
| 270 | CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); |
| 271 | bufferIDCount++; |
| 272 | |
| 273 | // slice data buffer pointer |
| 274 | // Note that this is the original data buffer ptr; |
| 275 | // offset to the actual slice data is provided in |
| 276 | // slice_data_offset in VASliceParameterBufferMPEG2 |
| 277 | vaStatus = vaCreateBuffer( |
| 278 | mVADisplay, |
| 279 | mVAContext, |
| 280 | VASliceDataBufferType, |
| 281 | picData->slice_data[i].slice_size, //size |
| 282 | 1, //num_elements |
| 283 | picData->slice_data[i].buffer_addr + picData->slice_data[i].slice_offset, |
| 284 | &mBufferIDs[bufferIDCount]); |
| 285 | CHECK_VA_STATUS("vaCreateSliceDataBuffer"); |
| 286 | bufferIDCount++; |
| 287 | } |
| 288 | |
| 289 | vaStatus = vaRenderPicture( |
| 290 | mVADisplay, |
| 291 | mVAContext, |
| 292 | mBufferIDs, |
| 293 | bufferIDCount); |
| 294 | CHECK_VA_STATUS("vaRenderPicture"); |
| 295 | |
| 296 | vaStatus = vaEndPicture(mVADisplay, mVAContext); |
| 297 | mDecodingFrame = false; |
| 298 | CHECK_VA_STATUS("vaRenderPicture"); |
| 299 | |
| 300 | return DECODE_SUCCESS; |
| 301 | } |
| 302 | |
| 303 | Decode_Status VideoDecoderMPEG2::setReference(VAPictureParameterBufferMPEG2 *picParam) { |
| 304 | switch (picParam->picture_coding_type) { |
| 305 | case MPEG2_PICTURE_TYPE_I: |
| 306 | picParam->forward_reference_picture = VA_INVALID_SURFACE; |
| 307 | picParam->backward_reference_picture = VA_INVALID_SURFACE; |
| 308 | break; |
| 309 | case MPEG2_PICTURE_TYPE_P: |
| 310 | if (mLastReference != NULL) { |
| 311 | picParam->forward_reference_picture = mLastReference->renderBuffer.surface; |
| 312 | } else { |
| 313 | VTRACE("%s: no reference frame, but keep decoding", __FUNCTION__); |
| 314 | picParam->forward_reference_picture = VA_INVALID_SURFACE; |
| 315 | } |
| 316 | picParam->backward_reference_picture = VA_INVALID_SURFACE; |
| 317 | break; |
| 318 | case MPEG2_PICTURE_TYPE_B: |
| 319 | if (mLastReference == NULL || mForwardReference == NULL) { |
| 320 | return DECODE_NO_REFERENCE; |
| 321 | } else { |
| 322 | picParam->forward_reference_picture = mForwardReference->renderBuffer.surface; |
| 323 | picParam->backward_reference_picture = mLastReference->renderBuffer.surface; |
| 324 | } |
| 325 | break; |
| 326 | default: |
| 327 | // Will never reach here; |
| 328 | return DECODE_PARSER_FAIL; |
| 329 | } |
| 330 | return DECODE_SUCCESS; |
| 331 | } |
| 332 | |
| 333 | Decode_Status VideoDecoderMPEG2::startVA(vbp_data_mpeg2 *data) { |
| 334 | updateFormatInfo(data); |
| 335 | |
| 336 | VAProfile vaProfile; |
| 337 | |
| 338 | // profile_and_level_indication is 8-bit field |
| 339 | // | x | x x x | x x x x| |
| 340 | // profile level |
| 341 | // profile: 101 - simple |
| 342 | // 100 - main |
| 343 | // level: 1010 - low |
| 344 | // 1000 - main |
| 345 | // 0100 - high |
| 346 | // 0110 - high 1440 |
| 347 | if ((data->codec_data->profile_and_level_indication & 0x70) == 0x50) { |
| 348 | vaProfile = VAProfileMPEG2Simple; |
| 349 | } else { |
| 350 | vaProfile = VAProfileMPEG2Main; |
| 351 | } |
| 352 | |
| 353 | return VideoDecoderBase::setupVA(MPEG2_SURFACE_NUMBER, vaProfile); |
| 354 | } |
| 355 | |
| 356 | Decode_Status VideoDecoderMPEG2::allocateVABufferIDs(int32_t number) { |
| 357 | if (mNumBufferIDs > number) { |
| 358 | return DECODE_SUCCESS; |
| 359 | } |
| 360 | if (mBufferIDs) { |
| 361 | delete [] mBufferIDs; |
| 362 | } |
| 363 | mBufferIDs = NULL; |
| 364 | mNumBufferIDs = 0; |
| 365 | mBufferIDs = new VABufferID [number]; |
| 366 | if (mBufferIDs == NULL) { |
| 367 | return DECODE_MEMORY_FAIL; |
| 368 | } |
| 369 | mNumBufferIDs = number; |
| 370 | return DECODE_SUCCESS; |
| 371 | } |
| 372 | |
| 373 | void VideoDecoderMPEG2::updateFormatInfo(vbp_data_mpeg2 *data) { |
| 374 | ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d", |
| 375 | mVideoFormatInfo.width, mVideoFormatInfo.height, |
| 376 | data->codec_data->frame_width, |
| 377 | data->codec_data->frame_height); |
| 378 | |
| 379 | mVideoFormatInfo.cropBottom = (data->codec_data->frame_height > mVideoFormatInfo.height) ? |
| 380 | (data->codec_data->frame_height - mVideoFormatInfo.height) : 0; |
| 381 | mVideoFormatInfo.cropRight = (data->codec_data->frame_width > mVideoFormatInfo.width) ? |
| 382 | (data->codec_data->frame_width - mVideoFormatInfo.width) : 0; |
| 383 | |
| 384 | if ((mVideoFormatInfo.width != (uint32_t)data->codec_data->frame_width || |
| 385 | mVideoFormatInfo.height != (uint32_t)data->codec_data->frame_height) && |
| 386 | (data->codec_data->frame_width > 0) && (data->codec_data->frame_height)) { |
| 387 | // update encoded image size |
| 388 | mVideoFormatInfo.width = data->codec_data->frame_width; |
| 389 | mVideoFormatInfo.height = data->codec_data->frame_height; |
| 390 | mSizeChanged = true; |
| 391 | ITRACE("Video size is changed."); |
| 392 | } |
| 393 | |
| 394 | // video_range has default value of 0. Y ranges from 16 to 235. |
| 395 | mVideoFormatInfo.videoRange = data->codec_data->video_range; |
| 396 | |
| 397 | switch (data->codec_data->matrix_coefficients) { |
| 398 | case 1: |
| 399 | mVideoFormatInfo.colorMatrix = VA_SRC_BT709; |
| 400 | break; |
| 401 | |
| 402 | // ITU-R Recommendation BT.470-6 System B, G (MP4), same as |
| 403 | // SMPTE 170M/BT601 |
| 404 | case 5: |
| 405 | case 6: |
| 406 | mVideoFormatInfo.colorMatrix = VA_SRC_BT601; |
| 407 | break; |
| 408 | |
| 409 | default: |
| 410 | // unknown color matrix, set to 0 so color space flag will not be set. |
| 411 | mVideoFormatInfo.colorMatrix = 0; |
| 412 | break; |
| 413 | } |
| 414 | |
| 415 | mVideoFormatInfo.aspectX = data->codec_data->par_width; |
| 416 | mVideoFormatInfo.aspectY = data->codec_data->par_height; |
| 417 | mVideoFormatInfo.bitrate = data->codec_data->bit_rate; |
| 418 | mVideoFormatInfo.valid = true; |
| 419 | |
| 420 | setRenderRect(); |
| 421 | } |
| 422 | |
| 423 | Decode_Status VideoDecoderMPEG2::checkHardwareCapability() { |
| 424 | VAStatus vaStatus; |
| 425 | VAConfigAttrib cfgAttribs[2]; |
| 426 | cfgAttribs[0].type = VAConfigAttribMaxPictureWidth; |
| 427 | cfgAttribs[1].type = VAConfigAttribMaxPictureHeight; |
| 428 | vaStatus = vaGetConfigAttributes(mVADisplay, |
| 429 | VAProfileMPEG2Main, |
| 430 | VAEntrypointVLD, cfgAttribs, 2); |
| 431 | CHECK_VA_STATUS("vaGetConfigAttributes"); |
| 432 | if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) { |
| 433 | ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d", |
| 434 | cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height); |
| 435 | return DECODE_DRIVER_FAIL; |
| 436 | } |
| 437 | return DECODE_SUCCESS; |
| 438 | } |