/* * Copyright (c) 2007-2008 Intel Corporation. All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sub license, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice (including the * next paragraph) shall be included in all copies or substantial portions * of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* Authors: Yuan, Shengquan */ /* * It is a real program to show how VAAPI decoding works, * It decodes a simple H264 clip which is hardcoded into array h264_16x16 */ #include #include #include #include #include #include /* Data dump of a 16x16 H264 video clip */ static unsigned char h264_16x16[]={ 0x00,0x00,0x00,0x01,0x67,0x64,0x00,0x0a,0xac,0xd9,0x5e,0x84,0x00,0x00,0x03,0x00, 0x04,0x00,0x00,0x03,0x00,0xca,0x3c,0x48,0x96,0x58,0x00,0x00,0x00,0x01,0x68,0xeb, 0xe3,0xcb,0x22,0xc0,0x00,0x00,0x01,0x65,0x88,0x84,0x00,0x57,0x87,0xc6,0xb2,0xd9, 0xe6,0x75,0xbe,0x70,0x28,0x14,0xc0,0xfb,0x61,0xa2,0x42,0xd8,0xca,0x67,0xa2,0xd0, 0x3e,0x14,0x61,0x6d,0x80,0xc0,0x2b,0x11,0x9d,0x92,0x47,0x73,0x04,0x87,0xe3,0xb0, 0x8b,0x82,0xc9,0xf0,0x8f,0xaf,0x35,0x28,0x6b,0x11,0xdf,0x9c,0xe9,0xa2,0xc5,0xb0, 0x8e,0xed,0xda,0x0e,0x3c,0x1c,0xb4,0xc0,0x00,0x00,0x00,0x00, }; #define CHECK_VASTATUS(va_status,func) \ if (va_status != VA_STATUS_SUCCESS) { \ fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \ exit(1); \ } else \ printf("%s:%s (%d) returns okay\n", __func__, func, __LINE__) /* hardcoded here without a bitstream parser helper */ static VAPictureParameterBufferH264 pic_param={ CurrPic: {VA_INVALID_SURFACE, 0, 0x8, 0, 0}, ReferenceFrames:{[0 ... 15] = {VA_INVALID_SURFACE, 0, 0, 0, 0} }, picture_width_in_mbs_minus1 : 0, picture_height_in_mbs_minus1 : 0, bit_depth_luma_minus8 : 0, bit_depth_chroma_minus8 : 0, num_ref_frames : 0, seq_fields: {value:81}, num_slice_groups_minus1 : 0, slice_group_map_type : 0, slice_group_change_rate_minus1 : 0, pic_init_qp_minus26 : -3, pic_init_qs_minus26 : 0, chroma_qp_index_offset : -2, second_chroma_qp_index_offset : -2, pic_fields : {value: 0x01b}, frame_num : 0, num_ref_idx_l0_default_active_minus1 : 0, num_ref_idx_l1_default_active_minus1 : 0 }; static VAIQMatrixBufferH264 iq_matrix = { ScalingList4x4: { [ 0 ... 5 ] = { [ 0 ... 15 ] = 16 } }, ScalingList8x8: { { [ 0 ... 63 ] = 16 }, { [ 0 ... 63 ] = 16 } } }; static VASliceParameterBufferH264 slice_param = { slice_data_size : 69, slice_data_offset : 0, slice_data_flag : 0, slice_data_bit_offset : 64, first_mb_in_slice : 0, slice_type : 2, direct_spatial_mv_pred_flag : 0, num_ref_idx_l0_active_minus1 : 0, num_ref_idx_l1_active_minus1 : 0, cabac_init_idc : 0, slice_qp_delta : 5, disable_deblocking_filter_idc : 0, slice_alpha_c0_offset_div2 : 0, slice_beta_offset_div2 : 0, RefPicList0:{[0 ... 31] = {VA_INVALID_SURFACE, 0, 0, 0, 0} }, RefPicList1:{[0 ... 31] = {VA_INVALID_SURFACE, 0, 0, 0, 0} }, luma_log2_weight_denom : 5, chroma_log2_weight_denom : 5, luma_weight_l0_flag : 0, chroma_weight_l0_flag : 0, luma_weight_l1_flag : 0, chroma_weight_l1_flag : 0, }; #define CLIP_WIDTH 16 #define CLIP_HEIGHT 16 int main(int argc,char **argv) { VAEntrypoint entrypoints[5]; int num_entrypoints,vld_entrypoint; VAConfigAttrib attrib; VAConfigID config_id; VASurfaceID surface_id; VAContextID context_id; VABufferID pic_param_buf,iqmatrix_buf,slice_param_buf,slice_data_buf; int major_ver, minor_ver; VADisplay va_dpy; VAStatus va_status; int android_display = 0; VAImage surface_image; unsigned char *surface_p=NULL, *Y_start=NULL, *U_start=NULL; int Y_pitch=0, U_pitch=0, row, size=0; FILE *decyuv_fp = NULL; decyuv_fp = fopen("/sdcard/vadec.nv12","w+"); if (decyuv_fp == NULL) printf("Open YUV file /sdcard/vadec.nv12 failed\n"); va_dpy = vaGetDisplay(&android_display); va_status = vaInitialize(va_dpy, &major_ver, &minor_ver); CHECK_VASTATUS(va_status, "vaInitialize"); va_status = vaQueryConfigEntrypoints(va_dpy, VAProfileH264High, entrypoints, &num_entrypoints); CHECK_VASTATUS(va_status, "vaQueryConfigEntrypoints"); for (vld_entrypoint = 0; vld_entrypoint < num_entrypoints; vld_entrypoint++) { if (entrypoints[vld_entrypoint] == VAEntrypointVLD) break; } if (vld_entrypoint == num_entrypoints) { /* not find VLD entry point */ printf("VAEntrypointVLD is not supported, exit\n"); exit(1); } attrib.type = VAConfigAttribRTFormat; vaGetConfigAttributes(va_dpy, VAProfileH264High, VAEntrypointVLD,&attrib, 1); if ((attrib.value & VA_RT_FORMAT_YUV420) == 0) { /* not find desired YUV420 RT format */ printf("VA_RT_FORMAT_YUV420 is not supported, exit\n"); exit(1); } va_status = vaCreateConfig(va_dpy, VAProfileH264High, VAEntrypointVLD,&attrib, 1,&config_id); CHECK_VASTATUS(va_status, "vaQueryConfigEntrypoints"); va_status = vaCreateSurfaces(va_dpy,VA_RT_FORMAT_YUV420, CLIP_WIDTH, CLIP_HEIGHT,&surface_id, 1,NULL, 0); CHECK_VASTATUS(va_status, "vaCreateSurfaces"); /* Create a context for this decode pipe */ va_status = vaCreateContext(va_dpy, config_id,CLIP_WIDTH, CLIP_HEIGHT,VA_PROGRESSIVE,&surface_id,1,&context_id); CHECK_VASTATUS(va_status, "vaCreateContext"); pic_param.CurrPic.picture_id = surface_id; va_status = vaCreateBuffer(va_dpy, context_id,VAPictureParameterBufferType,sizeof(VAPictureParameterBufferH264),1, &pic_param,&pic_param_buf); CHECK_VASTATUS(va_status, "vaCreateBuffer"); va_status = vaCreateBuffer(va_dpy, context_id,VAIQMatrixBufferType,sizeof(VAIQMatrixBufferH264),1, &iq_matrix,&iqmatrix_buf ); CHECK_VASTATUS(va_status, "vaCreateBuffer"); va_status = vaCreateBuffer(va_dpy, context_id,VASliceParameterBufferType,sizeof(VASliceParameterBufferH264),1,&slice_param, &slice_param_buf); CHECK_VASTATUS(va_status, "vaCreateBuffer"); va_status = vaCreateBuffer(va_dpy, context_id,VASliceDataBufferType,69,1,h264_16x16+0x24,&slice_data_buf); CHECK_VASTATUS(va_status, "vaCreateBuffer"); va_status = vaBeginPicture(va_dpy, context_id, surface_id); CHECK_VASTATUS(va_status, "vaBeginPicture"); va_status = vaRenderPicture(va_dpy,context_id, &pic_param_buf, 1); CHECK_VASTATUS(va_status, "vaRenderPicture"); va_status = vaRenderPicture(va_dpy,context_id, &iqmatrix_buf, 1); CHECK_VASTATUS(va_status, "vaRenderPicture"); va_status = vaRenderPicture(va_dpy,context_id, &slice_param_buf, 1); CHECK_VASTATUS(va_status, "vaRenderPicture"); va_status = vaRenderPicture(va_dpy,context_id, &slice_data_buf, 1); CHECK_VASTATUS(va_status, "vaRenderPicture"); va_status = vaEndPicture(va_dpy,context_id); CHECK_VASTATUS(va_status, "vaEndPicture"); va_status = vaSyncSurface(va_dpy, surface_id); CHECK_VASTATUS(va_status, "vaSyncSurface"); va_status = vaDeriveImage(va_dpy,surface_id, &surface_image); CHECK_VASTATUS(va_status, "vaSyncSurface"); vaMapBuffer(va_dpy,surface_image.buf,(void **)&surface_p); CHECK_VASTATUS(va_status, "vaMapBuffer"); Y_start = surface_p; Y_pitch = surface_image.pitches[0]; switch (surface_image.format.fourcc) { case VA_FOURCC_NV12: U_start = (unsigned char *)surface_p + surface_image.offsets[1]; U_pitch = surface_image.pitches[1]; break; default: printf("Fall into the fourcc that is not handled, exit\n"); exit(1); } /* copy Y plane */ for (row=0;row