summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZhao Yakui <yakui.zhao@intel.com>2014-04-23 15:41:25 +0800
committerXiang, Haihao <haihao.xiang@intel.com>2014-09-09 01:27:26 +0800
commit49cd7ddd99562fcec55299990fac303c8a97c2ad (patch)
treecef188bd219f7507cb5f57fff260a8dcc963b933
parent14f13cae75f05b751f1c030568b7c837c53e654f (diff)
test/encode/avcenc: Follow the idea in h264encode to generalize avcenc test case
The following is generalized: a. the switch between encoding order and displaying order. b. Reference list0/1 c. It can insert more IDR frames intead of one IDR frame. Signed-off-by: Zhao Yakui <yakui.zhao@intel.com> (cherry picked from commit 615c2a0f3210ec567fde5b0bac60c3a17cff3aed) Conflicts: test/encode/avcenc.c
-rw-r--r--test/encode/avcenc.c432
1 files changed, 306 insertions, 126 deletions
diff --git a/test/encode/avcenc.c b/test/encode/avcenc.c
index 23782e6..60b2a77 100644
--- a/test/encode/avcenc.c
+++ b/test/encode/avcenc.c
@@ -63,6 +63,8 @@
#define SLICE_TYPE_B 1
#define SLICE_TYPE_I 2
+#define FRAME_IDR 7
+
#define ENTROPY_MODE_CAVLC 0
#define ENTROPY_MODE_CABAC 1
@@ -86,7 +88,6 @@ static unsigned char *newImageBuffer = 0;
static int qp_value = 26;
static int intra_period = 30;
-static int pb_period = 5;
static int frame_bit_rate = -1;
static int frame_rate = 30;
static int ip_period = 1;
@@ -238,11 +239,27 @@ static void destory_encode_pipe()
#define SID_REFERENCE_PICTURE_L1 3
#define SID_RECON_PICTURE 4
#define SID_NUMBER SID_RECON_PICTURE + 1
+
+#define SURFACE_NUM 16 /* 16 surfaces for reference */
+
static VASurfaceID surface_ids[SID_NUMBER];
+static VASurfaceID ref_surface[SURFACE_NUM];
+
+static unsigned long long current_frame_display = 0;
+static unsigned long long current_IDR_display = 0;
+
+static VAPictureH264 CurrentCurrPic;
+
+#define current_slot (current_frame_display % SURFACE_NUM)
static int frame_number;
-static int enc_frame_number;
+static unsigned long long enc_frame_number;
+static int current_frame_type;
+static int current_frame_num;
+static unsigned int current_poc;
+static unsigned int num_ref_frames = 2;
+static unsigned int numShortTerm = 0;
/***************************************************/
static void *
@@ -261,13 +278,25 @@ static void alloc_encode_resource(FILE *yuv_fp)
// Create surface
va_status = vaCreateSurfaces(
- va_dpy,
- VA_RT_FORMAT_YUV420, picture_width, picture_height,
- &surface_ids[0], SID_NUMBER,
- NULL, 0
- );
+ va_dpy,
+ VA_RT_FORMAT_YUV420, picture_width, picture_height,
+ surface_ids, SID_NUMBER,
+ NULL, 0
+ );
+
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
+ // Create surface
+ va_status = vaCreateSurfaces(
+ va_dpy,
+ VA_RT_FORMAT_YUV420, picture_width, picture_height,
+ ref_surface, SURFACE_NUM,
+ NULL, 0
+ );
+
+ CHECK_VASTATUS(va_status, "vaCreateSurfaces");
+
+
newImageBuffer = (unsigned char *)malloc(frame_size);
/* firstly upload YUV data to SID_INPUT_PICTURE_1 */
@@ -286,13 +315,15 @@ static void release_encode_resource()
free(newImageBuffer);
// Release all the surfaces resource
- vaDestroySurfaces(va_dpy, &surface_ids[0], SID_NUMBER);
+ vaDestroySurfaces(va_dpy, surface_ids, SID_NUMBER);
+ // Release all the reference surfaces
+ vaDestroySurfaces(va_dpy, ref_surface, SURFACE_NUM);
}
static void avcenc_update_sei_param(int frame_num)
{
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
- unsigned int length_in_bits, offset_in_bytes;
+ unsigned int length_in_bits;
unsigned char *packed_sei_buffer = NULL;
VAStatus va_status;
@@ -306,7 +337,6 @@ static void avcenc_update_sei_param(int frame_num)
0,
&packed_sei_buffer);
- offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderH264_SEI;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
@@ -328,23 +358,151 @@ static void avcenc_update_sei_param(int frame_num)
return;
}
-static void avcenc_update_picture_parameter(int slice_type, int frame_num, int display_num, int is_idr)
+#define partition(ref, field, key, ascending) \
+ while (i <= j) { \
+ if (ascending) { \
+ while (ref[i].field < key) \
+ i++; \
+ while (ref[j].field > key) \
+ j--; \
+ } else { \
+ while (ref[i].field > key) \
+ i++; \
+ while (ref[j].field < key) \
+ j--; \
+ } \
+ if (i <= j) { \
+ tmp = ref[i]; \
+ ref[i] = ref[j]; \
+ ref[j] = tmp; \
+ i++; \
+ j--; \
+ } \
+ } \
+
+static void sort_one(VAPictureH264 ref[], int left, int right,
+ int ascending, int frame_idx)
+{
+ int i = left, j = right;
+ unsigned int key;
+ VAPictureH264 tmp;
+
+ if (frame_idx) {
+ key = ref[(left + right) / 2].frame_idx;
+ partition(ref, frame_idx, key, ascending);
+ } else {
+ key = ref[(left + right) / 2].TopFieldOrderCnt;
+ partition(ref, TopFieldOrderCnt, (signed int)key, ascending);
+ }
+
+ /* recursion */
+ if (left < j)
+ sort_one(ref, left, j, ascending, frame_idx);
+
+ if (i < right)
+ sort_one(ref, i, right, ascending, frame_idx);
+}
+
+static void sort_two(VAPictureH264 ref[], int left, int right, unsigned int key, unsigned int frame_idx,
+ int partition_ascending, int list0_ascending, int list1_ascending)
+{
+ int i = left, j = right;
+ VAPictureH264 tmp;
+
+ if (frame_idx) {
+ partition(ref, frame_idx, key, partition_ascending);
+ } else {
+ partition(ref, TopFieldOrderCnt, (signed int)key, partition_ascending);
+ }
+
+ sort_one(ref, left, i-1, list0_ascending, frame_idx);
+ sort_one(ref, j+1, right, list1_ascending, frame_idx);
+}
+
+static int update_RefPicList()
+{
+
+ if (current_frame_type == SLICE_TYPE_P) {
+ memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
+ sort_one(RefPicList0, 0, numShortTerm-1, 0, 1);
+ }
+
+ if (current_frame_type == SLICE_TYPE_B) {
+ memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
+ sort_two(RefPicList0, 0, numShortTerm-1, current_poc, 0,
+ 1, 0, 1);
+
+ memcpy(RefPicList1, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
+ sort_two(RefPicList1, 0, numShortTerm-1, current_poc, 0,
+ 0, 1, 0);
+ }
+
+ return 0;
+}
+
+static int calc_poc(int pic_order_cnt_lsb)
+{
+ static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
+ int prevPicOrderCntMsb, prevPicOrderCntLsb;
+ int PicOrderCntMsb, TopFieldOrderCnt;
+
+ if (current_frame_type == FRAME_IDR)
+ prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
+ else {
+ prevPicOrderCntMsb = PicOrderCntMsb_ref;
+ prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
+ }
+
+ if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
+ ((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
+ PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
+ else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
+ ((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
+ PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
+ else
+ PicOrderCntMsb = prevPicOrderCntMsb;
+
+ TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
+
+ if (current_frame_type != SLICE_TYPE_B) {
+ PicOrderCntMsb_ref = PicOrderCntMsb;
+ pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
+ }
+
+ return TopFieldOrderCnt;
+}
+
+static void avcenc_update_picture_parameter(int slice_type, int is_idr)
{
VAEncPictureParameterBufferH264 *pic_param;
VAStatus va_status;
// Picture level
pic_param = &avcenc_context.pic_param;
- pic_param->CurrPic.picture_id = surface_ids[SID_RECON_PICTURE];
- pic_param->CurrPic.TopFieldOrderCnt = display_num * 2;
- pic_param->ReferenceFrames[0].picture_id = surface_ids[SID_REFERENCE_PICTURE_L0];
- pic_param->ReferenceFrames[1].picture_id = surface_ids[SID_REFERENCE_PICTURE_L1];
- pic_param->ReferenceFrames[2].picture_id = VA_INVALID_ID;
+
+ pic_param->CurrPic.picture_id = ref_surface[current_slot];
+ pic_param->CurrPic.frame_idx = current_frame_num;
+ pic_param->CurrPic.flags = 0;
+
+ pic_param->CurrPic.TopFieldOrderCnt = current_poc;
+ pic_param->CurrPic.BottomFieldOrderCnt = pic_param->CurrPic.TopFieldOrderCnt;
+
assert(avcenc_context.codedbuf_buf_id != VA_INVALID_ID);
pic_param->coded_buf = avcenc_context.codedbuf_buf_id;
- pic_param->frame_num = frame_num;
+ pic_param->frame_num = current_frame_num;
pic_param->pic_fields.bits.idr_pic_flag = !!is_idr;
pic_param->pic_fields.bits.reference_pic_flag = (slice_type != SLICE_TYPE_B);
+ CurrentCurrPic = pic_param->CurrPic;
+
+ if (slice_type == SLICE_TYPE_P || slice_type == SLICE_TYPE_B)
+ memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
+
+ if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
+ pic_param->ReferenceFrames[0] = RefPicList0[0];
+ }
+ if (slice_type == SLICE_TYPE_B) {
+ pic_param->ReferenceFrames[1] = RefPicList1[0];
+ }
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
@@ -352,6 +510,7 @@ static void avcenc_update_picture_parameter(int slice_type, int frame_num, int d
sizeof(*pic_param), 1, pic_param,
&avcenc_context.pic_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
+
}
#ifndef VA_FOURCC_I420
@@ -453,21 +612,13 @@ static void avcenc_update_slice_parameter(int slice_type)
/* FIXME: fill other fields */
if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
- int j;
- slice_param->RefPicList0[0].picture_id = surface_ids[SID_REFERENCE_PICTURE_L0];
- for (j = 1; j < 32; j++) {
- slice_param->RefPicList0[j].picture_id = VA_INVALID_SURFACE;
- slice_param->RefPicList0[j].flags = VA_PICTURE_H264_INVALID;
- }
+ memset(slice_param->RefPicList0, 0xFF, 32 * sizeof(VAPictureH264));
+ slice_param->RefPicList0[0] = RefPicList0[0];
}
if ((slice_type == SLICE_TYPE_B)) {
- int j;
- slice_param->RefPicList1[0].picture_id = surface_ids[SID_REFERENCE_PICTURE_L1];
- for (j = 1; j < 32; j++) {
- slice_param->RefPicList1[j].picture_id = VA_INVALID_SURFACE;
- slice_param->RefPicList1[j].flags = VA_PICTURE_H264_INVALID;
- }
+ memset(slice_param->RefPicList1, 0xFF, 32 * sizeof(VAPictureH264));
+ slice_param->RefPicList1[0] = RefPicList1[0];
}
va_status = vaCreateBuffer(va_dpy,
@@ -508,6 +659,30 @@ static void avcenc_update_slice_parameter(int slice_type)
avcenc_context.num_slices = i;
}
+static int update_ReferenceFrames(void)
+{
+ int i;
+
+ /* B-frame is not used for reference */
+ if (current_frame_type == SLICE_TYPE_B)
+ return 0;
+
+ CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ numShortTerm++;
+ if (numShortTerm > num_ref_frames)
+ numShortTerm = num_ref_frames;
+ for (i=numShortTerm-1; i>0; i--)
+ ReferenceFrames[i] = ReferenceFrames[i-1];
+ ReferenceFrames[0] = CurrentCurrPic;
+
+ if (current_frame_type != SLICE_TYPE_B)
+ current_frame_num++;
+ if (current_frame_num > MaxFrameNum)
+ current_frame_num = 0;
+
+ return 0;
+}
+
static int begin_picture(FILE *yuv_fp, int frame_num, int display_num, int slice_type, int is_idr)
{
VAStatus va_status;
@@ -526,7 +701,7 @@ static int begin_picture(FILE *yuv_fp, int frame_num, int display_num, int slice
else
avcenc_context.current_input_surface = SID_INPUT_PICTURE_0;
- if (frame_num == 0) {
+ if (is_idr) {
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
unsigned int length_in_bits, offset_in_bytes;
unsigned char *packed_seq_buffer = NULL, *packed_pic_buffer = NULL;
@@ -613,9 +788,6 @@ static int begin_picture(FILE *yuv_fp, int frame_num, int display_num, int slice
vaUnmapBuffer(va_dpy, avcenc_context.misc_parameter_hrd_buf_id);
- /* slice parameter */
- avcenc_update_slice_parameter(slice_type);
-
return 0;
}
@@ -691,29 +863,10 @@ static int avcenc_destroy_buffers(VABufferID *va_buffers, unsigned int num_va_bu
return 0;
}
-static void end_picture(int slice_type, int next_is_bpic)
+static void end_picture()
{
- VABufferID tempID;
-
- /* Prepare for next picture */
- tempID = surface_ids[SID_RECON_PICTURE];
-
- if (slice_type != SLICE_TYPE_B) {
- if (next_is_bpic) {
- surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L1];
- surface_ids[SID_REFERENCE_PICTURE_L1] = tempID;
- } else {
- surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L0];
- surface_ids[SID_REFERENCE_PICTURE_L0] = tempID;
- }
- } else {
- if (!next_is_bpic) {
- surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L0];
- surface_ids[SID_REFERENCE_PICTURE_L0] = surface_ids[SID_REFERENCE_PICTURE_L1];
- surface_ids[SID_REFERENCE_PICTURE_L1] = tempID;
- }
- }
+ update_ReferenceFrames();
avcenc_destroy_buffers(&avcenc_context.seq_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.pic_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_seq_header_param_buf_id, 1);
@@ -1299,6 +1452,68 @@ store_coded_buffer(FILE *avc_fp, int slice_type)
return 0;
}
+/*
+ * It is from the h264encode.c but it simplifies something.
+ * For example: When one frame is encoded as I-frame under the scenario with
+ * P-B frames, it will be regarded as IDR frame(key-frame) and then new GOP is
+ * started. If the video clip is encoded as all I-frames, the first frame
+ * is regarded as IDR and the remaining is regarded as I-frame.
+ *
+ */
+
+static void encoding2display_order(
+ unsigned long long encoding_order,int gop_size,
+ int ip_period,
+ unsigned long long *displaying_order,
+ int *frame_type)
+{
+ int encoding_order_gop = 0;
+
+ /* When ip_period is 0, all are I/IDR frames */
+ if (ip_period == 0) { /* all are I/IDR frames */
+ if (encoding_order == 0)
+ *frame_type = FRAME_IDR;
+ else
+ *frame_type = SLICE_TYPE_I;
+
+ *displaying_order = encoding_order;
+ return;
+ }
+
+ /* new sequence like
+ * IDR PPPPP IDRPPPPP
+ * IDR (PBB)(PBB)(PBB)(PBB) IDR (PBB)(PBB)(PBB)(PBB)
+ */
+ encoding_order_gop = encoding_order % gop_size;
+
+ if (encoding_order_gop == 0) { /* the first frame */
+ *frame_type = FRAME_IDR;
+ *displaying_order = encoding_order;
+ } else {
+ int gop_delta;
+
+ gop_delta = 1;
+
+ if ((ip_period != 1) && ((gop_size - 1) % ip_period)) {
+ int ipb_size;
+ ipb_size = (gop_size - 1) / ip_period * ip_period + 1;
+ if (encoding_order_gop >= ipb_size) {
+ gop_delta = ipb_size;
+ ip_period = gop_size - ipb_size;
+ }
+ }
+
+ if (((encoding_order_gop - gop_delta) % ip_period) == 0) { /* P frames */
+ *frame_type = SLICE_TYPE_P;
+ *displaying_order = encoding_order + ip_period - 1;
+ } else {
+ *frame_type = SLICE_TYPE_B;
+ *displaying_order = encoding_order - 1;
+ }
+ }
+}
+
+
static void
encode_picture(FILE *yuv_fp, FILE *avc_fp,
int frame_num, int display_num,
@@ -1352,8 +1567,14 @@ encode_picture(FILE *yuv_fp, FILE *avc_fp,
&avcenc_context.codedbuf_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
+ /* Update the RefPicList */
+ update_RefPicList();
+
/* picture parameter set */
- avcenc_update_picture_parameter(slice_type, frame_num, display_num, is_idr);
+ avcenc_update_picture_parameter(slice_type, is_idr);
+
+ /* slice parameter */
+ avcenc_update_slice_parameter(slice_type);
if (avcenc_context.rate_control_method == VA_RC_CBR)
avcenc_update_sei_param(frame_num);
@@ -1366,27 +1587,6 @@ encode_picture(FILE *yuv_fp, FILE *avc_fp,
end_picture(slice_type, next_is_bpic);
}
-static void encode_pb_pictures(FILE *yuv_fp, FILE *avc_fp, int f, int nbframes, int next_f)
-{
- int i;
- encode_picture(yuv_fp, avc_fp,
- enc_frame_number, f + nbframes,
- 0,
- SLICE_TYPE_P, 1, f);
-
- for( i = 0; i < nbframes - 1; i++) {
- encode_picture(yuv_fp, avc_fp,
- enc_frame_number + 1, f + i,
- 0,
- SLICE_TYPE_B, 1, f + i + 1);
- }
-
- encode_picture(yuv_fp, avc_fp,
- enc_frame_number + 1, f + nbframes - 1,
- 0,
- SLICE_TYPE_B, 0, next_f);
-}
-
static void show_help()
{
printf("Usage: avnenc <width> <height> <input_yuvfile> <output_avcfile> [qp=qpvalue|fb=framebitrate] [mode=0(I frames only)/1(I and P frames)/2(I, P and B frames)\n");
@@ -1469,6 +1669,8 @@ static void avcenc_context_pic_param_init(VAEncPictureParameterBufferH264 *pic_p
pic_param->pic_fields.bits.transform_8x8_mode_flag = 1;
pic_param->pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+ memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
}
static void avcenc_context_sei_init()
@@ -1549,8 +1751,7 @@ int main(int argc, char *argv[])
int f;
FILE *yuv_fp;
FILE *avc_fp;
- long file_size;
- int i_frame_only=0,i_p_frame_only=1;
+ off_t file_size;
int mode_value;
struct timeval tpstart,tpend;
float timeuse;
@@ -1589,18 +1790,12 @@ int main(int argc, char *argv[])
if (argc == 7) {
sscanf(argv[6], "mode=%d", &mode_value);
if ( mode_value == 0 ) {
- i_frame_only = 1;
- i_p_frame_only = 0;
ip_period = 0;
}
else if ( mode_value == 1) {
- i_frame_only = 0;
- i_p_frame_only = 1;
ip_period = 1;
}
else if ( mode_value == 2 ) {
- i_frame_only = 0;
- i_p_frame_only = 0;
/* Hack mechanism before adding the parameter of B-frame number */
ip_period = 3;
}
@@ -1640,49 +1835,34 @@ int main(int argc, char *argv[])
alloc_encode_resource(yuv_fp);
enc_frame_number = 0;
- for ( f = 0; f < frame_number; ) { //picture level loop
- static int const frame_type_pattern[][2] = { {SLICE_TYPE_I,1},
- {SLICE_TYPE_P,3}, {SLICE_TYPE_P,3},{SLICE_TYPE_P,3},
- {SLICE_TYPE_P,3}, {SLICE_TYPE_P,3},{SLICE_TYPE_P,3},
- {SLICE_TYPE_P,3}, {SLICE_TYPE_P,3},{SLICE_TYPE_P,3},
- {SLICE_TYPE_P,2} };
-
- if ( i_frame_only ) {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_I, 0, f+1);
- f++;
- enc_frame_number++;
- } else if ( i_p_frame_only ) {
- if ( (f % intra_period) == 0 ) {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_I, 0, f+1);
- f++;
- enc_frame_number++;
- } else {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_P, 0, f+1);
- f++;
- enc_frame_number++;
- }
- } else { // follow the i,p,b pattern
- static int fcurrent = 0;
- int fnext;
-
- fcurrent = fcurrent % (sizeof(frame_type_pattern)/sizeof(int[2]));
- fnext = (fcurrent+1) % (sizeof(frame_type_pattern)/sizeof(int[2]));
-
- if ( frame_type_pattern[fcurrent][0] == SLICE_TYPE_I ) {
- encode_picture(yuv_fp, avc_fp,enc_frame_number, f, f==0, SLICE_TYPE_I, 0,
- f+frame_type_pattern[fnext][1]);
- f++;
- enc_frame_number++;
- } else {
- encode_pb_pictures(yuv_fp, avc_fp, f, frame_type_pattern[fcurrent][1]-1,
- f + frame_type_pattern[fcurrent][1] + frame_type_pattern[fnext][1] -1 );
- f += frame_type_pattern[fcurrent][1];
- enc_frame_number++;
- }
-
- fcurrent++;
+ for ( f = 0; f < frame_number; f++) { //picture level loop
+ unsigned long long next_frame_display;
+ int next_frame_type;
+
+ enc_frame_number = f;
+
+ encoding2display_order(enc_frame_number, intra_period, ip_period,
+ &current_frame_display, &current_frame_type);
+
+ encoding2display_order(enc_frame_number + 1, intra_period, ip_period,
+ &next_frame_display, &next_frame_type);
+
+ if (current_frame_type == FRAME_IDR) {
+ numShortTerm = 0;
+ current_frame_num = 0;
+ current_IDR_display = current_frame_display;
}
- printf("\r %d/%d ...", f+1, frame_number);
+
+ /* use the simple mechanism to calc the POC */
+ current_poc = (current_frame_display - current_IDR_display) * 2;
+
+ encode_picture(yuv_fp, avc_fp, frame_number, current_frame_display,
+ (current_frame_type == FRAME_IDR) ? 1 : 0,
+ (current_frame_type == FRAME_IDR) ? SLICE_TYPE_I : current_frame_type,
+ (next_frame_type == SLICE_TYPE_B) ? 1 : 0,
+ next_frame_display);
+
+ printf("\r %d/%d ...", f, frame_number);
fflush(stdout);
}