X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=h264encode.cpp;h=c233f39b738101140589182d9e738dedb467f861;hb=64b8430160caae390a7e408a6c7d9629dc08e923;hp=35f34130ec014e7ec493d891336f81123a082e0e;hpb=021c4779665c734dcde1f15a6d9b38a37820bb38;p=nageru diff --git a/h264encode.cpp b/h264encode.cpp index 35f3413..c233f39 100644 --- a/h264encode.cpp +++ b/h264encode.cpp @@ -131,8 +131,6 @@ static int rc_default_modes[] = { VA_RC_VCM, VA_RC_NONE, }; -static unsigned long long current_frame_display = 0; -static unsigned long long current_IDR_display = 0; static unsigned int current_frame_num = 0; static int misc_priv_type = 0; @@ -661,9 +659,9 @@ static int build_packed_slice_buffer(unsigned char **header_buffer) #define FRAME_I 2 #define FRAME_IDR 7 void encoding2display_order( - unsigned long long encoding_order, int intra_period, + int encoding_order, int intra_period, int intra_idr_period, int ip_period, - unsigned long long *displaying_order, + int *displaying_order, int *frame_type, int *pts_lag) { int encoding_order_gop = 0; @@ -1208,12 +1206,12 @@ static void sort_two(VAPictureH264 ref[], int left, int right, unsigned int key, sort_one(ref, j+1, right, list1_ascending, frame_idx); } -static int update_ReferenceFrames(int frame_type) +static void update_ReferenceFrames(int frame_type) { int i; if (frame_type == FRAME_B) - return 0; + return; CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; numShortTerm++; @@ -1226,8 +1224,6 @@ static int update_ReferenceFrames(int frame_type) current_frame_num++; if (current_frame_num > MaxFrameNum) current_frame_num = 0; - - return 0; } @@ -1366,16 +1362,16 @@ static int calc_poc(int pic_order_cnt_lsb, int frame_type) return TopFieldOrderCnt; } -static int render_picture(int frame_type) +static int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num) { VABufferID pic_param_buf; VAStatus va_status; int i = 0; - pic_param.CurrPic.picture_id = gl_surfaces[current_frame_display % SURFACE_NUM].ref_surface; + pic_param.CurrPic.picture_id = gl_surfaces[display_frame_num % SURFACE_NUM].ref_surface; pic_param.CurrPic.frame_idx = current_frame_num; pic_param.CurrPic.flags = 0; - pic_param.CurrPic.TopFieldOrderCnt = calc_poc((current_frame_display - current_IDR_display) % MaxPicOrderCntLsb, frame_type); + pic_param.CurrPic.TopFieldOrderCnt = calc_poc((display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb, frame_type); pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt; CurrentCurrPic = pic_param.CurrPic; @@ -1390,7 +1386,7 @@ static int render_picture(int frame_type) pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode; pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1; pic_param.frame_num = current_frame_num; - pic_param.coded_buf = gl_surfaces[current_frame_display % SURFACE_NUM].coded_buf; + pic_param.coded_buf = gl_surfaces[display_frame_num % SURFACE_NUM].coded_buf; pic_param.last_picture = false; // FIXME pic_param.pic_init_qp = initial_qp; @@ -1511,7 +1507,7 @@ static void render_packedslice() free(packedslice_buffer); } -static int render_slice(int encoding_frame_num, int frame_type) +static int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type) { VABufferID slice_param_buf; VAStatus va_status; @@ -1554,7 +1550,7 @@ static int render_slice(int encoding_frame_num, int frame_type) slice_param.slice_alpha_c0_offset_div2 = 0; slice_param.slice_beta_offset_div2 = 0; slice_param.direct_spatial_mv_pred_flag = 1; - slice_param.pic_order_cnt_lsb = (current_frame_display - current_IDR_display) % MaxPicOrderCntLsb; + slice_param.pic_order_cnt_lsb = (display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb; if (h264_packedheader && @@ -1899,26 +1895,29 @@ void H264Encoder::end_frame(RefCountedGLsync fence, int64_t pts, const vector lock(frame_queue_mutex); - frame_queue_nonempty.wait(lock, [this]{ return copy_thread_should_quit || pending_video_frames.count(current_frame_display) != 0; }); + frame_queue_nonempty.wait(lock, [this, display_frame_num]{ + return copy_thread_should_quit || pending_video_frames.count(display_frame_num) != 0; + }); if (copy_thread_should_quit) { return; } else { - frame = move(pending_video_frames[current_frame_display]); - pending_video_frames.erase(current_frame_display); + frame = move(pending_video_frames[display_frame_num]); + pending_video_frames.erase(display_frame_num); } } @@ -1932,11 +1931,12 @@ void H264Encoder::copy_thread_func() } last_dts = dts; - encode_frame(frame, encoding_frame_num, frame_type, frame.pts, dts); + encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts); } } -void H264Encoder::encode_frame(H264Encoder::PendingFrame frame, int encoding_frame_num, int frame_type, int64_t pts, int64_t dts) +void H264Encoder::encode_frame(H264Encoder::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, + int frame_type, int64_t pts, int64_t dts) { // Wait for the GPU to be done with the frame. glClientWaitSync(frame.fence.get(), 0, 0); @@ -1945,7 +1945,7 @@ void H264Encoder::encode_frame(H264Encoder::PendingFrame frame, int encoding_fra frame.input_frames.clear(); // Unmap the image. - GLSurface *surf = &gl_surfaces[current_frame_display % SURFACE_NUM]; + GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM]; eglDestroyImageKHR(eglGetCurrentDisplay(), surf->y_egl_image); eglDestroyImageKHR(eglGetCurrentDisplay(), surf->cbcr_egl_image); VAStatus va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf); @@ -1961,16 +1961,16 @@ void H264Encoder::encode_frame(H264Encoder::PendingFrame frame, int encoding_fra if (frame_type == FRAME_IDR) { render_sequence(); - render_picture(frame_type); + render_picture(frame_type, display_frame_num, gop_start_display_frame_num); if (h264_packedheader) { render_packedsequence(); render_packedpicture(); } } else { //render_sequence(); - render_picture(frame_type); + render_picture(frame_type, display_frame_num, gop_start_display_frame_num); } - render_slice(encoding_frame_num, frame_type); + render_slice(encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type); va_status = vaEndPicture(va_dpy, context_id); CHECK_VASTATUS(va_status, "vaEndPicture"); @@ -1978,7 +1978,7 @@ void H264Encoder::encode_frame(H264Encoder::PendingFrame frame, int encoding_fra // so now the data is done encoding (well, async job kicked off)... // we send that to the storage thread storage_task tmp; - tmp.display_order = current_frame_display; + tmp.display_order = display_frame_num; tmp.frame_type = frame_type; tmp.pts = pts; tmp.dts = dts;