#define BITSTREAM_ALLOCATE_STEPPING 4096
#define SURFACE_NUM 16 /* 16 surfaces for source YUV */
+#define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
+#define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
static constexpr unsigned int MaxFrameNum = (2<<16);
static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
VAEncPictureParameterBufferH264 pic_param;
VAEncSliceParameterBufferH264 slice_param;
VAPictureH264 CurrentCurrPic;
- VAPictureH264 ReferenceFrames[16], RefPicList0_P[32], RefPicList0_B[32], RefPicList1_B[32];
+ VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
// Static quality settings.
static constexpr unsigned int frame_bitrate = 15000000 / 60; // Doesn't really matter; only initial_qp does.
CurrentCurrPic = pic_param.CurrPic;
memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
- for (i = numShortTerm; i < SURFACE_NUM; i++) {
+ for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
}
int refpiclist0_max = h264_maxref & 0xffff;
memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
- for (i = refpiclist0_max; i < 32; i++) {
+ for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
}
int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
- for (i = refpiclist0_max; i < 32; i++) {
+ for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
}
memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
- for (i = refpiclist1_max; i < 32; i++) {
+ for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
}
pending_audio_frames.erase(it);
}
- AVFrame *frame = avcodec_alloc_frame();
+ AVFrame *frame = av_frame_alloc();
frame->nb_samples = audio.size() / 2;
frame->format = AV_SAMPLE_FMT_S32;
frame->channel_layout = AV_CH_LAYOUT_STEREO;
httpd->add_packet(pkt, audio_pts + global_delay, audio_pts + global_delay);
}
// TODO: Delayed frames.
- avcodec_free_frame(&frame);
+ av_frame_unref(frame);
av_free_packet(&pkt);
if (audio_pts == task.pts) break;
}
{
unique_lock<mutex> lock(storage_task_queue_mutex);
storage_task_queue.push(move(task));
- srcsurface_status[task.display_order % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
storage_task_queue_changed.notify_all();
}
{
// Wait until this frame slot is done encoding.
unique_lock<mutex> lock(storage_task_queue_mutex);
+ if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
+ fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
+ current_storage_frame % SURFACE_NUM, current_storage_frame);
+ }
storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
+ srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
if (storage_thread_should_quit) return false;
}