#define BITSTREAM_ALLOCATE_STEPPING 4096
#define SURFACE_NUM 16 /* 16 surfaces for source YUV */
+#define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
+#define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
static constexpr unsigned int MaxFrameNum = (2<<16);
static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
QSurface *surface;
AVCodecContext *context_audio;
+ AVFrame *audio_frame = nullptr;
HTTPD *httpd;
Display *x11_display = nullptr;
VAEncPictureParameterBufferH264 pic_param;
VAEncSliceParameterBufferH264 slice_param;
VAPictureH264 CurrentCurrPic;
- VAPictureH264 ReferenceFrames[16], RefPicList0_P[32], RefPicList0_B[32], RefPicList1_B[32];
+ VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
// Static quality settings.
static constexpr unsigned int frame_bitrate = 15000000 / 60; // Doesn't really matter; only initial_qp does.
CurrentCurrPic = pic_param.CurrPic;
memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
- for (i = numShortTerm; i < SURFACE_NUM; i++) {
+ for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
}
int refpiclist0_max = h264_maxref & 0xffff;
memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
- for (i = refpiclist0_max; i < 32; i++) {
+ for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
}
int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
- for (i = refpiclist0_max; i < 32; i++) {
+ for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
}
memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
- for (i = refpiclist1_max; i < 32; i++) {
+ for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
}
pending_audio_frames.erase(it);
}
- AVFrame *frame = av_frame_alloc();
- frame->nb_samples = audio.size() / 2;
- frame->format = AV_SAMPLE_FMT_S32;
- frame->channel_layout = AV_CH_LAYOUT_STEREO;
+ audio_frame->nb_samples = audio.size() / 2;
+ audio_frame->format = AV_SAMPLE_FMT_S32;
+ audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
unique_ptr<int32_t[]> int_samples(new int32_t[audio.size()]);
- int ret = avcodec_fill_audio_frame(frame, 2, AV_SAMPLE_FMT_S32, (const uint8_t*)int_samples.get(), audio.size() * sizeof(int32_t), 1);
+ int ret = avcodec_fill_audio_frame(audio_frame, 2, AV_SAMPLE_FMT_S32, (const uint8_t*)int_samples.get(), audio.size() * sizeof(int32_t), 1);
if (ret < 0) {
fprintf(stderr, "avcodec_fill_audio_frame() failed with %d\n", ret);
exit(1);
}
- for (int i = 0; i < frame->nb_samples * 2; ++i) {
+ for (int i = 0; i < audio_frame->nb_samples * 2; ++i) {
if (audio[i] >= 1.0f) {
int_samples[i] = 2147483647;
} else if (audio[i] <= -1.0f) {
pkt.data = nullptr;
pkt.size = 0;
int got_output;
- avcodec_encode_audio2(context_audio, &pkt, frame, &got_output);
+ avcodec_encode_audio2(context_audio, &pkt, audio_frame, &got_output);
if (got_output) {
pkt.stream_index = 1;
httpd->add_packet(pkt, audio_pts + global_delay, audio_pts + global_delay);
}
// TODO: Delayed frames.
- av_frame_unref(frame);
+ av_frame_unref(audio_frame);
av_free_packet(&pkt);
if (audio_pts == task.pts) break;
}
-
-#if 0
- printf("\r "); /* return back to startpoint */
- switch (encode_order % 4) {
- case 0:
- printf("|");
- break;
- case 1:
- printf("/");
- break;
- case 2:
- printf("-");
- break;
- case 3:
- printf("\\");
- break;
- }
- printf("%08lld", encode_order);
-#endif
}
{
unique_lock<mutex> lock(storage_task_queue_mutex);
storage_task_queue.push(move(task));
- srcsurface_status[task.display_order % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
storage_task_queue_changed.notify_all();
}
fprintf(stderr, "Could not open codec\n");
exit(1);
}
+ audio_frame = av_frame_alloc();
frame_width = width;
frame_height = height;
H264EncoderImpl::~H264EncoderImpl()
{
shutdown();
+ av_frame_free(&audio_frame);
+
+ // TODO: Destroy context.
}
bool H264EncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
{
// Wait until this frame slot is done encoding.
unique_lock<mutex> lock(storage_task_queue_mutex);
+ if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
+ fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
+ current_storage_frame % SURFACE_NUM, current_storage_frame);
+ }
storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
+ srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
if (storage_thread_should_quit) return false;
}