#include "flags.h"
#include "input_mapping.h"
#include "metrics.h"
+#include "mjpeg_encoder.h"
#include "pbo_frame_allocator.h"
#include "ref_counted_gl_sync.h"
#include "resampling_queue.h"
#include "timebase.h"
#include "timecode_renderer.h"
#include "v210_converter.h"
+#include "va_display_with_cleanup.h"
#include "video_encoder.h"
#undef Status
display_chain->finalize();
video_encoder.reset(new VideoEncoder(resource_pool.get(), h264_encoder_surface, global_flags.va_display, global_flags.width, global_flags.height, &httpd, global_disk_space_estimator));
+ mjpeg_encoder.reset(new MJPEGEncoder(&httpd, global_flags.va_display));
// Must be instantiated after VideoEncoder has initialized global_flags.use_zerocopy.
theme.reset(new Theme(global_flags.theme_filename, global_flags.theme_dirs, resource_pool.get(), num_cards));
Mixer::~Mixer()
{
+ mjpeg_encoder->stop();
httpd.stop();
BMUSBCapture::stop_bm_thread();
for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
- {
- unique_lock<mutex> lock(card_mutex);
- cards[card_index].should_quit = true; // Unblock thread.
- cards[card_index].new_frames_changed.notify_all();
- }
cards[card_index].capture->stop_dequeue_thread();
if (cards[card_index].output) {
cards[card_index].output->end_output();
}
}
+DeviceSpec card_index_to_device(unsigned card_index, unsigned num_cards)
+{
+ if (card_index >= num_cards) {
+ return DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards};
+ } else {
+ return DeviceSpec{InputSourceType::CAPTURE_CARD, card_index};
+ }
+}
+
} // namespace
void Mixer::bm_frame(unsigned card_index, uint16_t timecode,
FrameAllocator::Frame video_frame, size_t video_offset, VideoFormat video_format,
FrameAllocator::Frame audio_frame, size_t audio_offset, AudioFormat audio_format)
{
- DeviceSpec device;
- if (card_index >= num_cards) {
- device = DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards};
- } else {
- device = DeviceSpec{InputSourceType::CAPTURE_CARD, card_index};
- }
+ DeviceSpec device = card_index_to_device(card_index, num_cards);
CaptureCard *card = &cards[card_index];
++card->metric_input_received_frames;
new_frame.upload_func = upload_func;
new_frame.dropped_frames = dropped_frames;
new_frame.received_timestamp = video_frame.received_timestamp; // Ignore the audio timestamp.
+ new_frame.video_format = video_format;
+ new_frame.y_offset = y_offset;
+ new_frame.cbcr_offset = cbcr_offset;
card->new_frames.push_back(move(new_frame));
card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames);
card->may_have_dropped_last_frame = false;
}
}
- BasicStats basic_stats(/*verbose=*/true);
+ BasicStats basic_stats(/*verbose=*/true, /*use_opengl=*/true);
int stats_dropped_frames = 0;
while (!should_quit) {
handle_hotplugged_cards();
for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
+ DeviceSpec device = card_index_to_device(card_index, num_cards);
if (card_index == master_card_index || !has_new_frame[card_index]) {
continue;
}
++new_frames[card_index].dropped_frames;
}
if (new_frames[card_index].dropped_frames > 0) {
- printf("Card %u dropped %d frames before this\n",
- card_index, int(new_frames[card_index].dropped_frames));
+ printf("%s dropped %d frames before this\n",
+ spec_to_string(device).c_str(), int(new_frames[card_index].dropped_frames));
}
}
new_frame->upload_func();
new_frame->upload_func = nullptr;
}
+
+ // There are situations where we could possibly want to
+ // include FFmpeg inputs (CEF inputs are unlikely),
+ // but they're not necessarily in 4:2:2 Y'CbCr, so it would
+ // require more functionality the the JPEG encoder.
+ if (card_index < num_cards) {
+ mjpeg_encoder->upload_frame(pts_int, card_index, new_frame->frame, new_frame->video_format, new_frame->y_offset, new_frame->cbcr_offset);
+ }
}
int64_t frame_duration = output_frame_info.frame_duration;