bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
RefCountedGLsync end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames);
void shutdown();
+ void open_output_file(const std::string &filename);
+ void close_output_file();
private:
struct storage_task {
void storage_task_thread();
void encode_audio(const vector<float> &audio,
int64_t audio_pts,
- AVCodecContext *ctx,
- HTTPD::PacketDestination destination);
+ AVCodecContext *ctx);
void storage_task_enqueue(storage_task task);
void save_codeddata(storage_task task);
int render_packedsequence();
int frame_height;
int frame_width_mbaligned;
int frame_height_mbaligned;
+
+ unique_ptr<Mux> file_mux; // To local disk.
};
// Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
pkt.flags = 0;
}
//pkt.duration = 1;
- httpd->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay(),
- global_flags.uncompressed_video_to_http ? HTTPD::DESTINATION_FILE_ONLY : HTTPD::DESTINATION_FILE_AND_HTTP);
+ if (file_mux) {
+ file_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
+ }
+ if (!global_flags.uncompressed_video_to_http) {
+ httpd->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
+ }
}
// Encode and add all audio frames up to and including the pts of this video frame.
for ( ;; ) {
pending_audio_frames.erase(it);
}
- encode_audio(audio, audio_pts, context_audio, HTTPD::DESTINATION_FILE_AND_HTTP);
+ encode_audio(audio, audio_pts, context_audio);
if (audio_pts == task.pts) break;
}
void H264EncoderImpl::encode_audio(
const vector<float> &audio,
int64_t audio_pts,
- AVCodecContext *ctx,
- HTTPD::PacketDestination destination)
+ AVCodecContext *ctx)
{
audio_frame->nb_samples = audio.size() / 2;
audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
if (got_output) {
pkt.stream_index = 1;
pkt.flags = AV_PKT_FLAG_KEY;
- httpd->add_packet(pkt, audio_pts + global_delay(), audio_pts + global_delay(), destination);
+ if (file_mux) {
+ file_mux->add_packet(pkt, audio_pts + global_delay(), audio_pts + global_delay());
+ }
+ httpd->add_packet(pkt, audio_pts + global_delay(), audio_pts + global_delay());
}
// TODO: Delayed frames.
av_frame_unref(audio_frame);
is_shutdown = true;
}
+void H264EncoderImpl::open_output_file(const std::string &filename)
+{
+ AVFormatContext *avctx = avformat_alloc_context();
+ avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
+ assert(filename.size() < sizeof(avctx->filename) - 1);
+ strcpy(avctx->filename, filename.c_str());
+
+ string url = "file:" + filename;
+ int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
+ if (ret < 0) {
+ char tmp[AV_ERROR_MAX_STRING_SIZE];
+ fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
+ exit(1);
+ }
+
+ file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, TIMEBASE));
+}
+
+void H264EncoderImpl::close_output_file()
+{
+ file_mux.reset();
+}
+
void H264EncoderImpl::encode_thread_func()
{
int64_t last_dts = -1;
pkt.size = frame_width * frame_height * 2;
pkt.stream_index = 0;
pkt.flags = AV_PKT_FLAG_KEY;
- httpd->add_packet(pkt, pts, pts, HTTPD::DESTINATION_HTTP_ONLY);
+ httpd->add_packet(pkt, pts, pts);
}
namespace {
impl->shutdown();
}
-// Real class.
+void H264Encoder::open_output_file(const std::string &filename)
+{
+ impl->open_output_file(filename);
+}
+
+void H264Encoder::close_output_file()
+{
+ impl->close_output_file();
+}
RefCountedGLsync end_frame(int64_t pts, const std::vector<RefCountedFrame> &input_frames);
void shutdown(); // Blocking.
+ // You can only have one going at the same time.
+ void open_output_file(const std::string &filename);
+ void close_output_file();
+
private:
std::unique_ptr<H264EncoderImpl> impl;
};
MHD_OPTION_END);
}
-void HTTPD::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts, PacketDestination destination)
+void HTTPD::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts)
{
unique_lock<mutex> lock(streams_mutex);
- if (destination != DESTINATION_FILE_ONLY) {
- for (Stream *stream : streams) {
- stream->add_packet(pkt, pts, dts);
- }
- }
- if (file_mux && destination != DESTINATION_HTTP_ONLY) {
- file_mux->add_packet(pkt, pts, dts);
+ for (Stream *stream : streams) {
+ stream->add_packet(pkt, pts, dts);
}
}
-void HTTPD::open_output_file(const string &filename)
-{
- AVFormatContext *avctx = avformat_alloc_context();
- avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
- assert(filename.size() < sizeof(avctx->filename) - 1);
- strcpy(avctx->filename, filename.c_str());
-
- string url = "file:" + filename;
- int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
- if (ret < 0) {
- char tmp[AV_ERROR_MAX_STRING_SIZE];
- fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
- exit(1);
- }
-
- file_mux.reset(new Mux(avctx, width, height, Mux::CODEC_H264, TIMEBASE));
-}
-
-void HTTPD::close_output_file()
-{
- file_mux.reset();
-}
-
int HTTPD::answer_to_connection_thunk(void *cls, MHD_Connection *connection,
const char *url, const char *method,
const char *version, const char *upload_data,
#ifndef _HTTPD_H
#define _HTTPD_H
-// A class dealing with stream output, both to HTTP (thus the class name)
-// and to local output files. Since we generally have very few outputs
+// A class dealing with stream output to HTTP. Since we generally have very few outputs
// (end clients are not meant to connect directly to our stream; it should be
// transcoded by something else and then sent to a reflector), we don't need to
// care a lot about performance. Thus, we solve this by the simplest possible
class HTTPD {
public:
- enum PacketDestination {
- DESTINATION_FILE_ONLY,
- DESTINATION_HTTP_ONLY,
- DESTINATION_FILE_AND_HTTP
- };
-
HTTPD(int width, int height);
void start(int port);
- void add_packet(const AVPacket &pkt, int64_t pts, int64_t dts, PacketDestination destination);
-
- // You can only have one going at the same time.
- void open_output_file(const std::string &filename);
- void close_output_file();
+ void add_packet(const AVPacket &pkt, int64_t pts, int64_t dts);
private:
static int answer_to_connection_thunk(void *cls, MHD_Connection *connection,
std::set<Stream *> streams; // Not owned.
int width, height;
- std::unique_ptr<Mux> file_mux; // To local disk.
};
#endif // !defined(_HTTPD_H)
limiter(OUTPUT_FREQUENCY),
compressor(OUTPUT_FREQUENCY)
{
- httpd.open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
httpd.start(9095);
CHECK(init_movit(MOVIT_SHADER_DIR, MOVIT_DEBUG_OFF));
display_chain->finalize();
h264_encoder.reset(new H264Encoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
+ h264_encoder->open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
// First try initializing the PCI devices, then USB, until we have the desired number of cards.
unsigned num_pci_devices = 0, num_usb_devices = 0;
if (should_cut.exchange(false)) { // Test and clear.
string filename = generate_local_dump_filename(frame);
printf("Starting new recording: %s\n", filename.c_str());
+ h264_encoder->close_output_file();
h264_encoder->shutdown();
- httpd.close_output_file();
- httpd.open_output_file(filename.c_str());
h264_encoder.reset(new H264Encoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
+ h264_encoder->open_output_file(filename.c_str());
}
#if 0