]> git.sesse.net Git - nageru/blobdiff - mixer.cpp
Set duration for all video frames.
[nageru] / mixer.cpp
index 42d854817b57c10c7dce162c9b149cceb62a8ed3..3134dd13e5eeb8cffbd14907113ae3cc76ac9517 100644 (file)
--- a/mixer.cpp
+++ b/mixer.cpp
@@ -139,7 +139,7 @@ void QueueLengthPolicy::update_policy(int queue_length)
 }
 
 Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards)
-       : httpd(WIDTH, HEIGHT),
+       : httpd(),
          num_cards(num_cards),
          mixer_surface(create_surface(format)),
          h264_encoder_surface(create_surface(format)),
@@ -148,9 +148,6 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards)
          limiter(OUTPUT_FREQUENCY),
          compressor(OUTPUT_FREQUENCY)
 {
-       httpd.open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
-       httpd.start(9095);
-
        CHECK(init_movit(MOVIT_SHADER_DIR, MOVIT_DEBUG_OFF));
        check_error();
 
@@ -178,6 +175,10 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards)
        display_chain->finalize();
 
        h264_encoder.reset(new H264Encoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
+       h264_encoder->open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
+
+       // Start listening for clients only once H264Encoder has written its header, if any.
+       httpd.start(9095);
 
        // First try initializing the PCI devices, then USB, until we have the desired number of cards.
        unsigned num_pci_devices = 0, num_usb_devices = 0;
@@ -543,18 +544,24 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode,
                check_error();
                glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pbo);
                check_error();
+
+               size_t field_y_start = y_offset + video_format.width * field_start_line;
+               size_t field_cbcr_start = cbcr_offset + cbcr_width * field_start_line * sizeof(uint16_t);
+
                if (global_flags.flush_pbos) {
-                       glFlushMappedBufferRange(GL_PIXEL_UNPACK_BUFFER, 0, video_frame.size);
+                       glFlushMappedBufferRange(GL_PIXEL_UNPACK_BUFFER, field_y_start, video_format.width * video_format.height);
+                       check_error();
+                       glFlushMappedBufferRange(GL_PIXEL_UNPACK_BUFFER, field_cbcr_start, cbcr_width * video_format.height * sizeof(uint16_t));
                        check_error();
                }
 
                glBindTexture(GL_TEXTURE_2D, userdata->tex_cbcr[field]);
                check_error();
-               glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, cbcr_width, video_format.height, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(cbcr_offset + cbcr_width * field_start_line * sizeof(uint16_t)));
+               glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, cbcr_width, video_format.height, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(field_cbcr_start));
                check_error();
                glBindTexture(GL_TEXTURE_2D, userdata->tex_y[field]);
                check_error();
-               glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, video_format.width, video_format.height, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(y_offset + video_format.width * field_start_line));
+               glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, video_format.width, video_format.height, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(field_y_start));
                check_error();
                glBindTexture(GL_TEXTURE_2D, 0);
                check_error();
@@ -669,9 +676,10 @@ void Mixer::thread_func()
                        }
                }
 
-               render_one_frame();
+               int64_t duration = new_frames[master_card_index].length;
+               render_one_frame(duration);
                ++frame;
-               pts_int += new_frames[master_card_index].length;
+               pts_int += duration;
 
                clock_gettime(CLOCK_MONOTONIC, &now);
                double elapsed = now.tv_sec - start.tv_sec +
@@ -686,10 +694,10 @@ void Mixer::thread_func()
                if (should_cut.exchange(false)) {  // Test and clear.
                        string filename = generate_local_dump_filename(frame);
                        printf("Starting new recording: %s\n", filename.c_str());
+                       h264_encoder->close_output_file();
                        h264_encoder->shutdown();
-                       httpd.close_output_file();
-                       httpd.open_output_file(filename.c_str());
                        h264_encoder.reset(new H264Encoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
+                       h264_encoder->open_output_file(filename.c_str());
                }
 
 #if 0
@@ -767,7 +775,7 @@ void Mixer::schedule_audio_resampling_tasks(unsigned dropped_frames, int num_sam
        }
 }
 
-void Mixer::render_one_frame()
+void Mixer::render_one_frame(int64_t duration)
 {
        // Get the main chain from the theme, and set its state immediately.
        Theme::Chain theme_main_chain = theme->get_chain(0, pts(), WIDTH, HEIGHT, input_state);
@@ -798,7 +806,7 @@ void Mixer::render_one_frame()
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
 
        const int64_t av_delay = TIMEBASE / 10;  // Corresponds to the fixed delay in resampling_queue.h. TODO: Make less hard-coded.
-       RefCountedGLsync fence = h264_encoder->end_frame(pts_int + av_delay, theme_main_chain.input_frames);
+       RefCountedGLsync fence = h264_encoder->end_frame(pts_int + av_delay, duration, theme_main_chain.input_frames);
 
        // The live frame just shows the RGBA texture we just rendered.
        // It owns rgba_tex now.
@@ -1198,3 +1206,5 @@ void Mixer::OutputChannel::set_frame_ready_callback(Mixer::new_frame_ready_callb
        new_frame_ready_callback = callback;
        has_new_frame_ready_callback = true;
 }
+
+mutex RefCountedGLsync::fence_lock;