}
int audio_stream_index = find_stream_index(format_ctx.get(), AVMEDIA_TYPE_AUDIO);
+ int subtitle_stream_index = find_stream_index(format_ctx.get(), AVMEDIA_TYPE_SUBTITLE);
+ has_last_subtitle = false;
// Open video decoder.
const AVCodecParameters *video_codecpar = format_ctx->streams[video_stream_index]->codecpar;
int64_t audio_pts;
bool error;
AVFrameWithDeleter frame = decode_frame(format_ctx.get(), video_codec_ctx.get(), audio_codec_ctx.get(),
- pathname, video_stream_index, audio_stream_index, audio_frame.get(), &audio_format, &audio_pts, &error);
+ pathname, video_stream_index, audio_stream_index, subtitle_stream_index, audio_frame.get(), &audio_format, &audio_pts, &error);
if (error) {
return false;
}
} // namespace
AVFrameWithDeleter FFmpegCapture::decode_frame(AVFormatContext *format_ctx, AVCodecContext *video_codec_ctx, AVCodecContext *audio_codec_ctx,
- const std::string &pathname, int video_stream_index, int audio_stream_index,
+ const std::string &pathname, int video_stream_index, int audio_stream_index, int subtitle_stream_index,
FrameAllocator::Frame *audio_frame, AudioFormat *audio_format, int64_t *audio_pts, bool *error)
{
*error = false;
*error = true;
return AVFrameWithDeleter(nullptr);
}
+ } else if (pkt.stream_index == subtitle_stream_index) {
+ last_subtitle = string(reinterpret_cast<const char *>(pkt.data), pkt.size);
+ has_last_subtitle = true;
}
} else {
eof = true; // Or error, but ignore that for the time being.
// changes parameters midway, which is allowed in some formats.
//
// You can get out the audio either as decoded or in raw form (Kaeru uses this).
+//
+// If there's a subtitle track, you can also get out the last subtitle at the
+// point of the frame. Note that once we get a video frame, we don't look for
+// subtitle, so if subtitles and a frame comes at the same time, you might not
+// see the subtitle until the next frame.
#include <assert.h>
#include <stdint.h>
return current_frame_ycbcr_format;
}
+ // Only valid to call during the frame callback.
+ std::string get_last_subtitle() const
+ {
+ return last_subtitle;
+ }
+
+ // Same.
+ bool get_has_last_subtitle() const
+ {
+ return has_last_subtitle;
+ }
+
void set_dequeue_thread_callbacks(std::function<void()> init, std::function<void()> cleanup) override
{
dequeue_init_callback = init;
// Returns nullptr if no frame was decoded (e.g. EOF).
AVFrameWithDeleter decode_frame(AVFormatContext *format_ctx, AVCodecContext *video_codec_ctx, AVCodecContext *audio_codec_ctx,
- const std::string &pathname, int video_stream_index, int audio_stream_index,
+ const std::string &pathname, int video_stream_index, int audio_stream_index, int subtitle_stream_index,
bmusb::FrameAllocator::Frame *audio_frame, bmusb::AudioFormat *audio_format, int64_t *audio_pts, bool *error);
void convert_audio(const AVFrame *audio_avframe, bmusb::FrameAllocator::Frame *audio_frame, bmusb::AudioFormat *audio_format);
int64_t last_channel_layout;
int last_sample_rate;
+ // Subtitles (no decoding done, really).
+ bool has_last_subtitle = false;
+ std::string last_subtitle;
};
#endif // !defined(_FFMPEG_CAPTURE_H)
card->last_timecode = timecode;
PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)video_frame.userdata;
+ if (card->type == CardType::FFMPEG_INPUT) {
+ FFmpegCapture *ffmpeg_capture = static_cast<FFmpegCapture *>(card->capture.get());
+ userdata->has_last_subtitle = ffmpeg_capture->get_has_last_subtitle();
+ userdata->last_subtitle = ffmpeg_capture->get_last_subtitle();
+ }
size_t cbcr_width, cbcr_height, cbcr_offset, y_offset;
size_t expected_length = video_format.stride * (video_format.height + video_format.extra_lines_top + video_format.extra_lines_bottom);
GLuint last_v210_width[2]; // PixelFormat_10BitYCbCr.
bool last_interlaced, last_has_signal, last_is_connected;
unsigned last_frame_rate_nom, last_frame_rate_den;
+ bool has_last_subtitle = false;
+ std::string last_subtitle;
};
private:
unsigned last_width[MAX_VIDEO_CARDS], last_height[MAX_VIDEO_CARDS];
bool last_interlaced[MAX_VIDEO_CARDS], last_has_signal[MAX_VIDEO_CARDS], last_is_connected[MAX_VIDEO_CARDS];
unsigned last_frame_rate_nom[MAX_VIDEO_CARDS], last_frame_rate_den[MAX_VIDEO_CARDS];
+ bool has_last_subtitle[MAX_VIDEO_CARDS];
+ std::string last_subtitle[MAX_VIDEO_CARDS];
};
InputStateInfo::InputStateInfo(const InputState &input_state)
last_is_connected[signal_num] = userdata->last_is_connected;
last_frame_rate_nom[signal_num] = userdata->last_frame_rate_nom;
last_frame_rate_den[signal_num] = userdata->last_frame_rate_den;
+ has_last_subtitle[signal_num] = userdata->has_last_subtitle;
+ last_subtitle[signal_num] = userdata->last_subtitle;
}
}
return 1;
}
+int InputStateInfo_get_last_subtitle(lua_State* L)
+{
+ assert(lua_gettop(L) == 2);
+ InputStateInfo *input_state_info = get_input_state_info(L, 1);
+ Theme *theme = get_theme_updata(L);
+ int signal_num = theme->map_signal(luaL_checknumber(L, 2));
+ if (!input_state_info->has_last_subtitle[signal_num]) {
+ lua_pushnil(L);
+ } else {
+ lua_pushstring(L, input_state_info->last_subtitle[signal_num].c_str());
+ }
+ return 1;
+}
+
int Effect_set_float(lua_State *L)
{
assert(lua_gettop(L) == 3);
{ "get_is_connected", InputStateInfo_get_is_connected },
{ "get_frame_rate_nom", InputStateInfo_get_frame_rate_nom },
{ "get_frame_rate_den", InputStateInfo_get_frame_rate_den },
+ { "get_last_subtitle", InputStateInfo_get_last_subtitle },
{ NULL, NULL }
};