我想使用 Gstreamer 用 C 程序播放视频并从 SRT 文件添加字幕。
我是 gstreamer 的新手,我以某种方式弄清楚了谁可以让它在命令行中工作:
gst-launch filesrc location=video.srt ! subparse ! \
overlay. filesrc location=video.ogv ! oggdemux name=demux \
demux. ! queue ! vorbisdec ! audioconvert ! autoaudiosink \
demux. ! queue ! theoradec ! ffmpegcolorspace ! subtitleoverlay name=overlay ! autovideosink;
问题是我可以从 C 程序播放视频,但我不明白如何添加字幕。
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer, *audioDecoder, *videoDecoder, *audioConv, *videoConv, *videosink,
*audiosink, *audioQueue, *videoQueue;
GstBus *bus;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
if (argc < 2 && argc > 3) {
g_printerr ("Usage: %s <Ogg/Vorbis filename> [Srt filename]\n", argv[0]);
return -1;
}
pipeline = gst_pipeline_new ("audiovideo-player");
source = gst_element_factory_make ("filesrc", "file-source");
demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
audioQueue = gst_element_factory_make ("queue", "audio-queue");
videoQueue = gst_element_factory_make ("queue", "video-queue");
audioDecoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
videoDecoder = gst_element_factory_make ("theoradec", "theora-decoder");
audioConv = gst_element_factory_make ("audioconvert", "audio-converter");
videoConv = gst_element_factory_make ("ffmpegcolorspace", "video-converter");
videosink = gst_element_factory_make ("autovideosink", "video-output");
audiosink = gst_element_factory_make ("autoaudiosink", "audio-output");
if (!pipeline || !source || !demuxer || !audioDecoder || !audioConv || !videoDecoder || !videoConv || !audioQueue
|| !videoQueue || !audiosink || !videosink) {
g_printerr ("One element could not be created. Exiting.\n");
exit(-1);
}
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN (pipeline),
source, demuxer,
audioQueue, videoQueue, audioDecoder, videoDecoder,
videoConv, audioConv, videosink, audiosink, NULL);
gst_element_link (source, demuxer);
gst_element_link_many (videoQueue, videoDecoder, videoConv, videosink, NULL);
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), videoQueue);
gst_element_link_many (audioQueue, audioDecoder, audioConv, audiosink, NULL);
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), audioQueue);
g_print ("Lecture de : %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("En cours...\n");
g_main_loop_run (loop);
g_print ("Arret de la lecture\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Suppression du pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}