| /* GStreamer |
| * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com> |
| * Copyright (C) <2006> Nokia Corporation (contact <stefan.kost@nokia.com>) |
| * Copyright (C) <2009-2010> STEricsson <benjamin.gaignard@stericsson.com> |
| * |
| * This library is free software; you can redistribute it and/or |
| * modify it under the terms of the GNU Library General Public |
| * License as published by the Free Software Foundation; either |
| * version 2 of the License, or (at your option) any later version. |
| * |
| * This library is distributed in the hope that it will be useful, |
| * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| * Library General Public License for more details. |
| * |
| * You should have received a copy of the GNU Library General Public |
| * License along with this library; if not, write to the |
| * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, |
| * Boston, MA 02110-1301, USA. |
| */ |
| /* Element-Checklist-Version: 5 */ |
| |
| /** |
| * SECTION:element-avidemux |
| * |
| * Demuxes an .avi file into raw or compressed audio and/or video streams. |
| * |
| * This element supports both push and pull-based scheduling, depending on the |
| * capabilities of the upstream elements. |
| * |
| * <refsect2> |
| * <title>Example launch line</title> |
| * |[ |
| * gst-launch-1.0 filesrc location=test.avi ! avidemux name=demux demux.audio_00 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_00 ! queue ! decodebin ! videoconvert ! videoscale ! autovideosink |
| * ]| Play (parse and decode) an .avi file and try to output it to |
| * an automatically detected soundcard and videosink. If the AVI file contains |
| * compressed audio or video data, this will only work if you have the |
| * right decoder elements/plugins installed. |
| * </refsect2> |
| */ |
| |
| #ifdef HAVE_CONFIG_H |
| #include "config.h" |
| #endif |
| |
| #include <string.h> |
| #include <stdio.h> |
| |
| #include "gst/riff/riff-media.h" |
| #include "gstavidemux.h" |
| #include "avi-ids.h" |
| #include <gst/gst-i18n-plugin.h> |
| #include <gst/base/gstadapter.h> |
| #include <gst/tag/tag.h> |
| |
| #define DIV_ROUND_UP(s,v) (((s) + ((v)-1)) / (v)) |
| |
| #define GST_AVI_KEYFRAME (1 << 0) |
| #define ENTRY_IS_KEYFRAME(e) ((e)->flags == GST_AVI_KEYFRAME) |
| #define ENTRY_SET_KEYFRAME(e) ((e)->flags = GST_AVI_KEYFRAME) |
| #define ENTRY_UNSET_KEYFRAME(e) ((e)->flags = 0) |
| |
| |
| GST_DEBUG_CATEGORY_STATIC (avidemux_debug); |
| #define GST_CAT_DEFAULT avidemux_debug |
| |
| static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink", |
| GST_PAD_SINK, |
| GST_PAD_ALWAYS, |
| GST_STATIC_CAPS ("video/x-msvideo") |
| ); |
| |
| #ifndef GST_DISABLE_GST_DEBUG |
| static const char *const snap_types[2][2] = { |
| {"any", "after"}, |
| {"before", "nearest"}, |
| }; |
| #endif |
| |
| static void gst_avi_demux_finalize (GObject * object); |
| |
| static void gst_avi_demux_reset (GstAviDemux * avi); |
| |
| #if 0 |
| static const GstEventMask *gst_avi_demux_get_event_mask (GstPad * pad); |
| #endif |
| static gboolean gst_avi_demux_handle_src_event (GstPad * pad, |
| GstObject * parent, GstEvent * event); |
| static gboolean gst_avi_demux_handle_sink_event (GstPad * pad, |
| GstObject * parent, GstEvent * event); |
| static gboolean gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event); |
| |
| #if 0 |
| static const GstFormat *gst_avi_demux_get_src_formats (GstPad * pad); |
| #endif |
| static gboolean gst_avi_demux_handle_src_query (GstPad * pad, |
| GstObject * parent, GstQuery * query); |
| static gboolean gst_avi_demux_src_convert (GstPad * pad, GstFormat src_format, |
| gint64 src_value, GstFormat * dest_format, gint64 * dest_value); |
| |
| static gboolean gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment, |
| GstSeekFlags flags); |
| static gboolean gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad, |
| GstEvent * event); |
| static gboolean gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, |
| GstEvent * event); |
| static void gst_avi_demux_loop (GstPad * pad); |
| static gboolean gst_avi_demux_sink_activate (GstPad * sinkpad, |
| GstObject * parent); |
| static gboolean gst_avi_demux_sink_activate_mode (GstPad * sinkpad, |
| GstObject * parent, GstPadMode mode, gboolean active); |
| static GstFlowReturn gst_avi_demux_chain (GstPad * pad, GstObject * parent, |
| GstBuffer * buf); |
| #if 0 |
| static void gst_avi_demux_set_index (GstElement * element, GstIndex * index); |
| static GstIndex *gst_avi_demux_get_index (GstElement * element); |
| #endif |
| static GstStateChangeReturn gst_avi_demux_change_state (GstElement * element, |
| GstStateChange transition); |
| static void gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi); |
| static void gst_avi_demux_get_buffer_info (GstAviDemux * avi, |
| GstAviStream * stream, guint entry_n, GstClockTime * timestamp, |
| GstClockTime * ts_end, guint64 * offset, guint64 * offset_end); |
| |
| static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf); |
| static void gst_avi_demux_parse_strd (GstAviDemux * avi, GstBuffer * buf); |
| |
| static void parse_tag_value (GstAviDemux * avi, GstTagList * taglist, |
| const gchar * type, guint8 * ptr, guint tsize); |
| |
| /* GObject methods */ |
| |
| #define gst_avi_demux_parent_class parent_class |
| G_DEFINE_TYPE (GstAviDemux, gst_avi_demux, GST_TYPE_ELEMENT); |
| |
| static void |
| gst_avi_demux_class_init (GstAviDemuxClass * klass) |
| { |
| GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); |
| GObjectClass *gobject_class = (GObjectClass *) klass; |
| GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl, *subpicsrctempl; |
| GstCaps *audcaps, *vidcaps, *subcaps, *subpiccaps; |
| |
| GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux", |
| 0, "Demuxer for AVI streams"); |
| |
| gobject_class->finalize = gst_avi_demux_finalize; |
| |
| gstelement_class->change_state = |
| GST_DEBUG_FUNCPTR (gst_avi_demux_change_state); |
| #if 0 |
| gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index); |
| gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index); |
| #endif |
| |
| audcaps = gst_riff_create_audio_template_caps (); |
| gst_caps_append (audcaps, gst_caps_new_empty_simple ("audio/x-avi-unknown")); |
| audiosrctempl = gst_pad_template_new ("audio_%u", |
| GST_PAD_SRC, GST_PAD_SOMETIMES, audcaps); |
| |
| vidcaps = gst_riff_create_video_template_caps (); |
| gst_caps_append (vidcaps, gst_riff_create_iavs_template_caps ()); |
| gst_caps_append (vidcaps, gst_caps_new_empty_simple ("video/x-avi-unknown")); |
| videosrctempl = gst_pad_template_new ("video_%u", |
| GST_PAD_SRC, GST_PAD_SOMETIMES, vidcaps); |
| |
| subcaps = gst_caps_new_empty_simple ("application/x-subtitle-avi"); |
| subsrctempl = gst_pad_template_new ("subtitle_%u", |
| GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps); |
| subpiccaps = gst_caps_new_empty_simple ("subpicture/x-xsub"); |
| subpicsrctempl = gst_pad_template_new ("subpicture_%u", |
| GST_PAD_SRC, GST_PAD_SOMETIMES, subpiccaps); |
| gst_element_class_add_pad_template (gstelement_class, audiosrctempl); |
| gst_element_class_add_pad_template (gstelement_class, videosrctempl); |
| gst_element_class_add_pad_template (gstelement_class, subsrctempl); |
| gst_element_class_add_pad_template (gstelement_class, subpicsrctempl); |
| gst_element_class_add_static_pad_template (gstelement_class, &sink_templ); |
| |
| gst_caps_unref (audcaps); |
| gst_caps_unref (vidcaps); |
| gst_caps_unref (subcaps); |
| gst_caps_unref (subpiccaps); |
| |
| gst_element_class_set_static_metadata (gstelement_class, "Avi demuxer", |
| "Codec/Demuxer", |
| "Demultiplex an avi file into audio and video", |
| "Erik Walthinsen <omega@cse.ogi.edu>, " |
| "Wim Taymans <wim.taymans@chello.be>, " |
| "Thijs Vermeir <thijsvermeir@gmail.com>"); |
| } |
| |
| static void |
| gst_avi_demux_init (GstAviDemux * avi) |
| { |
| avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink"); |
| gst_pad_set_activate_function (avi->sinkpad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate)); |
| gst_pad_set_activatemode_function (avi->sinkpad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate_mode)); |
| gst_pad_set_chain_function (avi->sinkpad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_chain)); |
| gst_pad_set_event_function (avi->sinkpad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_handle_sink_event)); |
| gst_element_add_pad (GST_ELEMENT_CAST (avi), avi->sinkpad); |
| |
| avi->adapter = gst_adapter_new (); |
| avi->flowcombiner = gst_flow_combiner_new (); |
| |
| gst_avi_demux_reset (avi); |
| |
| GST_OBJECT_FLAG_SET (avi, GST_ELEMENT_FLAG_INDEXABLE); |
| } |
| |
| static void |
| gst_avi_demux_finalize (GObject * object) |
| { |
| GstAviDemux *avi = GST_AVI_DEMUX (object); |
| |
| GST_DEBUG ("AVI: finalize"); |
| |
| g_object_unref (avi->adapter); |
| gst_flow_combiner_free (avi->flowcombiner); |
| |
| G_OBJECT_CLASS (parent_class)->finalize (object); |
| } |
| |
| static void |
| gst_avi_demux_reset_stream (GstAviDemux * avi, GstAviStream * stream) |
| { |
| g_free (stream->strh); |
| g_free (stream->strf.data); |
| g_free (stream->name); |
| g_free (stream->index); |
| g_free (stream->indexes); |
| if (stream->initdata) |
| gst_buffer_unref (stream->initdata); |
| if (stream->extradata) |
| gst_buffer_unref (stream->extradata); |
| if (stream->rgb8_palette) |
| gst_buffer_unref (stream->rgb8_palette); |
| if (stream->pad) { |
| if (stream->exposed) { |
| gst_pad_set_active (stream->pad, FALSE); |
| gst_element_remove_pad (GST_ELEMENT_CAST (avi), stream->pad); |
| gst_flow_combiner_remove_pad (avi->flowcombiner, stream->pad); |
| } else |
| gst_object_unref (stream->pad); |
| } |
| if (stream->taglist) { |
| gst_tag_list_unref (stream->taglist); |
| stream->taglist = NULL; |
| } |
| memset (stream, 0, sizeof (GstAviStream)); |
| } |
| |
| static void |
| gst_avi_demux_reset (GstAviDemux * avi) |
| { |
| gint i; |
| |
| GST_DEBUG ("AVI: reset"); |
| |
| for (i = 0; i < avi->num_streams; i++) |
| gst_avi_demux_reset_stream (avi, &avi->stream[i]); |
| |
| avi->header_state = GST_AVI_DEMUX_HEADER_TAG_LIST; |
| avi->num_streams = 0; |
| avi->num_v_streams = 0; |
| avi->num_a_streams = 0; |
| avi->num_t_streams = 0; |
| avi->num_sp_streams = 0; |
| avi->main_stream = -1; |
| |
| avi->have_group_id = FALSE; |
| avi->group_id = G_MAXUINT; |
| |
| avi->state = GST_AVI_DEMUX_START; |
| avi->offset = 0; |
| avi->building_index = FALSE; |
| |
| avi->index_offset = 0; |
| g_free (avi->avih); |
| avi->avih = NULL; |
| |
| #if 0 |
| if (avi->element_index) |
| gst_object_unref (avi->element_index); |
| avi->element_index = NULL; |
| #endif |
| |
| if (avi->seg_event) { |
| gst_event_unref (avi->seg_event); |
| avi->seg_event = NULL; |
| } |
| if (avi->seek_event) { |
| gst_event_unref (avi->seek_event); |
| avi->seek_event = NULL; |
| } |
| |
| if (avi->globaltags) |
| gst_tag_list_unref (avi->globaltags); |
| avi->globaltags = NULL; |
| |
| avi->got_tags = TRUE; /* we always want to push global tags */ |
| avi->have_eos = FALSE; |
| avi->seekable = TRUE; |
| |
| gst_adapter_clear (avi->adapter); |
| |
| gst_segment_init (&avi->segment, GST_FORMAT_TIME); |
| avi->segment_seqnum = 0; |
| } |
| |
| |
| /* GstElement methods */ |
| |
| #if 0 |
| static const GstFormat * |
| gst_avi_demux_get_src_formats (GstPad * pad) |
| { |
| GstAviStream *stream = gst_pad_get_element_private (pad); |
| |
| static const GstFormat src_a_formats[] = { |
| GST_FORMAT_TIME, |
| GST_FORMAT_BYTES, |
| GST_FORMAT_DEFAULT, |
| 0 |
| }; |
| static const GstFormat src_v_formats[] = { |
| GST_FORMAT_TIME, |
| GST_FORMAT_DEFAULT, |
| 0 |
| }; |
| |
| return (stream->strh->type == GST_RIFF_FCC_auds ? |
| src_a_formats : src_v_formats); |
| } |
| #endif |
| |
| /* assumes stream->strf.auds->av_bps != 0 */ |
| static inline GstClockTime |
| avi_stream_convert_bytes_to_time_unchecked (GstAviStream * stream, |
| guint64 bytes) |
| { |
| return gst_util_uint64_scale_int (bytes, GST_SECOND, |
| stream->strf.auds->av_bps); |
| } |
| |
| static inline guint64 |
| avi_stream_convert_time_to_bytes_unchecked (GstAviStream * stream, |
| GstClockTime time) |
| { |
| return gst_util_uint64_scale_int (time, stream->strf.auds->av_bps, |
| GST_SECOND); |
| } |
| |
| /* assumes stream->strh->rate != 0 */ |
| static inline GstClockTime |
| avi_stream_convert_frames_to_time_unchecked (GstAviStream * stream, |
| guint64 frames) |
| { |
| return gst_util_uint64_scale (frames, stream->strh->scale * GST_SECOND, |
| stream->strh->rate); |
| } |
| |
| static inline guint64 |
| avi_stream_convert_time_to_frames_unchecked (GstAviStream * stream, |
| GstClockTime time) |
| { |
| return gst_util_uint64_scale (time, stream->strh->rate, |
| stream->strh->scale * GST_SECOND); |
| } |
| |
| static gboolean |
| gst_avi_demux_src_convert (GstPad * pad, |
| GstFormat src_format, |
| gint64 src_value, GstFormat * dest_format, gint64 * dest_value) |
| { |
| GstAviStream *stream = gst_pad_get_element_private (pad); |
| gboolean res = TRUE; |
| |
| GST_LOG_OBJECT (pad, |
| "Received src_format:%s, src_value:%" G_GUINT64_FORMAT |
| ", dest_format:%s", gst_format_get_name (src_format), src_value, |
| gst_format_get_name (*dest_format)); |
| |
| if (G_UNLIKELY (src_format == *dest_format)) { |
| *dest_value = src_value; |
| goto done; |
| } |
| if (G_UNLIKELY (!stream->strh || !stream->strf.data)) { |
| res = FALSE; |
| goto done; |
| } |
| if (G_UNLIKELY (stream->strh->type == GST_RIFF_FCC_vids && |
| (src_format == GST_FORMAT_BYTES |
| || *dest_format == GST_FORMAT_BYTES))) { |
| res = FALSE; |
| goto done; |
| } |
| |
| switch (src_format) { |
| case GST_FORMAT_TIME: |
| switch (*dest_format) { |
| case GST_FORMAT_BYTES: |
| *dest_value = gst_util_uint64_scale_int (src_value, |
| stream->strf.auds->av_bps, GST_SECOND); |
| break; |
| case GST_FORMAT_DEFAULT: |
| *dest_value = |
| gst_util_uint64_scale_round (src_value, stream->strh->rate, |
| stream->strh->scale * GST_SECOND); |
| break; |
| default: |
| res = FALSE; |
| break; |
| } |
| break; |
| case GST_FORMAT_BYTES: |
| switch (*dest_format) { |
| case GST_FORMAT_TIME: |
| if (stream->strf.auds->av_bps != 0) { |
| *dest_value = avi_stream_convert_bytes_to_time_unchecked (stream, |
| src_value); |
| } else |
| res = FALSE; |
| break; |
| default: |
| res = FALSE; |
| break; |
| } |
| break; |
| case GST_FORMAT_DEFAULT: |
| switch (*dest_format) { |
| case GST_FORMAT_TIME: |
| *dest_value = |
| avi_stream_convert_frames_to_time_unchecked (stream, src_value); |
| break; |
| default: |
| res = FALSE; |
| break; |
| } |
| break; |
| default: |
| res = FALSE; |
| } |
| |
| done: |
| GST_LOG_OBJECT (pad, |
| "Returning res:%d dest_format:%s dest_value:%" G_GUINT64_FORMAT, res, |
| gst_format_get_name (*dest_format), *dest_value); |
| return res; |
| } |
| |
| static gboolean |
| gst_avi_demux_handle_src_query (GstPad * pad, GstObject * parent, |
| GstQuery * query) |
| { |
| gboolean res = TRUE; |
| GstAviDemux *avi = GST_AVI_DEMUX (parent); |
| |
| GstAviStream *stream = gst_pad_get_element_private (pad); |
| |
| if (!stream->strh || !stream->strf.data) |
| return gst_pad_query_default (pad, parent, query); |
| |
| switch (GST_QUERY_TYPE (query)) { |
| case GST_QUERY_POSITION:{ |
| gint64 pos = 0; |
| |
| GST_DEBUG ("pos query for stream %u: frames %u, bytes %u", |
| stream->num, stream->current_entry, stream->current_total); |
| |
| /* FIXME, this looks clumsy */ |
| if (stream->strh->type == GST_RIFF_FCC_auds) { |
| if (stream->is_vbr) { |
| /* VBR */ |
| pos = avi_stream_convert_frames_to_time_unchecked (stream, |
| stream->current_entry); |
| GST_DEBUG_OBJECT (avi, "VBR convert frame %u, time %" |
| GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos)); |
| } else if (stream->strf.auds->av_bps != 0) { |
| /* CBR */ |
| pos = avi_stream_convert_bytes_to_time_unchecked (stream, |
| stream->current_total); |
| GST_DEBUG_OBJECT (avi, |
| "CBR convert bytes %u, time %" GST_TIME_FORMAT, |
| stream->current_total, GST_TIME_ARGS (pos)); |
| } else if (stream->idx_n != 0 && stream->total_bytes != 0) { |
| /* calculate timestamps based on percentage of length */ |
| guint64 xlen = avi->avih->us_frame * |
| avi->avih->tot_frames * GST_USECOND; |
| |
| pos = gst_util_uint64_scale (xlen, stream->current_total, |
| stream->total_bytes); |
| GST_DEBUG_OBJECT (avi, |
| "CBR perc convert bytes %u, time %" GST_TIME_FORMAT, |
| stream->current_total, GST_TIME_ARGS (pos)); |
| } else { |
| /* we don't know */ |
| res = FALSE; |
| } |
| } else { |
| if (stream->strh->rate != 0) { |
| pos = gst_util_uint64_scale ((guint64) stream->current_entry * |
| stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate); |
| } else { |
| pos = stream->current_entry * avi->avih->us_frame * GST_USECOND; |
| } |
| } |
| if (res) { |
| GST_DEBUG ("pos query : %" GST_TIME_FORMAT, GST_TIME_ARGS (pos)); |
| gst_query_set_position (query, GST_FORMAT_TIME, pos); |
| } else |
| GST_WARNING ("pos query failed"); |
| break; |
| } |
| case GST_QUERY_DURATION: |
| { |
| GstFormat fmt; |
| GstClockTime duration; |
| |
| /* only act on audio or video streams */ |
| if (stream->strh->type != GST_RIFF_FCC_auds && |
| stream->strh->type != GST_RIFF_FCC_vids && |
| stream->strh->type != GST_RIFF_FCC_iavs) { |
| res = FALSE; |
| break; |
| } |
| |
| /* take stream duration, fall back to avih duration */ |
| if ((duration = stream->duration) == -1) |
| if ((duration = stream->hdr_duration) == -1) |
| duration = avi->duration; |
| |
| gst_query_parse_duration (query, &fmt, NULL); |
| |
| switch (fmt) { |
| case GST_FORMAT_TIME: |
| gst_query_set_duration (query, fmt, duration); |
| break; |
| case GST_FORMAT_DEFAULT: |
| { |
| gint64 dur; |
| GST_DEBUG_OBJECT (query, "total frames is %" G_GUINT32_FORMAT, |
| stream->idx_n); |
| |
| if (stream->idx_n > 0) |
| gst_query_set_duration (query, fmt, stream->idx_n); |
| else if (gst_pad_query_convert (pad, GST_FORMAT_TIME, |
| duration, fmt, &dur)) |
| gst_query_set_duration (query, fmt, dur); |
| break; |
| } |
| default: |
| res = FALSE; |
| break; |
| } |
| break; |
| } |
| case GST_QUERY_SEEKING:{ |
| GstFormat fmt; |
| |
| gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL); |
| if (fmt == GST_FORMAT_TIME) { |
| gboolean seekable = TRUE; |
| |
| if (avi->streaming) { |
| seekable = avi->seekable; |
| } |
| |
| gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, |
| 0, stream->duration); |
| res = TRUE; |
| } |
| break; |
| } |
| case GST_QUERY_CONVERT:{ |
| GstFormat src_fmt, dest_fmt; |
| gint64 src_val, dest_val; |
| |
| gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); |
| if ((res = gst_avi_demux_src_convert (pad, src_fmt, src_val, &dest_fmt, |
| &dest_val))) |
| gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); |
| else |
| res = gst_pad_query_default (pad, parent, query); |
| break; |
| } |
| case GST_QUERY_SEGMENT: |
| { |
| GstFormat format; |
| gint64 start, stop; |
| |
| format = avi->segment.format; |
| |
| start = |
| gst_segment_to_stream_time (&avi->segment, format, |
| avi->segment.start); |
| if ((stop = avi->segment.stop) == -1) |
| stop = avi->segment.duration; |
| else |
| stop = gst_segment_to_stream_time (&avi->segment, format, stop); |
| |
| gst_query_set_segment (query, avi->segment.rate, format, start, stop); |
| res = TRUE; |
| break; |
| } |
| default: |
| res = gst_pad_query_default (pad, parent, query); |
| break; |
| } |
| |
| return res; |
| } |
| |
| #if 0 |
| static const GstEventMask * |
| gst_avi_demux_get_event_mask (GstPad * pad) |
| { |
| static const GstEventMask masks[] = { |
| {GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT}, |
| {0,} |
| }; |
| |
| return masks; |
| } |
| #endif |
| |
| #if 0 |
| static guint64 |
| gst_avi_demux_seek_streams (GstAviDemux * avi, guint64 offset, gboolean before) |
| { |
| GstAviStream *stream; |
| GstIndexEntry *entry; |
| gint i; |
| gint64 val, min = offset; |
| |
| for (i = 0; i < avi->num_streams; i++) { |
| stream = &avi->stream[i]; |
| |
| entry = gst_index_get_assoc_entry (avi->element_index, stream->index_id, |
| before ? GST_INDEX_LOOKUP_BEFORE : GST_INDEX_LOOKUP_AFTER, |
| GST_ASSOCIATION_FLAG_NONE, GST_FORMAT_BYTES, offset); |
| |
| if (before) { |
| if (entry) { |
| gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val); |
| GST_DEBUG_OBJECT (avi, "stream %d, previous entry at %" |
| G_GUINT64_FORMAT, i, val); |
| if (val < min) |
| min = val; |
| } |
| continue; |
| } |
| |
| if (!entry) { |
| GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i); |
| stream->current_entry = 0; |
| stream->current_total = 0; |
| continue; |
| } |
| |
| gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val); |
| GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT, |
| i, val); |
| |
| gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &val); |
| stream->current_total = val; |
| gst_index_entry_assoc_map (entry, GST_FORMAT_DEFAULT, &val); |
| stream->current_entry = val; |
| } |
| |
| return min; |
| } |
| #endif |
| |
| static gint |
| gst_avi_demux_index_entry_offset_search (GstAviIndexEntry * entry, |
| guint64 * offset) |
| { |
| if (entry->offset < *offset) |
| return -1; |
| else if (entry->offset > *offset) |
| return 1; |
| return 0; |
| } |
| |
| static guint64 |
| gst_avi_demux_seek_streams_index (GstAviDemux * avi, guint64 offset, |
| gboolean before) |
| { |
| GstAviStream *stream; |
| GstAviIndexEntry *entry; |
| gint i; |
| gint64 val, min = offset; |
| guint index = 0; |
| |
| for (i = 0; i < avi->num_streams; i++) { |
| stream = &avi->stream[i]; |
| |
| /* compensate for chunk header */ |
| offset += 8; |
| entry = |
| gst_util_array_binary_search (stream->index, stream->idx_n, |
| sizeof (GstAviIndexEntry), |
| (GCompareDataFunc) gst_avi_demux_index_entry_offset_search, |
| before ? GST_SEARCH_MODE_BEFORE : GST_SEARCH_MODE_AFTER, &offset, NULL); |
| offset -= 8; |
| |
| if (entry) |
| index = entry - stream->index; |
| |
| if (before) { |
| if (entry) { |
| val = stream->index[index].offset; |
| GST_DEBUG_OBJECT (avi, |
| "stream %d, previous entry at %" G_GUINT64_FORMAT, i, val); |
| if (val < min) |
| min = val; |
| } |
| continue; |
| } |
| |
| if (!entry) { |
| GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i); |
| stream->current_entry = 0; |
| stream->current_total = 0; |
| continue; |
| } |
| |
| val = stream->index[index].offset - 8; |
| GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT, i, |
| val); |
| |
| stream->current_total = stream->index[index].total; |
| stream->current_entry = index; |
| } |
| |
| return min; |
| } |
| |
| #define GST_AVI_SEEK_PUSH_DISPLACE (4 * GST_SECOND) |
| |
| static gboolean |
| gst_avi_demux_handle_sink_event (GstPad * pad, GstObject * parent, |
| GstEvent * event) |
| { |
| gboolean res = TRUE; |
| GstAviDemux *avi = GST_AVI_DEMUX (parent); |
| |
| GST_DEBUG_OBJECT (avi, |
| "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event); |
| |
| switch (GST_EVENT_TYPE (event)) { |
| case GST_EVENT_SEGMENT: |
| { |
| gint64 boffset, offset = 0; |
| GstSegment segment; |
| GstEvent *segment_event; |
| |
| /* some debug output */ |
| gst_event_copy_segment (event, &segment); |
| GST_DEBUG_OBJECT (avi, "received newsegment %" GST_SEGMENT_FORMAT, |
| &segment); |
| |
| /* chain will send initial newsegment after pads have been added */ |
| if (avi->state != GST_AVI_DEMUX_MOVI) { |
| GST_DEBUG_OBJECT (avi, "still starting, eating event"); |
| goto exit; |
| } |
| |
| /* we only expect a BYTE segment, e.g. following a seek */ |
| if (segment.format != GST_FORMAT_BYTES) { |
| GST_DEBUG_OBJECT (avi, "unsupported segment format, ignoring"); |
| goto exit; |
| } |
| |
| if (avi->have_index) { |
| GstAviIndexEntry *entry; |
| guint i = 0, index = 0, k = 0; |
| GstAviStream *stream; |
| |
| /* compensate chunk header, stored index offset points after header */ |
| boffset = segment.start + 8; |
| /* find which stream we're on */ |
| do { |
| stream = &avi->stream[i]; |
| |
| /* find the index for start bytes offset */ |
| entry = gst_util_array_binary_search (stream->index, |
| stream->idx_n, sizeof (GstAviIndexEntry), |
| (GCompareDataFunc) gst_avi_demux_index_entry_offset_search, |
| GST_SEARCH_MODE_AFTER, &boffset, NULL); |
| |
| if (entry == NULL) |
| continue; |
| index = entry - stream->index; |
| |
| /* we are on the stream with a chunk start offset closest to start */ |
| if (!offset || stream->index[index].offset < offset) { |
| offset = stream->index[index].offset; |
| k = i; |
| } |
| /* exact match needs no further searching */ |
| if (stream->index[index].offset == boffset) |
| break; |
| } while (++i < avi->num_streams); |
| boffset -= 8; |
| offset -= 8; |
| stream = &avi->stream[k]; |
| |
| /* so we have no idea what is to come, or where we are */ |
| if (!offset) { |
| GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS"); |
| goto eos; |
| } |
| |
| /* get the ts corresponding to start offset bytes for the stream */ |
| gst_avi_demux_get_buffer_info (avi, stream, index, |
| (GstClockTime *) & segment.time, NULL, NULL, NULL); |
| #if 0 |
| } else if (avi->element_index) { |
| GstIndexEntry *entry; |
| |
| /* Let's check if we have an index entry for this position */ |
| entry = gst_index_get_assoc_entry (avi->element_index, avi->index_id, |
| GST_INDEX_LOOKUP_AFTER, GST_ASSOCIATION_FLAG_NONE, |
| GST_FORMAT_BYTES, segment.start); |
| |
| /* we can not go where we have not yet been before ... */ |
| if (!entry) { |
| GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS"); |
| goto eos; |
| } |
| |
| gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, |
| (gint64 *) & segment.time); |
| gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &offset); |
| #endif |
| } else { |
| GST_WARNING_OBJECT (avi, "no index data, forcing EOS"); |
| goto eos; |
| } |
| |
| segment.format = GST_FORMAT_TIME; |
| segment.start = segment.time; |
| segment.stop = GST_CLOCK_TIME_NONE; |
| segment.position = segment.start; |
| |
| /* rescue duration */ |
| segment.duration = avi->segment.duration; |
| |
| /* set up segment and send downstream */ |
| gst_segment_copy_into (&segment, &avi->segment); |
| |
| GST_DEBUG_OBJECT (avi, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment); |
| avi->segment_seqnum = gst_event_get_seqnum (event); |
| segment_event = gst_event_new_segment (&segment); |
| gst_event_set_seqnum (segment_event, gst_event_get_seqnum (event)); |
| gst_avi_demux_push_event (avi, segment_event); |
| |
| GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT, |
| boffset); |
| |
| /* adjust state for streaming thread accordingly */ |
| if (avi->have_index) |
| gst_avi_demux_seek_streams_index (avi, offset, FALSE); |
| #if 0 |
| else |
| gst_avi_demux_seek_streams (avi, offset, FALSE); |
| #endif |
| |
| /* set up streaming thread */ |
| g_assert (offset >= boffset); |
| avi->offset = boffset; |
| avi->todrop = offset - boffset; |
| |
| exit: |
| gst_event_unref (event); |
| res = TRUE; |
| break; |
| eos: |
| /* set up for EOS */ |
| avi->have_eos = TRUE; |
| goto exit; |
| } |
| case GST_EVENT_EOS: |
| { |
| if (avi->state != GST_AVI_DEMUX_MOVI) { |
| gst_event_unref (event); |
| GST_ELEMENT_ERROR (avi, STREAM, DEMUX, |
| (NULL), ("got eos and didn't receive a complete header object")); |
| } else if (!gst_avi_demux_push_event (avi, event)) { |
| GST_ELEMENT_ERROR (avi, STREAM, DEMUX, |
| (NULL), ("got eos but no streams (yet)")); |
| } |
| break; |
| } |
| case GST_EVENT_FLUSH_STOP: |
| { |
| gint i; |
| |
| gst_adapter_clear (avi->adapter); |
| avi->have_eos = FALSE; |
| for (i = 0; i < avi->num_streams; i++) { |
| avi->stream[i].discont = TRUE; |
| } |
| /* fall through to default case so that the event gets passed downstream */ |
| } |
| default: |
| res = gst_pad_event_default (pad, parent, event); |
| break; |
| } |
| |
| return res; |
| } |
| |
| static gboolean |
| gst_avi_demux_handle_src_event (GstPad * pad, GstObject * parent, |
| GstEvent * event) |
| { |
| gboolean res = TRUE; |
| GstAviDemux *avi = GST_AVI_DEMUX (parent); |
| |
| GST_DEBUG_OBJECT (avi, |
| "have event type %s: %p on src pad", GST_EVENT_TYPE_NAME (event), event); |
| |
| switch (GST_EVENT_TYPE (event)) { |
| case GST_EVENT_SEEK: |
| if (!avi->streaming) { |
| res = gst_avi_demux_handle_seek (avi, pad, event); |
| } else { |
| res = gst_avi_demux_handle_seek_push (avi, pad, event); |
| } |
| gst_event_unref (event); |
| break; |
| default: |
| res = gst_pad_event_default (pad, parent, event); |
| break; |
| } |
| |
| return res; |
| } |
| |
| /* streaming helper (push) */ |
| |
| /* |
| * gst_avi_demux_peek_chunk_info: |
| * @avi: Avi object |
| * @tag: holder for tag |
| * @size: holder for tag size |
| * |
| * Peek next chunk info (tag and size) |
| * |
| * Returns: TRUE when one chunk info has been got |
| */ |
| static gboolean |
| gst_avi_demux_peek_chunk_info (GstAviDemux * avi, guint32 * tag, guint32 * size) |
| { |
| const guint8 *data = NULL; |
| |
| if (gst_adapter_available (avi->adapter) < 8) |
| return FALSE; |
| |
| data = gst_adapter_map (avi->adapter, 8); |
| *tag = GST_READ_UINT32_LE (data); |
| *size = GST_READ_UINT32_LE (data + 4); |
| gst_adapter_unmap (avi->adapter); |
| |
| return TRUE; |
| } |
| |
| /* |
| * gst_avi_demux_peek_chunk: |
| * @avi: Avi object |
| * @tag: holder for tag |
| * @size: holder for tag size |
| * |
| * Peek enough data for one full chunk |
| * |
| * Returns: %TRUE when one chunk has been got |
| */ |
| static gboolean |
| gst_avi_demux_peek_chunk (GstAviDemux * avi, guint32 * tag, guint32 * size) |
| { |
| guint32 peek_size = 0; |
| gint available; |
| |
| if (!gst_avi_demux_peek_chunk_info (avi, tag, size)) |
| goto peek_failed; |
| |
| /* size 0 -> empty data buffer would surprise most callers, |
| * large size -> do not bother trying to squeeze that into adapter, |
| * so we throw poor man's exception, which can be caught if caller really |
| * wants to handle 0 size chunk */ |
| if (!(*size) || (*size) >= (1 << 30)) |
| goto strange_size; |
| |
| peek_size = (*size + 1) & ~1; |
| available = gst_adapter_available (avi->adapter); |
| |
| GST_DEBUG_OBJECT (avi, |
| "Need to peek chunk of %d bytes to read chunk %" GST_FOURCC_FORMAT |
| ", %d bytes available", *size, GST_FOURCC_ARGS (*tag), available); |
| |
| if (available < (8 + peek_size)) |
| goto need_more; |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| peek_failed: |
| { |
| GST_INFO_OBJECT (avi, "Failed to peek"); |
| return FALSE; |
| } |
| strange_size: |
| { |
| GST_INFO_OBJECT (avi, |
| "Invalid/unexpected chunk size %d for tag %" GST_FOURCC_FORMAT, *size, |
| GST_FOURCC_ARGS (*tag)); |
| /* chain should give up */ |
| avi->abort_buffering = TRUE; |
| return FALSE; |
| } |
| need_more: |
| { |
| GST_INFO_OBJECT (avi, "need more %d < %" G_GUINT32_FORMAT, |
| available, 8 + peek_size); |
| return FALSE; |
| } |
| } |
| |
| /* AVI init */ |
| |
| /* |
| * gst_avi_demux_parse_file_header: |
| * @element: caller element (used for errors/debug). |
| * @buf: input data to be used for parsing. |
| * |
| * "Open" a RIFF/AVI file. The buffer should be at least 12 |
| * bytes long. Takes ownership of @buf. |
| * |
| * Returns: TRUE if the file is a RIFF/AVI file, FALSE otherwise. |
| * Throws an error, caller should error out (fatal). |
| */ |
| static gboolean |
| gst_avi_demux_parse_file_header (GstElement * element, GstBuffer * buf) |
| { |
| guint32 doctype; |
| GstClockTime stamp; |
| |
| stamp = gst_util_get_timestamp (); |
| |
| /* riff_parse posts an error */ |
| if (!gst_riff_parse_file_header (element, buf, &doctype)) |
| return FALSE; |
| |
| if (doctype != GST_RIFF_RIFF_AVI) |
| goto not_avi; |
| |
| stamp = gst_util_get_timestamp () - stamp; |
| GST_DEBUG_OBJECT (element, "header parsing took %" GST_TIME_FORMAT, |
| GST_TIME_ARGS (stamp)); |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| not_avi: |
| { |
| GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL), |
| ("File is not an AVI file: 0x%" G_GINT32_MODIFIER "x", doctype)); |
| return FALSE; |
| } |
| } |
| |
| /* |
| * Read AVI file tag when streaming |
| */ |
| static GstFlowReturn |
| gst_avi_demux_stream_init_push (GstAviDemux * avi) |
| { |
| if (gst_adapter_available (avi->adapter) >= 12) { |
| GstBuffer *tmp; |
| |
| tmp = gst_adapter_take_buffer (avi->adapter, 12); |
| |
| GST_DEBUG ("Parsing avi header"); |
| if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), tmp)) { |
| return GST_FLOW_ERROR; |
| } |
| GST_DEBUG ("header ok"); |
| avi->offset += 12; |
| |
| avi->state = GST_AVI_DEMUX_HEADER; |
| } |
| return GST_FLOW_OK; |
| } |
| |
| /* |
| * Read AVI file tag |
| */ |
| static GstFlowReturn |
| gst_avi_demux_stream_init_pull (GstAviDemux * avi) |
| { |
| GstFlowReturn res; |
| GstBuffer *buf = NULL; |
| |
| res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf); |
| if (res != GST_FLOW_OK) |
| return res; |
| else if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), buf)) |
| goto wrong_header; |
| |
| avi->offset += 12; |
| |
| return GST_FLOW_OK; |
| |
| /* ERRORS */ |
| wrong_header: |
| { |
| GST_DEBUG_OBJECT (avi, "error parsing file header"); |
| return GST_FLOW_ERROR; |
| } |
| } |
| |
| /* AVI header handling */ |
| /* |
| * gst_avi_demux_parse_avih: |
| * @avi: caller element (used for errors/debug). |
| * @buf: input data to be used for parsing. |
| * @avih: pointer to structure (filled in by function) containing |
| * stream information (such as flags, number of streams, etc.). |
| * |
| * Read 'avih' header. Discards buffer after use. |
| * |
| * Returns: TRUE on success, FALSE otherwise. Throws an error if |
| * the header is invalid. The caller should error out |
| * (fatal). |
| */ |
| static gboolean |
| gst_avi_demux_parse_avih (GstAviDemux * avi, |
| GstBuffer * buf, gst_riff_avih ** _avih) |
| { |
| gst_riff_avih *avih; |
| gsize size; |
| |
| if (buf == NULL) |
| goto no_buffer; |
| |
| size = gst_buffer_get_size (buf); |
| if (size < sizeof (gst_riff_avih)) |
| goto avih_too_small; |
| |
| avih = g_malloc (size); |
| gst_buffer_extract (buf, 0, avih, size); |
| |
| #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
| avih->us_frame = GUINT32_FROM_LE (avih->us_frame); |
| avih->max_bps = GUINT32_FROM_LE (avih->max_bps); |
| avih->pad_gran = GUINT32_FROM_LE (avih->pad_gran); |
| avih->flags = GUINT32_FROM_LE (avih->flags); |
| avih->tot_frames = GUINT32_FROM_LE (avih->tot_frames); |
| avih->init_frames = GUINT32_FROM_LE (avih->init_frames); |
| avih->streams = GUINT32_FROM_LE (avih->streams); |
| avih->bufsize = GUINT32_FROM_LE (avih->bufsize); |
| avih->width = GUINT32_FROM_LE (avih->width); |
| avih->height = GUINT32_FROM_LE (avih->height); |
| avih->scale = GUINT32_FROM_LE (avih->scale); |
| avih->rate = GUINT32_FROM_LE (avih->rate); |
| avih->start = GUINT32_FROM_LE (avih->start); |
| avih->length = GUINT32_FROM_LE (avih->length); |
| #endif |
| |
| /* debug stuff */ |
| GST_INFO_OBJECT (avi, "avih tag found:"); |
| GST_INFO_OBJECT (avi, " us_frame %u", avih->us_frame); |
| GST_INFO_OBJECT (avi, " max_bps %u", avih->max_bps); |
| GST_INFO_OBJECT (avi, " pad_gran %u", avih->pad_gran); |
| GST_INFO_OBJECT (avi, " flags 0x%08x", avih->flags); |
| GST_INFO_OBJECT (avi, " tot_frames %u", avih->tot_frames); |
| GST_INFO_OBJECT (avi, " init_frames %u", avih->init_frames); |
| GST_INFO_OBJECT (avi, " streams %u", avih->streams); |
| GST_INFO_OBJECT (avi, " bufsize %u", avih->bufsize); |
| GST_INFO_OBJECT (avi, " width %u", avih->width); |
| GST_INFO_OBJECT (avi, " height %u", avih->height); |
| GST_INFO_OBJECT (avi, " scale %u", avih->scale); |
| GST_INFO_OBJECT (avi, " rate %u", avih->rate); |
| GST_INFO_OBJECT (avi, " start %u", avih->start); |
| GST_INFO_OBJECT (avi, " length %u", avih->length); |
| |
| *_avih = avih; |
| gst_buffer_unref (buf); |
| |
| if (avih->us_frame != 0 && avih->tot_frames != 0) |
| avi->duration = |
| (guint64) avih->us_frame * (guint64) avih->tot_frames * 1000; |
| else |
| avi->duration = GST_CLOCK_TIME_NONE; |
| |
| GST_INFO_OBJECT (avi, " header duration %" GST_TIME_FORMAT, |
| GST_TIME_ARGS (avi->duration)); |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| no_buffer: |
| { |
| GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No buffer")); |
| return FALSE; |
| } |
| avih_too_small: |
| { |
| GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), |
| ("Too small avih (%" G_GSIZE_FORMAT " available, %d needed)", |
| size, (int) sizeof (gst_riff_avih))); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| } |
| |
| /* |
| * gst_avi_demux_parse_superindex: |
| * @avi: caller element (used for debugging/errors). |
| * @buf: input data to use for parsing. |
| * @locations: locations in the file (byte-offsets) that contain |
| * the actual indexes (see get_avi_demux_parse_subindex()). |
| * The array ends with GST_BUFFER_OFFSET_NONE. |
| * |
| * Reads superindex (openDML-2 spec stuff) from the provided data. |
| * |
| * Returns: TRUE on success, FALSE otherwise. Indexes should be skipped |
| * on error, but they are not fatal. |
| */ |
| static gboolean |
| gst_avi_demux_parse_superindex (GstAviDemux * avi, |
| GstBuffer * buf, guint64 ** _indexes) |
| { |
| GstMapInfo map; |
| guint8 *data; |
| guint16 bpe = 16; |
| guint32 num, i; |
| guint64 *indexes; |
| gsize size; |
| |
| *_indexes = NULL; |
| |
| if (buf) { |
| gst_buffer_map (buf, &map, GST_MAP_READ); |
| data = map.data; |
| size = map.size; |
| } else { |
| data = NULL; |
| size = 0; |
| } |
| |
| if (size < 24) |
| goto too_small; |
| |
| /* check type of index. The opendml2 specs state that |
| * there should be 4 dwords per array entry. Type can be |
| * either frame or field (and we don't care). */ |
| if (GST_READ_UINT16_LE (data) != 4 || |
| (data[2] & 0xfe) != 0x0 || data[3] != 0x0) { |
| GST_WARNING_OBJECT (avi, |
| "Superindex for stream has unexpected " |
| "size_entry %d (bytes) or flags 0x%02x/0x%02x", |
| GST_READ_UINT16_LE (data), data[2], data[3]); |
| bpe = GST_READ_UINT16_LE (data) * 4; |
| } |
| num = GST_READ_UINT32_LE (&data[4]); |
| |
| GST_DEBUG_OBJECT (avi, "got %d indexes", num); |
| |
| /* this can't work out well ... */ |
| if (num > G_MAXUINT32 >> 1 || bpe < 8) { |
| goto invalid_params; |
| } |
| |
| indexes = g_new (guint64, num + 1); |
| for (i = 0; i < num; i++) { |
| if (size < 24 + bpe * (i + 1)) |
| break; |
| indexes[i] = GST_READ_UINT64_LE (&data[24 + bpe * i]); |
| GST_DEBUG_OBJECT (avi, "index %d at %" G_GUINT64_FORMAT, i, indexes[i]); |
| } |
| indexes[i] = GST_BUFFER_OFFSET_NONE; |
| *_indexes = indexes; |
| |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| too_small: |
| { |
| GST_ERROR_OBJECT (avi, |
| "Not enough data to parse superindex (%" G_GSIZE_FORMAT |
| " available, 24 needed)", size); |
| if (buf) { |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| } |
| return FALSE; |
| } |
| invalid_params: |
| { |
| GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)", |
| num, bpe); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| } |
| |
| /* add an entry to the index of a stream. @num should be an estimate of the |
| * total amount of index entries for all streams and is used to dynamically |
| * allocate memory for the index entries. */ |
| static inline gboolean |
| gst_avi_demux_add_index (GstAviDemux * avi, GstAviStream * stream, |
| guint num, GstAviIndexEntry * entry) |
| { |
| /* ensure index memory */ |
| if (G_UNLIKELY (stream->idx_n >= stream->idx_max)) { |
| guint idx_max = stream->idx_max; |
| GstAviIndexEntry *new_idx; |
| |
| /* we need to make some more room */ |
| if (idx_max == 0) { |
| /* initial size guess, assume each stream has an equal amount of entries, |
| * overshoot with at least 8K */ |
| idx_max = (num / avi->num_streams) + (8192 / sizeof (GstAviIndexEntry)); |
| } else { |
| idx_max += 8192 / sizeof (GstAviIndexEntry); |
| GST_DEBUG_OBJECT (avi, "expanded index from %u to %u", |
| stream->idx_max, idx_max); |
| } |
| new_idx = g_try_renew (GstAviIndexEntry, stream->index, idx_max); |
| /* out of memory, if this fails stream->index is untouched. */ |
| if (G_UNLIKELY (!new_idx)) |
| return FALSE; |
| /* use new index */ |
| stream->index = new_idx; |
| stream->idx_max = idx_max; |
| } |
| |
| /* update entry total and stream stats. The entry total can be converted to |
| * the timestamp of the entry easily. */ |
| if (stream->strh->type == GST_RIFF_FCC_auds) { |
| gint blockalign; |
| |
| if (stream->is_vbr) { |
| entry->total = stream->total_blocks; |
| } else { |
| entry->total = stream->total_bytes; |
| } |
| blockalign = stream->strf.auds->blockalign; |
| if (blockalign > 0) |
| stream->total_blocks += DIV_ROUND_UP (entry->size, blockalign); |
| else |
| stream->total_blocks++; |
| } else { |
| if (stream->is_vbr) { |
| entry->total = stream->idx_n; |
| } else { |
| entry->total = stream->total_bytes; |
| } |
| } |
| stream->total_bytes += entry->size; |
| if (ENTRY_IS_KEYFRAME (entry)) |
| stream->n_keyframes++; |
| |
| /* and add */ |
| GST_LOG_OBJECT (avi, |
| "Adding stream %u, index entry %d, kf %d, size %u " |
| ", offset %" G_GUINT64_FORMAT ", total %" G_GUINT64_FORMAT, stream->num, |
| stream->idx_n, ENTRY_IS_KEYFRAME (entry), entry->size, entry->offset, |
| entry->total); |
| stream->index[stream->idx_n++] = *entry; |
| |
| return TRUE; |
| } |
| |
| /* given @entry_n in @stream, calculate info such as timestamps and |
| * offsets for the entry. */ |
| static void |
| gst_avi_demux_get_buffer_info (GstAviDemux * avi, GstAviStream * stream, |
| guint entry_n, GstClockTime * timestamp, GstClockTime * ts_end, |
| guint64 * offset, guint64 * offset_end) |
| { |
| GstAviIndexEntry *entry; |
| |
| entry = &stream->index[entry_n]; |
| |
| if (stream->is_vbr) { |
| /* VBR stream next timestamp */ |
| if (stream->strh->type == GST_RIFF_FCC_auds) { |
| if (timestamp) |
| *timestamp = |
| avi_stream_convert_frames_to_time_unchecked (stream, entry->total); |
| if (ts_end) { |
| gint size = 1; |
| if (G_LIKELY (entry_n + 1 < stream->idx_n)) |
| size = stream->index[entry_n + 1].total - entry->total; |
| *ts_end = avi_stream_convert_frames_to_time_unchecked (stream, |
| entry->total + size); |
| } |
| } else { |
| if (timestamp) |
| *timestamp = |
| avi_stream_convert_frames_to_time_unchecked (stream, entry_n); |
| if (ts_end) |
| *ts_end = avi_stream_convert_frames_to_time_unchecked (stream, |
| entry_n + 1); |
| } |
| } else if (stream->strh->type == GST_RIFF_FCC_auds) { |
| /* constant rate stream */ |
| if (timestamp) |
| *timestamp = |
| avi_stream_convert_bytes_to_time_unchecked (stream, entry->total); |
| if (ts_end) |
| *ts_end = avi_stream_convert_bytes_to_time_unchecked (stream, |
| entry->total + entry->size); |
| } |
| if (stream->strh->type == GST_RIFF_FCC_vids) { |
| /* video offsets are the frame number */ |
| if (offset) |
| *offset = entry_n; |
| if (offset_end) |
| *offset_end = entry_n + 1; |
| } else { |
| /* no offsets for audio */ |
| if (offset) |
| *offset = -1; |
| if (offset_end) |
| *offset_end = -1; |
| } |
| } |
| |
| /* collect and debug stats about the indexes for all streams. |
| * This method is also responsible for filling in the stream duration |
| * as measured by the amount of index entries. |
| * |
| * Returns TRUE if the index is not empty, else FALSE */ |
| static gboolean |
| gst_avi_demux_do_index_stats (GstAviDemux * avi) |
| { |
| guint total_idx = 0; |
| guint i; |
| #ifndef GST_DISABLE_GST_DEBUG |
| guint total_max = 0; |
| #endif |
| |
| /* get stream stats now */ |
| for (i = 0; i < avi->num_streams; i++) { |
| GstAviStream *stream; |
| |
| if (G_UNLIKELY (!(stream = &avi->stream[i]))) |
| continue; |
| if (G_UNLIKELY (!stream->strh)) |
| continue; |
| if (G_UNLIKELY (!stream->index || stream->idx_n == 0)) |
| continue; |
| |
| /* we interested in the end_ts of the last entry, which is the total |
| * duration of this stream */ |
| gst_avi_demux_get_buffer_info (avi, stream, stream->idx_n - 1, |
| NULL, &stream->idx_duration, NULL, NULL); |
| |
| total_idx += stream->idx_n; |
| #ifndef GST_DISABLE_GST_DEBUG |
| total_max += stream->idx_max; |
| #endif |
| GST_INFO_OBJECT (avi, "Stream %d, dur %" GST_TIME_FORMAT ", %6u entries, " |
| "%5u keyframes, entry size = %2u, total size = %10u, allocated %10u", |
| i, GST_TIME_ARGS (stream->idx_duration), stream->idx_n, |
| stream->n_keyframes, (guint) sizeof (GstAviIndexEntry), |
| (guint) (stream->idx_n * sizeof (GstAviIndexEntry)), |
| (guint) (stream->idx_max * sizeof (GstAviIndexEntry))); |
| |
| /* knowing all that we do, that also includes avg bitrate */ |
| if (!stream->taglist) { |
| stream->taglist = gst_tag_list_new_empty (); |
| } |
| if (stream->total_bytes && stream->idx_duration) |
| gst_tag_list_add (stream->taglist, GST_TAG_MERGE_REPLACE, |
| GST_TAG_BITRATE, |
| (guint) gst_util_uint64_scale (stream->total_bytes * 8, |
| GST_SECOND, stream->idx_duration), NULL); |
| } |
| total_idx *= sizeof (GstAviIndexEntry); |
| #ifndef GST_DISABLE_GST_DEBUG |
| total_max *= sizeof (GstAviIndexEntry); |
| #endif |
| GST_INFO_OBJECT (avi, "%u bytes for index vs %u ideally, %u wasted", |
| total_max, total_idx, total_max - total_idx); |
| |
| if (total_idx == 0) { |
| GST_WARNING_OBJECT (avi, "Index is empty !"); |
| return FALSE; |
| } |
| return TRUE; |
| } |
| |
| /* |
| * gst_avi_demux_parse_subindex: |
| * @avi: Avi object |
| * @buf: input data to use for parsing. |
| * @stream: stream context. |
| * @entries_list: a list (returned by the function) containing all the |
| * indexes parsed in this specific subindex. The first |
| * entry is also a pointer to allocated memory that needs |
| * to be free´ed. May be NULL if no supported indexes were |
| * found. |
| * |
| * Reads superindex (openDML-2 spec stuff) from the provided data. |
| * The buffer should contain a GST_RIFF_TAG_ix?? chunk. |
| * |
| * Returns: TRUE on success, FALSE otherwise. Errors are fatal, we |
| * throw an error, caller should bail out asap. |
| */ |
| static gboolean |
| gst_avi_demux_parse_subindex (GstAviDemux * avi, GstAviStream * stream, |
| GstBuffer * buf) |
| { |
| GstMapInfo map; |
| guint8 *data; |
| guint16 bpe; |
| guint32 num, i; |
| guint64 baseoff; |
| |
| if (buf == NULL) |
| return TRUE; |
| |
| gst_buffer_map (buf, &map, GST_MAP_READ); |
| data = map.data; |
| |
| /* check size */ |
| if (map.size < 24) |
| goto too_small; |
| |
| /* We don't support index-data yet */ |
| if (data[3] & 0x80) |
| goto not_implemented; |
| |
| /* check type of index. The opendml2 specs state that |
| * there should be 4 dwords per array entry. Type can be |
| * either frame or field (and we don't care). */ |
| bpe = (data[2] & 0x01) ? 12 : 8; |
| if (GST_READ_UINT16_LE (data) != bpe / 4 || |
| (data[2] & 0xfe) != 0x0 || data[3] != 0x1) { |
| GST_WARNING_OBJECT (avi, |
| "Superindex for stream %d has unexpected " |
| "size_entry %d (bytes) or flags 0x%02x/0x%02x", |
| stream->num, GST_READ_UINT16_LE (data), data[2], data[3]); |
| bpe = GST_READ_UINT16_LE (data) * 4; |
| } |
| num = GST_READ_UINT32_LE (&data[4]); |
| baseoff = GST_READ_UINT64_LE (&data[12]); |
| |
| /* If there's nothing, just return ! */ |
| if (num == 0) |
| goto empty_index; |
| |
| GST_INFO_OBJECT (avi, "Parsing subindex, nr_entries = %6d", num); |
| |
| for (i = 0; i < num; i++) { |
| GstAviIndexEntry entry; |
| |
| if (map.size < 24 + bpe * (i + 1)) |
| break; |
| |
| /* fill in offset and size. offset contains the keyframe flag in the |
| * upper bit*/ |
| entry.offset = baseoff + GST_READ_UINT32_LE (&data[24 + bpe * i]); |
| entry.size = GST_READ_UINT32_LE (&data[24 + bpe * i + 4]); |
| /* handle flags */ |
| if (stream->strh->type == GST_RIFF_FCC_auds) { |
| /* all audio frames are keyframes */ |
| ENTRY_SET_KEYFRAME (&entry); |
| } else { |
| /* else read flags */ |
| entry.flags = (entry.size & 0x80000000) ? 0 : GST_AVI_KEYFRAME; |
| } |
| entry.size &= ~0x80000000; |
| |
| /* and add */ |
| if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry))) |
| goto out_of_mem; |
| } |
| done: |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| too_small: |
| { |
| GST_ERROR_OBJECT (avi, |
| "Not enough data to parse subindex (%" G_GSIZE_FORMAT |
| " available, 24 needed)", map.size); |
| goto done; /* continue */ |
| } |
| not_implemented: |
| { |
| GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL), |
| ("Subindex-is-data is not implemented")); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| empty_index: |
| { |
| GST_DEBUG_OBJECT (avi, "the index is empty"); |
| goto done; /* continue */ |
| } |
| out_of_mem: |
| { |
| GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL), |
| ("Cannot allocate memory for %u*%u=%u bytes", |
| (guint) sizeof (GstAviIndexEntry), num, |
| (guint) sizeof (GstAviIndexEntry) * num)); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| } |
| |
| /* |
| * Create and push a flushing seek event upstream |
| */ |
| static gboolean |
| perform_seek_to_offset (GstAviDemux * demux, guint64 offset, guint32 seqnum) |
| { |
| GstEvent *event; |
| gboolean res = 0; |
| |
| GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset); |
| |
| event = |
| gst_event_new_seek (1.0, GST_FORMAT_BYTES, |
| GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset, |
| GST_SEEK_TYPE_NONE, -1); |
| gst_event_set_seqnum (event, seqnum); |
| res = gst_pad_push_event (demux->sinkpad, event); |
| |
| if (res) |
| demux->offset = offset; |
| return res; |
| } |
| |
| /* |
| * Read AVI index when streaming |
| */ |
| static gboolean |
| gst_avi_demux_read_subindexes_push (GstAviDemux * avi) |
| { |
| guint32 tag = 0, size; |
| GstBuffer *buf = NULL; |
| guint odml_stream; |
| |
| GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams); |
| |
| if (avi->odml_subidxs[avi->odml_subidx] != avi->offset) |
| return FALSE; |
| |
| if (!gst_avi_demux_peek_chunk (avi, &tag, &size)) |
| return TRUE; |
| |
| /* this is the ODML chunk we expect */ |
| odml_stream = avi->odml_stream; |
| |
| if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + odml_stream / 10, |
| '0' + odml_stream % 10)) && |
| (tag != GST_MAKE_FOURCC ('0' + odml_stream / 10, |
| '0' + odml_stream % 10, 'i', 'x'))) { |
| GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")", |
| GST_FOURCC_ARGS (tag)); |
| return FALSE; |
| } |
| |
| avi->offset += 8 + GST_ROUND_UP_2 (size); |
| /* flush chunk header so we get just the 'size' payload data */ |
| gst_adapter_flush (avi->adapter, 8); |
| buf = gst_adapter_take_buffer (avi->adapter, size); |
| |
| if (!gst_avi_demux_parse_subindex (avi, &avi->stream[odml_stream], buf)) |
| return FALSE; |
| |
| /* we parsed the index, go to next subindex */ |
| avi->odml_subidx++; |
| |
| if (avi->odml_subidxs[avi->odml_subidx] == GST_BUFFER_OFFSET_NONE) { |
| /* we reached the end of the indexes for this stream, move to the next |
| * stream to handle the first index */ |
| avi->odml_stream++; |
| avi->odml_subidx = 0; |
| |
| if (avi->odml_stream < avi->num_streams) { |
| /* there are more indexes */ |
| avi->odml_subidxs = avi->stream[avi->odml_stream].indexes; |
| } else { |
| /* we're done, get stream stats now */ |
| avi->have_index = gst_avi_demux_do_index_stats (avi); |
| |
| return TRUE; |
| } |
| } |
| |
| /* seek to next index */ |
| return perform_seek_to_offset (avi, avi->odml_subidxs[avi->odml_subidx], |
| avi->segment_seqnum); |
| } |
| |
| /* |
| * Read AVI index |
| */ |
| static void |
| gst_avi_demux_read_subindexes_pull (GstAviDemux * avi) |
| { |
| guint32 tag; |
| GstBuffer *buf; |
| gint i, n; |
| |
| GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams); |
| |
| for (n = 0; n < avi->num_streams; n++) { |
| GstAviStream *stream = &avi->stream[n]; |
| |
| if (stream->indexes == NULL) |
| continue; |
| |
| for (i = 0; stream->indexes[i] != GST_BUFFER_OFFSET_NONE; i++) { |
| if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi), avi->sinkpad, |
| &stream->indexes[i], &tag, &buf) != GST_FLOW_OK) |
| continue; |
| else if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + stream->num / 10, |
| '0' + stream->num % 10)) && |
| (tag != GST_MAKE_FOURCC ('0' + stream->num / 10, |
| '0' + stream->num % 10, 'i', 'x'))) { |
| /* Some ODML files (created by god knows what muxer) have a ##ix format |
| * instead of the 'official' ix##. They are still valid though. */ |
| GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")", |
| GST_FOURCC_ARGS (tag)); |
| gst_buffer_unref (buf); |
| continue; |
| } |
| |
| if (!gst_avi_demux_parse_subindex (avi, stream, buf)) |
| continue; |
| } |
| |
| g_free (stream->indexes); |
| stream->indexes = NULL; |
| } |
| /* get stream stats now */ |
| avi->have_index = gst_avi_demux_do_index_stats (avi); |
| } |
| |
| /* |
| * gst_avi_demux_riff_parse_vprp: |
| * @element: caller element (used for debugging/error). |
| * @buf: input data to be used for parsing, stripped from header. |
| * @vprp: a pointer (returned by this function) to a filled-in vprp |
| * structure. Caller should free it. |
| * |
| * Parses a video stream´s vprp. This function takes ownership of @buf. |
| * |
| * Returns: TRUE if parsing succeeded, otherwise FALSE. The stream |
| * should be skipped on error, but it is not fatal. |
| */ |
| static gboolean |
| gst_avi_demux_riff_parse_vprp (GstElement * element, |
| GstBuffer * buf, gst_riff_vprp ** _vprp) |
| { |
| gst_riff_vprp *vprp; |
| gint k; |
| gsize size; |
| |
| g_return_val_if_fail (buf != NULL, FALSE); |
| g_return_val_if_fail (_vprp != NULL, FALSE); |
| |
| size = gst_buffer_get_size (buf); |
| |
| if (size < G_STRUCT_OFFSET (gst_riff_vprp, field_info)) |
| goto too_small; |
| |
| vprp = g_malloc (size); |
| gst_buffer_extract (buf, 0, vprp, size); |
| |
| #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
| vprp->format_token = GUINT32_FROM_LE (vprp->format_token); |
| vprp->standard = GUINT32_FROM_LE (vprp->standard); |
| vprp->vert_rate = GUINT32_FROM_LE (vprp->vert_rate); |
| vprp->hor_t_total = GUINT32_FROM_LE (vprp->hor_t_total); |
| vprp->vert_lines = GUINT32_FROM_LE (vprp->vert_lines); |
| vprp->aspect = GUINT32_FROM_LE (vprp->aspect); |
| vprp->width = GUINT32_FROM_LE (vprp->width); |
| vprp->height = GUINT32_FROM_LE (vprp->height); |
| vprp->fields = GUINT32_FROM_LE (vprp->fields); |
| #endif |
| |
| /* size checking */ |
| /* calculate fields based on size */ |
| k = (size - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) / vprp->fields; |
| if (vprp->fields > k) { |
| GST_WARNING_OBJECT (element, |
| "vprp header indicated %d fields, only %d available", vprp->fields, k); |
| vprp->fields = k; |
| } |
| if (vprp->fields > GST_RIFF_VPRP_VIDEO_FIELDS) { |
| GST_WARNING_OBJECT (element, |
| "vprp header indicated %d fields, at most %d supported", vprp->fields, |
| GST_RIFF_VPRP_VIDEO_FIELDS); |
| vprp->fields = GST_RIFF_VPRP_VIDEO_FIELDS; |
| } |
| #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
| for (k = 0; k < vprp->fields; k++) { |
| gst_riff_vprp_video_field_desc *fd; |
| |
| fd = &vprp->field_info[k]; |
| fd->compressed_bm_height = GUINT32_FROM_LE (fd->compressed_bm_height); |
| fd->compressed_bm_width = GUINT32_FROM_LE (fd->compressed_bm_width); |
| fd->valid_bm_height = GUINT32_FROM_LE (fd->valid_bm_height); |
| fd->valid_bm_width = GUINT16_FROM_LE (fd->valid_bm_width); |
| fd->valid_bm_x_offset = GUINT16_FROM_LE (fd->valid_bm_x_offset); |
| fd->valid_bm_y_offset = GUINT32_FROM_LE (fd->valid_bm_y_offset); |
| fd->video_x_t_offset = GUINT32_FROM_LE (fd->video_x_t_offset); |
| fd->video_y_start = GUINT32_FROM_LE (fd->video_y_start); |
| } |
| #endif |
| |
| /* debug */ |
| GST_INFO_OBJECT (element, "vprp tag found in context vids:"); |
| GST_INFO_OBJECT (element, " format_token %d", vprp->format_token); |
| GST_INFO_OBJECT (element, " standard %d", vprp->standard); |
| GST_INFO_OBJECT (element, " vert_rate %d", vprp->vert_rate); |
| GST_INFO_OBJECT (element, " hor_t_total %d", vprp->hor_t_total); |
| GST_INFO_OBJECT (element, " vert_lines %d", vprp->vert_lines); |
| GST_INFO_OBJECT (element, " aspect %d:%d", vprp->aspect >> 16, |
| vprp->aspect & 0xffff); |
| GST_INFO_OBJECT (element, " width %d", vprp->width); |
| GST_INFO_OBJECT (element, " height %d", vprp->height); |
| GST_INFO_OBJECT (element, " fields %d", vprp->fields); |
| for (k = 0; k < vprp->fields; k++) { |
| gst_riff_vprp_video_field_desc *fd; |
| |
| fd = &(vprp->field_info[k]); |
| GST_INFO_OBJECT (element, " field %u description:", k); |
| GST_INFO_OBJECT (element, " compressed_bm_height %d", |
| fd->compressed_bm_height); |
| GST_INFO_OBJECT (element, " compressed_bm_width %d", |
| fd->compressed_bm_width); |
| GST_INFO_OBJECT (element, " valid_bm_height %d", |
| fd->valid_bm_height); |
| GST_INFO_OBJECT (element, " valid_bm_width %d", fd->valid_bm_width); |
| GST_INFO_OBJECT (element, " valid_bm_x_offset %d", |
| fd->valid_bm_x_offset); |
| GST_INFO_OBJECT (element, " valid_bm_y_offset %d", |
| fd->valid_bm_y_offset); |
| GST_INFO_OBJECT (element, " video_x_t_offset %d", |
| fd->video_x_t_offset); |
| GST_INFO_OBJECT (element, " video_y_start %d", fd->video_y_start); |
| } |
| |
| gst_buffer_unref (buf); |
| |
| *_vprp = vprp; |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| too_small: |
| { |
| GST_ERROR_OBJECT (element, |
| "Too small vprp (%" G_GSIZE_FORMAT " available, at least %d needed)", |
| size, (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info)); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| } |
| |
| static void |
| gst_avi_demux_expose_streams (GstAviDemux * avi, gboolean force) |
| { |
| guint i; |
| |
| GST_DEBUG_OBJECT (avi, "force : %d", force); |
| |
| for (i = 0; i < avi->num_streams; i++) { |
| GstAviStream *stream = &avi->stream[i]; |
| |
| if (force || stream->idx_n != 0) { |
| GST_LOG_OBJECT (avi, "Adding pad %s", GST_PAD_NAME (stream->pad)); |
| gst_element_add_pad ((GstElement *) avi, stream->pad); |
| gst_flow_combiner_add_pad (avi->flowcombiner, stream->pad); |
| |
| #if 0 |
| if (avi->element_index) |
| gst_index_get_writer_id (avi->element_index, |
| GST_OBJECT_CAST (stream->pad), &stream->index_id); |
| #endif |
| |
| stream->exposed = TRUE; |
| if (avi->main_stream == -1) |
| avi->main_stream = i; |
| } else { |
| GST_WARNING_OBJECT (avi, "Stream #%d doesn't have any entry, removing it", |
| i); |
| gst_avi_demux_reset_stream (avi, stream); |
| } |
| } |
| } |
| |
| /* buf contains LIST chunk data, and will be padded to even size, |
| * since some buggy files do not account for the padding of chunks |
| * within a LIST in the size of the LIST */ |
| static inline void |
| gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf) |
| { |
| gsize size; |
| |
| size = gst_buffer_get_size (*buf); |
| |
| if (G_UNLIKELY (size & 1)) { |
| GstBuffer *obuf; |
| GstMapInfo map; |
| |
| GST_DEBUG_OBJECT (avi, "rounding up dubious list size %" G_GSIZE_FORMAT, |
| size); |
| obuf = gst_buffer_new_and_alloc (size + 1); |
| |
| gst_buffer_map (obuf, &map, GST_MAP_WRITE); |
| gst_buffer_extract (*buf, 0, map.data, size); |
| /* assume 0 padding, at least makes outcome deterministic */ |
| map.data[size] = 0; |
| gst_buffer_unmap (obuf, &map); |
| gst_buffer_replace (buf, obuf); |
| } |
| } |
| |
| static GstCaps * |
| gst_avi_demux_check_caps (GstAviDemux * avi, GstAviStream * stream, |
| GstCaps * caps) |
| { |
| GstStructure *s; |
| const GValue *val; |
| GstBuffer *buf; |
| |
| caps = gst_caps_make_writable (caps); |
| |
| s = gst_caps_get_structure (caps, 0); |
| if (gst_structure_has_name (s, "video/x-raw")) { |
| stream->is_raw = TRUE; |
| stream->alignment = 32; |
| if (!gst_structure_has_field (s, "pixel-aspect-ratio")) |
| gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, |
| 1, 1, NULL); |
| if (gst_structure_has_field_typed (s, "palette_data", GST_TYPE_BUFFER)) { |
| gst_structure_get (s, "palette_data", GST_TYPE_BUFFER, |
| &stream->rgb8_palette, NULL); |
| gst_structure_remove_field (s, "palette_data"); |
| return caps; |
| } |
| } else if (!gst_structure_has_name (s, "video/x-h264")) { |
| return caps; |
| } |
| |
| GST_DEBUG_OBJECT (avi, "checking caps %" GST_PTR_FORMAT, caps); |
| |
| /* some muxers put invalid bytestream stuff in h264 extra data */ |
| val = gst_structure_get_value (s, "codec_data"); |
| if (val && (buf = gst_value_get_buffer (val))) { |
| guint8 *data; |
| gint size; |
| GstMapInfo map; |
| |
| gst_buffer_map (buf, &map, GST_MAP_READ); |
| data = map.data; |
| size = map.size; |
| if (size >= 4) { |
| guint32 h = GST_READ_UINT32_BE (data); |
| gst_buffer_unmap (buf, &map); |
| if (h == 0x01) { |
| /* can hardly be valid AVC codec data */ |
| GST_DEBUG_OBJECT (avi, |
| "discarding invalid codec_data containing byte-stream"); |
| /* so do not pretend to downstream that it is packetized avc */ |
| gst_structure_remove_field (s, "codec_data"); |
| /* ... but rather properly parsed bytestream */ |
| gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream", |
| "alignment", G_TYPE_STRING, "au", NULL); |
| } |
| } else { |
| gst_buffer_unmap (buf, &map); |
| } |
| } |
| |
| return caps; |
| } |
| |
| /* |
| * gst_avi_demux_parse_stream: |
| * @avi: calling element (used for debugging/errors). |
| * @buf: input buffer used to parse the stream. |
| * |
| * Parses all subchunks in a strl chunk (which defines a single |
| * stream). Discards the buffer after use. This function will |
| * increment the stream counter internally. |
| * |
| * Returns: whether the stream was identified successfully. |
| * Errors are not fatal. It does indicate the stream |
| * was skipped. |
| */ |
| static gboolean |
| gst_avi_demux_parse_stream (GstAviDemux * avi, GstBuffer * buf) |
| { |
| GstAviStream *stream; |
| GstElementClass *klass; |
| GstPadTemplate *templ; |
| GstBuffer *sub = NULL; |
| guint offset = 4; |
| guint32 tag = 0; |
| gchar *codec_name = NULL, *padname = NULL; |
| const gchar *tag_name; |
| GstCaps *caps = NULL; |
| GstPad *pad; |
| GstElement *element; |
| gboolean got_strh = FALSE, got_strf = FALSE, got_vprp = FALSE; |
| gst_riff_vprp *vprp = NULL; |
| GstEvent *event; |
| gchar *stream_id; |
| GstMapInfo map; |
| gboolean sparse = FALSE; |
| |
| element = GST_ELEMENT_CAST (avi); |
| |
| GST_DEBUG_OBJECT (avi, "Parsing stream"); |
| |
| gst_avi_demux_roundup_list (avi, &buf); |
| |
| if (avi->num_streams >= GST_AVI_DEMUX_MAX_STREAMS) { |
| GST_WARNING_OBJECT (avi, |
| "maximum no of streams (%d) exceeded, ignoring stream", |
| GST_AVI_DEMUX_MAX_STREAMS); |
| gst_buffer_unref (buf); |
| /* not a fatal error, let's say */ |
| return TRUE; |
| } |
| |
| stream = &avi->stream[avi->num_streams]; |
| |
| /* initial settings */ |
| stream->idx_duration = GST_CLOCK_TIME_NONE; |
| stream->hdr_duration = GST_CLOCK_TIME_NONE; |
| stream->duration = GST_CLOCK_TIME_NONE; |
| |
| while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) { |
| /* sub can be NULL if the chunk is empty */ |
| if (sub == NULL) { |
| GST_DEBUG_OBJECT (avi, "ignoring empty chunk %" GST_FOURCC_FORMAT, |
| GST_FOURCC_ARGS (tag)); |
| continue; |
| } |
| switch (tag) { |
| case GST_RIFF_TAG_strh: |
| { |
| gst_riff_strh *strh; |
| |
| if (got_strh) { |
| GST_WARNING_OBJECT (avi, "Ignoring additional strh chunk"); |
| break; |
| } |
| if (!gst_riff_parse_strh (element, sub, &stream->strh)) { |
| /* ownership given away */ |
| sub = NULL; |
| GST_WARNING_OBJECT (avi, "Failed to parse strh chunk"); |
| goto fail; |
| } |
| sub = NULL; |
| strh = stream->strh; |
| /* sanity check; stream header frame rate matches global header |
| * frame duration */ |
| if (stream->strh->type == GST_RIFF_FCC_vids) { |
| GstClockTime s_dur; |
| GstClockTime h_dur = avi->avih->us_frame * GST_USECOND; |
| |
| s_dur = gst_util_uint64_scale (GST_SECOND, strh->scale, strh->rate); |
| GST_DEBUG_OBJECT (avi, "verifying stream framerate %d/%d, " |
| "frame duration = %d ms", strh->rate, strh->scale, |
| (gint) (s_dur / GST_MSECOND)); |
| if (h_dur > (10 * GST_MSECOND) && (s_dur > 10 * h_dur)) { |
| strh->rate = GST_SECOND / GST_USECOND; |
| strh->scale = h_dur / GST_USECOND; |
| GST_DEBUG_OBJECT (avi, "correcting stream framerate to %d/%d", |
| strh->rate, strh->scale); |
| } |
| } |
| /* determine duration as indicated by header */ |
| stream->hdr_duration = gst_util_uint64_scale ((guint64) strh->length * |
| strh->scale, GST_SECOND, (guint64) strh->rate); |
| GST_INFO ("Stream duration according to header: %" GST_TIME_FORMAT, |
| GST_TIME_ARGS (stream->hdr_duration)); |
| if (stream->hdr_duration == 0) |
| stream->hdr_duration = GST_CLOCK_TIME_NONE; |
| |
| got_strh = TRUE; |
| break; |
| } |
| case GST_RIFF_TAG_strf: |
| { |
| gboolean res = FALSE; |
| |
| if (got_strf) { |
| GST_WARNING_OBJECT (avi, "Ignoring additional strf chunk"); |
| break; |
| } |
| if (!got_strh) { |
| GST_ERROR_OBJECT (avi, "Found strf chunk before strh chunk"); |
| goto fail; |
| } |
| switch (stream->strh->type) { |
| case GST_RIFF_FCC_vids: |
| stream->is_vbr = TRUE; |
| res = gst_riff_parse_strf_vids (element, sub, |
| &stream->strf.vids, &stream->extradata); |
| sub = NULL; |
| GST_DEBUG_OBJECT (element, "marking video as VBR, res %d", res); |
| break; |
| case GST_RIFF_FCC_auds: |
| res = |
| gst_riff_parse_strf_auds (element, sub, &stream->strf.auds, |
| &stream->extradata); |
| sub = NULL; |
| if (!res) |
| break; |
| stream->is_vbr = (stream->strh->samplesize == 0) |
| && stream->strh->scale > 1 |
| && stream->strf.auds->blockalign != 1; |
| GST_DEBUG_OBJECT (element, "marking audio as VBR:%d, res %d", |
| stream->is_vbr, res); |
| /* we need these or we have no way to come up with timestamps */ |
| if ((!stream->is_vbr && !stream->strf.auds->av_bps) || |
| (stream->is_vbr && (!stream->strh->scale || |
| !stream->strh->rate))) { |
| GST_WARNING_OBJECT (element, |
| "invalid audio header, ignoring stream"); |
| goto fail; |
| } |
| /* some more sanity checks */ |
| if (stream->is_vbr) { |
| if (stream->strf.auds->blockalign <= 4) { |
| /* that would mean (too) many frames per chunk, |
| * so not likely set as expected */ |
| GST_DEBUG_OBJECT (element, |
| "suspicious blockalign %d for VBR audio; " |
| "overriding to 1 frame per chunk", |
| stream->strf.auds->blockalign); |
| /* this should top any likely value */ |
| stream->strf.auds->blockalign = (1 << 12); |
| } |
| } |
| break; |
| case GST_RIFF_FCC_iavs: |
| stream->is_vbr = TRUE; |
| res = gst_riff_parse_strf_iavs (element, sub, |
| &stream->strf.iavs, &stream->extradata); |
| sub = NULL; |
| GST_DEBUG_OBJECT (element, "marking iavs as VBR, res %d", res); |
| break; |
| case GST_RIFF_FCC_txts: |
| /* nothing to parse here */ |
| stream->is_vbr = (stream->strh->samplesize == 0) |
| && (stream->strh->scale > 1); |
| res = TRUE; |
| break; |
| default: |
| GST_ERROR_OBJECT (avi, |
| "Don´t know how to handle stream type %" GST_FOURCC_FORMAT, |
| GST_FOURCC_ARGS (stream->strh->type)); |
| break; |
| } |
| if (sub) { |
| gst_buffer_unref (sub); |
| sub = NULL; |
| } |
| if (!res) |
| goto fail; |
| got_strf = TRUE; |
| break; |
| } |
| case GST_RIFF_TAG_vprp: |
| { |
| if (got_vprp) { |
| GST_WARNING_OBJECT (avi, "Ignoring additional vprp chunk"); |
| break; |
| } |
| if (!got_strh) { |
| GST_ERROR_OBJECT (avi, "Found vprp chunk before strh chunk"); |
| goto fail; |
| } |
| if (!got_strf) { |
| GST_ERROR_OBJECT (avi, "Found vprp chunk before strf chunk"); |
| goto fail; |
| } |
| |
| if (!gst_avi_demux_riff_parse_vprp (element, sub, &vprp)) { |
| GST_WARNING_OBJECT (avi, "Failed to parse vprp chunk"); |
| /* not considered fatal */ |
| g_free (vprp); |
| vprp = NULL; |
| } else |
| got_vprp = TRUE; |
| sub = NULL; |
| break; |
| } |
| case GST_RIFF_TAG_strd: |
| if (stream->initdata) |
| gst_buffer_unref (stream->initdata); |
| stream->initdata = sub; |
| if (sub != NULL) { |
| gst_avi_demux_parse_strd (avi, sub); |
| sub = NULL; |
| } |
| break; |
| case GST_RIFF_TAG_strn: |
| g_free (stream->name); |
| |
| gst_buffer_map (sub, &map, GST_MAP_READ); |
| |
| if (avi->globaltags == NULL) |
| avi->globaltags = gst_tag_list_new_empty (); |
| parse_tag_value (avi, avi->globaltags, GST_TAG_TITLE, |
| map.data, map.size); |
| |
| if (gst_tag_list_get_string (avi->globaltags, GST_TAG_TITLE, |
| &stream->name)) |
| GST_DEBUG_OBJECT (avi, "stream name: %s", stream->name); |
| |
| gst_buffer_unmap (sub, &map); |
| gst_buffer_unref (sub); |
| sub = NULL; |
| break; |
| case GST_RIFF_IDIT: |
| gst_avi_demux_parse_idit (avi, sub); |
| break; |
| default: |
| if (tag == GST_MAKE_FOURCC ('i', 'n', 'd', 'x') || |
| tag == GST_MAKE_FOURCC ('i', 'x', '0' + avi->num_streams / 10, |
| '0' + avi->num_streams % 10)) { |
| g_free (stream->indexes); |
| gst_avi_demux_parse_superindex (avi, sub, &stream->indexes); |
| stream->superindex = TRUE; |
| sub = NULL; |
| break; |
| } |
| GST_WARNING_OBJECT (avi, |
| "Unknown stream header tag %" GST_FOURCC_FORMAT ", ignoring", |
| GST_FOURCC_ARGS (tag)); |
| /* Only get buffer for debugging if the memdump is needed */ |
| if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) { |
| GstMapInfo map; |
| |
| gst_buffer_map (sub, &map, GST_MAP_READ); |
| GST_MEMDUMP_OBJECT (avi, "Unknown stream header tag", map.data, |
| map.size); |
| gst_buffer_unmap (sub, &map); |
| } |
| /* fall-through */ |
| case GST_RIFF_TAG_JUNQ: |
| case GST_RIFF_TAG_JUNK: |
| break; |
| } |
| if (sub != NULL) { |
| gst_buffer_unref (sub); |
| sub = NULL; |
| } |
| } |
| |
| if (!got_strh) { |
| GST_WARNING_OBJECT (avi, "Failed to find strh chunk"); |
| goto fail; |
| } |
| |
| if (!got_strf) { |
| GST_WARNING_OBJECT (avi, "Failed to find strf chunk"); |
| goto fail; |
| } |
| |
| /* get class to figure out the template */ |
| klass = GST_ELEMENT_GET_CLASS (avi); |
| |
| /* we now have all info, let´s set up a pad and a caps and be done */ |
| /* create stream name + pad */ |
| switch (stream->strh->type) { |
| case GST_RIFF_FCC_vids:{ |
| guint32 fourcc; |
| |
| fourcc = (stream->strf.vids->compression) ? |
| stream->strf.vids->compression : stream->strh->fcc_handler; |
| caps = gst_riff_create_video_caps (fourcc, stream->strh, |
| stream->strf.vids, stream->extradata, stream->initdata, &codec_name); |
| |
| /* DXSB is XSUB, and it is placed inside a vids */ |
| if (!caps || (fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'B') && |
| fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'A'))) { |
| padname = g_strdup_printf ("video_%u", avi->num_v_streams); |
| templ = gst_element_class_get_pad_template (klass, "video_%u"); |
| if (!caps) { |
| caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc", |
| G_TYPE_INT, fourcc, NULL); |
| } else if (got_vprp && vprp) { |
| guint32 aspect_n, aspect_d; |
| gint n, d; |
| |
| aspect_n = vprp->aspect >> 16; |
| aspect_d = vprp->aspect & 0xffff; |
| /* calculate the pixel aspect ratio using w/h and aspect ratio */ |
| n = aspect_n * stream->strf.vids->height; |
| d = aspect_d * stream->strf.vids->width; |
| if (n && d) |
| gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, |
| n, d, NULL); |
| } |
| caps = gst_avi_demux_check_caps (avi, stream, caps); |
| tag_name = GST_TAG_VIDEO_CODEC; |
| avi->num_v_streams++; |
| } else { |
| padname = g_strdup_printf ("subpicture_%u", avi->num_sp_streams); |
| templ = gst_element_class_get_pad_template (klass, "subpicture_%u"); |
| tag_name = NULL; |
| avi->num_sp_streams++; |
| sparse = TRUE; |
| } |
| break; |
| } |
| case GST_RIFF_FCC_auds:{ |
| /* FIXME: Do something with the channel reorder map */ |
| padname = g_strdup_printf ("audio_%u", avi->num_a_streams); |
| templ = gst_element_class_get_pad_template (klass, "audio_%u"); |
| caps = gst_riff_create_audio_caps (stream->strf.auds->format, |
| stream->strh, stream->strf.auds, stream->extradata, |
| stream->initdata, &codec_name, NULL); |
| if (!caps) { |
| caps = gst_caps_new_simple ("audio/x-avi-unknown", "codec_id", |
| G_TYPE_INT, stream->strf.auds->format, NULL); |
| } |
| tag_name = GST_TAG_AUDIO_CODEC; |
| avi->num_a_streams++; |
| break; |
| } |
| case GST_RIFF_FCC_iavs:{ |
| guint32 fourcc = stream->strh->fcc_handler; |
| |
| padname = g_strdup_printf ("video_%u", avi->num_v_streams); |
| templ = gst_element_class_get_pad_template (klass, "video_%u"); |
| caps = gst_riff_create_iavs_caps (fourcc, stream->strh, |
| stream->strf.iavs, stream->extradata, stream->initdata, &codec_name); |
| if (!caps) { |
| caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc", |
| G_TYPE_INT, fourcc, NULL); |
| } |
| tag_name = GST_TAG_VIDEO_CODEC; |
| avi->num_v_streams++; |
| break; |
| } |
| case GST_RIFF_FCC_txts:{ |
| padname = g_strdup_printf ("subtitle_%u", avi->num_t_streams); |
| templ = gst_element_class_get_pad_template (klass, "subtitle_%u"); |
| caps = gst_caps_new_empty_simple ("application/x-subtitle-avi"); |
| tag_name = NULL; |
| avi->num_t_streams++; |
| sparse = TRUE; |
| break; |
| } |
| default: |
| g_return_val_if_reached (FALSE); |
| } |
| |
| /* no caps means no stream */ |
| if (!caps) { |
| GST_ERROR_OBJECT (element, "Did not find caps for stream %s", padname); |
| goto fail; |
| } |
| |
| GST_DEBUG_OBJECT (element, "codec-name=%s", codec_name ? codec_name : "NULL"); |
| GST_DEBUG_OBJECT (element, "caps=%" GST_PTR_FORMAT, caps); |
| |
| /* set proper settings and add it */ |
| if (stream->pad) |
| gst_object_unref (stream->pad); |
| pad = stream->pad = gst_pad_new_from_template (templ, padname); |
| g_free (padname); |
| |
| gst_pad_use_fixed_caps (pad); |
| #if 0 |
| gst_pad_set_formats_function (pad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_get_src_formats)); |
| gst_pad_set_event_mask_function (pad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_get_event_mask)); |
| #endif |
| gst_pad_set_event_function (pad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_event)); |
| gst_pad_set_query_function (pad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_query)); |
| #if 0 |
| gst_pad_set_convert_function (pad, |
| GST_DEBUG_FUNCPTR (gst_avi_demux_src_convert)); |
| #endif |
| |
| stream->num = avi->num_streams; |
| |
| stream->start_entry = 0; |
| stream->step_entry = 0; |
| stream->stop_entry = 0; |
| |
| stream->current_entry = -1; |
| stream->current_total = 0; |
| |
| stream->discont = TRUE; |
| |
| stream->total_bytes = 0; |
| stream->total_blocks = 0; |
| stream->n_keyframes = 0; |
| |
| stream->idx_n = 0; |
| stream->idx_max = 0; |
| |
| gst_pad_set_element_private (pad, stream); |
| avi->num_streams++; |
| |
| gst_pad_set_active (pad, TRUE); |
| stream_id = |
| gst_pad_create_stream_id_printf (pad, GST_ELEMENT_CAST (avi), "%03u", |
| avi->num_streams); |
| |
| event = gst_pad_get_sticky_event (avi->sinkpad, GST_EVENT_STREAM_START, 0); |
| if (event) { |
| if (gst_event_parse_group_id (event, &avi->group_id)) |
| avi->have_group_id = TRUE; |
| else |
| avi->have_group_id = FALSE; |
| gst_event_unref (event); |
| } else if (!avi->have_group_id) { |
| avi->have_group_id = TRUE; |
| avi->group_id = gst_util_group_id_next (); |
| } |
| |
| event = gst_event_new_stream_start (stream_id); |
| if (avi->have_group_id) |
| gst_event_set_group_id (event, avi->group_id); |
| if (sparse) |
| gst_event_set_stream_flags (event, GST_STREAM_FLAG_SPARSE); |
| |
| gst_pad_push_event (pad, event); |
| g_free (stream_id); |
| gst_pad_set_caps (pad, caps); |
| gst_caps_unref (caps); |
| |
| /* make tags */ |
| if (codec_name && tag_name) { |
| if (!stream->taglist) |
| stream->taglist = gst_tag_list_new_empty (); |
| |
| avi->got_tags = TRUE; |
| |
| gst_tag_list_add (stream->taglist, GST_TAG_MERGE_APPEND, tag_name, |
| codec_name, NULL); |
| } |
| |
| g_free (vprp); |
| g_free (codec_name); |
| gst_buffer_unref (buf); |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| fail: |
| { |
| /* unref any mem that may be in use */ |
| if (buf) |
| gst_buffer_unref (buf); |
| if (sub) |
| gst_buffer_unref (sub); |
| g_free (vprp); |
| g_free (codec_name); |
| gst_avi_demux_reset_stream (avi, stream); |
| avi->num_streams++; |
| return FALSE; |
| } |
| } |
| |
| /* |
| * gst_avi_demux_parse_odml: |
| * @avi: calling element (used for debug/error). |
| * @buf: input buffer to be used for parsing. |
| * |
| * Read an openDML-2.0 extension header. Fills in the frame number |
| * in the avi demuxer object when reading succeeds. |
| */ |
| static void |
| gst_avi_demux_parse_odml (GstAviDemux * avi, GstBuffer * buf) |
| { |
| guint32 tag = 0; |
| guint offset = 4; |
| GstBuffer *sub = NULL; |
| |
| while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag, |
| &sub)) { |
| switch (tag) { |
| case GST_RIFF_TAG_dmlh:{ |
| gst_riff_dmlh dmlh, *_dmlh; |
| GstMapInfo map; |
| |
| /* sub == NULL is possible and means an empty buffer */ |
| if (sub == NULL) |
| goto next; |
| |
| gst_buffer_map (sub, &map, GST_MAP_READ); |
| |
| /* check size */ |
| if (map.size < sizeof (gst_riff_dmlh)) { |
| GST_ERROR_OBJECT (avi, |
| "DMLH entry is too small (%" G_GSIZE_FORMAT " bytes, %d needed)", |
| map.size, (int) sizeof (gst_riff_dmlh)); |
| gst_buffer_unmap (sub, &map); |
| goto next; |
| } |
| _dmlh = (gst_riff_dmlh *) map.data; |
| dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes); |
| gst_buffer_unmap (sub, &map); |
| |
| GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u", |
| dmlh.totalframes); |
| |
| avi->avih->tot_frames = dmlh.totalframes; |
| goto next; |
| } |
| |
| default: |
| GST_WARNING_OBJECT (avi, |
| "Unknown tag %" GST_FOURCC_FORMAT " in ODML header", |
| GST_FOURCC_ARGS (tag)); |
| /* Only get buffer for debugging if the memdump is needed */ |
| if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) { |
| GstMapInfo map; |
| |
| gst_buffer_map (sub, &map, GST_MAP_READ); |
| GST_MEMDUMP_OBJECT (avi, "Unknown ODML tag", map.data, map.size); |
| gst_buffer_unmap (sub, &map); |
| } |
| /* fall-through */ |
| case GST_RIFF_TAG_JUNQ: |
| case GST_RIFF_TAG_JUNK: |
| next: |
| /* skip and move to next chunk */ |
| if (sub) { |
| gst_buffer_unref (sub); |
| sub = NULL; |
| } |
| break; |
| } |
| } |
| if (buf) |
| gst_buffer_unref (buf); |
| } |
| |
| /* Index helper */ |
| static guint |
| gst_avi_demux_index_last (GstAviDemux * avi, GstAviStream * stream) |
| { |
| return stream->idx_n; |
| } |
| |
| /* find a previous entry in the index with the given flags */ |
| static guint |
| gst_avi_demux_index_prev (GstAviDemux * avi, GstAviStream * stream, |
| guint last, gboolean keyframe) |
| { |
| GstAviIndexEntry *entry; |
| guint i; |
| |
| for (i = last; i > 0; i--) { |
| entry = &stream->index[i - 1]; |
| if (!keyframe || ENTRY_IS_KEYFRAME (entry)) { |
| return i - 1; |
| } |
| } |
| return 0; |
| } |
| |
| static guint |
| gst_avi_demux_index_next (GstAviDemux * avi, GstAviStream * stream, |
| guint last, gboolean keyframe) |
| { |
| GstAviIndexEntry *entry; |
| gint i; |
| |
| for (i = last + 1; i < stream->idx_n; i++) { |
| entry = &stream->index[i]; |
| if (!keyframe || ENTRY_IS_KEYFRAME (entry)) { |
| return i; |
| } |
| } |
| return stream->idx_n - 1; |
| } |
| |
| static guint |
| gst_avi_demux_index_entry_search (GstAviIndexEntry * entry, guint64 * total) |
| { |
| if (entry->total < *total) |
| return -1; |
| else if (entry->total > *total) |
| return 1; |
| return 0; |
| } |
| |
| /* |
| * gst_avi_demux_index_for_time: |
| * @avi: Avi object |
| * @stream: the stream |
| * @time: a time position |
| * @next: whether to look for entry before or after @time |
| * |
| * Finds the index entry which time is less/more or equal than the requested time. |
| * Try to avoid binary search when we can convert the time to an index |
| * position directly (for example for video frames with a fixed duration). |
| * |
| * Returns: the found position in the index. |
| */ |
| static guint |
| gst_avi_demux_index_for_time (GstAviDemux * avi, |
| GstAviStream * stream, guint64 time, gboolean next) |
| { |
| guint index = -1; |
| guint64 total; |
| |
| GST_LOG_OBJECT (avi, "search time:%" GST_TIME_FORMAT, GST_TIME_ARGS (time)); |
| |
| /* easy (and common) cases */ |
| if (time == 0 || stream->idx_n == 0) |
| return 0; |
| if (time >= stream->idx_duration) |
| return stream->idx_n - 1; |
| |
| /* figure out where we need to go. For that we convert the time to an |
| * index entry or we convert it to a total and then do a binary search. */ |
| if (stream->is_vbr) { |
| /* VBR stream next timestamp */ |
| if (stream->strh->type == GST_RIFF_FCC_auds) { |
| total = avi_stream_convert_time_to_frames_unchecked (stream, time); |
| } else { |
| index = avi_stream_convert_time_to_frames_unchecked (stream, time); |
| /* this entry typically undershoots the target time, |
| * so check a bit more if next needed */ |
| if (next && index != -1) { |
| GstClockTime itime = |
| avi_stream_convert_frames_to_time_unchecked (stream, index); |
| if (itime < time && index + 1 < stream->idx_n) |
| index++; |
| } |
| } |
| } else if (stream->strh->type == GST_RIFF_FCC_auds) { |
| /* constant rate stream */ |
| total = avi_stream_convert_time_to_bytes_unchecked (stream, time); |
| } else |
| return -1; |
| |
| if (index == -1) { |
| GstAviIndexEntry *entry; |
| |
| /* no index, find index with binary search on total */ |
| GST_LOG_OBJECT (avi, "binary search for entry with total %" |
| G_GUINT64_FORMAT, total); |
| |
| entry = gst_util_array_binary_search (stream->index, |
| stream->idx_n, sizeof (GstAviIndexEntry), |
| (GCompareDataFunc) gst_avi_demux_index_entry_search, |
| next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, &total, NULL); |
| |
| if (entry == NULL) { |
| GST_LOG_OBJECT (avi, "not found, assume index 0"); |
| index = 0; |
| } else { |
| index = entry - stream->index; |
| GST_LOG_OBJECT (avi, "found at %u", index); |
| } |
| } else { |
| GST_LOG_OBJECT (avi, "converted time to index %u", index); |
| } |
| |
| return index; |
| } |
| |
| static inline GstAviStream * |
| gst_avi_demux_stream_for_id (GstAviDemux * avi, guint32 id) |
| { |
| guint stream_nr; |
| GstAviStream *stream; |
| |
| /* get the stream for this entry */ |
| stream_nr = CHUNKID_TO_STREAMNR (id); |
| if (G_UNLIKELY (stream_nr >= avi->num_streams)) { |
| GST_WARNING_OBJECT (avi, |
| "invalid stream nr %d (0x%08x, %" GST_FOURCC_FORMAT ")", stream_nr, id, |
| GST_FOURCC_ARGS (id)); |
| return NULL; |
| } |
| stream = &avi->stream[stream_nr]; |
| if (G_UNLIKELY (!stream->strh)) { |
| GST_WARNING_OBJECT (avi, "Unhandled stream %d, skipping", stream_nr); |
| return NULL; |
| } |
| return stream; |
| } |
| |
| /* |
| * gst_avi_demux_parse_index: |
| * @avi: calling element (used for debugging/errors). |
| * @buf: buffer containing the full index. |
| * |
| * Read index entries from the provided buffer. |
| * The buffer should contain a GST_RIFF_TAG_idx1 chunk. |
| */ |
| static gboolean |
| gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf) |
| { |
| GstMapInfo map; |
| guint i, num, n; |
| gst_riff_index_entry *index; |
| GstClockTime stamp; |
| GstAviStream *stream; |
| GstAviIndexEntry entry; |
| guint32 id; |
| |
| if (!buf) |
| return FALSE; |
| |
| gst_buffer_map (buf, &map, GST_MAP_READ); |
| |
| stamp = gst_util_get_timestamp (); |
| |
| /* see how many items in the index */ |
| num = map.size / sizeof (gst_riff_index_entry); |
| if (num == 0) |
| goto empty_list; |
| |
| GST_INFO_OBJECT (avi, "Parsing index, nr_entries = %6d", num); |
| |
| index = (gst_riff_index_entry *) map.data; |
| |
| /* figure out if the index is 0 based or relative to the MOVI start */ |
| entry.offset = GST_READ_UINT32_LE (&index[0].offset); |
| if (entry.offset < avi->offset) { |
| avi->index_offset = avi->offset + 8; |
| GST_DEBUG ("index_offset = %" G_GUINT64_FORMAT, avi->index_offset); |
| } else { |
| avi->index_offset = 0; |
| GST_DEBUG ("index is 0 based"); |
| } |
| |
| for (i = 0, n = 0; i < num; i++) { |
| id = GST_READ_UINT32_LE (&index[i].id); |
| entry.offset = GST_READ_UINT32_LE (&index[i].offset); |
| |
| /* some sanity checks */ |
| if (G_UNLIKELY (id == GST_RIFF_rec || id == 0 || |
| (entry.offset == 0 && n > 0))) |
| continue; |
| |
| /* get the stream for this entry */ |
| stream = gst_avi_demux_stream_for_id (avi, id); |
| if (G_UNLIKELY (!stream)) |
| continue; |
| |
| /* handle offset and size */ |
| entry.offset += avi->index_offset + 8; |
| entry.size = GST_READ_UINT32_LE (&index[i].size); |
| |
| /* handle flags */ |
| if (stream->strh->type == GST_RIFF_FCC_auds) { |
| /* all audio frames are keyframes */ |
| ENTRY_SET_KEYFRAME (&entry); |
| } else if (stream->strh->type == GST_RIFF_FCC_vids && |
| stream->strf.vids->compression == GST_RIFF_DXSB) { |
| /* all xsub frames are keyframes */ |
| ENTRY_SET_KEYFRAME (&entry); |
| } else { |
| guint32 flags; |
| /* else read flags */ |
| flags = GST_READ_UINT32_LE (&index[i].flags); |
| if (flags & GST_RIFF_IF_KEYFRAME) { |
| ENTRY_SET_KEYFRAME (&entry); |
| } else { |
| ENTRY_UNSET_KEYFRAME (&entry); |
| } |
| } |
| |
| /* and add */ |
| if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry))) |
| goto out_of_mem; |
| |
| n++; |
| } |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| |
| /* get stream stats now */ |
| avi->have_index = gst_avi_demux_do_index_stats (avi); |
| |
| stamp = gst_util_get_timestamp () - stamp; |
| GST_DEBUG_OBJECT (avi, "index parsing took %" GST_TIME_FORMAT, |
| GST_TIME_ARGS (stamp)); |
| |
| return TRUE; |
| |
| /* ERRORS */ |
| empty_list: |
| { |
| GST_DEBUG_OBJECT (avi, "empty index"); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| out_of_mem: |
| { |
| GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL), |
| ("Cannot allocate memory for %u*%u=%u bytes", |
| (guint) sizeof (GstAviIndexEntry), num, |
| (guint) sizeof (GstAviIndexEntry) * num)); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| return FALSE; |
| } |
| } |
| |
| /* |
| * gst_avi_demux_stream_index: |
| * @avi: avi demuxer object. |
| * |
| * Seeks to index and reads it. |
| */ |
| static void |
| gst_avi_demux_stream_index (GstAviDemux * avi) |
| { |
| GstFlowReturn res; |
| guint64 offset = avi->offset; |
| GstBuffer *buf = NULL; |
| guint32 tag; |
| guint32 size; |
| GstMapInfo map; |
| |
| GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset); |
| |
| /* get chunk information */ |
| res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf); |
| if (res != GST_FLOW_OK) |
| goto pull_failed; |
| |
| gst_buffer_map (buf, &map, GST_MAP_READ); |
| if (map.size < 8) |
| goto too_small; |
| |
| /* check tag first before blindy trying to read 'size' bytes */ |
| tag = GST_READ_UINT32_LE (map.data); |
| size = GST_READ_UINT32_LE (map.data + 4); |
| if (tag == GST_RIFF_TAG_LIST) { |
| /* this is the movi tag */ |
| GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT, |
| (8 + GST_ROUND_UP_2 (size))); |
| offset += 8 + GST_ROUND_UP_2 (size); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| |
| buf = NULL; |
| res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf); |
| if (res != GST_FLOW_OK) |
| goto pull_failed; |
| |
| gst_buffer_map (buf, &map, GST_MAP_READ); |
| if (map.size < 8) |
| goto too_small; |
| |
| tag = GST_READ_UINT32_LE (map.data); |
| size = GST_READ_UINT32_LE (map.data + 4); |
| } |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| |
| if (tag != GST_RIFF_TAG_idx1) |
| goto no_index; |
| if (!size) |
| goto zero_index; |
| |
| GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset); |
| |
| /* read chunk, advance offset */ |
| if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi), |
| avi->sinkpad, &offset, &tag, &buf) != GST_FLOW_OK) |
| return; |
| |
| GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %" |
| GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag)); |
| |
| gst_avi_demux_parse_index (avi, buf); |
| |
| #ifndef GST_DISABLE_GST_DEBUG |
| /* debug our indexes */ |
| { |
| gint i; |
| GstAviStream *stream; |
| |
| for (i = 0; i < avi->num_streams; i++) { |
| stream = &avi->stream[i]; |
| GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes", |
| i, stream->idx_n, stream->total_bytes); |
| } |
| } |
| #endif |
| return; |
| |
| /* ERRORS */ |
| pull_failed: |
| { |
| GST_DEBUG_OBJECT (avi, |
| "pull range failed: pos=%" G_GUINT64_FORMAT " size=8", offset); |
| return; |
| } |
| too_small: |
| { |
| GST_DEBUG_OBJECT (avi, "Buffer is too small"); |
| gst_buffer_unmap (buf, &map); |
| gst_buffer_unref (buf); |
| return; |
| } |
| no_index: |
| { |
| GST_WARNING_OBJECT (avi, |
| "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT, |
| GST_FOURCC_ARGS (tag)); |
| return; |
| } |
| zero_index: |
| { |
| GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk"); |
| return; |
| } |
| } |
| |
| /* |
| * gst_avi_demux_stream_index_push: |
| * @avi: avi demuxer object. |
| * |
| * Read index. |
| */ |
| static void |
| gst_avi_demux_stream_index_push (GstAviDemux * avi) |
| { |
| guint64 offset = avi->idx1_offset; |
| GstBuffer *buf; |
| guint32 tag; |
| guint32 size; |
| |
| GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset); |
| |
| /* get chunk information */ |
| if (!gst_avi_demux_peek_chunk (avi, &tag, &size)) |
| return; |
| |
| /* check tag first before blindly trying to read 'size' bytes */ |
| if (tag == GST_RIFF_TAG_LIST) { |
| /* this is the movi tag */ |
| GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT, |
| (8 + GST_ROUND_UP_2 (size))); |
| avi->idx1_offset = offset + 8 + GST_ROUND_UP_2 (size); |
| /* issue seek to allow chain function to handle it and return! */ |
| perform_seek_to_offset (avi, avi->idx1_offset, avi->segment_seqnum); |
| return; |
| } |
| |
| if (tag != GST_RIFF_TAG_idx1) |
| goto no_index; |
| |
| GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset); |
| |
| /* flush chunk header */ |
| gst_adapter_flush (avi->adapter, 8); |
| /* read chunk payload */ |
| buf = gst_adapter_take_buffer (avi->adapter, size); |
| if (!buf) |
| goto pull_failed; |
| /* advance offset */ |
| offset += 8 + GST_ROUND_UP_2 (size); |
| |
| GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %" |
| GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag)); |
| |
| avi->offset = avi->first_movi_offset; |
| gst_avi_demux_parse_index (avi, buf); |
| |
| #ifndef GST_DISABLE_GST_DEBUG |
| /* debug our indexes */ |
| { |
| gint i; |
| GstAviStream *stream; |
| |
| for (i = 0; i < avi->num_streams; i++) { |
| stream = &avi->stream[i]; |
| GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes", |
| i, stream->idx_n, stream->total_bytes); |
| } |
| } |
| #endif |
| return; |
| |
| /* ERRORS */ |
| pull_failed: |
| { |
| GST_DEBUG_OBJECT (avi, |
| "taking data from adapter failed: pos=%" G_GUINT64_FORMAT " size=%u", |
| offset, size); |
| return; |
| } |
| no_index: |
| { |
| GST_WARNING_OBJECT (avi, |
| |