blob: ceef7bfdd6020e47758f70f483ab3c6bbf382ac3 [file] [log] [blame]
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2003> David A. Schleef <ds@schleef.org>
* Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
* Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
* Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
* Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
* Copyright (C) <2013> Sreerenj Balachandran <sreerenj.balachandran@intel.com>
* Copyright (C) <2013> Intel Corporation
* Copyright (C) <2014> Centricular Ltd
* Copyright (C) <2015> YouView TV Ltd.
* Copyright (C) <2016> British Broadcasting Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-qtdemux
*
* Demuxes a .mov file into raw or compressed audio and/or video streams.
*
* This element supports both push and pull-based scheduling, depending on the
* capabilities of the upstream elements.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch-1.0 filesrc location=test.mov ! qtdemux name=demux demux.audio_0 ! queue ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_0 ! queue ! decodebin ! videoconvert ! videoscale ! autovideosink
* ]| Play (parse and decode) a .mov file and try to output it to
* an automatically detected soundcard and videosink. If the MOV file contains
* compressed audio or video data, this will only work if you have the
* right decoder elements/plugins installed.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gst/gst-i18n-plugin.h"
#include <glib/gprintf.h>
#include <gst/tag/tag.h>
#include <gst/audio/audio.h>
#include <gst/video/video.h>
#include <gst/riff/riff.h>
#include <gst/pbutils/pbutils.h>
#include "qtatomparser.h"
#include "qtdemux_types.h"
#include "qtdemux_dump.h"
#include "fourcc.h"
#include "descriptors.h"
#include "qtdemux_lang.h"
#include "qtdemux.h"
#include "qtpalette.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <gst/math-compat.h>
#ifdef HAVE_ZLIB
# include <zlib.h>
#endif
/* max. size considered 'sane' for non-mdat atoms */
#define QTDEMUX_MAX_ATOM_SIZE (25*1024*1024)
/* if the sample index is larger than this, something is likely wrong */
#define QTDEMUX_MAX_SAMPLE_INDEX_SIZE (200*1024*1024)
/* For converting qt creation times to unix epoch times */
#define QTDEMUX_SECONDS_PER_DAY (60 * 60 * 24)
#define QTDEMUX_LEAP_YEARS_FROM_1904_TO_1970 17
#define QTDEMUX_SECONDS_FROM_1904_TO_1970 (((1970 - 1904) * (guint64) 365 + \
QTDEMUX_LEAP_YEARS_FROM_1904_TO_1970) * QTDEMUX_SECONDS_PER_DAY)
#define QTDEMUX_TREE_NODE_FOURCC(n) (QT_FOURCC(((guint8 *) (n)->data) + 4))
#define STREAM_IS_EOS(s) (s->time_position == GST_CLOCK_TIME_NONE)
#define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
GST_DEBUG_CATEGORY (qtdemux_debug);
#define GST_CAT_DEFAULT qtdemux_debug
typedef struct _QtDemuxSegment QtDemuxSegment;
typedef struct _QtDemuxSample QtDemuxSample;
typedef struct _QtDemuxCencSampleSetInfo QtDemuxCencSampleSetInfo;
struct _QtDemuxSample
{
guint32 size;
gint32 pts_offset; /* Add this value to timestamp to get the pts */
guint64 offset;
guint64 timestamp; /* DTS In mov time */
guint32 duration; /* In mov time */
gboolean keyframe; /* TRUE when this packet is a keyframe */
};
/* Macros for converting to/from timescale */
#define QTSTREAMTIME_TO_GSTTIME(stream, value) (gst_util_uint64_scale((value), GST_SECOND, (stream)->timescale))
#define GSTTIME_TO_QTSTREAMTIME(stream, value) (gst_util_uint64_scale((value), (stream)->timescale, GST_SECOND))
#define QTTIME_TO_GSTTIME(qtdemux, value) (gst_util_uint64_scale((value), GST_SECOND, (qtdemux)->timescale))
#define GSTTIME_TO_QTTIME(qtdemux, value) (gst_util_uint64_scale((value), (qtdemux)->timescale, GST_SECOND))
/* timestamp is the DTS */
#define QTSAMPLE_DTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp))
/* timestamp + offset + cslg_shift is the outgoing PTS */
#define QTSAMPLE_PTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (stream)->cslg_shift + (sample)->pts_offset))
/* timestamp + offset is the PTS used for internal seek calcuations */
#define QTSAMPLE_PTS_NO_CSLG(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (sample)->pts_offset))
/* timestamp + duration - dts is the duration */
#define QTSAMPLE_DUR_DTS(stream, sample, dts) (QTSTREAMTIME_TO_GSTTIME ((stream), (sample)->timestamp + (sample)->duration) - (dts))
#define QTSAMPLE_KEYFRAME(stream,sample) ((stream)->all_keyframe || (sample)->keyframe)
/*
* Quicktime has tracks and segments. A track is a continuous piece of
* multimedia content. The track is not always played from start to finish but
* instead, pieces of the track are 'cut out' and played in sequence. This is
* what the segments do.
*
* Inside the track we have keyframes (K) and delta frames. The track has its
* own timing, which starts from 0 and extends to end. The position in the track
* is called the media_time.
*
* The segments now describe the pieces that should be played from this track
* and are basically tuples of media_time/duration/rate entries. We can have
* multiple segments and they are all played after one another. An example:
*
* segment 1: media_time: 1 second, duration: 1 second, rate 1
* segment 2: media_time: 3 second, duration: 2 second, rate 2
*
* To correctly play back this track, one must play: 1 second of media starting
* from media_time 1 followed by 2 seconds of media starting from media_time 3
* at a rate of 2.
*
* Each of the segments will be played at a specific time, the first segment at
* time 0, the second one after the duration of the first one, etc.. Note that
* the time in resulting playback is not identical to the media_time of the
* track anymore.
*
* Visually, assuming the track has 4 second of media_time:
*
* (a) (b) (c) (d)
* .-----------------------------------------------------------.
* track: | K.....K.........K........K.......K.......K...........K... |
* '-----------------------------------------------------------'
* 0 1 2 3 4
* .------------^ ^ .----------^ ^
* / .-------------' / .------------------'
* / / .-----' /
* .--------------. .--------------.
* | segment 1 | | segment 2 |
* '--------------' '--------------'
*
* The challenge here is to cut out the right pieces of the track for each of
* the playback segments. This fortunately can easily be done with the SEGMENT
* events of GStreamer.
*
* For playback of segment 1, we need to provide the decoder with the keyframe
* (a), in the above figure, but we must instruct it only to output the decoded
* data between second 1 and 2. We do this with a SEGMENT event for 1 to 2, time
* position set to the time of the segment: 0.
*
* We then proceed to push data from keyframe (a) to frame (b). The decoder
* decodes but clips all before media_time 1.
*
* After finishing a segment, we push out a new SEGMENT event with the clipping
* boundaries of the new data.
*
* This is a good usecase for the GStreamer accumulated SEGMENT events.
*/
struct _QtDemuxSegment
{
/* global time and duration, all gst time */
GstClockTime time;
GstClockTime stop_time;
GstClockTime duration;
/* media time of trak, all gst time */
GstClockTime media_start;
GstClockTime media_stop;
gdouble rate;
/* Media start time in trak timescale units */
guint32 trak_media_start;
};
#define QTSEGMENT_IS_EMPTY(s) ((s)->media_start == GST_CLOCK_TIME_NONE)
/* Used with fragmented MP4 files (mfra atom) */
typedef struct
{
GstClockTime ts;
guint64 moof_offset;
} QtDemuxRandomAccessEntry;
typedef struct _QtDemuxStreamStsdEntry
{
GstCaps *caps;
guint32 fourcc;
gboolean sparse;
/* video info */
gint width;
gint height;
gint par_w;
gint par_h;
/* Numerator/denominator framerate */
gint fps_n;
gint fps_d;
GstVideoColorimetry colorimetry;
guint16 bits_per_sample;
guint16 color_table_id;
GstMemory *rgb8_palette;
guint interlace_mode;
guint field_order;
/* audio info */
gdouble rate;
gint n_channels;
guint samples_per_packet;
guint samples_per_frame;
guint bytes_per_packet;
guint bytes_per_sample;
guint bytes_per_frame;
guint compression;
/* if we use chunks or samples */
gboolean sampled;
guint padding;
} QtDemuxStreamStsdEntry;
#define CUR_STREAM(s) (&((s)->stsd_entries[(s)->cur_stsd_entry_index]))
struct _QtDemuxStream
{
GstPad *pad;
QtDemuxStreamStsdEntry *stsd_entries;
guint stsd_entries_length;
guint cur_stsd_entry_index;
/* stream type */
guint32 subtype;
gboolean new_caps; /* If TRUE, caps need to be generated (by
* calling _configure_stream()) This happens
* for MSS and fragmented streams */
gboolean new_stream; /* signals that a stream_start is required */
gboolean on_keyframe; /* if this stream last pushed buffer was a
* keyframe. This is important to identify
* where to stop pushing buffers after a
* segment stop time */
/* if the stream has a redirect URI in its headers, we store it here */
gchar *redirect_uri;
/* track id */
guint track_id;
/* duration/scale */
guint64 duration; /* in timescale units */
guint32 timescale;
/* language */
gchar lang_id[4]; /* ISO 639-2T language code */
/* our samples */
guint32 n_samples;
QtDemuxSample *samples;
gboolean all_keyframe; /* TRUE when all samples are keyframes (no stss) */
guint32 first_duration; /* duration in timescale of first sample, used for figuring out
the framerate */
guint32 n_samples_moof; /* sample count in a moof */
guint64 duration_moof; /* duration in timescale of a moof, used for figure out
* the framerate of fragmented format stream */
guint64 duration_last_moof;
guint32 offset_in_sample; /* Offset in the current sample, used for
* streams which have got exceedingly big
* sample size (such as 24s of raw audio).
* Only used when max_buffer_size is non-NULL */
guint32 max_buffer_size; /* Maximum allowed size for output buffers.
* Currently only set for raw audio streams*/
/* video info */
/* aspect ratio */
gint display_width;
gint display_height;
/* allocation */
gboolean use_allocator;
GstAllocator *allocator;
GstAllocationParams params;
gsize alignment;
/* when a discontinuity is pending */
gboolean discont;
/* list of buffers to push first */
GSList *buffers;
/* if we need to clip this buffer. This is only needed for uncompressed
* data */
gboolean need_clip;
/* buffer needs some custom processing, e.g. subtitles */
gboolean need_process;
/* current position */
guint32 segment_index;
guint32 sample_index;
GstClockTime time_position; /* in gst time */
guint64 accumulated_base;
/* the Gst segment we are processing out, used for clipping */
GstSegment segment;
/* quicktime segments */
guint32 n_segments;
QtDemuxSegment *segments;
gboolean dummy_segment;
guint32 from_sample;
guint32 to_sample;
gboolean sent_eos;
GstTagList *stream_tags;
gboolean send_global_tags;
GstEvent *pending_event;
GstByteReader stco;
GstByteReader stsz;
GstByteReader stsc;
GstByteReader stts;
GstByteReader stss;
GstByteReader stps;
GstByteReader ctts;
gboolean chunks_are_samples; /* TRUE means treat chunks as samples */
gint64 stbl_index;
/* stco */
guint co_size;
GstByteReader co_chunk;
guint32 first_chunk;
guint32 current_chunk;
guint32 last_chunk;
guint32 samples_per_chunk;
guint32 stsd_sample_description_id;
guint32 stco_sample_index;
/* stsz */
guint32 sample_size; /* 0 means variable sizes are stored in stsz */
/* stsc */
guint32 stsc_index;
guint32 n_samples_per_chunk;
guint32 stsc_chunk_index;
guint32 stsc_sample_index;
guint64 chunk_offset;
/* stts */
guint32 stts_index;
guint32 stts_samples;
guint32 n_sample_times;
guint32 stts_sample_index;
guint64 stts_time;
guint32 stts_duration;
/* stss */
gboolean stss_present;
guint32 n_sample_syncs;
guint32 stss_index;
/* stps */
gboolean stps_present;
guint32 n_sample_partial_syncs;
guint32 stps_index;
QtDemuxRandomAccessEntry *ra_entries;
guint n_ra_entries;
const QtDemuxRandomAccessEntry *pending_seek;
/* ctts */
gboolean ctts_present;
guint32 n_composition_times;
guint32 ctts_index;
guint32 ctts_sample_index;
guint32 ctts_count;
gint32 ctts_soffset;
/* cslg */
guint32 cslg_shift;
/* fragmented */
gboolean parsed_trex;
guint32 def_sample_description_index; /* index is 1-based */
guint32 def_sample_duration;
guint32 def_sample_size;
guint32 def_sample_flags;
gboolean disabled;
/* stereoscopic video streams */
GstVideoMultiviewMode multiview_mode;
GstVideoMultiviewFlags multiview_flags;
/* protected streams */
gboolean protected;
guint32 protection_scheme_type;
guint32 protection_scheme_version;
gpointer protection_scheme_info; /* specific to the protection scheme */
GQueue protection_scheme_event_queue;
};
/* Contains properties and cryptographic info for a set of samples from a
* track protected using Common Encryption (cenc) */
struct _QtDemuxCencSampleSetInfo
{
GstStructure *default_properties;
/* @crypto_info holds one GstStructure per sample */
GPtrArray *crypto_info;
};
static const gchar *
qt_demux_state_string (enum QtDemuxState state)
{
switch (state) {
case QTDEMUX_STATE_INITIAL:
return "<INITIAL>";
case QTDEMUX_STATE_HEADER:
return "<HEADER>";
case QTDEMUX_STATE_MOVIE:
return "<MOVIE>";
case QTDEMUX_STATE_BUFFER_MDAT:
return "<BUFFER_MDAT>";
default:
return "<UNKNOWN>";
}
}
static GNode *qtdemux_tree_get_child_by_type (GNode * node, guint32 fourcc);
static GNode *qtdemux_tree_get_child_by_type_full (GNode * node,
guint32 fourcc, GstByteReader * parser);
static GNode *qtdemux_tree_get_sibling_by_type (GNode * node, guint32 fourcc);
static GNode *qtdemux_tree_get_sibling_by_type_full (GNode * node,
guint32 fourcc, GstByteReader * parser);
static GstFlowReturn qtdemux_add_fragmented_samples (GstQTDemux * qtdemux);
static GstStaticPadTemplate gst_qtdemux_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/quicktime; video/mj2; audio/x-m4a; "
"application/x-3gp")
);
static GstStaticPadTemplate gst_qtdemux_videosrc_template =
GST_STATIC_PAD_TEMPLATE ("video_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate gst_qtdemux_audiosrc_template =
GST_STATIC_PAD_TEMPLATE ("audio_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate gst_qtdemux_subsrc_template =
GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
#define gst_qtdemux_parent_class parent_class
G_DEFINE_TYPE (GstQTDemux, gst_qtdemux, GST_TYPE_ELEMENT);
static void gst_qtdemux_dispose (GObject * object);
static guint32
gst_qtdemux_find_index_linear (GstQTDemux * qtdemux, QtDemuxStream * str,
GstClockTime media_time);
static guint32
gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
QtDemuxStream * str, gint64 media_offset);
#if 0
static void gst_qtdemux_set_index (GstElement * element, GstIndex * index);
static GstIndex *gst_qtdemux_get_index (GstElement * element);
#endif
static GstStateChangeReturn gst_qtdemux_change_state (GstElement * element,
GstStateChange transition);
static gboolean qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent);
static gboolean qtdemux_sink_activate_mode (GstPad * sinkpad,
GstObject * parent, GstPadMode mode, gboolean active);
static void gst_qtdemux_loop (GstPad * pad);
static GstFlowReturn gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent,
GstBuffer * inbuf);
static gboolean gst_qtdemux_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event);
static gboolean gst_qtdemux_setcaps (GstQTDemux * qtdemux, GstCaps * caps);
static gboolean gst_qtdemux_configure_stream (GstQTDemux * qtdemux,
QtDemuxStream * stream);
static void gst_qtdemux_stream_check_and_change_stsd_index (GstQTDemux * demux,
QtDemuxStream * stream);
static GstFlowReturn gst_qtdemux_process_adapter (GstQTDemux * demux,
gboolean force);
static gboolean qtdemux_parse_moov (GstQTDemux * qtdemux,
const guint8 * buffer, guint length);
static gboolean qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node,
const guint8 * buffer, guint length);
static gboolean qtdemux_parse_tree (GstQTDemux * qtdemux);
static void qtdemux_parse_udta (GstQTDemux * qtdemux, GstTagList * taglist,
GNode * udta);
static void gst_qtdemux_handle_esds (GstQTDemux * qtdemux,
QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, GNode * esds,
GstTagList * list);
static GstCaps *qtdemux_video_caps (GstQTDemux * qtdemux,
QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
const guint8 * stsd_entry_data, gchar ** codec_name);
static GstCaps *qtdemux_audio_caps (GstQTDemux * qtdemux,
QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
const guint8 * data, int len, gchar ** codec_name);
static GstCaps *qtdemux_sub_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
QtDemuxStreamStsdEntry * entry, guint32 fourcc, const guint8 * data,
gchar ** codec_name);
static GstCaps *qtdemux_generic_caps (GstQTDemux * qtdemux,
QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
const guint8 * stsd_entry_data, gchar ** codec_name);
static gboolean qtdemux_parse_samples (GstQTDemux * qtdemux,
QtDemuxStream * stream, guint32 n);
static GstFlowReturn qtdemux_expose_streams (GstQTDemux * qtdemux);
static void gst_qtdemux_stream_free (GstQTDemux * qtdemux,
QtDemuxStream * stream);
static void gst_qtdemux_stream_clear (GstQTDemux * qtdemux,
QtDemuxStream * stream);
static void gst_qtdemux_remove_stream (GstQTDemux * qtdemux, int index);
static GstFlowReturn qtdemux_prepare_streams (GstQTDemux * qtdemux);
static void qtdemux_do_allocation (GstQTDemux * qtdemux,
QtDemuxStream * stream);
static gboolean gst_qtdemux_activate_segment (GstQTDemux * qtdemux,
QtDemuxStream * stream, guint32 seg_idx, GstClockTime offset);
static gboolean gst_qtdemux_stream_update_segment (GstQTDemux * qtdemux,
QtDemuxStream * stream, gint seg_idx, GstClockTime offset,
GstClockTime * _start, GstClockTime * _stop);
static void gst_qtdemux_send_gap_for_segment (GstQTDemux * demux,
QtDemuxStream * stream, gint segment_index, GstClockTime pos);
static gboolean qtdemux_pull_mfro_mfra (GstQTDemux * qtdemux);
static void check_update_duration (GstQTDemux * qtdemux, GstClockTime duration);
static gchar *qtdemux_uuid_bytes_to_string (gconstpointer uuid_bytes);
static GstStructure *qtdemux_get_cenc_sample_properties (GstQTDemux * qtdemux,
QtDemuxStream * stream, guint sample_index);
static void gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
const gchar * id);
static void qtdemux_gst_structure_free (GstStructure * gststructure);
static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = gst_qtdemux_dispose;
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qtdemux_change_state);
#if 0
gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_qtdemux_set_index);
gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_qtdemux_get_index);
#endif
gst_tag_register_musicbrainz_tags ();
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_sink_template);
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_videosrc_template);
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_audiosrc_template);
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_subsrc_template);
gst_element_class_set_static_metadata (gstelement_class, "QuickTime demuxer",
"Codec/Demuxer",
"Demultiplex a QuickTime file into audio and video streams",
"David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
gst_riff_init ();
}
static void
gst_qtdemux_init (GstQTDemux * qtdemux)
{
qtdemux->sinkpad =
gst_pad_new_from_static_template (&gst_qtdemux_sink_template, "sink");
gst_pad_set_activate_function (qtdemux->sinkpad, qtdemux_sink_activate);
gst_pad_set_activatemode_function (qtdemux->sinkpad,
qtdemux_sink_activate_mode);
gst_pad_set_chain_function (qtdemux->sinkpad, gst_qtdemux_chain);
gst_pad_set_event_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_event);
gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), qtdemux->sinkpad);
qtdemux->state = QTDEMUX_STATE_INITIAL;
qtdemux->pullbased = FALSE;
qtdemux->posted_redirect = FALSE;
qtdemux->neededbytes = 16;
qtdemux->todrop = 0;
qtdemux->adapter = gst_adapter_new ();
qtdemux->offset = 0;
qtdemux->first_mdat = -1;
qtdemux->got_moov = FALSE;
qtdemux->mdatoffset = -1;
qtdemux->mdatbuffer = NULL;
qtdemux->restoredata_buffer = NULL;
qtdemux->restoredata_offset = -1;
qtdemux->fragment_start = -1;
qtdemux->fragment_start_offset = -1;
qtdemux->media_caps = NULL;
qtdemux->exposed = FALSE;
qtdemux->mss_mode = FALSE;
qtdemux->pending_newsegment = NULL;
qtdemux->upstream_format_is_time = FALSE;
qtdemux->have_group_id = FALSE;
qtdemux->group_id = G_MAXUINT;
qtdemux->cenc_aux_info_offset = 0;
qtdemux->cenc_aux_info_sizes = NULL;
qtdemux->cenc_aux_sample_count = 0;
qtdemux->protection_system_ids = NULL;
g_queue_init (&qtdemux->protection_event_queue);
gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
qtdemux->tag_list = gst_tag_list_new_empty ();
gst_tag_list_set_scope (qtdemux->tag_list, GST_TAG_SCOPE_GLOBAL);
qtdemux->flowcombiner = gst_flow_combiner_new ();
GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
}
static void
gst_qtdemux_dispose (GObject * object)
{
GstQTDemux *qtdemux = GST_QTDEMUX (object);
if (qtdemux->adapter) {
g_object_unref (G_OBJECT (qtdemux->adapter));
qtdemux->adapter = NULL;
}
gst_tag_list_unref (qtdemux->tag_list);
gst_flow_combiner_free (qtdemux->flowcombiner);
g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
NULL);
g_queue_clear (&qtdemux->protection_event_queue);
g_free (qtdemux->cenc_aux_info_sizes);
qtdemux->cenc_aux_info_sizes = NULL;
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
gst_qtdemux_post_no_playable_stream_error (GstQTDemux * qtdemux)
{
if (qtdemux->posted_redirect) {
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file contains no playable streams.")),
("no known streams found, a redirect message has been posted"));
} else {
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file contains no playable streams.")),
("no known streams found"));
}
}
static GstBuffer *
_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
{
return gst_buffer_new_wrapped_full (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
mem, size, 0, size, mem, free_func);
}
static GstFlowReturn
gst_qtdemux_pull_atom (GstQTDemux * qtdemux, guint64 offset, guint64 size,
GstBuffer ** buf)
{
GstFlowReturn flow;
GstMapInfo map;
gsize bsize;
if (G_UNLIKELY (size == 0)) {
GstFlowReturn ret;
GstBuffer *tmp = NULL;
ret = gst_qtdemux_pull_atom (qtdemux, offset, sizeof (guint32), &tmp);
if (ret != GST_FLOW_OK)
return ret;
gst_buffer_map (tmp, &map, GST_MAP_READ);
size = QT_UINT32 (map.data);
GST_DEBUG_OBJECT (qtdemux, "size 0x%08" G_GINT64_MODIFIER "x", size);
gst_buffer_unmap (tmp, &map);
gst_buffer_unref (tmp);
}
/* Sanity check: catch bogus sizes (fuzzed/broken files) */
if (G_UNLIKELY (size > QTDEMUX_MAX_ATOM_SIZE)) {
if (qtdemux->state != QTDEMUX_STATE_MOVIE && qtdemux->got_moov) {
/* we're pulling header but already got most interesting bits,
* so never mind the rest (e.g. tags) (that much) */
GST_WARNING_OBJECT (qtdemux, "atom has bogus size %" G_GUINT64_FORMAT,
size);
return GST_FLOW_EOS;
} else {
GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
(_("This file is invalid and cannot be played.")),
("atom has bogus size %" G_GUINT64_FORMAT, size));
return GST_FLOW_ERROR;
}
}
flow = gst_pad_pull_range (qtdemux->sinkpad, offset, size, buf);
if (G_UNLIKELY (flow != GST_FLOW_OK))
return flow;
bsize = gst_buffer_get_size (*buf);
/* Catch short reads - we don't want any partial atoms */
if (G_UNLIKELY (bsize < size)) {
GST_WARNING_OBJECT (qtdemux,
"short read: %" G_GSIZE_FORMAT " < %" G_GUINT64_FORMAT, bsize, size);
gst_buffer_unref (*buf);
*buf = NULL;
return GST_FLOW_EOS;
}
return flow;
}
#if 1
static gboolean
gst_qtdemux_src_convert (GstQTDemux * qtdemux, GstPad * pad,
GstFormat src_format, gint64 src_value, GstFormat dest_format,
gint64 * dest_value)
{
gboolean res = TRUE;
QtDemuxStream *stream = gst_pad_get_element_private (pad);
gint32 index;
if (stream->subtype != FOURCC_vide) {
res = FALSE;
goto done;
}
switch (src_format) {
case GST_FORMAT_TIME:
switch (dest_format) {
case GST_FORMAT_BYTES:{
index = gst_qtdemux_find_index_linear (qtdemux, stream, src_value);
if (-1 == index) {
res = FALSE;
goto done;
}
*dest_value = stream->samples[index].offset;
GST_DEBUG_OBJECT (qtdemux, "Format Conversion Time->Offset :%"
GST_TIME_FORMAT "->%" G_GUINT64_FORMAT,
GST_TIME_ARGS (src_value), *dest_value);
break;
}
default:
res = FALSE;
break;
}
break;
case GST_FORMAT_BYTES:
switch (dest_format) {
case GST_FORMAT_TIME:{
index =
gst_qtdemux_find_index_for_given_media_offset_linear (qtdemux,
stream, src_value);
if (-1 == index) {
res = FALSE;
goto done;
}
*dest_value =
QTSTREAMTIME_TO_GSTTIME (stream,
stream->samples[index].timestamp);
GST_DEBUG_OBJECT (qtdemux,
"Format Conversion Offset->Time :%" G_GUINT64_FORMAT "->%"
GST_TIME_FORMAT, src_value, GST_TIME_ARGS (*dest_value));
break;
}
default:
res = FALSE;
break;
}
break;
default:
res = FALSE;
break;
}
done:
return res;
}
#endif
static gboolean
gst_qtdemux_get_duration (GstQTDemux * qtdemux, GstClockTime * duration)
{
gboolean res = FALSE;
*duration = GST_CLOCK_TIME_NONE;
if (qtdemux->duration != 0 &&
qtdemux->duration != G_MAXINT64 && qtdemux->timescale != 0) {
*duration = QTTIME_TO_GSTTIME (qtdemux, qtdemux->duration);
res = TRUE;
} else {
*duration = GST_CLOCK_TIME_NONE;
}
return res;
}
static gboolean
gst_qtdemux_handle_src_query (GstPad * pad, GstObject * parent,
GstQuery * query)
{
gboolean res = FALSE;
GstQTDemux *qtdemux = GST_QTDEMUX (parent);
GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:{
GstFormat fmt;
gst_query_parse_position (query, &fmt, NULL);
if (fmt == GST_FORMAT_TIME
&& GST_CLOCK_TIME_IS_VALID (qtdemux->segment.position)) {
gst_query_set_position (query, GST_FORMAT_TIME,
qtdemux->segment.position);
res = TRUE;
}
}
break;
case GST_QUERY_DURATION:{
GstFormat fmt;
gst_query_parse_duration (query, &fmt, NULL);
if (fmt == GST_FORMAT_TIME) {
/* First try to query upstream */
res = gst_pad_query_default (pad, parent, query);
if (!res) {
GstClockTime duration;
if (gst_qtdemux_get_duration (qtdemux, &duration) && duration > 0) {
gst_query_set_duration (query, GST_FORMAT_TIME, duration);
res = TRUE;
}
}
}
break;
}
case GST_QUERY_CONVERT:{
GstFormat src_fmt, dest_fmt;
gint64 src_value, dest_value = 0;
gst_query_parse_convert (query, &src_fmt, &src_value, &dest_fmt, NULL);
res = gst_qtdemux_src_convert (qtdemux, pad,
src_fmt, src_value, dest_fmt, &dest_value);
if (res)
gst_query_set_convert (query, src_fmt, src_value, dest_fmt, dest_value);
break;
}
case GST_QUERY_FORMATS:
gst_query_set_formats (query, 2, GST_FORMAT_TIME, GST_FORMAT_BYTES);
res = TRUE;
break;
case GST_QUERY_SEEKING:{
GstFormat fmt;
gboolean seekable;
/* try upstream first */
res = gst_pad_query_default (pad, parent, query);
if (!res) {
gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
if (fmt == GST_FORMAT_TIME) {
GstClockTime duration;
gst_qtdemux_get_duration (qtdemux, &duration);
seekable = TRUE;
if (!qtdemux->pullbased) {
GstQuery *q;
/* we might be able with help from upstream */
seekable = FALSE;
q = gst_query_new_seeking (GST_FORMAT_BYTES);
if (gst_pad_peer_query (qtdemux->sinkpad, q)) {
gst_query_parse_seeking (q, &fmt, &seekable, NULL, NULL);
GST_LOG_OBJECT (qtdemux, "upstream BYTE seekable %d", seekable);
}
gst_query_unref (q);
}
gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, 0, duration);
res = TRUE;
}
}
break;
}
case GST_QUERY_SEGMENT:
{
GstFormat format;
gint64 start, stop;
format = qtdemux->segment.format;
start =
gst_segment_to_stream_time (&qtdemux->segment, format,
qtdemux->segment.start);
if ((stop = qtdemux->segment.stop) == -1)
stop = qtdemux->segment.duration;
else
stop = gst_segment_to_stream_time (&qtdemux->segment, format, stop);
gst_query_set_segment (query, qtdemux->segment.rate, format, start, stop);
res = TRUE;
break;
}
default:
res = gst_pad_query_default (pad, parent, query);
break;
}
return res;
}
static void
gst_qtdemux_push_tags (GstQTDemux * qtdemux, QtDemuxStream * stream)
{
if (G_LIKELY (stream->pad)) {
GST_DEBUG_OBJECT (qtdemux, "Checking pad %s:%s for tags",
GST_DEBUG_PAD_NAME (stream->pad));
if (!gst_tag_list_is_empty (stream->stream_tags)) {
GST_DEBUG_OBJECT (qtdemux, "Sending tags %" GST_PTR_FORMAT,
stream->stream_tags);
gst_pad_push_event (stream->pad,
gst_event_new_tag (gst_tag_list_ref (stream->stream_tags)));
}
if (G_UNLIKELY (stream->send_global_tags)) {
GST_DEBUG_OBJECT (qtdemux, "Sending global tags %" GST_PTR_FORMAT,
qtdemux->tag_list);
gst_pad_push_event (stream->pad,
gst_event_new_tag (gst_tag_list_ref (qtdemux->tag_list)));
stream->send_global_tags = FALSE;
}
}
}
/* push event on all source pads; takes ownership of the event */
static void
gst_qtdemux_push_event (GstQTDemux * qtdemux, GstEvent * event)
{
guint n;
gboolean has_valid_stream = FALSE;
GstEventType etype = GST_EVENT_TYPE (event);
GST_DEBUG_OBJECT (qtdemux, "pushing %s event on all source pads",
GST_EVENT_TYPE_NAME (event));
for (n = 0; n < qtdemux->n_streams; n++) {
GstPad *pad;
QtDemuxStream *stream = qtdemux->streams[n];
GST_DEBUG_OBJECT (qtdemux, "pushing on pad %i", n);
if ((pad = stream->pad)) {
has_valid_stream = TRUE;
if (etype == GST_EVENT_EOS) {
/* let's not send twice */
if (stream->sent_eos)
continue;
stream->sent_eos = TRUE;
}
gst_pad_push_event (pad, gst_event_ref (event));
}
}
gst_event_unref (event);
/* if it is EOS and there are no pads, post an error */
if (!has_valid_stream && etype == GST_EVENT_EOS) {
gst_qtdemux_post_no_playable_stream_error (qtdemux);
}
}
/* push a pending newsegment event, if any from the streaming thread */
static void
gst_qtdemux_push_pending_newsegment (GstQTDemux * qtdemux)
{
if (qtdemux->pending_newsegment) {
gst_qtdemux_push_event (qtdemux, qtdemux->pending_newsegment);
qtdemux->pending_newsegment = NULL;
}
}
typedef struct
{
guint64 media_time;
} FindData;
static gint
find_func (QtDemuxSample * s1, gint64 * media_time, gpointer user_data)
{
if ((gint64) s1->timestamp + s1->pts_offset > *media_time)
return 1;
if ((gint64) s1->timestamp + s1->pts_offset == *media_time)
return 0;
return -1;
}
/* find the index of the sample that includes the data for @media_time using a
* binary search. Only to be called in optimized cases of linear search below.
*
* Returns the index of the sample.
*/
static guint32
gst_qtdemux_find_index (GstQTDemux * qtdemux, QtDemuxStream * str,
guint64 media_time)
{
QtDemuxSample *result;
guint32 index;
/* convert media_time to mov format */
media_time =
gst_util_uint64_scale_ceil (media_time, str->timescale, GST_SECOND);
result = gst_util_array_binary_search (str->samples, str->stbl_index + 1,
sizeof (QtDemuxSample), (GCompareDataFunc) find_func,
GST_SEARCH_MODE_BEFORE, &media_time, NULL);
if (G_LIKELY (result))
index = result - str->samples;
else
index = 0;
return index;
}
/* find the index of the sample that includes the data for @media_offset using a
* linear search
*
* Returns the index of the sample.
*/
static guint32
gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
QtDemuxStream * str, gint64 media_offset)
{
QtDemuxSample *result = str->samples;
guint32 index = 0;
if (result == NULL || str->n_samples == 0)
return -1;
if (media_offset == result->offset)
return index;
result++;
while (index < str->n_samples - 1) {
if (!qtdemux_parse_samples (qtdemux, str, index + 1))
goto parse_failed;
if (media_offset < result->offset)
break;
index++;
result++;
}
return index;
/* ERRORS */
parse_failed:
{
GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", index + 1);
return -1;
}
}
/* find the index of the sample that includes the data for @media_time using a
* linear search, and keeping in mind that not all samples may have been parsed
* yet. If possible, it will delegate to binary search.
*
* Returns the index of the sample.
*/
static guint32
gst_qtdemux_find_index_linear (GstQTDemux * qtdemux, QtDemuxStream * str,
GstClockTime media_time)
{
guint32 index = 0;
guint64 mov_time;
QtDemuxSample *sample;
/* convert media_time to mov format */
mov_time =
gst_util_uint64_scale_ceil (media_time, str->timescale, GST_SECOND);
sample = str->samples;
if (mov_time == sample->timestamp + sample->pts_offset)
return index;
/* use faster search if requested time in already parsed range */
sample = str->samples + str->stbl_index;
if (str->stbl_index >= 0 &&
mov_time <= (sample->timestamp + sample->pts_offset))
return gst_qtdemux_find_index (qtdemux, str, media_time);
while (index < str->n_samples - 1) {
if (!qtdemux_parse_samples (qtdemux, str, index + 1))
goto parse_failed;
sample = str->samples + index + 1;
if (mov_time < (sample->timestamp + sample->pts_offset))
break;
index++;
}
return index;
/* ERRORS */
parse_failed:
{
GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", index + 1);
return -1;
}
}
/* find the index of the keyframe needed to decode the sample at @index
* of stream @str, or of a subsequent keyframe (depending on @next)
*
* Returns the index of the keyframe.
*/
static guint32
gst_qtdemux_find_keyframe (GstQTDemux * qtdemux, QtDemuxStream * str,
guint32 index, gboolean next)
{
guint32 new_index = index;
if (index >= str->n_samples) {
new_index = str->n_samples;
goto beach;
}
/* all keyframes, return index */
if (str->all_keyframe) {
new_index = index;
goto beach;
}
/* else search until we have a keyframe */
while (new_index < str->n_samples) {
if (next && !qtdemux_parse_samples (qtdemux, str, new_index))
goto parse_failed;
if (str->samples[new_index].keyframe)
break;
if (new_index == 0)
break;
if (next)
new_index++;
else
new_index--;
}
if (new_index == str->n_samples) {
GST_DEBUG_OBJECT (qtdemux, "no next keyframe");
new_index = -1;
}
beach:
GST_DEBUG_OBJECT (qtdemux, "searching for keyframe index %s index %u "
"gave %u", next ? "after" : "before", index, new_index);
return new_index;
/* ERRORS */
parse_failed:
{
GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", new_index);
return -1;
}
}
/* find the segment for @time_position for @stream
*
* Returns the index of the segment containing @time_position.
* Returns the last segment and sets the @eos variable to TRUE
* if the time is beyond the end. @eos may be NULL
*/
static guint32
gst_qtdemux_find_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
GstClockTime time_position)
{
gint i;
guint32 seg_idx;
GST_LOG_OBJECT (stream->pad, "finding segment for %" GST_TIME_FORMAT,
GST_TIME_ARGS (time_position));
seg_idx = -1;
for (i = 0; i < stream->n_segments; i++) {
QtDemuxSegment *segment = &stream->segments[i];
GST_LOG_OBJECT (stream->pad,
"looking at segment %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
GST_TIME_ARGS (segment->time), GST_TIME_ARGS (segment->stop_time));
/* For the last segment we include stop_time in the last segment */
if (i < stream->n_segments - 1) {
if (segment->time <= time_position && time_position < segment->stop_time) {
GST_LOG_OBJECT (stream->pad, "segment %d matches", i);
seg_idx = i;
break;
}
} else {
/* Last segment always matches */
seg_idx = i;
break;
}
}
return seg_idx;
}
/* move the stream @str to the sample position @index.
*
* Updates @str->sample_index and marks discontinuity if needed.
*/
static void
gst_qtdemux_move_stream (GstQTDemux * qtdemux, QtDemuxStream * str,
guint32 index)
{
/* no change needed */
if (index == str->sample_index)
return;
GST_DEBUG_OBJECT (qtdemux, "moving to sample %u of %u", index,
str->n_samples);
/* position changed, we have a discont */
str->sample_index = index;
str->offset_in_sample = 0;
/* Each time we move in the stream we store the position where we are
* starting from */
str->from_sample = index;
str->discont = TRUE;
}
static void
gst_qtdemux_adjust_seek (GstQTDemux * qtdemux, gint64 desired_time,
gboolean use_sparse, gboolean next, gint64 * key_time, gint64 * key_offset)
{
guint64 min_offset;
gint64 min_byte_offset = -1;
gint n;
min_offset = desired_time;
/* for each stream, find the index of the sample in the segment
* and move back to the previous keyframe. */
for (n = 0; n < qtdemux->n_streams; n++) {
QtDemuxStream *str;
guint32 index, kindex;
guint32 seg_idx;
GstClockTime media_start;
GstClockTime media_time;
GstClockTime seg_time;
QtDemuxSegment *seg;
gboolean empty_segment = FALSE;
str = qtdemux->streams[n];
if (CUR_STREAM (str)->sparse && !use_sparse)
continue;
seg_idx = gst_qtdemux_find_segment (qtdemux, str, desired_time);
GST_DEBUG_OBJECT (qtdemux, "align segment %d", seg_idx);
/* get segment and time in the segment */
seg = &str->segments[seg_idx];
seg_time = (desired_time - seg->time) * seg->rate;
while (QTSEGMENT_IS_EMPTY (seg)) {
seg_time = 0;
empty_segment = TRUE;
GST_DEBUG_OBJECT (str->pad, "Segment %d is empty, moving to next one",
seg_idx);
seg_idx++;
if (seg_idx == str->n_segments)
break;
seg = &str->segments[seg_idx];
}
if (seg_idx == str->n_segments) {
/* FIXME track shouldn't have the last segment as empty, but if it
* happens we better handle it */
continue;
}
/* get the media time in the segment */
media_start = seg->media_start + seg_time;
/* get the index of the sample with media time */
index = gst_qtdemux_find_index_linear (qtdemux, str, media_start);
GST_DEBUG_OBJECT (qtdemux, "sample for %" GST_TIME_FORMAT " at %u"
" at offset %" G_GUINT64_FORMAT " (empty segment: %d)",
GST_TIME_ARGS (media_start), index, str->samples[index].offset,
empty_segment);
/* shift to next frame if we are looking for next keyframe */
if (next && QTSAMPLE_PTS_NO_CSLG (str, &str->samples[index]) < media_start
&& index < str->stbl_index)
index++;
if (!empty_segment) {
/* find previous keyframe */
kindex = gst_qtdemux_find_keyframe (qtdemux, str, index, next);
/* we will settle for one before if none found after */
if (next && kindex == -1)
kindex = gst_qtdemux_find_keyframe (qtdemux, str, index, FALSE);
/* if the keyframe is at a different position, we need to update the
* requested seek time */
if (index != kindex) {
index = kindex;
/* get timestamp of keyframe */
media_time = QTSAMPLE_PTS_NO_CSLG (str, &str->samples[kindex]);
GST_DEBUG_OBJECT (qtdemux,
"keyframe at %u with time %" GST_TIME_FORMAT " at offset %"
G_GUINT64_FORMAT, kindex, GST_TIME_ARGS (media_time),
str->samples[kindex].offset);
/* keyframes in the segment get a chance to change the
* desired_offset. keyframes out of the segment are
* ignored. */
if (media_time >= seg->media_start) {
GstClockTime seg_time;
/* this keyframe is inside the segment, convert back to
* segment time */
seg_time = (media_time - seg->media_start) + seg->time;
if ((!next && (seg_time < min_offset)) ||
(next && (seg_time > min_offset)))
min_offset = seg_time;
}
}
}
if (min_byte_offset < 0 || str->samples[index].offset < min_byte_offset)
min_byte_offset = str->samples[index].offset;
}
if (key_time)
*key_time = min_offset;
if (key_offset)
*key_offset = min_byte_offset;
}
static gboolean
gst_qtdemux_convert_seek (GstPad * pad, GstFormat * format,
GstSeekType cur_type, gint64 * cur, GstSeekType stop_type, gint64 * stop)
{
gboolean res;
g_return_val_if_fail (format != NULL, FALSE);
g_return_val_if_fail (cur != NULL, FALSE);
g_return_val_if_fail (stop != NULL, FALSE);
if (*format == GST_FORMAT_TIME)
return TRUE;
res = TRUE;
if (cur_type != GST_SEEK_TYPE_NONE)
res = gst_pad_query_convert (pad, *format, *cur, GST_FORMAT_TIME, cur);
if (res && stop_type != GST_SEEK_TYPE_NONE)
res = gst_pad_query_convert (pad, *format, *stop, GST_FORMAT_TIME, stop);
if (res)
*format = GST_FORMAT_TIME;
return res;
}
/* perform seek in push based mode:
find BYTE position to move to based on time and delegate to upstream
*/
static gboolean
gst_qtdemux_do_push_seek (GstQTDemux * qtdemux, GstPad * pad, GstEvent * event)
{
gdouble rate;
GstFormat format;
GstSeekFlags flags;
GstSeekType cur_type, stop_type;
gint64 cur, stop, key_cur;
gboolean res;
gint64 byte_cur;
gint64 original_stop;
guint32 seqnum;
GST_DEBUG_OBJECT (qtdemux, "doing push-based seek");
gst_event_parse_seek (event, &rate, &format, &flags,
&cur_type, &cur, &stop_type, &stop);
seqnum = gst_event_get_seqnum (event);
/* only forward streaming and seeking is possible */
if (rate <= 0)
goto unsupported_seek;
/* convert to TIME if needed and possible */
if (!gst_qtdemux_convert_seek (pad, &format, cur_type, &cur,
stop_type, &stop))
goto no_format;
/* Upstream seek in bytes will have undefined stop, but qtdemux stores
* the original stop position to use when upstream pushes the new segment
* for this seek */
original_stop = stop;
stop = -1;
/* find reasonable corresponding BYTE position,
* also try to mind about keyframes, since we can not go back a bit for them
* later on */
/* determining @next here based on SNAP_BEFORE/SNAP_AFTER should
* mostly just work, but let's not yet boldly go there ... */
gst_qtdemux_adjust_seek (qtdemux, cur, FALSE, FALSE, &key_cur, &byte_cur);
if (byte_cur == -1)
goto abort_seek;
GST_DEBUG_OBJECT (qtdemux, "Pushing BYTE seek rate %g, "
"start %" G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT, rate, byte_cur,
stop);
GST_OBJECT_LOCK (qtdemux);
qtdemux->seek_offset = byte_cur;
if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
qtdemux->push_seek_start = cur;
} else {
qtdemux->push_seek_start = key_cur;
}
if (stop_type == GST_SEEK_TYPE_NONE) {
qtdemux->push_seek_stop = qtdemux->segment.stop;
} else {
qtdemux->push_seek_stop = original_stop;
}
GST_OBJECT_UNLOCK (qtdemux);
/* BYTE seek event */
event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
stop_type, stop);
gst_event_set_seqnum (event, seqnum);
res = gst_pad_push_event (qtdemux->sinkpad, event);
return res;
/* ERRORS */
abort_seek:
{
GST_DEBUG_OBJECT (qtdemux, "could not determine byte position to seek to, "
"seek aborted.");
return FALSE;
}
unsupported_seek:
{
GST_DEBUG_OBJECT (qtdemux, "unsupported seek, seek aborted.");
return FALSE;
}
no_format:
{
GST_DEBUG_OBJECT (qtdemux, "unsupported format given, seek aborted.");
return FALSE;
}
}
/* perform the seek.
*
* We set all segment_indexes in the streams to unknown and
* adjust the time_position to the desired position. this is enough
* to trigger a segment switch in the streaming thread to start
* streaming from the desired position.
*
* Keyframe seeking is a little more complicated when dealing with
* segments. Ideally we want to move to the previous keyframe in
* the segment but there might not be a keyframe in the segment. In
* fact, none of the segments could contain a keyframe. We take a
* practical approach: seek to the previous keyframe in the segment,
* if there is none, seek to the beginning of the segment.
*
* Called with STREAM_LOCK
*/
static gboolean
gst_qtdemux_perform_seek (GstQTDemux * qtdemux, GstSegment * segment,
guint32 seqnum, GstSeekFlags flags)
{
gint64 desired_offset;
gint n;
desired_offset = segment->position;
GST_DEBUG_OBJECT (qtdemux, "seeking to %" GST_TIME_FORMAT,
GST_TIME_ARGS (desired_offset));
/* may not have enough fragmented info to do this adjustment,
* and we can't scan (and probably should not) at this time with
* possibly flushing upstream */
if ((flags & GST_SEEK_FLAG_KEY_UNIT) && !qtdemux->fragmented) {
gint64 min_offset;
gboolean next, before, after;
before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
next = after && !before;
if (segment->rate < 0)
next = !next;
gst_qtdemux_adjust_seek (qtdemux, desired_offset, TRUE, next, &min_offset,
NULL);
GST_DEBUG_OBJECT (qtdemux, "keyframe seek, align to %"
GST_TIME_FORMAT, GST_TIME_ARGS (min_offset));
desired_offset = min_offset;
}
/* and set all streams to the final position */
gst_flow_combiner_reset (qtdemux->flowcombiner);
qtdemux->segment_seqnum = seqnum;
for (n = 0; n < qtdemux->n_streams; n++) {
QtDemuxStream *stream = qtdemux->streams[n];
stream->time_position = desired_offset;
stream->accumulated_base = 0;
stream->sample_index = -1;
stream->offset_in_sample = 0;
stream->segment_index = -1;
stream->sent_eos = FALSE;
if (segment->flags & GST_SEEK_FLAG_FLUSH)
gst_segment_init (&stream->segment, GST_FORMAT_TIME);
}
segment->position = desired_offset;
segment->time = desired_offset;
if (segment->rate >= 0) {
segment->start = desired_offset;
/* we stop at the end */
if (segment->stop == -1)
segment->stop = segment->duration;
} else {
segment->stop = desired_offset;
}
if (qtdemux->fragmented)
qtdemux->fragmented_seek_pending = TRUE;
return TRUE;
}
/* do a seek in pull based mode */
static gboolean
gst_qtdemux_do_seek (GstQTDemux * qtdemux, GstPad * pad, GstEvent * event)
{
gdouble rate;
GstFormat format;
GstSeekFlags flags;
GstSeekType cur_type, stop_type;
gint64 cur, stop;
gboolean flush;
gboolean update;
GstSegment seeksegment;
guint32 seqnum = GST_SEQNUM_INVALID;
GstEvent *flush_event;
gboolean ret;
if (event) {
GST_DEBUG_OBJECT (qtdemux, "doing seek with event");
gst_event_parse_seek (event, &rate, &format, &flags,
&cur_type, &cur, &stop_type, &stop);
seqnum = gst_event_get_seqnum (event);
/* we have to have a format as the segment format. Try to convert
* if not. */
if (!gst_qtdemux_convert_seek (pad, &format, cur_type, &cur,
stop_type, &stop))
goto no_format;
GST_DEBUG_OBJECT (qtdemux, "seek format %s", gst_format_get_name (format));
} else {
GST_DEBUG_OBJECT (qtdemux, "doing seek without event");
flags = 0;
}
flush = flags & GST_SEEK_FLAG_FLUSH;
/* stop streaming, either by flushing or by pausing the task */
if (flush) {
flush_event = gst_event_new_flush_start ();
if (seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (flush_event, seqnum);
/* unlock upstream pull_range */
gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
/* make sure out loop function exits */
gst_qtdemux_push_event (qtdemux, flush_event);
} else {
/* non flushing seek, pause the task */
gst_pad_pause_task (qtdemux->sinkpad);
}
/* wait for streaming to finish */
GST_PAD_STREAM_LOCK (qtdemux->sinkpad);
/* copy segment, we need this because we still need the old
* segment when we close the current segment. */
memcpy (&seeksegment, &qtdemux->segment, sizeof (GstSegment));
if (event) {
/* configure the segment with the seek variables */
GST_DEBUG_OBJECT (qtdemux, "configuring seek");
if (!gst_segment_do_seek (&seeksegment, rate, format, flags,
cur_type, cur, stop_type, stop, &update)) {
ret = FALSE;
GST_ERROR_OBJECT (qtdemux, "inconsistent seek values, doing nothing");
} else {
/* now do the seek */
ret = gst_qtdemux_perform_seek (qtdemux, &seeksegment, seqnum, flags);
}
} else {
/* now do the seek */
ret = gst_qtdemux_perform_seek (qtdemux, &seeksegment, seqnum, flags);
}
/* prepare for streaming again */
if (flush) {
flush_event = gst_event_new_flush_stop (TRUE);
if (seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (flush_event, seqnum);
gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
gst_qtdemux_push_event (qtdemux, flush_event);
}
/* commit the new segment */
memcpy (&qtdemux->segment, &seeksegment, sizeof (GstSegment));
if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
GstMessage *msg = gst_message_new_segment_start (GST_OBJECT_CAST (qtdemux),
qtdemux->segment.format, qtdemux->segment.position);
if (seqnum != GST_SEQNUM_INVALID)
gst_message_set_seqnum (msg, seqnum);
gst_element_post_message (GST_ELEMENT_CAST (qtdemux), msg);
}
/* restart streaming, NEWSEGMENT will be sent from the streaming thread. */
gst_pad_start_task (qtdemux->sinkpad, (GstTaskFunction) gst_qtdemux_loop,
qtdemux->sinkpad, NULL);
GST_PAD_STREAM_UNLOCK (qtdemux->sinkpad);
return ret;
/* ERRORS */
no_format:
{
GST_DEBUG_OBJECT (qtdemux, "unsupported format given, seek aborted.");
return FALSE;
}
}
static gboolean
qtdemux_ensure_index (GstQTDemux * qtdemux)
{
guint i;
GST_DEBUG_OBJECT (qtdemux, "collecting all metadata for all streams");
/* Build complete index */
for (i = 0; i < qtdemux->n_streams; i++) {
QtDemuxStream *stream = qtdemux->streams[i];
if (!qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1))
goto parse_error;
}
return TRUE;
/* ERRORS */
parse_error:
{
GST_LOG_OBJECT (qtdemux,
"Building complete index of stream %u for seeking failed!", i);
return FALSE;
}
}
static gboolean
gst_qtdemux_handle_src_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
gboolean res = TRUE;
GstQTDemux *qtdemux = GST_QTDEMUX (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
{
#ifndef GST_DISABLE_GST_DEBUG
GstClockTime ts = gst_util_get_timestamp ();
#endif
guint32 seqnum = gst_event_get_seqnum (event);
if (seqnum == qtdemux->segment_seqnum) {
GST_LOG_OBJECT (pad,
"Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
gst_event_unref (event);
return TRUE;
}
if (qtdemux->upstream_format_is_time && qtdemux->fragmented) {
/* seek should be handled by upstream, we might need to re-download fragments */
GST_DEBUG_OBJECT (qtdemux,
"let upstream handle seek for fragmented playback");
goto upstream;
}
/* Build complete index for seeking;
* if not a fragmented file at least */
if (!qtdemux->fragmented)
if (!qtdemux_ensure_index (qtdemux))
goto index_failed;
#ifndef GST_DISABLE_GST_DEBUG
ts = gst_util_get_timestamp () - ts;
GST_INFO_OBJECT (qtdemux,
"Time taken to parse index %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
#endif
}
if (qtdemux->pullbased) {
res = gst_qtdemux_do_seek (qtdemux, pad, event);
} else if (gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (event))) {
GST_DEBUG_OBJECT (qtdemux, "Upstream successfully seeked");
res = TRUE;
} else if (qtdemux->state == QTDEMUX_STATE_MOVIE && qtdemux->n_streams
&& !qtdemux->fragmented) {
res = gst_qtdemux_do_push_seek (qtdemux, pad, event);
} else {
GST_DEBUG_OBJECT (qtdemux,
"ignoring seek in push mode in current state");
res = FALSE;
}
gst_event_unref (event);
break;
default:
upstream:
res = gst_pad_event_default (pad, parent, event);
break;
}
done:
return res;
/* ERRORS */
index_failed:
{
GST_ERROR_OBJECT (qtdemux, "Index failed");
gst_event_unref (event);
res = FALSE;
goto done;
}
}
/* Find, for each track, the first sample in coding order that has a file offset >= @byte_pos.
*
* If @fw is false, the coding order is explored backwards.
*
* If @set is true, each stream will be moved to its matched sample, or EOS if no matching
* sample is found for that track.
*
* The stream and sample index of the sample with the minimum offset in the direction explored
* (see @fw) is returned in the output parameters @_stream and @_index respectively.
*
* @_time is set to the QTSAMPLE_PTS of the matched sample with the minimum QTSAMPLE_PTS in the
* direction explored, which may not always match the QTSAMPLE_PTS of the sample returned in
* @_stream and @_index. */
static void
gst_qtdemux_find_sample (GstQTDemux * qtdemux, gint64 byte_pos, gboolean fw,
gboolean set, QtDemuxStream ** _stream, gint * _index, gint64 * _time)
{
gint i, n, index;
gint64 time, min_time;
QtDemuxStream *stream;
min_time = -1;
stream = NULL;
index = -1;
for (n = 0; n < qtdemux->n_streams; ++n) {
QtDemuxStream *str;
gint inc;
gboolean set_sample;
str = qtdemux->streams[n];
set_sample = !set;
if (fw) {
i = 0;
inc = 1;
} else {
i = str->n_samples - 1;
inc = -1;
}
for (; (i >= 0) && (i < str->n_samples); i += inc) {
if (str->samples[i].size == 0)
continue;
if (fw && (str->samples[i].offset < byte_pos))
continue;
if (!fw && (str->samples[i].offset + str->samples[i].size > byte_pos))
continue;
/* move stream to first available sample */
if (set) {
gst_qtdemux_move_stream (qtdemux, str, i);
set_sample = TRUE;
}
/* avoid index from sparse streams since they might be far away */
if (!CUR_STREAM (str)->sparse) {
/* determine min/max time */
time = QTSAMPLE_PTS (str, &str->samples[i]);
if (min_time == -1 || (!fw && time > min_time) ||
(fw && time < min_time)) {
min_time = time;
}
/* determine stream with leading sample, to get its position */
if (!stream ||
(fw && (str->samples[i].offset < stream->samples[index].offset)) ||
(!fw && (str->samples[i].offset > stream->samples[index].offset))) {
stream = str;
index = i;
}
}
break;
}
/* no sample for this stream, mark eos */
if (!set_sample)
gst_qtdemux_move_stream (qtdemux, str, str->n_samples);
}
if (_time)
*_time = min_time;
if (_stream)
*_stream = stream;
if (_index)
*_index = index;
}
static QtDemuxStream *
_create_stream (void)
{
QtDemuxStream *stream;
stream = g_new0 (QtDemuxStream, 1);
/* new streams always need a discont */
stream->discont = TRUE;
/* we enable clipping for raw audio/video streams */
stream->need_clip = FALSE;
stream->need_process = FALSE;
stream->segment_index = -1;
stream->time_position = 0;
stream->sample_index = -1;
stream->offset_in_sample = 0;
stream->new_stream = TRUE;
stream->multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
stream->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
stream->protected = FALSE;
stream->protection_scheme_type = 0;
stream->protection_scheme_version = 0;
stream->protection_scheme_info = NULL;
stream->n_samples_moof = 0;
stream->duration_moof = 0;
stream->duration_last_moof = 0;
stream->alignment = 1;
stream->stream_tags = gst_tag_list_new_empty ();
gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
g_queue_init (&stream->protection_scheme_event_queue);
return stream;
}
static gboolean
gst_qtdemux_setcaps (GstQTDemux * demux, GstCaps * caps)
{
GstStructure *structure;
const gchar *variant;
const GstCaps *mediacaps = NULL;
GST_DEBUG_OBJECT (demux, "Sink set caps: %" GST_PTR_FORMAT, caps);
structure = gst_caps_get_structure (caps, 0);
variant = gst_structure_get_string (structure, "variant");
if (variant && strcmp (variant, "mss-fragmented") == 0) {
QtDemuxStream *stream;
const GValue *value;
demux->fragmented = TRUE;
demux->mss_mode = TRUE;
if (demux->n_streams > 1) {
/* can't do this, we can only renegotiate for another mss format */
return FALSE;
}
value = gst_structure_get_value (structure, "media-caps");
/* create stream */
if (value) {
const GValue *timescale_v;
/* TODO update when stream changes during playback */
if (demux->n_streams == 0) {
stream = _create_stream ();
demux->streams[demux->n_streams] = stream;
demux->n_streams = 1;
/* mss has no stsd/stsd entry, use id 0 as default */
stream->stsd_entries_length = 1;
stream->stsd_sample_description_id = stream->cur_stsd_entry_index = 0;
stream->stsd_entries = g_new0 (QtDemuxStreamStsdEntry, 1);
} else {
stream = demux->streams[0];
}
timescale_v = gst_structure_get_value (structure, "timescale");
if (timescale_v) {
stream->timescale = g_value_get_uint64 (timescale_v);
} else {
/* default mss timescale */
stream->timescale = 10000000;
}
demux->timescale = stream->timescale;
mediacaps = gst_value_get_caps (value);
if (!CUR_STREAM (stream)->caps
|| !gst_caps_is_equal_fixed (mediacaps, CUR_STREAM (stream)->caps)) {
GST_DEBUG_OBJECT (demux, "We have a new caps %" GST_PTR_FORMAT,
mediacaps);
stream->new_caps = TRUE;
}
gst_caps_replace (&CUR_STREAM (stream)->caps, (GstCaps *) mediacaps);
structure = gst_caps_get_structure (mediacaps, 0);
if (g_str_has_prefix (gst_structure_get_name (structure), "video")) {
stream->subtype = FOURCC_vide;
gst_structure_get_int (structure, "width", &CUR_STREAM (stream)->width);
gst_structure_get_int (structure, "height",
&CUR_STREAM (stream)->height);
gst_structure_get_fraction (structure, "framerate",
&CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
} else if (g_str_has_prefix (gst_structure_get_name (structure), "audio")) {
gint rate = 0;
stream->subtype = FOURCC_soun;
gst_structure_get_int (structure, "channels",
&CUR_STREAM (stream)->n_channels);
gst_structure_get_int (structure, "rate", &rate);
CUR_STREAM (stream)->rate = rate;
}
}
gst_caps_replace (&demux->media_caps, (GstCaps *) mediacaps);
} else {
demux->mss_mode = FALSE;
}
return TRUE;
}
static void
gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard)
{
gint n;
GST_DEBUG_OBJECT (qtdemux, "Resetting demux");
gst_pad_stop_task (qtdemux->sinkpad);
if (hard || qtdemux->upstream_format_is_time) {
qtdemux->state = QTDEMUX_STATE_INITIAL;
qtdemux->neededbytes = 16;
qtdemux->todrop = 0;
qtdemux->pullbased = FALSE;
qtdemux->posted_redirect = FALSE;
qtdemux->first_mdat = -1;
qtdemux->header_size = 0;
qtdemux->mdatoffset = -1;
qtdemux->restoredata_offset = -1;
if (qtdemux->mdatbuffer)
gst_buffer_unref (qtdemux->mdatbuffer);
if (qtdemux->restoredata_buffer)
gst_buffer_unref (qtdemux->restoredata_buffer);
qtdemux->mdatbuffer = NULL;
qtdemux->restoredata_buffer = NULL;
qtdemux->mdatleft = 0;
qtdemux->mdatsize = 0;
if (qtdemux->comp_brands)
gst_buffer_unref (qtdemux->comp_brands);
qtdemux->comp_brands = NULL;
qtdemux->last_moov_offset = -1;
if (qtdemux->moov_node_compressed) {
g_node_destroy (qtdemux->moov_node_compressed);
if (qtdemux->moov_node)
g_free (qtdemux->moov_node->data);
}
qtdemux->moov_node_compressed = NULL;
if (qtdemux->moov_node)
g_node_destroy (qtdemux->moov_node);
qtdemux->moov_node = NULL;
if (qtdemux->tag_list)
gst_mini_object_unref (GST_MINI_OBJECT_CAST (qtdemux->tag_list));
qtdemux->tag_list = gst_tag_list_new_empty ();
gst_tag_list_set_scope (qtdemux->tag_list, GST_TAG_SCOPE_GLOBAL);
#if 0
if (qtdemux->element_index)
gst_object_unref (qtdemux->element_index);
qtdemux->element_index = NULL;
#endif
qtdemux->major_brand = 0;
if (qtdemux->pending_newsegment)
gst_event_unref (qtdemux->pending_newsegment);
qtdemux->pending_newsegment = NULL;
qtdemux->upstream_format_is_time = FALSE;
qtdemux->upstream_seekable = FALSE;
qtdemux->upstream_size = 0;
qtdemux->fragment_start = -1;
qtdemux->fragment_start_offset = -1;
qtdemux->duration = 0;
qtdemux->moof_offset = 0;
qtdemux->chapters_track_id = 0;
qtdemux->have_group_id = FALSE;
qtdemux->group_id = G_MAXUINT;
g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
NULL);
g_queue_clear (&qtdemux->protection_event_queue);
}
qtdemux->offset = 0;
gst_adapter_clear (qtdemux->adapter);
gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
qtdemux->segment_seqnum = GST_SEQNUM_INVALID;
if (hard) {
for (n = 0; n < qtdemux->n_streams; n++) {
gst_qtdemux_stream_free (qtdemux, qtdemux->streams[n]);
qtdemux->streams[n] = NULL;
}
qtdemux->n_streams = 0;
qtdemux->n_video_streams = 0;
qtdemux->n_audio_streams = 0;
qtdemux->n_sub_streams = 0;
qtdemux->exposed = FALSE;
qtdemux->fragmented = FALSE;
qtdemux->mss_mode = FALSE;
gst_caps_replace (&qtdemux->media_caps, NULL);
qtdemux->timescale = 0;
qtdemux->got_moov = FALSE;
if (qtdemux->protection_system_ids) {
g_ptr_array_free (qtdemux->protection_system_ids, TRUE);
qtdemux->protection_system_ids = NULL;
}
} else if (qtdemux->mss_mode) {
gst_flow_combiner_reset (qtdemux->flowcombiner);
for (n = 0; n < qtdemux->n_streams; n++)
gst_qtdemux_stream_clear (qtdemux, qtdemux->streams[n]);
} else {
gst_flow_combiner_reset (qtdemux->flowcombiner);
for (n = 0; n < qtdemux->n_streams; n++) {
qtdemux->streams[n]->sent_eos = FALSE;
qtdemux->streams[n]->time_position = 0;
qtdemux->streams[n]->accumulated_base = 0;
}
if (!qtdemux->pending_newsegment) {
qtdemux->pending_newsegment = gst_event_new_segment (&qtdemux->segment);
if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID)
gst_event_set_seqnum (qtdemux->pending_newsegment,
qtdemux->segment_seqnum);
}
}
}
/* Maps the @segment to the qt edts internal segments and pushes
* the correspnding segment event.
*
* If it ends up being at a empty segment, a gap will be pushed and the next
* edts segment will be activated in sequence.
*
* To be used in push-mode only */
static void
gst_qtdemux_map_and_push_segments (GstQTDemux * qtdemux, GstSegment * segment)
{
gint n, i;
for (n = 0; n < qtdemux->n_streams; n++) {
QtDemuxStream *stream = qtdemux->streams[n];
stream->time_position = segment->start;
/* in push mode we should be guaranteed that we will have empty segments
* at the beginning and then one segment after, other scenarios are not
* supported and are discarded when parsing the edts */
for (i = 0; i < stream->n_segments; i++) {
if (stream->segments[i].stop_time > segment->start) {
gst_qtdemux_activate_segment (qtdemux, stream, i,
stream->time_position);
if (QTSEGMENT_IS_EMPTY (&stream->segments[i])) {
/* push the empty segment and move to the next one */
gst_qtdemux_send_gap_for_segment (qtdemux, stream, i,
stream->time_position);
continue;
}
g_assert (i == stream->n_segments - 1);
}
}
}
}
static gboolean
gst_qtdemux_handle_sink_event (GstPad * sinkpad, GstObject * parent,
GstEvent * event)
{
GstQTDemux *demux = GST_QTDEMUX (parent);
gboolean res = TRUE;
GST_LOG_OBJECT (demux, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEGMENT:
{
gint64 offset = 0;
QtDemuxStream *stream;
gint idx;
GstSegment segment;
/* some debug output */
gst_event_copy_segment (event, &segment);
GST_DEBUG_OBJECT (demux, "received newsegment %" GST_SEGMENT_FORMAT,
&segment);
/* erase any previously set segment */
gst_event_replace (&demux->pending_newsegment, NULL);
if (segment.format == GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (demux, "new pending_newsegment");
gst_event_replace (&demux->pending_newsegment, event);
demux->upstream_format_is_time = TRUE;
} else {
GST_DEBUG_OBJECT (demux, "Not storing upstream newsegment, "
"not in time format");
/* chain will send initial newsegment after pads have been added */
if (demux->state != QTDEMUX_STATE_MOVIE || !demux->n_streams) {
GST_DEBUG_OBJECT (demux, "still starting, eating event");
goto exit;
}
}
/* check if this matches a time seek we received previously
* FIXME for backwards compatibility reasons we use the
* seek_offset here to compare. In the future we might want to
* change this to use the seqnum as it uniquely should identify
* the segment that corresponds to the seek. */
GST_DEBUG_OBJECT (demux, "Stored seek offset: %" G_GINT64_FORMAT
", received segment offset %" G_GINT64_FORMAT,
demux->seek_offset, segment.start);
if (segment.format == GST_FORMAT_BYTES
&& demux->seek_offset == segment.start) {
GST_OBJECT_LOCK (demux);
offset = segment.start;
segment.format = GST_FORMAT_TIME;
segment.start = demux->push_seek_start;
segment.stop = demux->push_seek_stop;
GST_DEBUG_OBJECT (demux, "Replaced segment with stored seek "
"segment %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
GST_TIME_ARGS (segment.start), GST_TIME_ARGS (segment.stop));
GST_OBJECT_UNLOCK (demux);
}
/* we only expect a BYTE segment, e.g. following a seek */
if (segment.format == GST_FORMAT_BYTES) {
if (GST_CLOCK_TIME_IS_VALID (segment.start)) {
offset = segment.start;
gst_qtdemux_find_sample (demux, segment.start, TRUE, FALSE, NULL,
NULL, (gint64 *) & segment.start);
if ((gint64) segment.start < 0)
segment.start = 0;
}
if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
gst_qtdemux_find_sample (demux, segment.stop, FALSE, FALSE, NULL,
NULL, (gint64 *) & segment.stop);
/* keyframe seeking should already arrange for start >= stop,
* but make sure in other rare cases */
segment.stop = MAX (segment.stop, segment.start);
}
} else if (segment.format == GST_FORMAT_TIME) {
/* push all data on the adapter before starting this
* new segment */
gst_qtdemux_process_adapter (demux, TRUE);
} else {
GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
goto exit;
}
/* We shouldn't modify upstream driven TIME FORMAT segment */
if (!demux->upstream_format_is_time) {
/* accept upstream's notion of segment and distribute along */
segment.format = GST_FORMAT_TIME;
segment.position = segment.time = segment.start;
segment.duration = demux->segment.duration;
segment.base = gst_segment_to_running_time (&demux->segment,
GST_FORMAT_TIME, demux->segment.position);
}
gst_segment_copy_into (&segment, &demux->segment);
GST_DEBUG_OBJECT (demux, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
/* map segment to internal qt segments and push on each stream */
if (demux->n_streams) {
if (demux->fragmented) {
GstEvent *segment_event = gst_event_new_segment (&segment);
gst_event_replace (&demux->pending_newsegment, NULL);
gst_event_set_seqnum (segment_event, demux->segment_seqnum);
gst_qtdemux_push_event (demux, segment_event);
} else {
gst_event_replace (&demux->pending_newsegment, NULL);
gst_qtdemux_map_and_push_segments (demux, &segment);
}
}
/* clear leftover in current segment, if any */
gst_adapter_clear (demux->adapter);
/* set up streaming thread */
demux->offset = offset;
if (demux->upstream_format_is_time) {
GST_DEBUG_OBJECT (demux, "Upstream is driving in time format, "
"set values to restart reading from a new atom");
demux->neededbytes = 16;
demux->todrop = 0;
} else {
gst_qtdemux_find_sample (demux, offset, TRUE, TRUE, &stream, &idx,
NULL);
if (stream) {
demux->todrop = stream->samples[idx].offset - offset;
demux->neededbytes = demux->todrop + stream->samples[idx].size;
} else {
/* set up for EOS */
demux->neededbytes = -1;
demux->todrop = 0;
}
}
exit:
gst_event_unref (event);
res = TRUE;
goto drop;
}
case GST_EVENT_FLUSH_START:
{
if (gst_event_get_seqnum (event) == demux->offset_seek_seqnum) {
gst_event_unref (event);
goto drop;
}
break;
}
case GST_EVENT_FLUSH_STOP:
{
guint64 dur;
dur = demux->segment.duration;
gst_qtdemux_reset (demux, FALSE);
demux->segment.duration = dur;
if (gst_event_get_seqnum (event) == demux->offset_seek_seqnum) {
gst_event_unref (event);
goto drop;
}
break;
}
case GST_EVENT_EOS:
/* If we are in push mode, and get an EOS before we've seen any streams,
* then error out - we have nowhere to send the EOS */
if (!demux->pullbased) {
gint i;
gboolean has_valid_stream = FALSE;
for (i = 0; i < demux->n_streams; i++) {
if (demux->streams[i]->pad != NULL) {
has_valid_stream = TRUE;
break;
}
}
if (!has_valid_stream)
gst_qtdemux_post_no_playable_stream_error (demux);
else {
GST_DEBUG_OBJECT (demux, "Data still available after EOS: %u",
(guint) gst_adapter_available (demux->adapter));
if (gst_qtdemux_process_adapter (demux, TRUE) != GST_FLOW_OK) {
res = FALSE;
}
}
}
break;
case GST_EVENT_CAPS:{
GstCaps *caps = NULL;
gst_event_parse_caps (event, &caps);
gst_qtdemux_setcaps (demux, caps);
res = TRUE;
gst_event_unref (event);
goto drop;
}
case GST_EVENT_PROTECTION:
{
const gchar *system_id = NULL;
gst_event_parse_protection (event, &system_id, NULL, NULL);
GST_DEBUG_OBJECT (demux, "Received protection event for system ID %s",
system_id);
gst_qtdemux_append_protection_system_id (demux, system_id);
/* save the event for later, for source pads that have not been created */
g_queue_push_tail (&demux->protection_event_queue, gst_event_ref (event));
/* send it to all pads that already exist */
gst_qtdemux_push_event (demux, event);
res = TRUE;
goto drop;
}
default:
break;
}
res = gst_pad_event_default (demux->sinkpad, parent, event) & res;
drop:
return res;
}
#if 0
static void
gst_qtdemux_set_index (GstElement * element, GstIndex * index)
{
GstQTDemux *demux = GST_QTDEMUX (element);
GST_OBJECT_LOCK (demux);
if (demux->element_index)
gst_object_unref (demux->element_index);
if (index) {
demux->element_index = gst_object_ref (index);
} else {
demux->element_index = NULL;
}
GST_OBJECT_UNLOCK (demux);
/* object lock might be taken again */
if (index)
gst_index_get_writer_id (index, GST_OBJECT (element), &demux->index_id);
GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT "for writer id %d",
demux->element_index, demux->index_id);
}
static GstIndex *
gst_qtdemux_get_index (GstElement * element)
{
GstIndex *result = NULL;
GstQTDemux *demux = GST_QTDEMUX (element);
GST_OBJECT_LOCK (demux);
if (demux->element_index)
result = gst_object_ref (demux->element_index);
GST_OBJECT_UNLOCK (demux);
GST_DEBUG_OBJECT (demux, "Returning index %" GST_PTR_FORMAT, result);
return result;
}
#endif
static void
gst_qtdemux_stbl_free (QtDemuxStream * stream)
{
g_free ((gpointer) stream->stco.data);
stream->stco.data = NULL;
g_free ((gpointer) stream->stsz.data);
stream->stsz.data = NULL;
g_free ((gpointer) stream->stsc.data);
stream->stsc.data = NULL;
g_free ((gpointer) stream->stts.data);
stream->stts.data = NULL;
g_free ((gpointer) stream->stss.data);
stream->stss.data = NULL;
g_free ((gpointer) stream->stps.data);
stream->stps.data = NULL;
g_free ((gpointer) stream->ctts.data);
stream->ctts.data = NULL;
}
static void
gst_qtdemux_stream_flush_segments_data (GstQTDemux * qtdemux,
QtDemuxStream * stream)
{
g_free (stream->segments);
stream->segments = NULL;
stream->segment_index = -1;
stream->accumulated_base = 0;
}
static void
gst_qtdemux_stream_flush_samples_data (GstQTDemux * qtdemux,
QtDemuxStream * stream)
{
g_free (stream->samples);
stream->samples = NULL;
gst_qtdemux_stbl_free (stream);
/* fragments */
g_free (stream->ra_entries);
stream->ra_entries = NULL;
stream->n_ra_entries = 0;
stream->sample_index = -1;
stream->stbl_index = -1;
stream->n_samples = 0;
stream->time_position = 0;
stream->n_samples_moof = 0;
stream->duration_moof = 0;
stream->duration_last_moof = 0;
}
static void
gst_qtdemux_stream_clear (GstQTDemux * qtdemux, QtDemuxStream * stream)
{
gint i;
if (stream->allocator)
gst_object_unref (stream->allocator);
while (stream->buffers) {
gst_buffer_unref (GST_BUFFER_CAST (stream->buffers->data));
stream->buffers = g_slist_delete_link (stream->buffers, stream->buffers);
}
for (i = 0; i < stream->stsd_entries_length; i++) {
QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
if (entry->rgb8_palette) {
gst_memory_unref (entry->rgb8_palette);
entry->rgb8_palette = NULL;
}
entry->sparse = FALSE;
}
gst_tag_list_unref (stream->stream_tags);
stream->stream_tags = gst_tag_list_new_empty ();
gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
g_free (stream->redirect_uri);
stream->redirect_uri = NULL;
stream->sent_eos = FALSE;
stream->protected = FALSE;
if (stream->protection_scheme_info) {
if (stream->protection_scheme_type == FOURCC_cenc) {
QtDemuxCencSampleSetInfo *info =
(QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
if (info->default_properties)
gst_structure_free (info->default_properties);
if (info->crypto_info)
g_ptr_array_free (info->crypto_info, TRUE);
}
g_free (stream->protection_scheme_info);
stream->protection_scheme_info = NULL;
}
stream->protection_scheme_type = 0;
stream->protection_scheme_version = 0;
g_queue_foreach (&stream->protection_scheme_event_queue,
(GFunc) gst_event_unref, NULL);
g_queue_clear (&stream->protection_scheme_event_queue);
gst_qtdemux_stream_flush_segments_data (qtdemux, stream);
gst_qtdemux_stream_flush_samples_data (qtdemux, stream);
}
static void
gst_qtdemux_stream_reset (GstQTDemux * qtdemux, QtDemuxStream * stream)
{
gint i;
gst_qtdemux_stream_clear (qtdemux, stream);
for (i = 0; i < stream->stsd_entries_length; i++) {
QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
if (entry->caps) {
gst_caps_unref (entry->caps);
entry->caps = NULL;
}
}
g_free (stream->stsd_entries);
stream->stsd_entries = NULL;
stream->stsd_entries_length = 0;
}
static void
gst_qtdemux_stream_free (GstQTDemux * qtdemux, QtDemuxStream * stream)
{
gst_qtdemux_stream_reset (qtdemux, stream);
gst_tag_list_unref (stream->stream_tags);
if (stream->pad) {
gst_element_remove_pad (GST_ELEMENT_CAST (qtdemux), stream->pad);
gst_flow_combiner_remove_pad (qtdemux->flowcombiner, stream->pad);
}
g_free (stream);
}
static void
gst_qtdemux_remove_stream (GstQTDemux * qtdemux, int i)
{
g_assert (i >= 0 && i < qtdemux->n_streams && qtdemux->streams[i] != NULL);
gst_qtdemux_stream_free (qtdemux, qtdemux->streams[i]);
qtdemux->streams[i] = qtdemux->streams[qtdemux->n_streams - 1];
qtdemux->streams[qtdemux->n_streams - 1] = NULL;
qtdemux->n_streams--;
}
static GstStateChangeReturn
gst_qtdemux_change_state (GstElement * element, GstStateChange transition)
{
GstQTDemux *qtdemux = GST_QTDEMUX (element);
GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
break;
default:
break;
}
result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:{
gst_qtdemux_reset (qtdemux, TRUE);
break;
}
default:
break;
}
return result;
}
static void
qtdemux_parse_ftyp (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
{
/* counts as header data */
qtdemux->header_size += length;
/* only consider at least a sufficiently complete ftyp atom */
if (length >= 20) {
GstBuffer *buf;
qtdemux->major_brand = QT_FOURCC (buffer + 8);
GST_DEBUG_OBJECT (qtdemux, "major brand: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (qtdemux->major_brand));
if (qtdemux->comp_brands)
gst_buffer_unref (qtdemux->comp_brands);
buf = qtdemux->comp_brands = gst_buffer_new_and_alloc (length - 16);
gst_buffer_fill (buf, 0, buffer + 16, length - 16);
}
}
static void
qtdemux_handle_xmp_taglist (GstQTDemux * qtdemux, GstTagList * taglist,
GstTagList * xmptaglist)
{
/* Strip out bogus fields */
if (xmptaglist) {
if (gst_tag_list_get_scope (taglist) == GST_TAG_SCOPE_GLOBAL) {
gst_tag_list_remove_tag (xmptaglist, GST_TAG_VIDEO_CODEC);
gst_tag_list_remove_tag (xmptaglist, GST_TAG_AUDIO_CODEC);
} else {
gst_tag_list_remove_tag (xmptaglist, GST_TAG_CONTAINER_FORMAT);
}
GST_DEBUG_OBJECT (qtdemux, "Found XMP tags %" GST_PTR_FORMAT, xmptaglist);
/* prioritize native tags using _KEEP mode */
gst_tag_list_insert (taglist, xmptaglist, GST_TAG_MERGE_KEEP);
gst_tag_list_unref (xmptaglist);
}
}
static void
qtdemux_parse_piff (GstQTDemux * qtdemux, const guint8 * buffer, gint length,
guint offset)
{
GstByteReader br;
guint8 version;
guint32 flags = 0;
guint i;
guint8 iv_size = 8;
QtDemuxStream *stream;
GstStructure *structure;
QtDemuxCencSampleSetInfo *ss_info = NULL;
const gchar *system_id;
gboolean uses_sub_sample_encryption = FALSE;
guint32 sample_count;
if (qtdemux->n_streams == 0)
return;
stream = qtdemux->streams[0];
structure = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
if (!gst_structure_has_name (structure, "application/x-cenc")) {
GST_WARNING_OBJECT (qtdemux,
"Attempting PIFF box parsing on an unencrypted stream.");
return;
}
gst_structure_get (structure, GST_PROTECTION_SYSTEM_ID_CAPS_FIELD,
G_TYPE_STRING, &system_id, NULL);
gst_qtdemux_append_protection_system_id (qtdemux, system_id);
stream->protected = TRUE;
stream->protection_scheme_type = FOURCC_cenc;
if (!stream->protection_scheme_info)
stream->protection_scheme_info = g_new0 (QtDemuxCencSampleSetInfo, 1);
ss_info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
if (ss_info->default_properties)
gst_structure_free (ss_info->default_properties);
ss_info->default_properties =
gst_structure_new ("application/x-cenc",
"iv_size", G_TYPE_UINT, iv_size, "encrypted", G_TYPE_BOOLEAN, TRUE, NULL);
if (ss_info->crypto_info) {
GST_LOG_OBJECT (qtdemux, "unreffing existing crypto_info");
g_ptr_array_free (ss_info->crypto_info, TRUE);
ss_info->crypto_info = NULL;
}
/* skip UUID */
gst_byte_reader_init (&br, buffer + offset + 16, length - offset - 16);
if (!gst_byte_reader_get_uint8 (&br, &version)) {
GST_ERROR_OBJECT (qtdemux, "Error getting box's version field");
return;
}
if (!gst_byte_reader_get_uint24_be (&br, &flags)) {
GST_ERROR_OBJECT (qtdemux, "Error getting box's flags field");
return;
}
if ((flags & 0x000001)) {
guint32 algorithm_id = 0;
const guint8 *kid;
GstBuffer *kid_buf;
gboolean is_encrypted = TRUE;
if (!gst_byte_reader_get_uint24_le (&br, &algorithm_id)) {
GST_ERROR_OBJECT (qtdemux, "Error getting box's algorithm ID field");
return;
}
algorithm_id >>= 8;
if (algorithm_id == 0) {
is_encrypted = FALSE;
} else if (algorithm_id == 1) {
/* FIXME: maybe store this in properties? */
GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CTR encrypted stream");
} else if (algorithm_id == 2) {
/* FIXME: maybe store this in properties? */
GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CBC encrypted stream");
}
if (!gst_byte_reader_get_uint8 (&br, &iv_size))
return;
if (!gst_byte_reader_get_data (&br, 16, &kid))
return;
kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
gst_buffer_fill (kid_buf, 0, kid, 16);
if (ss_info->default_properties)
gst_structure_free (ss_info->default_properties);
ss_info->default_properties =
gst_structure_new ("application/x-cenc",
"iv_size", G_TYPE_UINT, iv_size,
"encrypted", G_TYPE_BOOLEAN, is_encrypted,
"kid", GST_TYPE_BUFFER, kid_buf, NULL);
GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
"is_encrypted=%u, iv_size=%u", is_encrypted, iv_size);
gst_buffer_unref (kid_buf);
} else if ((flags & 0x000002)) {
uses_sub_sample_encryption = TRUE;
}
if (!gst_byte_reader_get_uint32_be (&br, &sample_count)) {
GST_ERROR_OBJECT (qtdemux, "Error getting box's sample count field");
return;
}
ss_info->crypto_info =
g_ptr_array_new_full (sample_count,
(GDestroyNotify) qtdemux_gst_structure_free);
for (i = 0; i < sample_count;