update gstreamer good to 1.14.4

Signed-off-by: Qian Hu <Qian.Hu@mediatek.com>
diff --git a/sys/v4l2/Makefile.am b/sys/v4l2/Makefile.am
index 3126c11..2e810fa 100644
--- a/sys/v4l2/Makefile.am
+++ b/sys/v4l2/Makefile.am
@@ -14,13 +14,13 @@
 				gstv4l2tuner.c \
 				gstv4l2transform.c \
 				gstv4l2videodec.c \
+				gstv4l2mtkvpudec.c \
+				gstv4l2mtkjpegdec.c \
 				gstv4l2videoenc.c \
 				gstv4l2h263enc.c \
 				gstv4l2h264enc.c \
 				gstv4l2mpeg4enc.c \
 				gstv4l2vidorient.c \
-				gstv4l2vp8enc.c \
-				gstv4l2vp9enc.c \
 				v4l2_calls.c \
 				v4l2-utils.c \
 				tuner.c \
@@ -60,13 +60,13 @@
 	gstv4l2tuner.h \
 	gstv4l2transform.h \
 	gstv4l2videodec.h \
+	gstv4l2mtkvpudec.h \
+	gstv4l2mtkjpegdec.h \
 	gstv4l2videoenc.h \
 	gstv4l2h263enc.h \
 	gstv4l2h264enc.h \
 	gstv4l2mpeg4enc.h \
 	gstv4l2vidorient.h \
-	gstv4l2vp8enc.h \
-	gstv4l2vp9enc.h \
 	v4l2-utils.h \
 	tuner.h \
 	tunerchannel.h \
diff --git a/sys/v4l2/ext/videodev2.h b/sys/v4l2/ext/videodev2.h
index 59e1f3d..c0b023d 100644
--- a/sys/v4l2/ext/videodev2.h
+++ b/sys/v4l2/ext/videodev2.h
@@ -557,6 +557,9 @@
 #define V4L2_PIX_FMT_NV12MT  v4l2_fourcc('T', 'M', '1', '2') /* 12  Y/CbCr 4:2:0 64x32 macroblocks */
 #define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12  Y/CbCr 4:2:0 16x16 macroblocks */
 
+#define V4L2_PIX_FMT_MT21    v4l2_fourcc('M', 'M', '2', '1')
+#define V4L2_PIX_FMT_YUV422M v4l2_fourcc('Y', 'M', '1', '6') /* 16  YUV422 planar */
+
 /* three planes - Y Cb, Cr */
 #define V4L2_PIX_FMT_YUV410  v4l2_fourcc('Y', 'U', 'V', '9') /*  9  YUV 4:1:0     */
 #define V4L2_PIX_FMT_YVU410  v4l2_fourcc('Y', 'V', 'U', '9') /*  9  YVU 4:1:0     */
@@ -615,14 +618,24 @@
 #define V4L2_PIX_FMT_JPEG     v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG     */
 #define V4L2_PIX_FMT_DV       v4l2_fourcc('d', 'v', 's', 'd') /* 1394          */
 #define V4L2_PIX_FMT_MPEG     v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 Multiplexed */
+#define V4L2_PIX_FMT_H265     v4l2_fourcc('H', '2', '6', '5') /* H265 with start codes */
 #define V4L2_PIX_FMT_H264     v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */
 #define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') /* H264 without start codes */
 #define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') /* H264 MVC */
 #define V4L2_PIX_FMT_H263     v4l2_fourcc('H', '2', '6', '3') /* H263          */
+#define V4L2_PIX_FMT_S263     v4l2_fourcc('S', '2', '6', '3') /* S263          */
 #define V4L2_PIX_FMT_MPEG1    v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES     */
 #define V4L2_PIX_FMT_MPEG2    v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES     */
 #define V4L2_PIX_FMT_MPEG4    v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */
+#define V4L2_PIX_FMT_RV30    v4l2_fourcc('R', 'V', '3', '0') /* RV30 ES */
+#define V4L2_PIX_FMT_RV40    v4l2_fourcc('R', 'V', '4', '0') /* RV40 ES */
 #define V4L2_PIX_FMT_XVID     v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid           */
+#define V4L2_PIX_FMT_DIVX     v4l2_fourcc('D', 'I', 'V', 'X') /* Divx           */
+#define V4L2_PIX_FMT_DIVX3    v4l2_fourcc('D', 'I', 'V', '3') /* Divx3           */
+#define V4L2_PIX_FMT_DIVX4    v4l2_fourcc('D', 'I', 'V', '4') /* Divx4           */
+#define V4L2_PIX_FMT_DIVX5    v4l2_fourcc('D', 'I', 'V', '5') /* Divx5           */
+#define V4L2_PIX_FMT_DIVX6    v4l2_fourcc('D', 'I', 'V', '6') /* Divx6           */
+
 #define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */
 #define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */
 #define V4L2_PIX_FMT_VP8      v4l2_fourcc('V', 'P', '8', '0') /* VP8 */
@@ -655,6 +668,12 @@
 #define V4L2_PIX_FMT_JPGL	v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */
 #define V4L2_PIX_FMT_SE401      v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */
 #define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */
+#define V4L2_PIX_FMT_WMV1	v4l2_fourcc('W', 'M', 'V', '1') /* WMV7 */
+#define V4L2_PIX_FMT_WMV2	v4l2_fourcc('W', 'M', 'V', '2') /* WMV8 */
+#define V4L2_PIX_FMT_WMV3	v4l2_fourcc('W', 'M', 'V', '3') /* WMV9 */
+#define V4L2_PIX_FMT_WMVA	v4l2_fourcc('W', 'M', 'V', 'A') /* WMVA */
+#define V4L2_PIX_FMT_WVC1	v4l2_fourcc('W', 'V', 'C', '1') /* VC1 */
+
 #define V4L2_PIX_FMT_Y8I      v4l2_fourcc('Y', '8', 'I', ' ') /* Greyscale 8-bit L/R interleaved */
 #define V4L2_PIX_FMT_Y12I     v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */
 #define V4L2_PIX_FMT_Z16      v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */
diff --git a/sys/v4l2/gstv4l2.c b/sys/v4l2/gstv4l2.c
index 2674d9c..29e83a7 100644
--- a/sys/v4l2/gstv4l2.c
+++ b/sys/v4l2/gstv4l2.c
@@ -54,6 +54,9 @@
 #include "gstv4l2vp9enc.h"
 #include "gstv4l2deviceprovider.h"
 #include "gstv4l2transform.h"
+#include "gstv4l2h264enc.h"
+#include "gstv4l2mtkvpudec.h"
+#include "gstv4l2mtkjpegdec.h"
 
 /* used in gstv4l2object.c and v4l2_calls.c */
 GST_DEBUG_CATEGORY (v4l2_debug);
@@ -181,10 +184,16 @@
 
     basename = g_path_get_basename (it->device_path);
 
-    if (gst_v4l2_is_video_dec (sink_caps, src_caps)) {
-      gst_v4l2_video_dec_register (plugin, basename, it->device_path,
+    if (gst_v4l2_is_mtk_jpeg_dec (sink_caps, src_caps))
+      gst_v4l2_mtk_jpeg_dec_register (plugin, basename, it->device_path,
           sink_caps, src_caps);
-    } else if (gst_v4l2_is_video_enc (sink_caps, src_caps, NULL)) {
+    else if (gst_v4l2_is_mtk_vpu_dec (sink_caps, src_caps))
+      gst_v4l2_mtk_vpu_dec_register (plugin, basename, it->device_path,
+          sink_caps, src_caps);
+    else if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
+      gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
+          sink_caps, src_caps);
+    else if (gst_v4l2_is_video_enc (sink_caps, src_caps, NULL)) {
       if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
         gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
             sink_caps, src_caps);
@@ -197,13 +206,13 @@
         gst_v4l2_h263_enc_register (plugin, basename, it->device_path,
             sink_caps, src_caps);
 
-      if (gst_v4l2_is_vp8_enc (sink_caps, src_caps))
-        gst_v4l2_vp8_enc_register (plugin, basename, it->device_path,
-            sink_caps, src_caps);
+      //if (gst_v4l2_is_vp8_enc (sink_caps, src_caps))
+        //gst_v4l2_vp8_enc_register (plugin, basename, it->device_path,
+            //sink_caps, src_caps);
 
-      if (gst_v4l2_is_vp9_enc (sink_caps, src_caps))
-        gst_v4l2_vp9_enc_register (plugin, basename, it->device_path,
-            sink_caps, src_caps);
+      //if (gst_v4l2_is_vp9_enc (sink_caps, src_caps))
+        //gst_v4l2_vp9_enc_register (plugin, basename, it->device_path,
+            //sink_caps, src_caps);
     } else if (gst_v4l2_is_transform (sink_caps, src_caps)) {
       gst_v4l2_transform_register (plugin, basename, it->device_path,
           sink_caps, src_caps);
diff --git a/sys/v4l2/gstv4l2bufferpool.c b/sys/v4l2/gstv4l2bufferpool.c
index 5a92dbb..124fef1 100644
--- a/sys/v4l2/gstv4l2bufferpool.c
+++ b/sys/v4l2/gstv4l2bufferpool.c
@@ -1715,6 +1715,7 @@
         case GST_V4L2_IO_DMABUF:
         {
           GstBuffer *tmp;
+		  struct v4l2_pix_format *pix_fmt = &(obj->format.fmt.pix);
 
           if ((*buf)->pool == bpool) {
             guint num_queued;
@@ -1742,7 +1743,7 @@
             }
 
             /* start copying buffers when we are running low on buffers */
-            if (num_queued < pool->copy_threshold) {
+            if (num_queued < pool->copy_threshold || pix_fmt->pixelformat == V4L2_PIX_FMT_MT21) {
               GstBuffer *copy;
 
               if (GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, MMAP)) {
@@ -1781,6 +1782,8 @@
 
           ret = gst_v4l2_buffer_pool_copy_buffer (pool, *buf, tmp);
 
+		  GST_BUFFER_TIMESTAMP(*buf) = GST_BUFFER_TIMESTAMP(tmp);
+
           /* an queue the buffer again after the copy */
           gst_v4l2_buffer_pool_release_buffer (bpool, tmp);
 
diff --git a/sys/v4l2/gstv4l2mtkjpegdec.c b/sys/v4l2/gstv4l2mtkjpegdec.c
new file mode 100644
index 0000000..a33d8f0
--- /dev/null
+++ b/sys/v4l2/gstv4l2mtkjpegdec.c
@@ -0,0 +1,183 @@
+/*
+ * Copyright (c) 2016 MediaTek Inc
+ *     Author: Rick Chang <rick.chang@mediatek.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <unistd.h>
+#include <string.h>
+
+#include "gstv4l2mtkjpegdec.h"
+#include "v4l2_calls.h"
+
+#include <string.h>
+#include <gst/gst-i18n-plugin.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_v4l2_mtk_jpeg_dec_debug);
+#define GST_CAT_DEFAULT gst_v4l2_mtk_jpeg_dec_debug
+
+typedef struct
+{
+  gchar *device;
+  GstCaps *sink_caps;
+  GstCaps *src_caps;
+} GstV4l2MtkJpegDecCData;
+
+enum
+{
+  PROP_0,
+  V4L2_STD_OBJECT_PROPS,
+};
+
+static GstStaticPadTemplate gst_mtk_jpeg_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+    GST_PAD_SRC,
+    GST_PAD_ALWAYS,
+    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ "
+            "I420, Y42B, I422 } "))
+    );
+
+#define gst_v4l2_mtk_jpeg_dec_parent_class parent_class
+G_DEFINE_TYPE (GstV4l2MtkJpegDec, gst_v4l2_mtk_jpeg_dec, GST_TYPE_V4L2_VIDEO_DEC);
+
+static void
+gst_v4l2_mtk_jpeg_dec_init (GstV4l2MtkJpegDec * self)
+{
+}
+
+static void
+gst_v4l2_mtk_jpeg_dec_subinstance_init (GTypeInstance * instance, gpointer g_class)
+{
+  GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
+  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (instance);
+  GstVideoDecoder *decoder = GST_VIDEO_DECODER (instance);
+
+  gst_video_decoder_set_packetized (decoder, TRUE);
+
+  self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
+      GST_VIDEO_DECODER_SINK_PAD (decoder), V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
+      gst_v4l2_get_output, gst_v4l2_set_output, NULL);
+  self->v4l2output->no_initial_format = TRUE;
+  self->v4l2output->keep_aspect = FALSE;
+
+  self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
+      GST_VIDEO_DECODER_SRC_PAD (decoder), V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
+      gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+  self->v4l2capture->no_initial_format = TRUE;
+  self->v4l2output->keep_aspect = FALSE;
+}
+
+static void
+gst_v4l2_mtk_jpeg_dec_class_init (GstV4l2MtkJpegDecClass * klass)
+{
+  GstElementClass *element_class;
+  GObjectClass *gobject_class;
+  GstV4l2VideoDecClass *v4l2_decoder_class;
+  GstVideoEncoderClass *baseclass;
+
+  parent_class = g_type_class_peek_parent (klass);
+
+  element_class = (GstElementClass *) klass;
+  gobject_class = (GObjectClass *) klass;
+  v4l2_decoder_class = GST_V4L2_VIDEO_DEC_CLASS (klass);
+  baseclass = GST_VIDEO_DECODER_CLASS (klass);
+
+  GST_DEBUG_CATEGORY_INIT (gst_v4l2_mtk_jpeg_dec_debug, "v4l2mtkjpegdec", 0,
+      "V4L2 Mtk Jpeg HW Decoder");
+
+  gst_element_class_set_static_metadata (element_class,
+      "V4L2 Mtk Jpeg HW Decoder",
+      "Codec/Decoder/Image",
+      "MTK jpeg HW decode via V4L2 API",
+      "Rick Chang <rick.chang@mediatek.com>");
+}
+
+static void
+gst_v4l2_mtk_jpeg_dec_subclass_init (gpointer g_class, gpointer data)
+{
+  GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
+  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+  GstV4l2MtkJpegDecCData *cdata = data;
+
+  klass->default_device = cdata->device;
+
+  /* Note: gst_pad_template_new() take the floating ref from the caps */
+  gst_element_class_add_pad_template (element_class,
+      gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+          cdata->sink_caps));
+  gst_element_class_add_pad_template (element_class,
+      gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+          cdata->src_caps));
+
+  g_free (cdata);
+}
+
+gboolean
+gst_v4l2_is_mtk_jpeg_dec (GstCaps * sink_caps, GstCaps * src_caps)
+{
+  gboolean ret = FALSE;
+  GstCaps *caps = gst_caps_new_empty_simple ("image/jpeg");
+
+  if (gst_caps_is_subset (sink_caps, caps)
+      && gst_caps_is_subset (src_caps, gst_v4l2_object_get_raw_caps ()))
+    ret = TRUE;
+
+  gst_caps_ref (caps);
+  return ret;
+}
+
+gboolean
+gst_v4l2_mtk_jpeg_dec_register (GstPlugin * plugin, const gchar * basename,
+    const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
+{
+  GTypeQuery type_query;
+  GTypeInfo type_info = { 0, };
+  GType type, subtype;
+  gchar *type_name;
+  GstV4l2MtkJpegDecCData *cdata;
+
+  cdata = g_new0 (GstV4l2MtkJpegDecCData, 1);
+  cdata->device = g_strdup (device_path);
+  cdata->sink_caps = gst_caps_ref (sink_caps);
+  cdata->src_caps = gst_static_pad_template_get_caps(&gst_mtk_jpeg_src_template);
+
+  type = gst_v4l2_mtk_jpeg_dec_get_type ();
+  g_type_query (type, &type_query);
+  memset (&type_info, 0, sizeof (type_info));
+  type_info.class_size = type_query.class_size;
+  type_info.instance_size = type_query.instance_size;
+  type_info.class_init = gst_v4l2_mtk_jpeg_dec_subclass_init;
+  type_info.class_data = cdata;
+  type_info.instance_init = gst_v4l2_mtk_jpeg_dec_subinstance_init;
+
+  type_name = g_strdup_printf ("v4l2mtkjpegdec");
+  subtype = g_type_register_static (type, type_name, &type_info, 0);
+
+  gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype);
+
+  g_free (type_name);
+
+  return TRUE;
+}
\ No newline at end of file
diff --git a/sys/v4l2/gstv4l2mtkjpegdec.h b/sys/v4l2/gstv4l2mtkjpegdec.h
new file mode 100644
index 0000000..9be6b04
--- /dev/null
+++ b/sys/v4l2/gstv4l2mtkjpegdec.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2016 MediaTek Inc
+ *     Author: Rick Chang <rick.chang@mediatek.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef __GST_V4L2_MTK_JPEG_DEC_H__
+#define __GST_V4L2_MTK_JPEG_DEC_H__
+
+#include <gst/gst.h>
+#include "gstv4l2videodec.h"
+
+GST_DEBUG_CATEGORY_EXTERN (v4l2mtkjpegdec_debug);
+
+G_BEGIN_DECLS
+#define GST_TYPE_V4L2_MTK_JPEG_DEC \
+  (gst_v4l2_mtk_jpeg_dec_get_type())
+#define GST_V4L2_MTK_JPEG_DEC(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_MTK_JPEG_DEC,GstV4l2MtkJpegDec))
+#define GST_V4L2_MTK_JPEG_DEC_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_MTK_JPEG_DEC,GstV4l2MtkJpegDecClass))
+#define GST_IS_V4L2_MTK_JPEG_DEC(obj) \
+  (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_MTK_JPEG_DEC))
+#define GST_IS_V4L2_MTK_JPEG_DEC_CLASS(obj) \
+  (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_MTK_JPEG_DEC))
+typedef struct _GstV4l2MtkJpegDec GstV4l2MtkJpegDec;
+typedef struct _GstV4l2MtkJpegDecClass GstV4l2MtkJpegDecClass;
+
+struct _GstV4l2MtkJpegDec
+{
+  GstV4l2VideoDec parent;
+};
+
+struct _GstV4l2MtkJpegDecClass
+{
+  GstV4l2VideoDecClass parent_class;
+};
+
+GType gst_v4l2_mtk_jpeg_dec_get_type (void);
+
+gboolean gst_v4l2_is_mtk_jpeg_dec (GstCaps * sink_caps, GstCaps * src_caps);
+
+gboolean gst_v4l2_mtk_jpeg_dec_register (GstPlugin * plugin,
+    const gchar * basename,
+    const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
+
+G_END_DECLS
+#endif /* __GST_V4L2_MTK_JPEG_DEC_H__ */
\ No newline at end of file
diff --git a/sys/v4l2/gstv4l2mtkvpudec.c b/sys/v4l2/gstv4l2mtkvpudec.c
new file mode 100644
index 0000000..15658e6
--- /dev/null
+++ b/sys/v4l2/gstv4l2mtkvpudec.c
@@ -0,0 +1,181 @@
+/*
+ * Copyright (c) 2016 MediaTek Inc
+ *     Author: Rick Chang <rick.chang@mediatek.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <unistd.h>
+#include <string.h>
+
+#include "gstv4l2mtkvpudec.h"
+#include "v4l2_calls.h"
+
+#include <string.h>
+#include <gst/gst-i18n-plugin.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_v4l2_mtk_vpu_dec_debug);
+#define GST_CAT_DEFAULT gst_v4l2_mtk_vpu_dec_debug
+
+typedef struct
+{
+  gchar *device;
+  GstCaps *sink_caps;
+  GstCaps *src_caps;
+} GstV4l2MtkVpuDecCData;
+
+enum
+{
+  PROP_0,
+  V4L2_STD_OBJECT_PROPS,
+};
+
+static GstStaticPadTemplate gst_mtk_vpu_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+    GST_PAD_SRC,
+    GST_PAD_ALWAYS,
+    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ "
+            "MT21 } "))
+    );
+
+#define gst_v4l2_mtk_vpu_dec_parent_class parent_class
+G_DEFINE_TYPE (GstV4l2MtkVpuDec, gst_v4l2_mtk_vpu_dec, GST_TYPE_V4L2_VIDEO_DEC);
+
+static void
+gst_v4l2_mtk_vpu_dec_init (GstV4l2MtkVpuDec * self)
+{
+}
+
+static void
+gst_v4l2_mtk_vpu_dec_subinstance_init (GTypeInstance * instance, gpointer g_class)
+{
+  GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
+  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (instance);
+  GstVideoDecoder *decoder = GST_VIDEO_DECODER (instance);
+
+  gst_video_decoder_set_packetized (decoder, TRUE);
+
+  self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
+      GST_VIDEO_DECODER_SINK_PAD (decoder), V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
+      gst_v4l2_get_output, gst_v4l2_set_output, NULL);
+  self->v4l2output->no_initial_format = TRUE;
+  self->v4l2output->keep_aspect = FALSE;
+
+  self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
+      GST_VIDEO_DECODER_SRC_PAD (decoder), V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
+      gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+  self->v4l2capture->no_initial_format = TRUE;
+  self->v4l2output->keep_aspect = FALSE;
+  self->v4l2capture->req_mode = GST_V4L2_IO_MMAP;
+}
+static void
+gst_v4l2_mtk_vpu_dec_class_init (GstV4l2MtkVpuDecClass * klass)
+{
+  GstElementClass *element_class;
+  GObjectClass *gobject_class;
+  GstV4l2VideoDecClass *v4l2_decoder_class;
+  GstVideoEncoderClass *baseclass;
+
+  parent_class = g_type_class_peek_parent (klass);
+
+  element_class = (GstElementClass *) klass;
+  gobject_class = (GObjectClass *) klass;
+  v4l2_decoder_class = GST_V4L2_VIDEO_DEC_CLASS (klass);
+  baseclass = GST_VIDEO_DECODER_CLASS (klass);
+
+  GST_DEBUG_CATEGORY_INIT (gst_v4l2_mtk_vpu_dec_debug, "v4l2mtkvpudec", 0,
+      "V4L2 Mtk Vpu HW Decoder");
+
+  gst_element_class_set_static_metadata (element_class,
+      "V4L2 Mtk Vpu HW Decoder",
+      "Codec/Decoder/Video",
+      "MTK video HW decode via V4L2 API",
+      "Rick Chang <rick.chang@mediatek.com>");
+}
+
+static void
+gst_v4l2_mtk_vpu_dec_subclass_init (gpointer g_class, gpointer data)
+{
+  GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
+  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+  GstV4l2MtkVpuDecCData *cdata = data;
+
+  klass->default_device = cdata->device;
+
+  /* Note: gst_pad_template_new() take the floating ref from the caps */
+  gst_element_class_add_pad_template (element_class,
+      gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+          cdata->sink_caps));
+  gst_element_class_add_pad_template (element_class,
+      gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+          cdata->src_caps));
+
+  g_free (cdata);
+}
+
+gboolean
+gst_v4l2_is_mtk_vpu_dec (GstCaps * sink_caps, GstCaps * src_caps)
+{
+  gboolean ret = FALSE;
+
+  if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_codec_caps ())
+      && gst_caps_is_subset (src_caps, gst_v4l2_object_get_raw_caps ()))
+    ret = TRUE;
+
+  return ret;
+}
+
+gboolean
+gst_v4l2_mtk_vpu_dec_register (GstPlugin * plugin, const gchar * basename,
+    const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
+{
+  GTypeQuery type_query;
+  GTypeInfo type_info = { 0, };
+  GType type, subtype;
+  gchar *type_name;
+  GstV4l2MtkVpuDecCData *cdata;
+
+  cdata = g_new0 (GstV4l2MtkVpuDecCData, 1);
+  cdata->device = g_strdup (device_path);
+  cdata->sink_caps = gst_caps_ref (sink_caps);
+  cdata->src_caps = gst_static_pad_template_get_caps(&gst_mtk_vpu_src_template);
+
+  type = gst_v4l2_mtk_vpu_dec_get_type ();
+  g_type_query (type, &type_query);
+  memset (&type_info, 0, sizeof (type_info));
+  type_info.class_size = type_query.class_size;
+  type_info.instance_size = type_query.instance_size;
+  type_info.class_init = gst_v4l2_mtk_vpu_dec_subclass_init;
+  type_info.class_data = cdata;
+  type_info.instance_init = gst_v4l2_mtk_vpu_dec_subinstance_init;
+
+  type_name = g_strdup_printf ("v4l2mtkvpudec");
+  subtype = g_type_register_static (type, type_name, &type_info, 0);
+
+  gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype);
+
+  g_free (type_name);
+
+  return TRUE;
+}
\ No newline at end of file
diff --git a/sys/v4l2/gstv4l2mtkvpudec.h b/sys/v4l2/gstv4l2mtkvpudec.h
new file mode 100644
index 0000000..3a6c736
--- /dev/null
+++ b/sys/v4l2/gstv4l2mtkvpudec.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2016 MediaTek Inc
+ *     Author: Rick Chang <rick.chang@mediatek.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef __GST_V4L2_MTK_VPU_DEC_H__
+#define __GST_V4L2_MTK_VPU_DEC_H__
+
+#include <gst/gst.h>
+#include "gstv4l2videodec.h"
+
+GST_DEBUG_CATEGORY_EXTERN (v4l2mtkvpudec_debug);
+
+G_BEGIN_DECLS
+#define GST_TYPE_V4L2_MTK_VPU_DEC \
+  (gst_v4l2_mtk_vpu_dec_get_type())
+#define GST_V4L2_MTK_VPU_DEC(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_MTK_VPU_DEC,GstV4l2MtkVpuDec))
+#define GST_V4L2_MTK_VPU_DEC_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_MTK_VPU_DEC,GstV4l2MtkVpuDecClass))
+#define GST_IS_V4L2_MTK_VPU_DEC(obj) \
+  (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_MTK_VPU_DEC))
+#define GST_IS_V4L2_MTK_VPU_DEC_CLASS(obj) \
+  (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_MTK_VPU_DEC))
+typedef struct _GstV4l2MtkVpuDec GstV4l2MtkVpuDec;
+typedef struct _GstV4l2MtkVpuDecClass GstV4l2MtkVpuDecClass;
+
+struct _GstV4l2MtkVpuDec
+{
+  GstV4l2VideoDec parent;
+};
+
+struct _GstV4l2MtkVpuDecClass
+{
+  GstV4l2VideoDecClass parent_class;
+};
+
+GType gst_v4l2_mtk_vpu_dec_get_type (void);
+
+gboolean gst_v4l2_is_mtk_vpu_dec (GstCaps * sink_caps, GstCaps * src_caps);
+
+gboolean gst_v4l2_mtk_vpu_dec_register (GstPlugin * plugin,
+    const gchar * basename,
+    const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
+
+G_END_DECLS
+#endif /* __GST_V4L2_MTK_VPU_DEC_H__ */
\ No newline at end of file
diff --git a/sys/v4l2/gstv4l2object.c b/sys/v4l2/gstv4l2object.c
index 124c778..94ed123 100644
--- a/sys/v4l2/gstv4l2object.c
+++ b/sys/v4l2/gstv4l2object.c
@@ -42,6 +42,8 @@
 #include "gst/gst-i18n-plugin.h"
 
 #include <gst/video/video.h>
+#include <sys/poll.h>
+#include <poll.h>
 
 GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
 #define GST_CAT_DEFAULT v4l2_debug
@@ -138,6 +140,7 @@
   {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
   {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
   {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
+  {V4L2_PIX_FMT_YUV422M, TRUE, GST_V4L2_RAW},
   {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
   {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
   {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
@@ -186,6 +189,23 @@
   {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
   {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
   {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
+
+  {V4L2_PIX_FMT_MT21, TRUE, GST_V4L2_RAW},
+  {V4L2_PIX_FMT_DIVX, FALSE, GST_V4L2_CODEC},
+  {V4L2_PIX_FMT_DIVX3, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+  {V4L2_PIX_FMT_DIVX4, FALSE, GST_V4L2_CODEC},
+  {V4L2_PIX_FMT_DIVX5, FALSE, GST_V4L2_CODEC},
+  {V4L2_PIX_FMT_DIVX6, FALSE, GST_V4L2_CODEC},
+  {V4L2_PIX_FMT_S263, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+  {V4L2_PIX_FMT_H265, FALSE, GST_V4L2_CODEC},
+  /*WMV not parseable */
+  {V4L2_PIX_FMT_WMV1, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+  {V4L2_PIX_FMT_WMV2, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+  {V4L2_PIX_FMT_WMV3, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+
+  /*realvideo not parseable */
+  {V4L2_PIX_FMT_RV30, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+  {V4L2_PIX_FMT_RV40, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
 };
 
 #define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
@@ -1055,6 +1075,7 @@
     case V4L2_PIX_FMT_NV61:    /* 16  Y/CrCb 4:2:2  */
     case V4L2_PIX_FMT_NV61M:   /* Same as NV61      */
     case V4L2_PIX_FMT_NV24:    /* 24  Y/CrCb 4:4:4  */
+    case V4L2_PIX_FMT_MT21:
       rank = YUV_ODD_BASE_RANK;
       break;
 
@@ -1066,6 +1087,7 @@
       break;
     case V4L2_PIX_FMT_YUV420:  /* I420, 12 bits per pixel */
     case V4L2_PIX_FMT_YUV420M:
+	case V4L2_PIX_FMT_YUV422M:
       rank = YUV_BASE_RANK + 7;
       break;
     case V4L2_PIX_FMT_YUYV:    /* YUY2, 16 bits per pixel */
@@ -1329,10 +1351,14 @@
     case V4L2_PIX_FMT_YUV420M:
       format = GST_VIDEO_FORMAT_I420;
       break;
+    case V4L2_PIX_FMT_YUV422M:
+      format = GST_VIDEO_FORMAT_I422;
+      break;
     case V4L2_PIX_FMT_YUYV:
       format = GST_VIDEO_FORMAT_YUY2;
       break;
     case V4L2_PIX_FMT_YVU420:
+	case V4L2_PIX_FMT_YVU420M:
       format = GST_VIDEO_FORMAT_YV12;
       break;
     case V4L2_PIX_FMT_UYVY:
@@ -1358,6 +1384,9 @@
     case V4L2_PIX_FMT_NV24:
       format = GST_VIDEO_FORMAT_NV24;
       break;
+    case V4L2_PIX_FMT_MT21:
+      format = GST_VIDEO_FORMAT_MT21;
+      break;
     default:
       format = GST_VIDEO_FORMAT_UNKNOWN;
       break;
@@ -1407,18 +1436,45 @@
       break;
     case V4L2_PIX_FMT_MPEG1:
       structure = gst_structure_new ("video/mpeg",
-          "mpegversion", G_TYPE_INT, 2, NULL);
+          "mpegversion", G_TYPE_INT, 1, NULL);
       break;
     case V4L2_PIX_FMT_MPEG2:
       structure = gst_structure_new ("video/mpeg",
           "mpegversion", G_TYPE_INT, 2, NULL);
       break;
     case V4L2_PIX_FMT_MPEG4:
-    case V4L2_PIX_FMT_XVID:
       structure = gst_structure_new ("video/mpeg",
           "mpegversion", G_TYPE_INT, 4, "systemstream",
           G_TYPE_BOOLEAN, FALSE, NULL);
       break;
+    case V4L2_PIX_FMT_XVID:
+      structure = gst_structure_new_empty ("video/x-xvid");
+      break;
+    case V4L2_PIX_FMT_DIVX3:
+      structure = gst_structure_new("video/x-divx",
+          "divxversion", G_TYPE_INT, 3, NULL);
+      break;
+    case V4L2_PIX_FMT_DIVX4:
+      structure = gst_structure_new("video/x-divx",
+          "divxversion", G_TYPE_INT, 4, NULL);
+      break;
+    case V4L2_PIX_FMT_DIVX5:
+      structure = gst_structure_new("video/x-divx",
+          "divxversion", G_TYPE_INT, 5, NULL);
+      break;
+    case V4L2_PIX_FMT_DIVX6:
+      structure = gst_structure_new("video/x-divx",
+          "divxversion", G_TYPE_INT, 6, NULL);
+      break;
+    case V4L2_PIX_FMT_S263:
+      structure = gst_structure_new ("video/x-flash-video",
+          "flvversion", G_TYPE_INT, 1, NULL);
+      break;
+    case V4L2_PIX_FMT_H265:    /* H.265 */
+      structure = gst_structure_new ("video/x-h265",
+          "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
+          G_TYPE_STRING, "au", NULL);
+      break;
     case V4L2_PIX_FMT_H263:
       structure = gst_structure_new ("video/x-h263",
           "variant", G_TYPE_STRING, "itu", NULL);
@@ -1444,6 +1500,20 @@
     case V4L2_PIX_FMT_VP9:
       structure = gst_structure_new_empty ("video/x-vp9");
       break;
+    case V4L2_PIX_FMT_WMV1:
+    case V4L2_PIX_FMT_WMV2:
+    case V4L2_PIX_FMT_WMV3:
+    case V4L2_PIX_FMT_WMVA:
+    case V4L2_PIX_FMT_WVC1:
+      structure = gst_structure_new_empty ("video/x-wmv");
+      break;
+    case V4L2_PIX_FMT_RV30:
+    case V4L2_PIX_FMT_RV40:
+      structure = gst_structure_new_empty ("video/x-pn-realvideo");
+      break;
+    case V4L2_PIX_FMT_MT21:
+      structure = gst_structure_new_empty ("video/x-raw");
+      break;
     case V4L2_PIX_FMT_GREY:    /*  8  Greyscale     */
     case V4L2_PIX_FMT_Y16:
     case V4L2_PIX_FMT_Y16_BE:
@@ -1474,6 +1544,8 @@
     case V4L2_PIX_FMT_YUV410:
     case V4L2_PIX_FMT_YUV420:  /* I420/IYUV */
     case V4L2_PIX_FMT_YUV420M:
+    case V4L2_PIX_FMT_YUV422M:
+    case V4L2_PIX_FMT_YVU420M:
     case V4L2_PIX_FMT_YUYV:
     case V4L2_PIX_FMT_YVU420:
     case V4L2_PIX_FMT_UYVY:
@@ -1680,6 +1752,7 @@
   guint32 fourcc = 0, fourcc_nc = 0;
   const gchar *mimetype;
   struct v4l2_fmtdesc *fmt = NULL;
+  gboolean prefered_non_contiguous = TRUE;;
 
   structure = gst_caps_get_structure (caps, 0);
 
@@ -1694,6 +1767,9 @@
         fourcc = V4L2_PIX_FMT_YUV420;
         fourcc_nc = V4L2_PIX_FMT_YUV420M;
         break;
+      case GST_VIDEO_FORMAT_I422:
+        fourcc = V4L2_PIX_FMT_YUV422M;
+        break;
       case GST_VIDEO_FORMAT_YUY2:
         fourcc = V4L2_PIX_FMT_YUYV;
         break;
@@ -1702,6 +1778,7 @@
         break;
       case GST_VIDEO_FORMAT_YV12:
         fourcc = V4L2_PIX_FMT_YVU420;
+		fourcc_nc = V4L2_PIX_FMT_YVU420M;
         break;
       case GST_VIDEO_FORMAT_Y41B:
         fourcc = V4L2_PIX_FMT_YUV411P;
@@ -1771,7 +1848,9 @@
         break;
       case GST_VIDEO_FORMAT_GRAY16_BE:
         fourcc = V4L2_PIX_FMT_Y16_BE;
-        break;
+      case GST_VIDEO_FORMAT_MT21:
+        fourcc = V4L2_PIX_FMT_MT21;
+		break;
       default:
         break;
     }
@@ -1831,10 +1910,54 @@
       fourcc = V4L2_PIX_FMT_PWC1;
     } else if (g_str_equal (mimetype, "video/x-pwc2")) {
       fourcc = V4L2_PIX_FMT_PWC2;
+    } else if (g_str_equal (mimetype, "video/x-xvid")) {
+      fourcc = V4L2_PIX_FMT_XVID;
+    } else if (g_str_equal (mimetype, "video/x-divx")) {
+      gint version;
+      if (gst_structure_get_int (structure, "divxversion", &version)) {
+        switch (version) {
+          case 3:
+            fourcc = V4L2_PIX_FMT_DIVX3;
+            break;
+          case 4:
+            fourcc = V4L2_PIX_FMT_DIVX4;
+            break;
+          case 5:
+            fourcc = V4L2_PIX_FMT_DIVX5;
+            break;
+          case 6:
+            fourcc = V4L2_PIX_FMT_DIVX6;
+            break;
+          default:
+            break;
+        }
+      }
+    } else if (g_str_equal (mimetype, "video/x-flash-video")) {
+      fourcc = V4L2_PIX_FMT_S263;
+    } else if (g_str_equal (mimetype, "video/x-h265")) {
+      fourcc = V4L2_PIX_FMT_H265;
+    } else if (g_str_equal (mimetype, "video/x-vp9")) {
+      fourcc = V4L2_PIX_FMT_VP9;
+    } else if (g_str_equal (mimetype, "video/x-wmv")) {
+      fourcc = V4L2_PIX_FMT_WMV3;
+    }else if (g_str_equal (mimetype, "video/x-pn-realvideo")) {
+      gint version;
+      if (gst_structure_get_int (structure, "rmversion", &version)) {
+        switch (version) {
+          case 3:
+            fourcc = V4L2_PIX_FMT_RV30;
+            break;
+          case 4:
+            fourcc = V4L2_PIX_FMT_RV40;
+            break;
+          default:
+            break;
+        }
+      }
     }
   }
 
-
+  prefered_non_contiguous = v4l2object->prefered_non_contiguous;
   /* Prefer the non-contiguous if supported */
   v4l2object->prefered_non_contiguous = TRUE;
 
@@ -1845,7 +1968,8 @@
 
   if (fmt == NULL) {
     fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
-    v4l2object->prefered_non_contiguous = FALSE;
+	if (!prefered_non_contiguous)
+      v4l2object->prefered_non_contiguous = FALSE;
   }
 
   if (fmt == NULL)
@@ -2264,6 +2388,12 @@
   if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0) {
     if (gst_v4l2_object_get_colorspace (&fmt, &cinfo))
       gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
+    else {
+      if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
+        g_value_unset (&list);
+        return;
+      }
+    }
   }
 
   /* step 2: probe all colorspace other than default
@@ -2660,6 +2790,9 @@
       gst_value_set_int_range_step (&step_range, h, maxh, step_h);
       gst_structure_take_value (tmp, "height", &step_range);
 
+      gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
+          (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
+          NULL);
       /* no point using the results list here, since there's only one struct */
       gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
     }
@@ -3935,8 +4068,8 @@
     align.padding_top = r->top;
     align.padding_right = width - r->width - r->left;
     align.padding_bottom = height - r->height - r->top;
-    width = r->width;
-    height = r->height;
+    //width = r->width;
+    //height = r->height;
   }
 
   gst_video_info_set_format (info, format, width, height);
@@ -3995,6 +4128,516 @@
 }
 
 gboolean
+gst_v4l2_object_set_enc_format (GstV4l2Object * v4l2object, GstCaps * caps, gboolean active)
+{
+  gint fd = v4l2object->video_fd;
+  struct v4l2_format format;
+  struct v4l2_streamparm streamparm;
+  enum v4l2_field field;
+  guint32 pixelformat;
+  struct v4l2_fmtdesc *fmtdesc;
+  GstVideoInfo info;
+  GstVideoAlignment align;
+  gint width, height, fps_n, fps_d;
+  gint n_v4l_planes;
+  gint i = 0;
+  gboolean is_mplane;
+  enum v4l2_colorspace colorspace = 0;
+
+  GST_V4L2_CHECK_OPEN (v4l2object);
+  if (active)
+    GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+  is_mplane = V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type);
+
+  gst_video_info_init (&info);
+  gst_video_alignment_reset (&align);
+
+  if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
+    goto invalid_caps;
+
+  pixelformat = fmtdesc->pixelformat;
+  width = GST_VIDEO_INFO_WIDTH (&info);
+  height = GST_VIDEO_INFO_HEIGHT (&info);
+  fps_n = GST_VIDEO_INFO_FPS_N (&info);
+  fps_d = GST_VIDEO_INFO_FPS_D (&info);
+
+  /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
+   * or if contiguous is prefered */
+  n_v4l_planes = GST_VIDEO_INFO_N_PLANES (&info);
+  /* Rick Chang
+     Our driver will check the number of planes. Can't change it.
+     if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
+  */
+  if (!n_v4l_planes || (!v4l2object->prefered_non_contiguous && !V4L2_TYPE_IS_OUTPUT(v4l2object->type)))
+    n_v4l_planes = 1;
+
+  if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
+    GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
+    /* ideally we would differentiate between types of interlaced video
+     * but there is not sufficient information in the caps..
+     */
+    field = V4L2_FIELD_INTERLACED;
+  } else {
+    GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
+    field = V4L2_FIELD_NONE;
+  }
+
+  if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
+    /* We should set colorspace if we have it */
+    if (gst_video_colorimetry_matches (&info.colorimetry, "bt601")) {
+      colorspace = V4L2_COLORSPACE_SMPTE170M;
+    } else if (gst_video_colorimetry_matches (&info.colorimetry, "bt709")) {
+      colorspace = V4L2_COLORSPACE_REC709;
+    } else if (gst_video_colorimetry_matches (&info.colorimetry, "smpte240m")) {
+      colorspace = V4L2_COLORSPACE_SMPTE240M;
+    } else {
+      /* Try to guess colorspace according to pixelformat and size */
+      if (GST_VIDEO_INFO_IS_YUV (&info)) {
+        /* SD streams likely use SMPTE170M and HD streams REC709 */
+        if (width <= 720 && height <= 576)
+          colorspace = V4L2_COLORSPACE_SMPTE170M;
+        else
+          colorspace = V4L2_COLORSPACE_REC709;
+      } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
+        colorspace = V4L2_COLORSPACE_SRGB;
+      }
+    }
+  }
+
+  GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
+      "%" GST_FOURCC_FORMAT " stride: %d", width, height,
+      GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
+
+  memset (&format, 0x00, sizeof (struct v4l2_format));
+  format.type = v4l2object->type;
+
+  if (is_mplane) {
+    format.type = v4l2object->type;
+    format.fmt.pix_mp.pixelformat = pixelformat;
+    format.fmt.pix_mp.width = width;
+    format.fmt.pix_mp.height = height;
+    format.fmt.pix_mp.field = field;
+    format.fmt.pix_mp.num_planes = n_v4l_planes;
+
+    /* try to ask our prefered stride but it's not a failure if not
+     * accepted */
+    for (i = 0; i < n_v4l_planes; i++) {
+      gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
+
+      if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
+        stride = GST_VIDEO_TILE_X_TILES (stride) <<
+            GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
+
+      format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
+    }
+
+    if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
+      format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
+  } else {
+    gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
+
+    format.type = v4l2object->type;
+    format.fmt.pix.width = width;
+    format.fmt.pix.height = height;
+    format.fmt.pix.pixelformat = pixelformat;
+    format.fmt.pix.field = field;
+
+    if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
+      stride = GST_VIDEO_TILE_X_TILES (stride) <<
+          GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
+
+    /* try to ask our prefered stride */
+    format.fmt.pix.bytesperline = stride;
+
+    if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
+      format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
+  }
+
+  GST_DEBUG_OBJECT (v4l2object->element, "Desired format is %dx%d, format "
+      "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
+      format.fmt.pix_mp.height,
+      GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
+      is_mplane ? format.fmt.pix_mp.num_planes : 1);
+
+#ifndef GST_DISABLE_GST_DEBUG
+  if (is_mplane) {
+    for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
+      GST_DEBUG_OBJECT (v4l2object->element, "  stride %d",
+          format.fmt.pix_mp.plane_fmt[i].bytesperline);
+  } else {
+    GST_DEBUG_OBJECT (v4l2object->element, "  stride %d",
+        format.fmt.pix.bytesperline);
+  }
+#endif
+
+  if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
+    if (is_mplane)
+      format.fmt.pix_mp.colorspace = colorspace;
+    else
+      format.fmt.pix.colorspace = colorspace;
+
+    GST_DEBUG_OBJECT (v4l2object->element, "Desired colorspace is %d",
+        colorspace);
+  }
+
+  if (ioctl (fd, VIDIOC_S_FMT, &format) < 0)
+    goto set_fmt_failed;
+
+  GST_DEBUG_OBJECT (v4l2object->element, "Got format of %dx%d, format "
+      "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d",
+      format.fmt.pix.width, format.fmt.pix_mp.height,
+      GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
+      is_mplane ? format.fmt.pix_mp.num_planes : 1,
+      is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace);
+
+#ifndef GST_DISABLE_GST_DEBUG
+  if (is_mplane) {
+    for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
+      GST_DEBUG_OBJECT (v4l2object->element, "  stride %d, sizeimage %d",
+          format.fmt.pix_mp.plane_fmt[i].bytesperline,
+          format.fmt.pix_mp.plane_fmt[i].sizeimage);
+  } else {
+    GST_DEBUG_OBJECT (v4l2object->element, "  stride %d, sizeimage %d",
+        format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
+  }
+#endif
+
+  if (format.fmt.pix.pixelformat != pixelformat)
+    goto invalid_pixelformat;
+
+  /* Only negotiate size with raw data.
+   * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
+   * in ASF mode for example, there is also not reason for a driver to
+   * change the size. */
+  if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED) {
+    /* We can crop larger images */
+    if (format.fmt.pix.width < width || format.fmt.pix.height < height)
+      goto invalid_dimensions;
+
+    /* Note, this will be adjusted if upstream has non-centered cropping. */
+    align.padding_top = 0;
+    align.padding_bottom = format.fmt.pix.height - height;
+    align.padding_left = 0;
+    align.padding_right = format.fmt.pix.width - width;
+  }
+
+  if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
+    goto invalid_planes;
+
+  if (GST_VIDEO_INFO_HAS_ALPHA (&info)) {
+    struct v4l2_control ctl = { 0, };
+    ctl.id = V4L2_CID_ALPHA_COMPONENT;
+    ctl.value = 0xff;
+
+    if (ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
+      GST_WARNING_OBJECT (v4l2object->element,
+          "Failed to set alpha component value");
+  }
+
+  if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
+    gint bitrate = 0;
+    gint gop = 0;
+    gint prepend_hdr = 0;
+    struct v4l2_control gop_ctl = { 0, };
+    struct v4l2_control bps_ctl = { 0, };
+    struct v4l2_control prepend_hdr_ctl = { 0, };
+    GstStructure *st;
+
+    st = gst_caps_get_structure (caps, 0);
+    if (gst_structure_has_field (st, "bitrate")) {
+      gst_structure_get_int(st, "bitrate", &bitrate);
+    }
+
+    if (gst_structure_has_field (st, "gop")) {
+      gst_structure_get_int(st, "gop", &gop);
+    }
+
+    if (gst_structure_has_field (st, "prepend_hdr")) {
+      gst_structure_get_int(st, "prepend_hdr", &prepend_hdr);
+    }
+
+    bps_ctl.id = V4L2_CID_MPEG_VIDEO_BITRATE;
+    bps_ctl.value = bitrate;
+    if (ioctl (fd, VIDIOC_S_CTRL, &bps_ctl) < 0) {
+      GST_WARNING_OBJECT (v4l2object->element,
+        "Failed to set bps_ctl component value");
+    }
+
+    gop_ctl.id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
+    gop_ctl.value = gop;
+    if (ioctl (fd, VIDIOC_S_CTRL, &gop_ctl) < 0) {
+      GST_WARNING_OBJECT (v4l2object->element,
+        "Failed to set gop_ctl component value");
+    }
+
+    prepend_hdr_ctl.id = V4L2_CID_MPEG_VIDEO_HEADER_MODE;
+    prepend_hdr_ctl.value = prepend_hdr;
+    if (ioctl (fd, VIDIOC_S_CTRL, &prepend_hdr_ctl) < 0) {
+      GST_WARNING_OBJECT (v4l2object->element,
+        "Failed to set prepend_hdr_ctl component value");
+    }
+
+    GST_INFO_OBJECT (v4l2object->element, "bitrate = %d, gop=%d, prepend_hdr=%d",
+      bitrate, gop, prepend_hdr);
+  }
+
+  /* Is there a reason we require the caller to always specify a framerate? */
+  GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
+      fps_d);
+
+  memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
+  streamparm.type = v4l2object->type;
+
+  /* Rick Chang
+     In encode flow, the frame rate is decided by client not driver.
+  */
+#if 0
+  if (ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
+    goto get_parm_failed;
+
+  GST_VIDEO_INFO_FPS_N (&info) =
+      streamparm.parm.capture.timeperframe.denominator;
+  GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator;
+
+  if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
+      || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+    GST_DEBUG_OBJECT (v4l2object->element, "Got framerate: %u/%u",
+        streamparm.parm.capture.timeperframe.denominator,
+        streamparm.parm.capture.timeperframe.numerator);
+
+    /* We used to skip frame rate setup if the camera was already setup
+     * with the requested frame rate. This breaks some cameras though,
+     * causing them to not output data (several models of Thinkpad cameras
+     * have this problem at least).
+     * So, don't skip. */
+    GST_LOG_OBJECT (v4l2object->element, "Setting framerate to %u/%u", fps_n,
+        fps_d);
+    /* We want to change the frame rate, so check whether we can. Some cheap USB
+     * cameras don't have the capability */
+    if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
+      GST_DEBUG_OBJECT (v4l2object->element,
+          "Not setting framerate (not supported)");
+      goto done;
+    }
+#endif
+    if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
+      /* Note: V4L2 wants the frame interval, we have the frame rate */
+      streamparm.parm.capture.timeperframe.numerator = fps_d;
+      streamparm.parm.capture.timeperframe.denominator = fps_n;
+
+      /* some cheap USB cam's won't accept any change */
+      if (ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
+        goto set_parm_failed;
+
+      /* get new values */
+      fps_d = streamparm.parm.capture.timeperframe.numerator;
+      fps_n = streamparm.parm.capture.timeperframe.denominator;
+
+      GST_INFO_OBJECT (v4l2object->element, "Set framerate to %u/%u", fps_n,
+          fps_d);
+
+      GST_VIDEO_INFO_FPS_N (&info) = fps_n;
+      GST_VIDEO_INFO_FPS_D (&info) = fps_d;
+    }
+#if 0
+  }
+#endif
+
+done:
+  if (!active)
+    return TRUE;
+
+  /* add boolean return, so we can fail on drivers bugs */
+  gst_v4l2_object_save_format (v4l2object, fmtdesc, &format, &info, &align);
+
+  /* now configure the pool */
+  if (!gst_v4l2_object_setup_pool (v4l2object, caps))
+    goto pool_failed;
+
+  return TRUE;
+
+  /* ERRORS */
+invalid_caps:
+  {
+    GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT,
+        caps);
+    return FALSE;
+  }
+set_fmt_failed:
+  {
+    if (errno == EBUSY) {
+      GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, BUSY,
+          (_("Device '%s' is busy"), v4l2object->videodev),
+          ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+              GST_FOURCC_ARGS (pixelformat), width, height,
+              g_strerror (errno)));
+    } else {
+      GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+          (_("Device '%s' cannot capture at %dx%d"),
+              v4l2object->videodev, width, height),
+          ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+              GST_FOURCC_ARGS (pixelformat), width, height,
+              g_strerror (errno)));
+    }
+    return FALSE;
+  }
+invalid_dimensions:
+  {
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+        (_("Device '%s' cannot capture at %dx%d"),
+            v4l2object->videodev, width, height),
+        ("Tried to capture at %dx%d, but device returned size %dx%d",
+            width, height, format.fmt.pix.width, format.fmt.pix.height));
+    return FALSE;
+  }
+invalid_pixelformat:
+  {
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+        (_("Device '%s' cannot capture in the specified format"),
+            v4l2object->videodev),
+        ("Tried to capture in %" GST_FOURCC_FORMAT
+            ", but device returned format" " %" GST_FOURCC_FORMAT,
+            GST_FOURCC_ARGS (pixelformat),
+            GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
+    return FALSE;
+  }
+invalid_planes:
+  {
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+        (_("Device '%s' does support non-contiguous planes"),
+            v4l2object->videodev),
+        ("Device wants %d planes", format.fmt.pix_mp.num_planes));
+    return FALSE;
+  }
+get_parm_failed:
+  {
+    /* it's possible that this call is not supported */
+    if (errno != EINVAL && errno != ENOTTY) {
+      GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+          (_("Could not get parameters on device '%s'"),
+              v4l2object->videodev), GST_ERROR_SYSTEM);
+    }
+    goto done;
+  }
+set_parm_failed:
+  {
+    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+        (_("Video device did not accept new frame rate setting.")),
+        GST_ERROR_SYSTEM);
+    goto done;
+  }
+pool_failed:
+  {
+    /* setup_pool already send the error */
+    return FALSE;
+  }
+}
+
+gboolean
+gst_v4l2_object_get_crop (GstV4l2Object * obj, guint *crop_width, guint *crop_height)
+{
+  struct v4l2_crop crop = { 0 };
+
+  if ((crop_width == NULL) || (crop_height == NULL))
+    return FALSE;
+
+  crop.type = obj->type;
+
+  if (ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
+    GST_WARNING_OBJECT (obj->element, "VIDIOC_G_CROP failed");
+    return FALSE;
+  }
+  *crop_width = crop.c.width;
+  *crop_height = crop.c.height;
+
+  GST_INFO_OBJECT (obj->element,
+      "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
+      crop.c.width, crop.c.height);
+
+  return TRUE;
+}
+
+gint
+gst_v4l2_object_sub_event (GstV4l2Object * v4l2object)
+{
+  gint r;
+  struct v4l2_event_subscription sub = { 0 };
+
+  sub.type = V4L2_EVENT_SOURCE_CHANGE;
+  r = ioctl (v4l2object->video_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+
+  sub.type = V4L2_EVENT_EOS;
+  r = ioctl (v4l2object->video_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+
+  return r;
+}
+
+gint
+gst_v4l2_object_check_res_change (GstV4l2Object * v4l2object)
+{
+  struct v4l2_event e = { 0 };
+  __u32 change;
+
+  ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, &e);
+  GST_LOG ("e.type=%d",e.type);
+
+  switch (e.type) {
+    case V4L2_EVENT_SOURCE_CHANGE:
+    {
+      change = e.u.src_change.changes;
+      if (change & V4L2_EVENT_SRC_CH_RESOLUTION) {
+        GST_LOG ("Got resolution change,change=%d", change);
+        return GST_V4L2_RET_RES_CHANGE;
+       }
+    }
+    case V4L2_EVENT_EOS:
+    {
+      GST_LOG ("Vdec not support the source, stop playing it");
+      return GST_V4L2_RET_SRC_NOT_SUPPORT;
+    }
+    default :
+      goto err;
+  }
+  return GST_V4L2_RET_OK;
+err:
+  GST_LOG ("Got unknonw event");
+  return GST_V4L2_RET_FAIL;
+}
+
+gint
+gst_v4l2_object_poll (GstV4l2Object * v4l2object, gint timeout)
+{
+  struct pollfd pfd;
+  gshort wait_event = V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? POLLOUT : POLLIN;
+  gint ret = 0;
+  pfd.fd = v4l2object->video_fd;
+  pfd.events = POLLERR;
+  pfd.events |= wait_event;
+  pfd.events |= POLLPRI;
+
+  GST_LOG ("before poll");
+  ret = poll (&pfd, 1, timeout);
+  GST_LOG ("after poll, pfd.revents=%d", pfd.revents);
+
+  if (ret == -1) {
+    GST_LOG ("poll fail");
+    return GST_V4L2_RET_FAIL;
+  }
+
+  if (pfd.revents & POLLERR)
+    return GST_V4L2_RET_FAIL;
+
+  if (pfd.revents & POLLPRI) {
+    ret = gst_v4l2_object_check_res_change (v4l2object);
+    return ret;
+  }
+
+  if ((pfd.revents & wait_event) == wait_event)
+    return GST_V4L2_RET_OK;
+}
+
+gboolean
 gst_v4l2_object_set_crop (GstV4l2Object * obj)
 {
   struct v4l2_selection sel = { 0 };
diff --git a/sys/v4l2/gstv4l2object.h b/sys/v4l2/gstv4l2object.h
index 7871eaf..92f22a7 100644
--- a/sys/v4l2/gstv4l2object.h
+++ b/sys/v4l2/gstv4l2object.h
@@ -64,6 +64,17 @@
   GST_V4L2_IO_DMABUF_IMPORT = 5
 } GstV4l2IOMode;
 
+enum gst_V4L2_ret_type {
+  GST_V4L2_RET_OK = 0,
+  GST_V4L2_RET_FAIL = -1,
+  GST_V4L2_RET_NO_FILE = -2,
+  GST_V4L2_RET_NO_FREE_BUF = -3,
+  GST_V4L2_RET_EOS = -4,
+  GST_V4L2_RET_RES_CHANGE = -5,
+  GST_V4L2_RET_SRC_NOT_SUPPORT = -6,
+};
+
+
 typedef gboolean  (*GstV4l2GetInOutFunction)  (GstV4l2Object * v4l2object, gint * input);
 typedef gboolean  (*GstV4l2SetInOutFunction)  (GstV4l2Object * v4l2object, gint input);
 typedef gboolean  (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
@@ -324,6 +335,16 @@
 gboolean     gst_v4l2_set_attribute   (GstV4l2Object * v4l2object, int attribute, const int value);
 gboolean     gst_v4l2_set_controls    (GstV4l2Object * v4l2object, GstStructure * controls);
 
+gboolean      gst_v4l2_object_get_crop (GstV4l2Object * obj, guint *crop_width, guint *crop_height);
+
+gboolean      gst_v4l2_object_set_enc_format  (GstV4l2Object * v4l2object, GstCaps * caps, gboolean active);
+
+gint gst_v4l2_object_sub_event (GstV4l2Object * v4l2object);
+
+gint gst_v4l2_object_check_res_change (GstV4l2Object * v4l2object);
+
+gint gst_v4l2_object_poll (GstV4l2Object * v4l2object, gint timeout);
+
 G_END_DECLS
 
 #endif /* __GST_V4L2_OBJECT_H__ */
diff --git a/sys/v4l2/gstv4l2src.c b/sys/v4l2/gstv4l2src.c
index 88c813f..25b84e1 100644
--- a/sys/v4l2/gstv4l2src.c
+++ b/sys/v4l2/gstv4l2src.c
@@ -200,6 +200,8 @@
       GST_OBJECT (GST_BASE_SRC_PAD (v4l2src)), V4L2_BUF_TYPE_VIDEO_CAPTURE,
       DEFAULT_PROP_DEVICE, gst_v4l2_get_input, gst_v4l2_set_input, NULL);
 
+  v4l2src->v4l2object->prefered_non_contiguous = TRUE;
+
   /* Avoid the slow probes */
   v4l2src->v4l2object->skip_try_fmt_probes = TRUE;
 
diff --git a/sys/v4l2/gstv4l2videodec.c b/sys/v4l2/gstv4l2videodec.c
index 838ebff..847c080 100644
--- a/sys/v4l2/gstv4l2videodec.c
+++ b/sys/v4l2/gstv4l2videodec.c
@@ -35,9 +35,20 @@
 #include <string.h>
 #include <gst/gst-i18n-plugin.h>
 
+#include <sys/poll.h>
+#include <poll.h>
 GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_dec_debug);
 #define GST_CAT_DEFAULT gst_v4l2_video_dec_debug
 
+#define FPS_COUNT_NUM  120
+
+static gint fps_count = 0;
+static gint64 time_start = 0;
+static gint64 time_end = 0;
+
+static gint64 time_pre_frame = 0;
+static guint32 video_count = 0;
+static gboolean gst_v4l2_video_dec_flush (GstVideoDecoder * decoder);
 typedef struct
 {
   gchar *device;
@@ -111,7 +122,11 @@
 gst_v4l2_video_dec_open (GstVideoDecoder * decoder)
 {
   GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
   GstCaps *codec_caps;
+  GstCaps *src_caps, *caps;
+  GstElementClass *element_class = GST_ELEMENT_GET_CLASS (decoder);
+  GstPadTemplate *pad_template;
 
   GST_DEBUG_OBJECT (self, "Opening");
 
@@ -129,8 +144,17 @@
   if (gst_caps_is_empty (self->probed_sinkcaps))
     goto no_encoded_format;
 
-  self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
-      gst_v4l2_object_get_raw_caps ());
+//  self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
+//      gst_v4l2_object_get_raw_caps ());
+  src_caps = gst_v4l2_object_get_caps (self->v4l2capture,
+       gst_v4l2_object_get_raw_caps ());
+  pad_template = gst_element_class_get_pad_template (element_class, "src");
+  caps = gst_pad_template_get_caps (pad_template);
+  self->probed_srccaps = gst_caps_intersect_full (src_caps, caps, GST_CAPS_INTERSECT_FIRST);
+  gst_caps_unref (src_caps);
+  gst_caps_unref (caps);
+
+  GST_INFO_OBJECT (self, "probed src caps: %" GST_PTR_FORMAT, self->probed_srccaps);
 
   if (gst_caps_is_empty (self->probed_srccaps))
     goto no_raw_format;
@@ -458,6 +482,86 @@
 }
 
 static void
+gst_v4l2_update_caps (GstVideoDecoder * decoder, guint width, guint height, guint crop_width, guint crop_height)
+{
+  GstCaps *prevcaps = NULL;
+  GstCaps *updatecaps = NULL;
+  GstStructure *s = NULL;
+
+  prevcaps = gst_pad_get_current_caps (decoder->srcpad);
+
+  if (prevcaps) {
+    gboolean ret = TRUE;
+    gboolean res_changed = FALSE;
+    gint disp_width = 0;
+    gint disp_height = 0;
+    gint pre_width = 0;
+    gint pre_height = 0;
+
+    s = gst_caps_get_structure (prevcaps, 0);
+    if (s && gst_structure_has_field (s, "display_width"))
+      gst_structure_get_int (s, "display_width", &disp_width);
+
+    if (s && gst_structure_has_field (s, "display_height"))
+      gst_structure_get_int (s, "display_height", &disp_height);
+
+    if (s && gst_structure_has_field (s, "width"))
+      gst_structure_get_int (s, "width", &pre_width);
+
+    if (s && gst_structure_has_field (s, "height"))
+      gst_structure_get_int (s, "height", &pre_height);
+
+    GST_INFO("display_width=%d,display_height=%d,crop.width=%d,crop.height=%d,prewidth=%d,preheight=%d,width=%d,height=%d",
+      disp_width, disp_height, crop_width, crop_height, pre_width, pre_height, width, height);
+
+    updatecaps = gst_caps_copy_nth (prevcaps, 0);
+
+    if ((crop_width != disp_width) || (crop_height != disp_height)) {
+      res_changed = TRUE;
+      gst_caps_set_simple (updatecaps, "display_width", G_TYPE_INT, crop_width, NULL);
+      gst_caps_set_simple (updatecaps, "display_height", G_TYPE_INT, crop_height, NULL);
+    }
+
+    if ((pre_width != width) || (pre_height != height)) {
+      res_changed = TRUE;
+      gst_caps_set_simple (updatecaps, "width", G_TYPE_INT, width, NULL);
+      gst_caps_set_simple (updatecaps, "height", G_TYPE_INT, height, NULL);
+    }
+
+    if (res_changed == TRUE) {
+      GstVideoCodecState *state = NULL;
+      state = gst_video_decoder_get_output_state(decoder);
+      state->caps = updatecaps;
+      ret = gst_pad_set_caps (decoder->srcpad, updatecaps);
+      if (ret == FALSE){
+        GST_INFO("gst_pad_set_caps FAILED");
+      }
+    }
+
+    if (prevcaps)
+      gst_caps_unref (prevcaps);
+    if (updatecaps)
+      gst_caps_unref (updatecaps);
+  }
+  return;
+}
+
+static gboolean
+gst_v4l2_video_codec_is_rm (GstVideoDecoder * decoder)
+{
+  gboolean rtn = FALSE;
+  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+  if (self->v4l2output->format.fmt.pix.pixelformat == V4L2_PIX_FMT_RV30
+    ||self->v4l2output->format.fmt.pix.pixelformat == V4L2_PIX_FMT_RV40) {
+    rtn = TRUE;
+  }
+
+  return rtn;
+}
+
+
+static void
 gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
 {
   GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
@@ -466,6 +570,9 @@
   GstVideoCodecFrame *frame;
   GstBuffer *buffer = NULL;
   GstFlowReturn ret;
+  struct pollfd pfd;
+  gshort wait_event = V4L2_TYPE_IS_OUTPUT(self->v4l2capture->type) ? POLLOUT : POLLIN;
+  gboolean res_changed = FALSE;
 
   GST_LOG_OBJECT (decoder, "Allocate output buffer");
 
@@ -475,6 +582,47 @@
      * stream lock. we know that the acquire may need to poll until more frames
      * comes in and holding this lock would prevent that.
      */
+    res_changed = gst_v4l2_object_poll (self->v4l2capture, 0);
+    if (res_changed == GST_V4L2_RET_SRC_NOT_SUPPORT) {
+      goto src_not_support;
+    }
+    if (res_changed == GST_V4L2_RET_RES_CHANGE) {
+      GstVideoInfo info;
+      guint crop_width = 0;
+      guint crop_height = 0;
+
+      if (!gst_v4l2_video_codec_is_rm (decoder)) {
+        if (self->v4l2capture->pool) {
+          GST_INFO_OBJECT (decoder, "deactivating pool");
+          gst_buffer_pool_set_active (self->v4l2capture->pool, FALSE);
+        }
+      }
+
+      if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info)) {
+        GST_INFO_OBJECT (decoder, "gst_v4l2_object_acquire_format failed");
+        goto beach;
+      }
+
+      if (TRUE == gst_v4l2_object_get_crop(self->v4l2capture, &crop_width, &crop_height)) {
+        gst_v4l2_update_caps(decoder, info.width, info.height, crop_width, crop_height);
+      }
+      else {
+        GST_WARNING_OBJECT (decoder, "gst_v4l2_object_get_crop failed");
+        goto beach;
+      }
+
+      if (!gst_video_decoder_negotiate (decoder)) {
+        GST_ERROR_OBJECT (decoder, "negotiate error");
+        goto beach;
+      }
+
+      if (!gst_v4l2_video_codec_is_rm (decoder)) {
+        if (self->v4l2capture->pool) {
+          GST_INFO_OBJECT (decoder, "activating pool");
+          gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE);
+        }
+      }
+    }
     pool = gst_video_decoder_get_buffer_pool (decoder);
 
     /* Pool may be NULL if we started going to READY state */
@@ -501,9 +649,34 @@
 
   if (frame) {
     frame->output_buffer = buffer;
+    if (GST_BUFFER_TIMESTAMP (buffer) != 0 && GST_BUFFER_TIMESTAMP (buffer) != (((GstClockTime)-1) - 999)) {
+      frame->pts = GST_BUFFER_TIMESTAMP (buffer);
+    }
     buffer = NULL;
     ret = gst_video_decoder_finish_frame (decoder, frame);
 
+    gint64 fps_time = 0;
+    gfloat fps = 0;
+
+    if (fps_count == 0) {
+      time_start = g_get_monotonic_time();
+    }
+
+    fps_count++;
+    if (fps_count == FPS_COUNT_NUM) {
+      time_end = g_get_monotonic_time();
+      fps_time = time_end - time_start;
+      fps = FPS_COUNT_NUM * 1000000.0 / fps_time;
+      GST_INFO_OBJECT (decoder, "fps = %f", fps);
+      fps_count = 0;
+    }
+
+    video_count++;
+    gint64 time_cur = g_get_monotonic_time();
+    GST_DEBUG_OBJECT (decoder, "[%d] frame time %lld us \n",
+        video_count, (time_cur - time_pre_frame));
+    time_pre_frame = time_cur;
+
     if (ret != GST_FLOW_OK)
       goto beach;
   } else {
@@ -513,6 +686,18 @@
 
   return;
 
+src_not_support:
+  {
+    GST_ERROR_OBJECT (decoder,
+        "Vdec not support the source, post error message to pipeline to stop playing it");
+    GError *gerror = g_error_new_literal (G_FILE_ERROR,
+        G_FILE_ERROR_NOENT, "Vdec not support the source");
+    gchar *sent_debug = g_strdup_printf ("%s(%d): %s ()",__FILE__, __LINE__, __FUNCTION__);
+
+    gst_element_post_message (&decoder->element,
+        gst_message_new_error(GST_OBJECT_CAST (decoder), gerror, sent_debug));
+  }
+
 beach:
   GST_DEBUG_OBJECT (decoder, "Leaving output thread: %s",
       gst_flow_get_name (ret));
@@ -557,6 +742,7 @@
   return TRUE;
 }
 
+gboolean polling_flag = FALSE;
 static GstFlowReturn
 gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
     GstVideoCodecFrame * frame)
@@ -567,6 +753,8 @@
   gboolean processed = FALSE;
   GstBuffer *tmp;
   GstTaskState task_state;
+  GstElementClass *element_class = GST_ELEMENT_GET_CLASS (decoder);
+  GstPadTemplate *pad_template;
 
   GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
 
@@ -584,6 +772,8 @@
   if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
     GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
     GstVideoInfo info;
+    GstVideoInfo input_info;
+    GstVideoCodecState *input_state;
     GstVideoCodecState *output_state;
     GstBuffer *codec_data;
     GstCaps *acquired_caps, *available_caps, *caps, *filter;
@@ -619,6 +809,8 @@
     }
 
     GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
+    if (gst_v4l2_object_sub_event (self->v4l2output) < 0)
+      goto register_sub_event_failed;
     ret =
         gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
             v4l2output->pool), &codec_data);
@@ -626,6 +818,9 @@
 
     gst_buffer_unref (codec_data);
 
+    if (gst_v4l2_object_poll (self->v4l2output, 0) == GST_V4L2_RET_SRC_NOT_SUPPORT)
+      goto src_not_support;
+
     /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
      * in the compose rectangle. gst_v4l2_object_acquire_format() checks both
      * and returns the visible size as with/height and the coded size as
@@ -635,9 +830,11 @@
 
     /* Create caps from the acquired format, remove the format field */
     acquired_caps = gst_video_info_to_caps (&info);
+    if (self->v4l2capture->format.fmt.pix.pixelformat == V4L2_PIX_FMT_MT21) {
     GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
     st = gst_caps_get_structure (acquired_caps, 0);
     gst_structure_remove_field (st, "format");
+	}
 
     /* Probe currently available pixel formats */
     available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL);
@@ -679,6 +876,23 @@
 
     /* Copy the rest of the information, there might be more in the future */
     output_state->info.interlace_mode = info.interlace_mode;
+
+    input_state = self->input_state;
+    if (!input_state) {
+        GST_ERROR_OBJECT (self, "input_state is null");
+    } else {
+        gst_video_info_from_caps(&input_info, input_state->caps);
+        GST_DEBUG_OBJECT (self, "input_info.width=%d input_info.height=%d", input_info.width, input_info.height);
+    }
+
+    if (output_state->caps == NULL) {
+        output_state->caps = gst_video_info_to_caps (&output_state->info);
+    }
+
+    gst_caps_set_simple (output_state->caps, "display_width", G_TYPE_INT, input_info.width, NULL);
+    gst_caps_set_simple (output_state->caps, "display_height", G_TYPE_INT, input_info.height, NULL);
+
+
     gst_video_codec_state_unref (output_state);
 
     if (!gst_video_decoder_negotiate (decoder)) {
@@ -709,6 +923,8 @@
     /* Start the processing task, when it quits, the task will disable input
      * processing to unlock input if draining, or prevent potential block */
     self->output_flow = GST_FLOW_FLUSHING;
+    if (gst_v4l2_object_sub_event (self->v4l2capture) < 0)
+      goto register_sub_event_failed;
     if (!gst_pad_start_task (decoder->srcpad,
             (GstTaskFunction) gst_v4l2_video_dec_loop, self, NULL))
       goto start_task_failed;
@@ -779,6 +995,21 @@
     ret = GST_FLOW_ERROR;
     goto drop;
   }
+
+src_not_support:
+  {
+    GST_ERROR_OBJECT (self, "Vdec not support the source");
+    ret = GST_FLOW_ERROR;
+    goto drop;
+  }
+
+register_sub_event_failed:
+  {
+    GST_ERROR_OBJECT (self, "register sub event to driver failed");
+    ret = GST_FLOW_ERROR;
+    goto drop;
+  }
+
 drop:
   {
     gst_video_decoder_drop_frame (decoder, frame);
@@ -939,6 +1170,8 @@
 gst_v4l2_video_dec_init (GstV4l2VideoDec * self)
 {
   /* V4L2 object are created in subinstance_init */
+  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
+      (self), TRUE);
 }
 
 static void
diff --git a/sys/v4l2/gstv4l2videoenc.h b/sys/v4l2/gstv4l2videoenc.h
index f03acd5..db39a56 100644
--- a/sys/v4l2/gstv4l2videoenc.h
+++ b/sys/v4l2/gstv4l2videoenc.h
@@ -30,6 +30,8 @@
 #include <gstv4l2object.h>
 #include <gstv4l2bufferpool.h>
 
+GST_DEBUG_CATEGORY_EXTERN (v4l2videoenc_debug);
+
 G_BEGIN_DECLS
 #define GST_TYPE_V4L2_VIDEO_ENC \
   (gst_v4l2_video_enc_get_type())
@@ -43,7 +45,6 @@
   (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VIDEO_ENC))
 #define GST_V4L2_VIDEO_ENC_GET_CLASS(obj) \
   (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_V4L2_VIDEO_ENC, GstV4l2VideoEncClass))
-
 typedef struct _GstV4l2VideoEnc GstV4l2VideoEnc;
 typedef struct _GstV4l2VideoEncClass GstV4l2VideoEncClass;
 
@@ -63,8 +64,14 @@
   GstVideoCodecState *input_state;
   gboolean active;
   gboolean processing;
+  gboolean finish;
   GstFlowReturn output_flow;
 
+  /*properity*/
+  gint bitrate;
+  gint gop;
+  gint prepend_hdr;
+
 };
 
 struct _GstV4l2VideoEncClass
@@ -72,6 +79,9 @@
   GstVideoEncoderClass parent_class;
 
   gchar *default_device;
+
+    GstFlowReturn (*handle_frame) (GstVideoEncoder * encoder,
+      GstVideoCodecFrame * frame, GstCaps * outcaps);
   const char *codec_name;
 
   guint32 profile_cid;
@@ -85,10 +95,17 @@
 
 GType gst_v4l2_video_enc_get_type (void);
 
-
 gboolean gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps,
     GstCaps * codec_caps);
 
+static void
+gst_v4l2_video_enc_set_property (GObject * object,
+    guint prop_id, const GValue * value, GParamSpec * pspec);
+
+static void
+gst_v4l2_video_enc_get_property (GObject * object,
+    guint prop_id, GValue * value, GParamSpec * pspec);
+
 void gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
     const char *codec, const gchar * basename, const gchar * device_path,
     GstCaps * sink_caps, GstCaps *codec_caps, GstCaps * src_caps);
diff --git a/sys/v4l2/v4l2_calls.c b/sys/v4l2/v4l2_calls.c
index d3dbd42..60fb6ce 100644
--- a/sys/v4l2/v4l2_calls.c
+++ b/sys/v4l2/v4l2_calls.c
@@ -38,6 +38,8 @@
 #include <stropts.h>
 #include <sys/ioccom.h>
 #endif
+#include "v4l2-utils.h"
+
 #include "gstv4l2object.h"
 #include "gstv4l2tuner.h"
 #include "gstv4l2colorbalance.h"
@@ -520,6 +522,12 @@
   struct stat st;
   int libv4l2_fd = -1;
 
+  GstV4l2Iterator *it;
+  gboolean ret = TRUE;
+  it = gst_v4l2_iterator_new ();
+
+retry:
+
   GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to open device %s",
       v4l2object->videodev);
 
@@ -567,7 +575,23 @@
   if (GST_IS_V4L2SRC (v4l2object->element) &&
       !(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE |
               V4L2_CAP_VIDEO_CAPTURE_MPLANE)))
-    goto not_capture;
+   {
+        ret = gst_v4l2_iterator_next (it);
+      if (ret) {
+        v4l2object->videodev = it->device_path;
+        if (GST_V4L2_IS_OPEN (v4l2object)) {
+          close (v4l2object->video_fd);
+          v4l2object->video_fd = -1;
+        }
+        goto retry;
+      }
+      else {
+        GST_DEBUG_OBJECT (v4l2object->element, "Cannot find capure device");
+        gst_v4l2_iterator_free (it);
+        goto not_capture;
+      }
+  }
+  gst_v4l2_iterator_free (it);
 
   if (GST_IS_V4L2SINK (v4l2object->element) &&
       !(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
@@ -588,6 +612,9 @@
       "Opened device '%s' (%s) successfully",
       v4l2object->vcap.card, v4l2object->videodev);
 
+  if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
+    v4l2object->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
   if (v4l2object->extra_controls)
     gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
 
diff --git a/sys/v4l2/v4l2_calls.h b/sys/v4l2/v4l2_calls.h
new file mode 100644
index 0000000..1bd7f62
--- /dev/null
+++ b/sys/v4l2/v4l2_calls.h
@@ -0,0 +1,138 @@
+/* GStreamer
+ *
+ * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *               2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * v4l2_calls.h - generic V4L2 calls handling
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __V4L2_CALLS_H__
+#define __V4L2_CALLS_H__
+
+#include "gstv4l2object.h"
+
+#ifdef HAVE_LIBV4L2
+#  include <libv4l2.h>
+#else
+#  include "ext/videodev2.h"
+#  include <sys/ioctl.h>
+#  include <sys/mman.h>
+#  include <unistd.h>
+#  define v4l2_fd_open(fd, flags) (fd)
+#  define v4l2_close    close
+#  define v4l2_dup      dup
+#  define v4l2_ioctl    ioctl
+#  define v4l2_read     read
+#  define v4l2_mmap     mmap
+#  define v4l2_munmap   munmap
+#endif
+
+#define GST_V4L2_IS_OVERLAY(v4l2object) \
+  (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OVERLAY)
+
+/* checks whether the current v4lv4l2object has already been open()'ed or not */
+#define GST_V4L2_CHECK_OPEN(v4l2object)				\
+  if (!GST_V4L2_IS_OPEN(v4l2object))				\
+  {								\
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,	\
+      (_("Device is not open.")), (NULL));                      \
+    return FALSE;						\
+  }
+
+/* checks whether the current v4lv4l2object is close()'ed or whether it is still open */
+#define GST_V4L2_CHECK_NOT_OPEN(v4l2object)			\
+  if (GST_V4L2_IS_OPEN(v4l2object))				\
+  {								\
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,	\
+      (_("Device is open.")), (NULL));                          \
+    return FALSE;						\
+  }
+
+/* checks whether the current v4lv4l2object does video overlay */
+#define GST_V4L2_CHECK_OVERLAY(v4l2object)			\
+  if (!GST_V4L2_IS_OVERLAY(v4l2object))				\
+  {								\
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
+      (NULL), ("Device cannot handle overlay"));                \
+    return FALSE;						\
+  }
+
+/* checks whether we're in capture mode or not */
+#define GST_V4L2_CHECK_ACTIVE(v4l2object)			\
+  if (!GST_V4L2_IS_ACTIVE(v4l2object))				\
+  {								\
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
+      (NULL), ("Device is not in streaming mode"));             \
+    return FALSE;						\
+  }
+
+/* checks whether we're out of capture mode or not */
+#define GST_V4L2_CHECK_NOT_ACTIVE(v4l2object)			\
+  if (GST_V4L2_IS_ACTIVE(v4l2object))				\
+  {								\
+    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
+      (NULL), ("Device is in streaming mode"));                 \
+    return FALSE;						\
+  }
+
+
+/* open/close the device */
+gboolean	gst_v4l2_open			(GstV4l2Object *v4l2object);
+gboolean	gst_v4l2_dup			(GstV4l2Object *v4l2object, GstV4l2Object *other);
+gboolean	gst_v4l2_close			(GstV4l2Object *v4l2object);
+
+/* norm/input/output */
+gboolean	gst_v4l2_get_norm		(GstV4l2Object *v4l2object,
+						 v4l2_std_id    *norm);
+gboolean	gst_v4l2_set_norm		(GstV4l2Object *v4l2object,
+						 v4l2_std_id     norm);
+gboolean        gst_v4l2_get_input              (GstV4l2Object * v4l2object,
+                                                 gint * input);
+gboolean        gst_v4l2_set_input              (GstV4l2Object * v4l2object,
+                                                 gint input);
+gboolean	gst_v4l2_get_output		(GstV4l2Object *v4l2object,
+						 gint           *output);
+gboolean	gst_v4l2_set_output		(GstV4l2Object *v4l2object,
+						 gint            output);
+
+/* frequency control */
+gboolean	gst_v4l2_get_frequency		(GstV4l2Object *v4l2object,
+						 gint            tunernum,
+            gulong         *frequency);
+gboolean	gst_v4l2_set_frequency		(GstV4l2Object *v4l2object,
+						 gint            tunernum,
+            gulong          frequency);
+gboolean	gst_v4l2_signal_strength	(GstV4l2Object *v4l2object,
+						 gint            tunernum,
+						 gulong         *signal);
+
+/* attribute control */
+gboolean	gst_v4l2_get_attribute		(GstV4l2Object *v4l2object,
+						 int             attribute,
+						 int            *value);
+gboolean	gst_v4l2_set_attribute		(GstV4l2Object *v4l2object,
+						 int             attribute,
+						 const int       value);
+
+gboolean	gst_v4l2_set_controls		(GstV4l2Object * v4l2object,
+						 GstStructure * controls);
+
+gboolean        gst_v4l2_get_capabilities       (GstV4l2Object * v4l2object);
+
+
+#endif /* __V4L2_CALLS_H__ */