| /* GStreamer |
| * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> |
| * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk> |
| * |
| * This library is free software; you can redistribute it and/or |
| * modify it under the terms of the GNU Library General Public |
| * License as published by the Free Software Foundation; either |
| * version 2 of the License, or (at your option) any later version. |
| * |
| * This library is distributed in the hope that it will be useful, |
| * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| * Library General Public License for more details. |
| * |
| * You should have received a copy of the GNU Library General Public |
| * License along with this library; if not, write to the |
| * Free Software Foundation, Inc., 59 Temple Place - Suite 330, |
| * Boston, MA 02111-1307, USA. |
| */ |
| /** |
| * SECTION:element-videobox |
| * @see_also: #GstVideoCrop |
| * |
| * This plugin crops or enlarges the image. It takes 4 values as input, a |
| * top, bottom, left and right offset. Positive values will crop that much |
| * pixels from the respective border of the image, negative values will add |
| * that much pixels. When pixels are added, you can specify their color. |
| * Some predefined colors are usable with an enum property. |
| * |
| * The plugin is alpha channel aware and will try to negotiate with a format |
| * that supports alpha channels first. When alpha channel is active two |
| * other properties, alpha and border_alpha can be used to set the alpha |
| * values of the inner picture and the border respectively. an alpha value of |
| * 0.0 means total transparency, 1.0 is opaque. |
| * |
| * The videobox plugin has many uses such as doing a mosaic of pictures, |
| * letterboxing video, cutting out pieces of video, picture in picture, etc.. |
| * |
| * Setting autocrop to true changes the behavior of the plugin so that |
| * caps determine crop properties rather than the other way around: given |
| * input and output dimensions, the crop values are selected so that the |
| * smaller frame is effectively centered in the larger frame. This |
| * involves either cropping or padding. |
| * |
| * If you use autocrop there is little point in setting the other |
| * properties manually because they will be overriden if the caps change, |
| * but nothing stops you from doing so. |
| * |
| * Sample pipeline: |
| * |[ |
| * gst-launch videotestsrc ! videobox autocrop=true ! \ |
| * "video/x-raw-yuv, width=600, height=400" ! ffmpegcolorspace ! ximagesink |
| * ]| |
| */ |
| |
| #ifdef HAVE_CONFIG_H |
| #include "config.h" |
| #endif |
| |
| #include "gstvideobox.h" |
| |
| #include <math.h> |
| #include <liboil/liboil.h> |
| #include <string.h> |
| |
| #include <gst/controller/gstcontroller.h> |
| |
| GST_DEBUG_CATEGORY_STATIC (videobox_debug); |
| #define GST_CAT_DEFAULT videobox_debug |
| |
| /* From videotestsrc.c */ |
| static const guint8 yuv_sdtv_colors_Y[VIDEO_BOX_FILL_LAST] = |
| { 16, 145, 41, 81, 210, 235 }; |
| static const guint8 yuv_sdtv_colors_U[VIDEO_BOX_FILL_LAST] = |
| { 128, 54, 240, 90, 16, 128 }; |
| static const guint8 yuv_sdtv_colors_V[VIDEO_BOX_FILL_LAST] = |
| { 128, 34, 110, 240, 146, 128 }; |
| |
| static const guint8 yuv_hdtv_colors_Y[VIDEO_BOX_FILL_LAST] = |
| { 16, 173, 32, 63, 219, 235 }; |
| static const guint8 yuv_hdtv_colors_U[VIDEO_BOX_FILL_LAST] = |
| { 128, 42, 240, 102, 16, 128 }; |
| static const guint8 yuv_hdtv_colors_V[VIDEO_BOX_FILL_LAST] = |
| { 128, 26, 118, 240, 138, 128 }; |
| |
| static const guint8 rgb_colors_R[VIDEO_BOX_FILL_LAST] = |
| { 0, 0, 0, 255, 255, 255 }; |
| static const guint8 rgb_colors_G[VIDEO_BOX_FILL_LAST] = |
| { 0, 255, 0, 0, 255, 255 }; |
| static const guint8 rgb_colors_B[VIDEO_BOX_FILL_LAST] = |
| { 0, 0, 255, 0, 0, 255 }; |
| |
| /* Generated by -bad/ext/cog/generate_tables */ |
| static const int cog_ycbcr_to_rgb_matrix_8bit_hdtv[] = { |
| 298, 0, 459, -63514, |
| 298, -55, -136, 19681, |
| 298, 541, 0, -73988, |
| }; |
| |
| static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = { |
| 298, 0, 409, -57068, |
| 298, -100, -208, 34707, |
| 298, 516, 0, -70870, |
| }; |
| |
| static const gint cog_rgb_to_ycbcr_matrix_8bit_hdtv[] = { |
| 47, 157, 16, 4096, |
| -26, -87, 112, 32768, |
| 112, -102, -10, 32768, |
| }; |
| |
| static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = { |
| 66, 129, 25, 4096, |
| -38, -74, 112, 32768, |
| 112, -94, -18, 32768, |
| }; |
| |
| static const gint cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit[] = { |
| 256, -30, -53, 10600, |
| 0, 261, 29, -4367, |
| 0, 19, 262, -3289, |
| }; |
| |
| static const gint cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit[] = { |
| 256, 25, 49, -9536, |
| 0, 253, -28, 3958, |
| 0, -19, 252, 2918, |
| }; |
| |
| #define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8) |
| |
| static void |
| fill_ayuv (GstVideoBoxFill fill_type, guint b_alpha, GstVideoFormat format, |
| guint8 * dest, gboolean sdtv, gint width, gint height) |
| { |
| guint32 empty_pixel; |
| |
| if (sdtv) |
| empty_pixel = GUINT32_FROM_BE ((b_alpha << 24) | |
| (yuv_sdtv_colors_Y[fill_type] << 16) | |
| (yuv_sdtv_colors_U[fill_type] << 8) | yuv_sdtv_colors_V[fill_type]); |
| else |
| empty_pixel = GUINT32_FROM_BE ((b_alpha << 24) | |
| (yuv_hdtv_colors_Y[fill_type] << 16) | |
| (yuv_hdtv_colors_U[fill_type] << 8) | yuv_hdtv_colors_V[fill_type]); |
| |
| oil_splat_u32_ns ((guint32 *) dest, &empty_pixel, width * height); |
| } |
| |
| static void |
| copy_ayuv_ayuv (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i, j; |
| gint src_stride = 4 * src_width; |
| gint dest_stride = 4 * dest_width; |
| |
| dest = dest + dest_y * dest_width * 4 + dest_x * 4; |
| src = src + src_y * src_width * 4 + src_x * 4; |
| |
| w *= 4; |
| |
| if (dest_sdtv != src_sdtv) { |
| gint matrix[12]; |
| gint y, u, v; |
| |
| memcpy (matrix, |
| dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit : |
| cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint)); |
| |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| dest[j] = (src[j] * i_alpha) >> 8; |
| y = src[j + 1]; |
| u = src[j + 2]; |
| v = src[j + 3]; |
| dest[j + 1] = APPLY_MATRIX (matrix, 0, y, u, v); |
| dest[j + 2] = APPLY_MATRIX (matrix, 1, y, u, v); |
| dest[j + 3] = APPLY_MATRIX (matrix, 2, y, u, v); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else { |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| dest[j] = (src[j] * i_alpha) >> 8; |
| dest[j + 1] = src[j + 1]; |
| dest[j + 2] = src[j + 2]; |
| dest[j + 3] = src[j + 3]; |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } |
| } |
| |
| static void |
| copy_ayuv_i420 (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i, j; |
| guint8 *destY, *destU, *destV; |
| gint dest_strideY, dest_strideUV; |
| gint widthY, widthUV; |
| gint hY, hUV; |
| |
| dest_strideY = gst_video_format_get_row_stride (dest_format, 0, dest_width); |
| dest_strideUV = gst_video_format_get_row_stride (dest_format, 1, dest_width); |
| |
| destY = |
| dest + gst_video_format_get_component_offset (dest_format, 0, |
| dest_width, dest_height); |
| destU = |
| dest + gst_video_format_get_component_offset (dest_format, 1, |
| dest_width, dest_height); |
| destV = |
| dest + gst_video_format_get_component_offset (dest_format, 2, |
| dest_width, dest_height); |
| |
| destY = destY + dest_y * dest_strideY + dest_x; |
| destU = destU + (dest_y / 2) * dest_strideUV + dest_x / 2; |
| destV = destV + (dest_y / 2) * dest_strideUV + dest_x / 2; |
| |
| src = src + src_y * src_width * 4 + src_x * 4; |
| |
| widthY = w; |
| widthUV = w / 2; |
| |
| hY = h; |
| hUV = h / 2; |
| |
| if (src_sdtv != dest_sdtv) { |
| gint matrix[12]; |
| gint y1, y2, y3, y4; |
| gint u1, u2, u3, u4; |
| gint v1, v2, v3, v4; |
| guint8 *destY2 = destY + dest_strideY; |
| const guint8 *src2 = src + src_width * 4; |
| |
| dest_strideY *= 2; |
| |
| memcpy (matrix, |
| dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit : |
| cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint)); |
| |
| for (i = 0; i < hUV; i++) { |
| if (i * 2 == hY) { |
| destY2 = destY; |
| } |
| |
| for (j = 0; j < widthUV; j++) { |
| y1 = src[8 * j + 1]; |
| u1 = src[8 * j + 2]; |
| v1 = src[8 * j + 3]; |
| y2 = src[8 * j + 5]; |
| u2 = src[8 * j + 6]; |
| v2 = src[8 * j + 7]; |
| |
| if (j * 2 < widthY) { |
| y3 = src2[8 * j + 1]; |
| u3 = src2[8 * j + 2]; |
| v3 = src2[8 * j + 3]; |
| y4 = src2[8 * j + 5]; |
| u4 = src2[8 * j + 6]; |
| v4 = src2[8 * j + 7]; |
| } else { |
| y3 = y1; |
| u3 = u1; |
| v3 = v1; |
| y4 = y2; |
| u4 = u2; |
| v4 = v2; |
| } |
| |
| y1 = APPLY_MATRIX (matrix, 0, y1, u1, v1); |
| u1 = APPLY_MATRIX (matrix, 1, y1, u1, v1); |
| v1 = APPLY_MATRIX (matrix, 2, y1, u1, v1); |
| |
| y3 = APPLY_MATRIX (matrix, 0, y3, u3, v3); |
| u3 = APPLY_MATRIX (matrix, 1, y3, u3, v3); |
| v3 = APPLY_MATRIX (matrix, 2, y3, u3, v3); |
| |
| if (j * 2 < widthY) { |
| y2 = APPLY_MATRIX (matrix, 0, y2, u2, v2); |
| u2 = APPLY_MATRIX (matrix, 1, y2, u2, v2); |
| v2 = APPLY_MATRIX (matrix, 2, y2, u2, v2); |
| |
| y4 = APPLY_MATRIX (matrix, 0, y4, u4, v4); |
| u4 = APPLY_MATRIX (matrix, 1, y4, u4, v4); |
| v4 = APPLY_MATRIX (matrix, 2, y4, u4, v4); |
| } else { |
| u2 = u1; |
| v2 = v1; |
| u4 = u3; |
| v4 = v3; |
| } |
| |
| destY[2 * j] = y1; |
| destY2[2 * j] = y3; |
| if (j * 2 < widthY) { |
| destY[2 * j + 1] = y2; |
| destY2[2 * j + 1] = y4; |
| } |
| |
| destU[j] = (u1 + u2 + u3 + u4) / 4; |
| destV[j] = (v1 + v2 + v3 + v4) / 4; |
| } |
| src += src_width * 8; |
| destY += dest_strideY; |
| src2 += src_width * 8; |
| destY2 += dest_strideY; |
| |
| destU += dest_strideUV; |
| destV += dest_strideUV; |
| } |
| } else { |
| gint y1, y2, y3, y4; |
| gint u1, u2, u3, u4; |
| gint v1, v2, v3, v4; |
| guint8 *destY2 = destY + dest_strideY; |
| const guint8 *src2 = src + src_width * 4; |
| |
| dest_strideY *= 2; |
| |
| for (i = 0; i < hUV; i++) { |
| if (i * 2 == hY) { |
| destY2 = destY; |
| } |
| |
| for (j = 0; j < widthUV; j++) { |
| y1 = src[8 * j + 1]; |
| u1 = src[8 * j + 2]; |
| v1 = src[8 * j + 3]; |
| y2 = src[8 * j + 5]; |
| u2 = src[8 * j + 6]; |
| v2 = src[8 * j + 7]; |
| |
| if (j * 2 < widthY) { |
| y3 = src2[8 * j + 1]; |
| u3 = src2[8 * j + 2]; |
| v3 = src2[8 * j + 3]; |
| y4 = src2[8 * j + 5]; |
| u4 = src2[8 * j + 6]; |
| v4 = src2[8 * j + 7]; |
| } else { |
| y3 = y1; |
| u3 = u1; |
| v3 = v1; |
| y4 = y2; |
| u4 = u2; |
| v4 = v2; |
| } |
| |
| destY[2 * j] = y1; |
| destY2[2 * j] = y3; |
| if (j * 2 < widthY) { |
| destY[2 * j + 1] = y2; |
| destY2[2 * j + 1] = y4; |
| } |
| |
| destU[j] = (u1 + u2 + u3 + u4) / 4; |
| destV[j] = (v1 + v2 + v3 + v4) / 4; |
| } |
| src += src_width * 8; |
| destY += dest_strideY; |
| src2 += src_width * 8; |
| destY2 += dest_strideY; |
| |
| destU += dest_strideUV; |
| destV += dest_strideUV; |
| } |
| } |
| } |
| |
| static void |
| fill_i420 (GstVideoBoxFill fill_type, guint b_alpha, GstVideoFormat format, |
| guint8 * dest, gboolean sdtv, gint width, gint height) |
| { |
| guint8 empty_pixel[3]; |
| guint8 *destY, *destU, *destV; |
| gint strideY, strideUV; |
| gint heightY, heightUV; |
| |
| if (sdtv) { |
| empty_pixel[0] = yuv_sdtv_colors_Y[fill_type]; |
| empty_pixel[1] = yuv_sdtv_colors_U[fill_type]; |
| empty_pixel[2] = yuv_sdtv_colors_V[fill_type]; |
| } else { |
| empty_pixel[0] = yuv_hdtv_colors_Y[fill_type]; |
| empty_pixel[1] = yuv_hdtv_colors_U[fill_type]; |
| empty_pixel[2] = yuv_hdtv_colors_V[fill_type]; |
| } |
| |
| strideY = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 0, width); |
| strideUV = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 1, width); |
| |
| destY = |
| dest + gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 0, |
| width, height); |
| destU = |
| dest + gst_video_format_get_component_offset (format, 1, width, height); |
| destV = |
| dest + gst_video_format_get_component_offset (format, 2, width, height); |
| |
| heightY = |
| gst_video_format_get_component_height (GST_VIDEO_FORMAT_I420, 0, height); |
| heightUV = |
| gst_video_format_get_component_height (GST_VIDEO_FORMAT_I420, 1, height); |
| |
| oil_splat_u8_ns (destY, &empty_pixel[0], strideY * heightY); |
| oil_splat_u8_ns (destU, &empty_pixel[1], strideUV * heightUV); |
| oil_splat_u8_ns (destV, &empty_pixel[2], strideUV * heightUV); |
| } |
| |
| static void |
| copy_i420_i420 (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i; |
| guint8 *destY, *destU, *destV; |
| const guint8 *srcY, *srcU, *srcV; |
| gint dest_strideY, dest_strideUV; |
| gint src_strideY, src_strideUV; |
| gint widthY, widthUV; |
| gint hY, hUV; |
| |
| dest_strideY = |
| gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 0, dest_width); |
| dest_strideUV = |
| gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 1, dest_width); |
| src_strideY = |
| gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 0, src_width); |
| src_strideUV = |
| gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 1, src_width); |
| |
| destY = |
| dest + gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 0, |
| dest_width, dest_height); |
| destU = |
| dest + gst_video_format_get_component_offset (dest_format, 1, |
| dest_width, dest_height); |
| destV = |
| dest + gst_video_format_get_component_offset (dest_format, 2, |
| dest_width, dest_height); |
| |
| srcY = |
| src + gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 0, |
| src_width, src_height); |
| srcU = |
| src + gst_video_format_get_component_offset (src_format, 1, |
| src_width, src_height); |
| srcV = |
| src + gst_video_format_get_component_offset (src_format, 2, |
| src_width, src_height); |
| |
| |
| destY = destY + dest_y * dest_strideY + dest_x; |
| destU = destU + (dest_y / 2) * dest_strideUV + dest_x / 2; |
| destV = destV + (dest_y / 2) * dest_strideUV + dest_x / 2; |
| |
| srcY = srcY + src_y * src_strideY + src_x; |
| srcU = srcU + (src_y / 2) * src_strideUV + src_x / 2; |
| srcV = srcV + (src_y / 2) * src_strideUV + src_x / 2; |
| |
| widthY = w; |
| widthUV = (w + 1) / 2; |
| |
| hY = h; |
| hUV = (h + 1) / 2; |
| |
| if (src_sdtv != dest_sdtv) { |
| gint matrix[12]; |
| gint y1, y2, y3, y4; |
| gint u1, u2, u3, u4; |
| gint v1, v2, v3, v4; |
| gint j; |
| guint8 *destY2 = destY + dest_strideY; |
| const guint8 *srcY2 = srcY + src_strideY; |
| |
| dest_strideY *= 2; |
| src_strideY *= 2; |
| |
| memcpy (matrix, |
| dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit : |
| cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint)); |
| |
| for (i = 0; i < hUV; i++) { |
| if (i * 2 == hY) { |
| destY2 = destY; |
| srcY2 = srcY; |
| } |
| |
| for (j = 0; j < widthUV; j++) { |
| y1 = srcY[2 * j]; |
| y2 = srcY[2 * j + 1]; |
| y3 = srcY2[2 * j]; |
| y4 = srcY2[2 * j + 1]; |
| |
| u1 = u2 = u3 = u4 = srcU[j]; |
| v1 = v2 = v3 = v4 = srcV[j]; |
| |
| y1 = APPLY_MATRIX (matrix, 0, y1, u1, v1); |
| u1 = APPLY_MATRIX (matrix, 1, y1, u1, v1); |
| v1 = APPLY_MATRIX (matrix, 2, y1, u1, v1); |
| |
| y2 = APPLY_MATRIX (matrix, 0, y2, u2, v2); |
| u2 = APPLY_MATRIX (matrix, 1, y2, u2, v2); |
| v2 = APPLY_MATRIX (matrix, 2, y2, u2, v2); |
| |
| y3 = APPLY_MATRIX (matrix, 0, y3, u3, v3); |
| u3 = APPLY_MATRIX (matrix, 1, y3, u3, v3); |
| v3 = APPLY_MATRIX (matrix, 2, y3, u3, v3); |
| |
| y4 = APPLY_MATRIX (matrix, 0, y4, u4, v4); |
| u4 = APPLY_MATRIX (matrix, 1, y4, u4, v4); |
| v4 = APPLY_MATRIX (matrix, 2, y4, u4, v4); |
| |
| destY[2 * j] = y1; |
| destY[2 * j + 1] = y2; |
| destY2[2 * j] = y3; |
| destY2[2 * j + 1] = y4; |
| |
| destU[j] = (u1 + u2 + u3 + u4) / 4; |
| destV[j] = (v1 + v2 + v3 + v4) / 4; |
| } |
| destY += dest_strideY; |
| srcY += src_strideY; |
| destY2 += dest_strideY; |
| srcY2 += src_strideY; |
| |
| destU += dest_strideUV; |
| destV += dest_strideUV; |
| srcU += src_strideUV; |
| srcV += src_strideUV; |
| } |
| } else { |
| for (i = 0; i < hY; i++) { |
| oil_copy_u8 (destY, srcY, widthY); |
| destY += dest_strideY; |
| srcY += src_strideY; |
| } |
| |
| for (i = 0; i < hUV; i++) { |
| oil_copy_u8 (destU, srcU, widthUV); |
| oil_copy_u8 (destV, srcV, widthUV); |
| destU += dest_strideUV; |
| destV += dest_strideUV; |
| srcU += src_strideUV; |
| srcV += src_strideUV; |
| } |
| } |
| } |
| |
| static void |
| copy_i420_ayuv (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i; |
| const guint8 *srcY, *srcU, *srcV; |
| gint src_strideY, src_strideUV; |
| gint widthY, widthUV; |
| gint hY, hUV; |
| |
| src_strideY = |
| gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 0, src_width); |
| src_strideUV = |
| gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420, 1, src_width); |
| |
| srcY = |
| src + gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420, 0, |
| src_width, src_height); |
| srcU = |
| src + gst_video_format_get_component_offset (src_format, 1, |
| src_width, src_height); |
| srcV = |
| src + gst_video_format_get_component_offset (src_format, 2, |
| src_width, src_height); |
| |
| |
| dest = dest + dest_y * dest_width * 4 + dest_x * 4; |
| |
| srcY = srcY + src_y * src_strideY + src_x; |
| srcU = srcU + (src_y / 2) * src_strideUV + src_x / 2; |
| srcV = srcV + (src_y / 2) * src_strideUV + src_x / 2; |
| |
| widthY = w; |
| widthUV = (w + 1) / 2; |
| |
| hY = h; |
| hUV = (h + 1) / 2; |
| |
| if (src_sdtv != dest_sdtv) { |
| gint matrix[12]; |
| gint y1, y2, y3, y4; |
| gint u1, u2, u3, u4; |
| gint v1, v2, v3, v4; |
| gint j; |
| guint8 *dest2; |
| const guint8 *srcY2 = srcY + src_strideY; |
| |
| dest2 = dest + dest_width * 4; |
| |
| src_strideY *= 2; |
| |
| memcpy (matrix, |
| dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit : |
| cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint)); |
| |
| for (i = 0; i < hUV; i++) { |
| if (i * 2 == hY) { |
| srcY2 = srcY; |
| } |
| |
| for (j = 0; j < widthUV; j++) { |
| y1 = srcY[2 * j]; |
| y3 = srcY2[2 * j]; |
| y2 = (j * 2 < widthY) ? srcY[2 * j + 1] : y1; |
| y4 = (j * 2 < widthY) ? srcY2[2 * j + 1] : y3; |
| |
| u1 = u2 = u3 = u4 = srcU[j]; |
| v1 = v2 = v3 = v4 = srcV[j]; |
| |
| y1 = APPLY_MATRIX (matrix, 0, y1, u1, v1); |
| u1 = APPLY_MATRIX (matrix, 1, y1, u1, v1); |
| v1 = APPLY_MATRIX (matrix, 2, y1, u1, v1); |
| |
| y3 = APPLY_MATRIX (matrix, 0, y3, u3, v3); |
| u3 = APPLY_MATRIX (matrix, 1, y3, u3, v3); |
| v3 = APPLY_MATRIX (matrix, 2, y3, u3, v3); |
| |
| if (j * 2 < widthY) { |
| y2 = APPLY_MATRIX (matrix, 0, y2, u2, v2); |
| u2 = APPLY_MATRIX (matrix, 1, y2, u2, v2); |
| v2 = APPLY_MATRIX (matrix, 2, y2, u2, v2); |
| |
| y4 = APPLY_MATRIX (matrix, 0, y4, u4, v4); |
| u4 = APPLY_MATRIX (matrix, 1, y4, u4, v4); |
| v4 = APPLY_MATRIX (matrix, 2, y4, u4, v4); |
| } |
| |
| dest[8 * j] = i_alpha; |
| dest[8 * j + 1] = y1; |
| dest[8 * j + 2] = u1; |
| dest[8 * j + 3] = v1; |
| dest2[8 * j] = i_alpha; |
| dest2[8 * j + 1] = y3; |
| dest2[8 * j + 2] = u3; |
| dest2[8 * j + 3] = v3; |
| if (j * 2 < widthY) { |
| dest[8 * j + 4] = i_alpha; |
| dest[8 * j + 5] = y2; |
| dest[8 * j + 6] = u2; |
| dest[8 * j + 7] = v2; |
| dest2[8 * j + 4] = i_alpha; |
| dest2[8 * j + 5] = y4; |
| dest2[8 * j + 6] = u4; |
| dest2[8 * j + 7] = v4; |
| } |
| } |
| dest += dest_width * 8; |
| srcY += src_strideY; |
| dest2 += dest_width * 8; |
| srcY2 += src_strideY; |
| |
| srcU += src_strideUV; |
| srcV += src_strideUV; |
| } |
| } else { |
| gint y1, y2, y3, y4; |
| gint u1, u2, u3, u4; |
| gint v1, v2, v3, v4; |
| gint j; |
| guint8 *dest2; |
| const guint8 *srcY2 = srcY + src_strideY; |
| |
| dest2 = dest + dest_width * 4; |
| |
| src_strideY *= 2; |
| |
| for (i = 0; i < hUV; i++) { |
| if (i * 2 == hY) { |
| srcY2 = srcY; |
| } |
| |
| for (j = 0; j < widthUV; j++) { |
| y1 = srcY[2 * j]; |
| y3 = srcY2[2 * j]; |
| y2 = (j * 2 < widthY) ? srcY[2 * j + 1] : y1; |
| y4 = (j * 2 < widthY) ? srcY2[2 * j + 1] : y3; |
| |
| u1 = u2 = u3 = u4 = srcU[j]; |
| v1 = v2 = v3 = v4 = srcV[j]; |
| |
| dest[8 * j] = i_alpha; |
| dest[8 * j + 1] = y1; |
| dest[8 * j + 2] = u1; |
| dest[8 * j + 3] = v1; |
| dest2[8 * j] = i_alpha; |
| dest2[8 * j + 1] = y3; |
| dest2[8 * j + 2] = u3; |
| dest2[8 * j + 3] = v3; |
| if (j * 2 < widthY) { |
| dest[8 * j + 4] = i_alpha; |
| dest[8 * j + 5] = y2; |
| dest[8 * j + 6] = u2; |
| dest[8 * j + 7] = v2; |
| dest2[8 * j + 4] = i_alpha; |
| dest2[8 * j + 5] = y4; |
| dest2[8 * j + 6] = u4; |
| dest2[8 * j + 7] = v4; |
| } |
| } |
| dest += dest_width * 8; |
| srcY += src_strideY; |
| dest2 += dest_width * 8; |
| srcY2 += src_strideY; |
| |
| srcU += src_strideUV; |
| srcV += src_strideUV; |
| } |
| } |
| } |
| |
| static void |
| _argb_order (GstVideoFormat format, gint p[4], gboolean * alpha) |
| { |
| *alpha = FALSE; |
| switch (format) { |
| case GST_VIDEO_FORMAT_ARGB: |
| *alpha = TRUE; |
| case GST_VIDEO_FORMAT_xRGB: |
| p[0] = 0; |
| p[1] = 1; |
| p[2] = 2; |
| p[3] = 3; |
| break; |
| case GST_VIDEO_FORMAT_ABGR: |
| *alpha = TRUE; |
| case GST_VIDEO_FORMAT_xBGR: |
| p[0] = 0; |
| p[1] = 3; |
| p[2] = 2; |
| p[3] = 1; |
| break; |
| case GST_VIDEO_FORMAT_RGBA: |
| *alpha = TRUE; |
| case GST_VIDEO_FORMAT_RGBx: |
| case GST_VIDEO_FORMAT_RGB: |
| p[0] = 3; |
| p[1] = 0; |
| p[2] = 1; |
| p[3] = 2; |
| break; |
| case GST_VIDEO_FORMAT_BGRA: |
| *alpha = TRUE; |
| case GST_VIDEO_FORMAT_BGRx: |
| case GST_VIDEO_FORMAT_BGR: |
| p[0] = 3; |
| p[1] = 2; |
| p[2] = 1; |
| p[3] = 0; |
| break; |
| default: |
| g_assert_not_reached (); |
| } |
| } |
| |
| static void |
| fill_rgb32 (GstVideoBoxFill fill_type, guint b_alpha, GstVideoFormat format, |
| guint8 * dest, gboolean sdtv, gint width, gint height) |
| { |
| guint32 empty_pixel; |
| gint p[4]; |
| gboolean alpha; |
| |
| _argb_order (format, p, &alpha); |
| |
| empty_pixel = GUINT32_FROM_BE ((b_alpha << (p[0] * 8)) | |
| (rgb_colors_R[fill_type] << (p[1] * 8)) | |
| (rgb_colors_G[fill_type] << (p[2] * 8)) | |
| (rgb_colors_B[fill_type] << (p[3] * 8))); |
| |
| oil_splat_u32_ns ((guint32 *) dest, &empty_pixel, width * height); |
| } |
| |
| static void |
| fill_rgb24 (GstVideoBoxFill fill_type, guint b_alpha, GstVideoFormat format, |
| guint8 * dest, gboolean sdtv, gint width, gint height) |
| { |
| gint dest_stride = GST_ROUND_UP_4 (width * 3); |
| gint p[4]; |
| gboolean alpha; |
| gint i, j; |
| |
| _argb_order (format, p, &alpha); |
| |
| for (i = 0; i < height; i++) { |
| for (j = 0; j < width; j++) { |
| dest[3 * j + p[1]] = rgb_colors_R[fill_type]; |
| dest[3 * j + p[2]] = rgb_colors_G[fill_type]; |
| dest[3 * j + p[3]] = rgb_colors_B[fill_type]; |
| } |
| dest += dest_stride; |
| } |
| } |
| |
| static void |
| copy_rgb32 (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i, j; |
| gint src_stride, dest_stride; |
| gboolean in_alpha, out_alpha; |
| gint in_bpp, out_bpp; |
| gint p_out[4]; |
| gint p_in[4]; |
| gboolean packed_out = (dest_format == GST_VIDEO_FORMAT_RGB |
| || dest_format == GST_VIDEO_FORMAT_BGR); |
| gboolean packed_in = (src_format == GST_VIDEO_FORMAT_RGB |
| || src_format == GST_VIDEO_FORMAT_BGR); |
| |
| src_stride = (packed_in) ? GST_ROUND_UP_4 (3 * src_width) : 4 * src_width; |
| dest_stride = (packed_out) ? GST_ROUND_UP_4 (3 * dest_width) : 4 * dest_width; |
| in_bpp = (packed_in) ? 3 : 4; |
| out_bpp = (packed_out) ? 3 : 4; |
| |
| _argb_order (dest_format, p_out, &out_alpha); |
| _argb_order (src_format, p_in, &in_alpha); |
| |
| dest = dest + dest_y * dest_stride + dest_x * out_bpp; |
| src = src + src_y * src_stride + src_x * in_bpp; |
| |
| if (in_alpha && out_alpha) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| dest[j + p_out[0]] = (src[j + p_in[0]] * i_alpha) >> 8; |
| dest[j + p_out[1]] = src[j + p_in[1]]; |
| dest[j + p_out[2]] = src[j + p_in[2]]; |
| dest[j + p_out[3]] = src[j + p_in[3]]; |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else if (out_alpha && !packed_in) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| dest[j + p_out[0]] = i_alpha & 0xff; |
| dest[j + p_out[1]] = src[j + p_in[1]]; |
| dest[j + p_out[2]] = src[j + p_in[2]]; |
| dest[j + p_out[3]] = src[j + p_in[3]]; |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else if (out_alpha && packed_in) { |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j++) { |
| dest[4 * j + p_out[0]] = i_alpha & 0xff; |
| dest[4 * j + p_out[1]] = src[in_bpp * j + p_in[1]]; |
| dest[4 * j + p_out[2]] = src[in_bpp * j + p_in[2]]; |
| dest[4 * j + p_out[3]] = src[in_bpp * j + p_in[3]]; |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else if (!packed_out && !packed_in) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| dest[j + p_out[1]] = src[j + p_in[1]]; |
| dest[j + p_out[2]] = src[j + p_in[2]]; |
| dest[j + p_out[3]] = src[j + p_in[3]]; |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else { |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j++) { |
| dest[out_bpp * j + p_out[1]] = src[in_bpp * j + p_in[1]]; |
| dest[out_bpp * j + p_out[2]] = src[in_bpp * j + p_in[2]]; |
| dest[out_bpp * j + p_out[3]] = src[in_bpp * j + p_in[3]]; |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } |
| } |
| |
| static void |
| copy_rgb32_ayuv (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i, j; |
| gint src_stride, dest_stride; |
| gboolean in_alpha; |
| gint in_bpp; |
| gint p_in[4]; |
| gboolean packed_in = (src_format == GST_VIDEO_FORMAT_RGB |
| || src_format == GST_VIDEO_FORMAT_BGR); |
| gint matrix[12]; |
| gint a; |
| gint y, u, v; |
| gint r, g, b; |
| |
| src_stride = (packed_in) ? GST_ROUND_UP_4 (3 * src_width) : 4 * src_width; |
| dest_stride = 4 * dest_width; |
| in_bpp = (packed_in) ? 3 : 4; |
| |
| _argb_order (src_format, p_in, &in_alpha); |
| |
| memcpy (matrix, |
| (dest_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv : |
| cog_rgb_to_ycbcr_matrix_8bit_hdtv, 12 * sizeof (gint)); |
| |
| dest = dest + dest_y * dest_stride + dest_x * 4; |
| src = src + src_y * src_stride + src_x * in_bpp; |
| |
| if (in_alpha) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| a = (src[j + p_in[0]] * i_alpha) >> 8; |
| r = src[j + p_in[1]]; |
| g = src[j + p_in[2]]; |
| b = src[j + p_in[3]]; |
| |
| y = APPLY_MATRIX (matrix, 0, r, g, b); |
| u = APPLY_MATRIX (matrix, 1, r, g, b); |
| v = APPLY_MATRIX (matrix, 2, r, g, b); |
| |
| dest[j + 0] = a; |
| dest[j + 1] = CLAMP (y, 0, 255); |
| dest[j + 2] = CLAMP (u, 0, 255); |
| dest[j + 3] = CLAMP (v, 0, 255); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else if (!packed_in) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| a = i_alpha & 0xff; |
| r = src[j + p_in[1]]; |
| g = src[j + p_in[2]]; |
| b = src[j + p_in[3]]; |
| |
| y = APPLY_MATRIX (matrix, 0, r, g, b); |
| u = APPLY_MATRIX (matrix, 1, r, g, b); |
| v = APPLY_MATRIX (matrix, 2, r, g, b); |
| |
| dest[j + 0] = a; |
| dest[j + 1] = CLAMP (y, 0, 255); |
| dest[j + 2] = CLAMP (u, 0, 255); |
| dest[j + 3] = CLAMP (v, 0, 255); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else { |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j++) { |
| a = i_alpha & 0xff; |
| r = src[in_bpp * j + p_in[1]]; |
| g = src[in_bpp * j + p_in[2]]; |
| b = src[in_bpp * j + p_in[3]]; |
| |
| y = APPLY_MATRIX (matrix, 0, r, g, b); |
| u = APPLY_MATRIX (matrix, 1, r, g, b); |
| v = APPLY_MATRIX (matrix, 2, r, g, b); |
| |
| dest[4 * j + 0] = a; |
| dest[4 * j + 1] = CLAMP (y, 0, 255); |
| dest[4 * j + 2] = CLAMP (u, 0, 255); |
| dest[4 * j + 3] = CLAMP (v, 0, 255); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } |
| } |
| |
| static void |
| copy_ayuv_rgb32 (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i, j; |
| gint src_stride, dest_stride; |
| gboolean out_alpha; |
| gint out_bpp; |
| gint p_out[4]; |
| gboolean packed_out = (dest_format == GST_VIDEO_FORMAT_RGB |
| || dest_format == GST_VIDEO_FORMAT_BGR); |
| gint matrix[12]; |
| gint a; |
| gint y, u, v; |
| gint r, g, b; |
| |
| dest_stride = (packed_out) ? GST_ROUND_UP_4 (3 * dest_width) : 4 * dest_width; |
| src_stride = 4 * src_width; |
| out_bpp = (packed_out) ? 3 : 4; |
| |
| _argb_order (dest_format, p_out, &out_alpha); |
| |
| memcpy (matrix, |
| (src_sdtv) ? cog_ycbcr_to_rgb_matrix_8bit_sdtv : |
| cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint)); |
| |
| dest = dest + dest_y * dest_stride + dest_x * out_bpp; |
| src = src + src_y * src_stride + src_x * 4; |
| |
| if (out_alpha) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| a = (src[j + 0] * i_alpha) >> 8; |
| y = src[j + 1]; |
| u = src[j + 2]; |
| v = src[j + 3]; |
| |
| r = APPLY_MATRIX (matrix, 0, y, u, v); |
| g = APPLY_MATRIX (matrix, 1, y, u, v); |
| b = APPLY_MATRIX (matrix, 2, y, u, v); |
| |
| dest[j + p_out[0]] = a; |
| dest[j + p_out[1]] = CLAMP (r, 0, 255); |
| dest[j + p_out[2]] = CLAMP (g, 0, 255); |
| dest[j + p_out[3]] = CLAMP (b, 0, 255); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else if (!packed_out) { |
| w *= 4; |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j += 4) { |
| y = src[j + 1]; |
| u = src[j + 2]; |
| v = src[j + 3]; |
| |
| r = APPLY_MATRIX (matrix, 0, y, u, v); |
| g = APPLY_MATRIX (matrix, 1, y, u, v); |
| b = APPLY_MATRIX (matrix, 2, y, u, v); |
| |
| dest[j + p_out[1]] = CLAMP (r, 0, 255); |
| dest[j + p_out[2]] = CLAMP (g, 0, 255); |
| dest[j + p_out[3]] = CLAMP (b, 0, 255); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } else { |
| for (i = 0; i < h; i++) { |
| for (j = 0; j < w; j++) { |
| y = src[4 * j + 1]; |
| u = src[4 * j + 2]; |
| v = src[4 * j + 3]; |
| |
| r = APPLY_MATRIX (matrix, 0, y, u, v); |
| g = APPLY_MATRIX (matrix, 1, y, u, v); |
| b = APPLY_MATRIX (matrix, 2, y, u, v); |
| |
| dest[out_bpp * j + p_out[1]] = CLAMP (r, 0, 255); |
| dest[out_bpp * j + p_out[2]] = CLAMP (g, 0, 255); |
| dest[out_bpp * j + p_out[3]] = CLAMP (b, 0, 255); |
| } |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } |
| } |
| |
| static void |
| fill_gray (GstVideoBoxFill fill_type, guint b_alpha, GstVideoFormat format, |
| guint8 * dest, gboolean sdtv, gint width, gint height) |
| { |
| gint i, j; |
| gint dest_stride; |
| |
| if (format == GST_VIDEO_FORMAT_GRAY8) { |
| guint8 val = yuv_sdtv_colors_Y[fill_type]; |
| |
| dest_stride = GST_ROUND_UP_4 (width); |
| for (i = 0; i < height; i++) { |
| oil_splat_u8_ns (dest, &val, width); |
| dest += dest_stride; |
| } |
| } else { |
| guint16 val = yuv_sdtv_colors_Y[fill_type] << 8; |
| |
| dest_stride = GST_ROUND_UP_4 (width * 2); |
| if (format == GST_VIDEO_FORMAT_GRAY16_BE) { |
| for (i = 0; i < height; i++) { |
| for (j = 0; j < width; j++) { |
| GST_WRITE_UINT16_BE (dest + 2 * j, val); |
| } |
| dest += dest_stride; |
| } |
| } else { |
| for (i = 0; i < height; i++) { |
| for (j = 0; j < width; j++) { |
| GST_WRITE_UINT16_LE (dest + 2 * j, val); |
| } |
| dest += dest_stride; |
| } |
| } |
| } |
| } |
| |
| static void |
| copy_packed_simple (guint i_alpha, GstVideoFormat dest_format, guint8 * dest, |
| gboolean dest_sdtv, gint dest_width, gint dest_height, gint dest_x, |
| gint dest_y, GstVideoFormat src_format, const guint8 * src, |
| gboolean src_sdtv, gint src_width, gint src_height, gint src_x, gint src_y, |
| gint w, gint h) |
| { |
| gint i; |
| gint src_stride, dest_stride; |
| gint pixel_stride, row_size; |
| |
| src_stride = gst_video_format_get_row_stride (src_format, 0, src_width); |
| dest_stride = gst_video_format_get_row_stride (dest_format, 0, dest_width); |
| pixel_stride = gst_video_format_get_pixel_stride (dest_format, 0); |
| row_size = w * pixel_stride; |
| |
| dest = dest + dest_y * dest_stride + dest_x * pixel_stride; |
| src = src + src_y * src_stride + src_x * pixel_stride; |
| |
| for (i = 0; i < h; i++) { |
| oil_copy_u8 (dest, src, row_size); |
| dest += dest_stride; |
| src += src_stride; |
| } |
| } |
| |
| #define DEFAULT_LEFT 0 |
| #define DEFAULT_RIGHT 0 |
| #define DEFAULT_TOP 0 |
| #define DEFAULT_BOTTOM 0 |
| #define DEFAULT_FILL_TYPE VIDEO_BOX_FILL_BLACK |
| #define DEFAULT_ALPHA 1.0 |
| #define DEFAULT_BORDER_ALPHA 1.0 |
| |
| enum |
| { |
| PROP_0, |
| PROP_LEFT, |
| PROP_RIGHT, |
| PROP_TOP, |
| PROP_BOTTOM, |
| PROP_FILL_TYPE, |
| PROP_ALPHA, |
| PROP_BORDER_ALPHA, |
| PROP_AUTOCROP |
| /* FILL ME */ |
| }; |
| |
| static GstStaticPadTemplate gst_video_box_src_template = |
| GST_STATIC_PAD_TEMPLATE ("src", |
| GST_PAD_SRC, |
| GST_PAD_ALWAYS, |
| GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" |
| GST_VIDEO_CAPS_YUV ("I420") ";" |
| GST_VIDEO_CAPS_YUV ("YV12") ";" |
| GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" |
| GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_RGBx ";" |
| GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" |
| GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";" |
| GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";" |
| GST_VIDEO_CAPS_GRAY8 ";" |
| GST_VIDEO_CAPS_GRAY16 ("BIG_ENDIAN") ";" |
| GST_VIDEO_CAPS_GRAY16 ("LITTLE_ENDIAN")) |
| ); |
| |
| static GstStaticPadTemplate gst_video_box_sink_template = |
| GST_STATIC_PAD_TEMPLATE ("sink", |
| GST_PAD_SINK, |
| GST_PAD_ALWAYS, |
| GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" |
| GST_VIDEO_CAPS_YUV ("I420") ";" |
| GST_VIDEO_CAPS_YUV ("YV12") ";" |
| GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" |
| GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_RGBx ";" |
| GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" |
| GST_VIDEO_CAPS_ABGR ";" GST_VIDEO_CAPS_RGBA ";" |
| GST_VIDEO_CAPS_RGB ";" GST_VIDEO_CAPS_BGR ";" |
| GST_VIDEO_CAPS_GRAY8 ";" |
| GST_VIDEO_CAPS_GRAY16 ("BIG_ENDIAN") ";" |
| GST_VIDEO_CAPS_GRAY16 ("LITTLE_ENDIAN")) |
| ); |
| |
| GST_BOILERPLATE (GstVideoBox, gst_video_box, GstBaseTransform, |
| GST_TYPE_BASE_TRANSFORM); |
| |
| static void gst_video_box_set_property (GObject * object, guint prop_id, |
| const GValue * value, GParamSpec * pspec); |
| static void gst_video_box_get_property (GObject * object, guint prop_id, |
| GValue * value, GParamSpec * pspec); |
| |
| static gboolean gst_video_box_recalc_transform (GstVideoBox * video_box); |
| static GstCaps *gst_video_box_transform_caps (GstBaseTransform * trans, |
| GstPadDirection direction, GstCaps * from); |
| static gboolean gst_video_box_set_caps (GstBaseTransform * trans, |
| GstCaps * in, GstCaps * out); |
| static gboolean gst_video_box_get_unit_size (GstBaseTransform * trans, |
| GstCaps * caps, guint * size); |
| static GstFlowReturn gst_video_box_transform (GstBaseTransform * trans, |
| GstBuffer * in, GstBuffer * out); |
| static void gst_video_box_fixate_caps (GstBaseTransform * trans, |
| GstPadDirection direction, GstCaps * caps, GstCaps * othercaps); |
| |
| #define GST_TYPE_VIDEO_BOX_FILL (gst_video_box_fill_get_type()) |
| static GType |
| gst_video_box_fill_get_type (void) |
| { |
| static GType video_box_fill_type = 0; |
| static const GEnumValue video_box_fill[] = { |
| {VIDEO_BOX_FILL_BLACK, "Black", "black"}, |
| {VIDEO_BOX_FILL_GREEN, "Green", "green"}, |
| {VIDEO_BOX_FILL_BLUE, "Blue", "blue"}, |
| {VIDEO_BOX_FILL_RED, "Red", "red"}, |
| {VIDEO_BOX_FILL_YELLOW, "Yellow", "yellow"}, |
| {VIDEO_BOX_FILL_WHITE, "White", "white"}, |
| {0, NULL, NULL}, |
| }; |
| |
| if (!video_box_fill_type) { |
| video_box_fill_type = |
| g_enum_register_static ("GstVideoBoxFill", video_box_fill); |
| } |
| return video_box_fill_type; |
| } |
| |
| |
| static void |
| gst_video_box_base_init (gpointer g_class) |
| { |
| GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); |
| |
| gst_element_class_set_details_simple (element_class, "Video box filter", |
| "Filter/Effect/Video", |
| "Resizes a video by adding borders or cropping", |
| "Wim Taymans <wim@fluendo.com>"); |
| |
| gst_element_class_add_pad_template (element_class, |
| gst_static_pad_template_get (&gst_video_box_sink_template)); |
| gst_element_class_add_pad_template (element_class, |
| gst_static_pad_template_get (&gst_video_box_src_template)); |
| } |
| |
| static void |
| gst_video_box_finalize (GObject * object) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (object); |
| |
| if (video_box->mutex) { |
| g_mutex_free (video_box->mutex); |
| video_box->mutex = NULL; |
| } |
| |
| G_OBJECT_CLASS (parent_class)->finalize (object); |
| } |
| |
| static void |
| gst_video_box_class_init (GstVideoBoxClass * klass) |
| { |
| GObjectClass *gobject_class = (GObjectClass *) klass; |
| GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; |
| |
| gobject_class->set_property = gst_video_box_set_property; |
| gobject_class->get_property = gst_video_box_get_property; |
| gobject_class->finalize = gst_video_box_finalize; |
| |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FILL_TYPE, |
| g_param_spec_enum ("fill", "Fill", "How to fill the borders", |
| GST_TYPE_VIDEO_BOX_FILL, DEFAULT_FILL_TYPE, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_LEFT, |
| g_param_spec_int ("left", "Left", |
| "Pixels to box at left (<0 = add a border)", G_MININT, G_MAXINT, |
| DEFAULT_LEFT, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RIGHT, |
| g_param_spec_int ("right", "Right", |
| "Pixels to box at right (<0 = add a border)", G_MININT, G_MAXINT, |
| DEFAULT_RIGHT, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TOP, |
| g_param_spec_int ("top", "Top", |
| "Pixels to box at top (<0 = add a border)", G_MININT, G_MAXINT, |
| DEFAULT_TOP, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BOTTOM, |
| g_param_spec_int ("bottom", "Bottom", |
| "Pixels to box at bottom (<0 = add a border)", G_MININT, G_MAXINT, |
| DEFAULT_BOTTOM, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ALPHA, |
| g_param_spec_double ("alpha", "Alpha", "Alpha value picture", 0.0, 1.0, |
| DEFAULT_ALPHA, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BORDER_ALPHA, |
| g_param_spec_double ("border-alpha", "Border Alpha", |
| "Alpha value of the border", 0.0, 1.0, DEFAULT_BORDER_ALPHA, |
| G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); |
| /** |
| * GstVideoBox:autocrop |
| * |
| * If set to %TRUE videobox will automatically crop/pad the input |
| * video to be centered in the output. |
| * |
| * Since: 0.10.16 |
| **/ |
| g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_AUTOCROP, |
| g_param_spec_boolean ("autocrop", "Auto crop", |
| "Auto crop", FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); |
| |
| trans_class->transform = GST_DEBUG_FUNCPTR (gst_video_box_transform); |
| trans_class->transform_caps = |
| GST_DEBUG_FUNCPTR (gst_video_box_transform_caps); |
| trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_box_set_caps); |
| trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_video_box_get_unit_size); |
| trans_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_video_box_fixate_caps); |
| } |
| |
| static void |
| gst_video_box_init (GstVideoBox * video_box, GstVideoBoxClass * g_class) |
| { |
| video_box->box_right = DEFAULT_RIGHT; |
| video_box->box_left = DEFAULT_LEFT; |
| video_box->box_top = DEFAULT_TOP; |
| video_box->box_bottom = DEFAULT_BOTTOM; |
| video_box->crop_right = 0; |
| video_box->crop_left = 0; |
| video_box->crop_top = 0; |
| video_box->crop_bottom = 0; |
| video_box->fill_type = DEFAULT_FILL_TYPE; |
| video_box->alpha = DEFAULT_ALPHA; |
| video_box->border_alpha = DEFAULT_BORDER_ALPHA; |
| video_box->autocrop = FALSE; |
| |
| video_box->mutex = g_mutex_new (); |
| } |
| |
| static void |
| gst_video_box_set_property (GObject * object, guint prop_id, |
| const GValue * value, GParamSpec * pspec) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (object); |
| |
| g_mutex_lock (video_box->mutex); |
| switch (prop_id) { |
| case PROP_LEFT: |
| video_box->box_left = g_value_get_int (value); |
| if (video_box->box_left < 0) { |
| video_box->border_left = -video_box->box_left; |
| video_box->crop_left = 0; |
| } else { |
| video_box->border_left = 0; |
| video_box->crop_left = video_box->box_left; |
| } |
| break; |
| case PROP_RIGHT: |
| video_box->box_right = g_value_get_int (value); |
| if (video_box->box_right < 0) { |
| video_box->border_right = -video_box->box_right; |
| video_box->crop_right = 0; |
| } else { |
| video_box->border_right = 0; |
| video_box->crop_right = video_box->box_right; |
| } |
| break; |
| case PROP_TOP: |
| video_box->box_top = g_value_get_int (value); |
| if (video_box->box_top < 0) { |
| video_box->border_top = -video_box->box_top; |
| video_box->crop_top = 0; |
| } else { |
| video_box->border_top = 0; |
| video_box->crop_top = video_box->box_top; |
| } |
| break; |
| case PROP_BOTTOM: |
| video_box->box_bottom = g_value_get_int (value); |
| if (video_box->box_bottom < 0) { |
| video_box->border_bottom = -video_box->box_bottom; |
| video_box->crop_bottom = 0; |
| } else { |
| video_box->border_bottom = 0; |
| video_box->crop_bottom = video_box->box_bottom; |
| } |
| break; |
| case PROP_FILL_TYPE: |
| video_box->fill_type = g_value_get_enum (value); |
| break; |
| case PROP_ALPHA: |
| video_box->alpha = g_value_get_double (value); |
| break; |
| case PROP_BORDER_ALPHA: |
| video_box->border_alpha = g_value_get_double (value); |
| break; |
| case PROP_AUTOCROP: |
| video_box->autocrop = g_value_get_boolean (value); |
| break; |
| default: |
| G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); |
| break; |
| } |
| gst_video_box_recalc_transform (video_box); |
| |
| GST_DEBUG_OBJECT (video_box, "Calling reconfigure"); |
| gst_base_transform_reconfigure (GST_BASE_TRANSFORM_CAST (video_box)); |
| |
| g_mutex_unlock (video_box->mutex); |
| } |
| |
| static void |
| gst_video_box_autocrop (GstVideoBox * video_box) |
| { |
| gint crop_w = video_box->in_width - video_box->out_width; |
| gint crop_h = video_box->in_height - video_box->out_height; |
| |
| video_box->box_left = crop_w / 2; |
| if (video_box->box_left < 0) { |
| video_box->border_left = -video_box->box_left; |
| video_box->crop_left = 0; |
| } else { |
| video_box->border_left = 0; |
| video_box->crop_left = video_box->box_left; |
| } |
| |
| /* Round down/up for odd width differences */ |
| if (crop_w < 0) |
| crop_w -= 1; |
| else |
| crop_w += 1; |
| |
| video_box->box_right = crop_w / 2; |
| if (video_box->box_right < 0) { |
| video_box->border_right = -video_box->box_right; |
| video_box->crop_right = 0; |
| } else { |
| video_box->border_right = 0; |
| video_box->crop_right = video_box->box_right; |
| } |
| |
| video_box->box_top = crop_h / 2; |
| if (video_box->box_top < 0) { |
| video_box->border_top = -video_box->box_top; |
| video_box->crop_top = 0; |
| } else { |
| video_box->border_top = 0; |
| video_box->crop_top = video_box->box_top; |
| } |
| |
| /* Round down/up for odd height differences */ |
| if (crop_h < 0) |
| crop_h -= 1; |
| else |
| crop_h += 1; |
| video_box->box_bottom = crop_h / 2; |
| |
| if (video_box->box_bottom < 0) { |
| video_box->border_bottom = -video_box->box_bottom; |
| video_box->crop_bottom = 0; |
| } else { |
| video_box->border_bottom = 0; |
| video_box->crop_bottom = video_box->box_bottom; |
| } |
| } |
| |
| static void |
| gst_video_box_get_property (GObject * object, guint prop_id, GValue * value, |
| GParamSpec * pspec) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (object); |
| |
| switch (prop_id) { |
| case PROP_LEFT: |
| g_value_set_int (value, video_box->box_left); |
| break; |
| case PROP_RIGHT: |
| g_value_set_int (value, video_box->box_right); |
| break; |
| case PROP_TOP: |
| g_value_set_int (value, video_box->box_top); |
| break; |
| case PROP_BOTTOM: |
| g_value_set_int (value, video_box->box_bottom); |
| break; |
| case PROP_FILL_TYPE: |
| g_value_set_enum (value, video_box->fill_type); |
| break; |
| case PROP_ALPHA: |
| g_value_set_double (value, video_box->alpha); |
| break; |
| case PROP_BORDER_ALPHA: |
| g_value_set_double (value, video_box->border_alpha); |
| break; |
| case PROP_AUTOCROP: |
| g_value_set_boolean (value, video_box->autocrop); |
| break; |
| default: |
| G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); |
| break; |
| } |
| } |
| |
| static GstCaps * |
| gst_video_box_transform_caps (GstBaseTransform * trans, |
| GstPadDirection direction, GstCaps * from) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (trans); |
| GstCaps *to, *ret; |
| const GstCaps *templ; |
| const gchar *name; |
| GstStructure *structure; |
| GstPad *other; |
| gint width, height; |
| |
| to = gst_caps_copy (from); |
| /* Just to be sure... */ |
| gst_caps_truncate (to); |
| structure = gst_caps_get_structure (to, 0); |
| |
| /* Transform width/height */ |
| if (video_box->autocrop) { |
| gst_structure_remove_field (structure, "width"); |
| gst_structure_remove_field (structure, "height"); |
| } else { |
| /* calculate width and height */ |
| if (gst_structure_get_int (structure, "width", &width)) { |
| if (direction == GST_PAD_SINK) { |
| width -= video_box->box_left; |
| width -= video_box->box_right; |
| } else { |
| width += video_box->box_left; |
| width += video_box->box_right; |
| } |
| if (width <= 0) |
| width = 1; |
| |
| GST_DEBUG_OBJECT (trans, "New caps width: %d", width); |
| gst_structure_set (structure, "width", G_TYPE_INT, width, NULL); |
| } |
| |
| if (gst_structure_get_int (structure, "height", &height)) { |
| if (direction == GST_PAD_SINK) { |
| height -= video_box->box_top; |
| height -= video_box->box_bottom; |
| } else { |
| height += video_box->box_top; |
| height += video_box->box_bottom; |
| } |
| |
| if (height <= 0) |
| height = 1; |
| |
| GST_DEBUG_OBJECT (trans, "New caps height: %d", height); |
| gst_structure_set (structure, "height", G_TYPE_INT, height, NULL); |
| } |
| } |
| |
| /* Supported conversions: |
| * I420->AYUV |
| * I420->YV12 |
| * YV12->AYUV |
| * YV12->I420 |
| * AYUV->I420 |
| * AYUV->YV12 |
| * AYUV->xRGB (24bpp, 32bpp, incl. alpha) |
| * xRGB->xRGB (24bpp, 32bpp, from/to all variants, incl. alpha) |
| * xRGB->AYUV (24bpp, 32bpp, incl. alpha) |
| * |
| * Passthrough only for everything else. |
| */ |
| name = gst_structure_get_name (structure); |
| if (g_str_equal (name, "video/x-raw-yuv")) { |
| guint32 fourcc; |
| |
| if (gst_structure_get_fourcc (structure, "format", &fourcc) && |
| (fourcc == GST_STR_FOURCC ("AYUV") || |
| fourcc == GST_STR_FOURCC ("I420") || |
| fourcc == GST_STR_FOURCC ("YV12"))) { |
| GValue list = { 0, }; |
| GValue val = { 0, }; |
| GstStructure *s2; |
| |
| /* get rid of format */ |
| gst_structure_remove_field (structure, "format"); |
| gst_structure_remove_field (structure, "color-matrix"); |
| gst_structure_remove_field (structure, "chroma-site"); |
| |
| s2 = gst_structure_copy (structure); |
| |
| g_value_init (&list, GST_TYPE_LIST); |
| g_value_init (&val, GST_TYPE_FOURCC); |
| gst_value_set_fourcc (&val, GST_STR_FOURCC ("AYUV")); |
| gst_value_list_append_value (&list, &val); |
| g_value_reset (&val); |
| gst_value_set_fourcc (&val, GST_STR_FOURCC ("I420")); |
| gst_value_list_append_value (&list, &val); |
| g_value_reset (&val); |
| gst_value_set_fourcc (&val, GST_STR_FOURCC ("YV12")); |
| gst_value_list_append_value (&list, &val); |
| g_value_unset (&val); |
| gst_structure_set_value (structure, "format", &list); |
| g_value_unset (&list); |
| |
| gst_structure_set_name (s2, "video/x-raw-rgb"); |
| g_value_init (&list, GST_TYPE_LIST); |
| g_value_init (&val, G_TYPE_INT); |
| g_value_set_int (&val, 32); |
| gst_value_list_append_value (&list, &val); |
| g_value_reset (&val); |
| g_value_set_int (&val, 24); |
| gst_value_list_append_value (&list, &val); |
| g_value_unset (&val); |
| gst_structure_set_value (s2, "depth", &list); |
| gst_structure_set_value (s2, "bpp", &list); |
| g_value_unset (&list); |
| gst_caps_append_structure (to, s2); |
| } |
| } else if (g_str_equal (name, "video/x-raw-rgb")) { |
| gint bpp; |
| |
| if (gst_structure_get_int (structure, "bpp", &bpp) && |
| (bpp == 32 || bpp == 24)) { |
| GValue list = { 0, }; |
| GValue val = { 0, }; |
| GstStructure *s2; |
| |
| /* get rid of format */ |
| gst_structure_remove_field (structure, "depth"); |
| gst_structure_remove_field (structure, "bpp"); |
| gst_structure_remove_field (structure, "red_mask"); |
| gst_structure_remove_field (structure, "green_mask"); |
| gst_structure_remove_field (structure, "blue_mask"); |
| gst_structure_remove_field (structure, "alpha_mask"); |
| |
| s2 = gst_structure_copy (structure); |
| |
| g_value_init (&list, GST_TYPE_LIST); |
| g_value_init (&val, G_TYPE_INT); |
| g_value_set_int (&val, 32); |
| gst_value_list_append_value (&list, &val); |
| g_value_reset (&val); |
| g_value_set_int (&val, 24); |
| gst_value_list_append_value (&list, &val); |
| g_value_unset (&val); |
| gst_structure_set_value (structure, "depth", &list); |
| gst_structure_set_value (structure, "bpp", &list); |
| g_value_unset (&list); |
| |
| gst_structure_set_name (s2, "video/x-raw-yuv"); |
| gst_structure_set (s2, "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("AYUV"), |
| NULL); |
| gst_caps_append_structure (to, s2); |
| } |
| } |
| |
| /* filter against set allowed caps on the pad */ |
| other = (direction == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad; |
| |
| templ = gst_pad_get_pad_template_caps (other); |
| ret = gst_caps_intersect (to, templ); |
| gst_caps_unref (to); |
| |
| GST_DEBUG_OBJECT (video_box, "direction %d, transformed %" GST_PTR_FORMAT |
| " to %" GST_PTR_FORMAT, direction, from, ret); |
| |
| return ret; |
| } |
| |
| static gboolean |
| gst_video_box_recalc_transform (GstVideoBox * video_box) |
| { |
| gboolean res = TRUE; |
| |
| /* if we have the same format in and out and we don't need to perform any |
| * cropping at all, we can just operate in passthrough mode */ |
| if (video_box->in_format == video_box->out_format && |
| video_box->box_left == 0 && video_box->box_right == 0 && |
| video_box->box_top == 0 && video_box->box_bottom == 0 && |
| video_box->in_sdtv == video_box->out_sdtv) { |
| |
| GST_LOG_OBJECT (video_box, "we are using passthrough"); |
| gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (video_box), |
| TRUE); |
| } else { |
| GST_LOG_OBJECT (video_box, "we are not using passthrough"); |
| gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (video_box), |
| FALSE); |
| } |
| return res; |
| } |
| |
| static gboolean |
| gst_video_box_select_processing_functions (GstVideoBox * video_box) |
| { |
| switch (video_box->out_format) { |
| case GST_VIDEO_FORMAT_AYUV: |
| video_box->fill = fill_ayuv; |
| switch (video_box->in_format) { |
| case GST_VIDEO_FORMAT_AYUV: |
| video_box->copy = copy_ayuv_ayuv; |
| break; |
| case GST_VIDEO_FORMAT_I420: |
| case GST_VIDEO_FORMAT_YV12: |
| video_box->copy = copy_i420_ayuv; |
| break; |
| case GST_VIDEO_FORMAT_ARGB: |
| case GST_VIDEO_FORMAT_ABGR: |
| case GST_VIDEO_FORMAT_RGBA: |
| case GST_VIDEO_FORMAT_BGRA: |
| case GST_VIDEO_FORMAT_xRGB: |
| case GST_VIDEO_FORMAT_xBGR: |
| case GST_VIDEO_FORMAT_RGBx: |
| case GST_VIDEO_FORMAT_BGRx: |
| case GST_VIDEO_FORMAT_RGB: |
| case GST_VIDEO_FORMAT_BGR: |
| video_box->copy = copy_rgb32_ayuv; |
| break; |
| default: |
| break; |
| } |
| break; |
| case GST_VIDEO_FORMAT_I420: |
| case GST_VIDEO_FORMAT_YV12: |
| video_box->fill = fill_i420; |
| switch (video_box->in_format) { |
| case GST_VIDEO_FORMAT_AYUV: |
| video_box->copy = copy_ayuv_i420; |
| break; |
| case GST_VIDEO_FORMAT_I420: |
| case GST_VIDEO_FORMAT_YV12: |
| video_box->copy = copy_i420_i420; |
| break; |
| default: |
| break; |
| } |
| break; |
| case GST_VIDEO_FORMAT_ARGB: |
| case GST_VIDEO_FORMAT_ABGR: |
| case GST_VIDEO_FORMAT_RGBA: |
| case GST_VIDEO_FORMAT_BGRA: |
| case GST_VIDEO_FORMAT_xRGB: |
| case GST_VIDEO_FORMAT_xBGR: |
| case GST_VIDEO_FORMAT_RGBx: |
| case GST_VIDEO_FORMAT_BGRx: |
| case GST_VIDEO_FORMAT_RGB: |
| case GST_VIDEO_FORMAT_BGR: |
| video_box->fill = (video_box->out_format == GST_VIDEO_FORMAT_BGR |
| || video_box->out_format == |
| GST_VIDEO_FORMAT_RGB) ? fill_rgb24 : fill_rgb32; |
| switch (video_box->in_format) { |
| case GST_VIDEO_FORMAT_ARGB: |
| case GST_VIDEO_FORMAT_ABGR: |
| case GST_VIDEO_FORMAT_RGBA: |
| case GST_VIDEO_FORMAT_BGRA: |
| case GST_VIDEO_FORMAT_xRGB: |
| case GST_VIDEO_FORMAT_xBGR: |
| case GST_VIDEO_FORMAT_RGBx: |
| case GST_VIDEO_FORMAT_BGRx: |
| case GST_VIDEO_FORMAT_RGB: |
| case GST_VIDEO_FORMAT_BGR: |
| video_box->copy = copy_rgb32; |
| break; |
| case GST_VIDEO_FORMAT_AYUV: |
| video_box->copy = copy_ayuv_rgb32; |
| default: |
| break; |
| } |
| break; |
| case GST_VIDEO_FORMAT_GRAY8: |
| case GST_VIDEO_FORMAT_GRAY16_BE: |
| case GST_VIDEO_FORMAT_GRAY16_LE: |
| video_box->fill = fill_gray; |
| switch (video_box->in_format) { |
| case GST_VIDEO_FORMAT_GRAY8: |
| case GST_VIDEO_FORMAT_GRAY16_BE: |
| case GST_VIDEO_FORMAT_GRAY16_LE: |
| video_box->copy = copy_packed_simple; |
| break; |
| default: |
| break; |
| } |
| break; |
| default: |
| break; |
| } |
| |
| return video_box->fill != NULL && video_box->copy != NULL; |
| } |
| |
| static gboolean |
| gst_video_box_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (trans); |
| gboolean ret; |
| const gchar *matrix; |
| |
| g_mutex_lock (video_box->mutex); |
| |
| ret = |
| gst_video_format_parse_caps (in, &video_box->in_format, |
| &video_box->in_width, &video_box->in_height); |
| ret &= |
| gst_video_format_parse_caps (out, &video_box->out_format, |
| &video_box->out_width, &video_box->out_height); |
| |
| matrix = gst_video_parse_caps_color_matrix (in); |
| video_box->in_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE; |
| matrix = gst_video_parse_caps_color_matrix (out); |
| video_box->out_sdtv = matrix ? g_str_equal (matrix, "sdtv") : TRUE; |
| |
| /* something wrong getting the caps */ |
| if (!ret) |
| goto no_caps; |
| |
| GST_DEBUG_OBJECT (trans, "Input w: %d h: %d", video_box->in_width, |
| video_box->in_height); |
| GST_DEBUG_OBJECT (trans, "Output w: %d h: %d", video_box->out_width, |
| video_box->out_height); |
| |
| if (video_box->autocrop) |
| gst_video_box_autocrop (video_box); |
| |
| /* recalc the transformation strategy */ |
| ret = gst_video_box_recalc_transform (video_box); |
| |
| if (ret) |
| ret = gst_video_box_select_processing_functions (video_box); |
| g_mutex_unlock (video_box->mutex); |
| |
| return ret; |
| |
| /* ERRORS */ |
| no_caps: |
| { |
| GST_DEBUG_OBJECT (video_box, |
| "Invalid caps: %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, in, out); |
| g_mutex_unlock (video_box->mutex); |
| return FALSE; |
| } |
| } |
| |
| static gboolean |
| gst_video_box_get_unit_size (GstBaseTransform * trans, GstCaps * caps, |
| guint * size) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (trans); |
| GstVideoFormat format; |
| gint width, height; |
| gboolean ret; |
| |
| g_assert (size); |
| |
| ret = gst_video_format_parse_caps (caps, &format, &width, &height); |
| if (!ret) { |
| GST_ERROR_OBJECT (video_box, "Invalid caps: %" GST_PTR_FORMAT, caps); |
| return FALSE; |
| } |
| |
| *size = gst_video_format_get_size (format, width, height); |
| |
| GST_LOG_OBJECT (video_box, "Returning from _unit_size %d", *size); |
| |
| return TRUE; |
| } |
| |
| static void |
| gst_video_box_fixate_caps (GstBaseTransform * trans, |
| GstPadDirection direction, GstCaps * caps, GstCaps * othercaps) |
| { |
| gint width, height; |
| GstStructure *s; |
| gboolean ret; |
| |
| ret = gst_video_format_parse_caps (caps, NULL, &width, &height); |
| if (!ret) |
| return; |
| |
| s = gst_caps_get_structure (othercaps, 0); |
| gst_structure_fixate_field_nearest_int (s, "width", width); |
| gst_structure_fixate_field_nearest_int (s, "height", height); |
| } |
| |
| static void |
| gst_video_box_process (GstVideoBox * video_box, const guint8 * src, |
| guint8 * dest) |
| { |
| guint b_alpha = CLAMP ((guint) video_box->border_alpha * 256, 0, 256); |
| guint i_alpha = CLAMP ((guint) video_box->alpha * 256, 0, 256); |
| GstVideoBoxFill fill_type = video_box->fill_type; |
| gint br, bl, bt, bb, crop_w, crop_h; |
| |
| crop_h = 0; |
| crop_w = 0; |
| |
| br = video_box->box_right; |
| bl = video_box->box_left; |
| bt = video_box->box_top; |
| bb = video_box->box_bottom; |
| |
| if (br >= 0 && bl >= 0) { |
| crop_w = video_box->in_width - (br + bl); |
| } else if (br >= 0 && bl < 0) { |
| crop_w = video_box->in_width - (br); |
| } else if (br < 0 && bl >= 0) { |
| crop_w = video_box->in_width - (bl); |
| } else if (br < 0 && bl < 0) { |
| crop_w = video_box->in_width; |
| } |
| |
| if (bb >= 0 && bt >= 0) { |
| crop_h = video_box->in_height - (bb + bt); |
| } else if (bb >= 0 && bt < 0) { |
| crop_h = video_box->in_height - (bb); |
| } else if (bb < 0 && bt >= 0) { |
| crop_h = video_box->in_height - (bt); |
| } else if (bb < 0 && bt < 0) { |
| crop_h = video_box->in_height; |
| } |
| |
| GST_DEBUG_OBJECT (video_box, "Borders are: L:%d, R:%d, T:%d, B:%d", bl, br, |
| bt, bb); |
| GST_DEBUG_OBJECT (video_box, "Alpha value is: %u (frame) %u (border)", |
| i_alpha, b_alpha); |
| |
| if (crop_h < 0 || crop_w < 0) { |
| video_box->fill (fill_type, b_alpha, video_box->out_format, dest, |
| video_box->out_sdtv, video_box->out_width, video_box->out_height); |
| } else if (bb == 0 && bt == 0 && br == 0 && bl == 0) { |
| video_box->copy (i_alpha, video_box->out_format, dest, video_box->out_sdtv, |
| video_box->out_width, video_box->out_height, 0, 0, video_box->in_format, |
| src, video_box->in_sdtv, video_box->in_width, video_box->in_height, 0, |
| 0, crop_w, crop_h); |
| } else { |
| gint src_x = 0, src_y = 0; |
| gint dest_x = 0, dest_y = 0; |
| |
| /* Fill everything if a border should be added somewhere */ |
| if (bt < 0 || bb < 0 || br < 0 || bl < 0) |
| video_box->fill (fill_type, b_alpha, video_box->out_format, dest, |
| video_box->out_sdtv, video_box->out_width, video_box->out_height); |
| |
| /* Top border */ |
| if (bt < 0) { |
| dest_y += -bt; |
| } else { |
| src_y += bt; |
| } |
| |
| /* Left border */ |
| if (bl < 0) { |
| dest_x += -bl; |
| } else { |
| src_x += bl; |
| } |
| |
| /* Frame */ |
| video_box->copy (i_alpha, video_box->out_format, dest, video_box->out_sdtv, |
| video_box->out_width, video_box->out_height, dest_x, dest_y, |
| video_box->in_format, src, video_box->in_sdtv, video_box->in_width, |
| video_box->in_height, src_x, src_y, crop_w, crop_h); |
| } |
| |
| GST_LOG_OBJECT (video_box, "image created"); |
| } |
| |
| static GstFlowReturn |
| gst_video_box_transform (GstBaseTransform * trans, GstBuffer * in, |
| GstBuffer * out) |
| { |
| GstVideoBox *video_box = GST_VIDEO_BOX (trans); |
| const guint8 *indata; |
| guint8 *outdata; |
| GstClockTime timestamp, stream_time; |
| |
| timestamp = GST_BUFFER_TIMESTAMP (in); |
| stream_time = |
| gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); |
| |
| GST_DEBUG_OBJECT (video_box, "sync to %" GST_TIME_FORMAT, |
| GST_TIME_ARGS (timestamp)); |
| |
| if (GST_CLOCK_TIME_IS_VALID (stream_time)) |
| gst_object_sync_values (G_OBJECT (video_box), stream_time); |
| |
| indata = GST_BUFFER_DATA (in); |
| outdata = GST_BUFFER_DATA (out); |
| |
| g_mutex_lock (video_box->mutex); |
| gst_video_box_process (video_box, indata, outdata); |
| g_mutex_unlock (video_box->mutex); |
| return GST_FLOW_OK; |
| } |
| |
| /* FIXME: 0.11 merge with videocrop plugin */ |
| static gboolean |
| plugin_init (GstPlugin * plugin) |
| { |
| oil_init (); |
| |
| gst_controller_init (NULL, NULL); |
| |
| GST_DEBUG_CATEGORY_INIT (videobox_debug, "videobox", 0, |
| "Resizes a video by adding borders or cropping"); |
| |
| return gst_element_register (plugin, "videobox", GST_RANK_NONE, |
| GST_TYPE_VIDEO_BOX); |
| } |
| |
| GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, |
| GST_VERSION_MINOR, |
| "videobox", |
| "resizes a video by adding borders or cropping", |
| plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) |