diff -Nru gst-plugins-bad1.0-1.1.4/debian/build-deps gst-plugins-bad1.0-1.1.4/debian/build-deps --- gst-plugins-bad1.0-1.1.4/debian/build-deps 2013-09-04 11:21:25.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/build-deps 2013-09-17 19:04:09.000000000 +0000 @@ -40,6 +40,7 @@ libiptcdata0-dev (>= 1.0.2) libjasper-dev libkate-dev (>= 0.1.7) +libmedia-dev (>= 0.1.0+git20130606+c5d897a-0ubuntu26) [i386 armhf] libmimic-dev (>= 1.0) libmms-dev (>= 0.4) libmodplug-dev diff -Nru gst-plugins-bad1.0-1.1.4/debian/build-deps.in gst-plugins-bad1.0-1.1.4/debian/build-deps.in --- gst-plugins-bad1.0-1.1.4/debian/build-deps.in 2013-08-30 11:02:50.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/build-deps.in 2013-09-12 17:48:21.000000000 +0000 @@ -1,6 +1,7 @@ @GST_LIB_DEV_DEP@ @GST_EXTRA_BUILD_DEPENDS@ libgstreamer-plugins-base@GST_ABI@-dev (>= 1.1.4) +libgstreamer-plugins-good@GST_ABI@-dev (>= 1.1.4) autotools-dev dh-autoreconf automake (>= 1.11) @@ -40,6 +41,7 @@ libkate-dev (>= 0.1.7) libschroedinger-dev (>= 1.0.7) libdirac-dev (>= 0.10) +libmedia-dev (>= 0.1.0+git20130606+c5d897a-0ubuntu26) [i386 armhf] libmimic-dev (>= 1.0) libgme-dev librsvg2-dev (>= 2.36) diff -Nru gst-plugins-bad1.0-1.1.4/debian/changelog gst-plugins-bad1.0-1.1.4/debian/changelog --- gst-plugins-bad1.0-1.1.4/debian/changelog 2013-09-10 09:12:49.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/changelog 2013-09-20 14:01:12.000000000 +0000 @@ -1,3 +1,14 @@ +gst-plugins-bad1.0 (1.1.4-2ubuntu2) saucy; urgency=low + + * debian/patches/adding-mirsink-and-android-media-over-hybris-support.patch: + - Adding mirsink and Android media over hybris support, for hardware + accelerated decode using libstagefright and the hybris compat layer. + * debian/control.in: + - Making the hybris plugin as part of a separated package, and i386 and + armhf only (can work with android compatible archs) + + -- Ricardo Salveti de Araujo Tue, 17 Sep 2013 16:05:57 -0300 + gst-plugins-bad1.0 (1.1.4-2ubuntu1) saucy; urgency=low * Merge from Debian unstable. Remaining changes: diff -Nru gst-plugins-bad1.0-1.1.4/debian/control gst-plugins-bad1.0-1.1.4/debian/control --- gst-plugins-bad1.0-1.1.4/debian/control 2013-09-04 11:21:51.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/control 2013-09-17 19:28:25.000000000 +0000 @@ -50,6 +50,7 @@ libiptcdata0-dev (>= 1.0.2), libjasper-dev, libkate-dev (>= 0.1.7), + libmedia-dev (>= 0.1.0+git20130606+c5d897a-0ubuntu26) [i386 armhf], libmimic-dev (>= 1.0), libmms-dev (>= 0.4), libmodplug-dev, @@ -108,6 +109,30 @@ real live maintainer, or some actual wide use. . This package contains the documentation for plugins from the "bad" set. + +Package: gstreamer1.0-hybris +Architecture: i386 armhf +Multi-Arch: same +Depends: ${misc:Depends}, + ${shlibs:Depends} +Provides: ${gstreamer:Provides} +XB-GStreamer-Version: ${gstreamer:Version} +XB-GStreamer-Elements: ${gstreamer:Elements} +XB-GStreamer-URI-Sources: ${gstreamer:URISources} +XB-GStreamer-URI-Sinks: ${gstreamer:URISinks} +XB-GStreamer-Encoders: ${gstreamer:Encoders} +XB-GStreamer-Decoders: ${gstreamer:Decoders} +Description: GStreamer plugins from hybris + GStreamer is a streaming media framework, based on graphs of filters + which operate on media data. Applications using this library can do + anything from real-time sound processing to playing videos, and just + about anything else media-related. Its plugin-based architecture means + that new data types or processing capabilities can be added simply by + installing new plug-ins. + . + This package contains the GStreamer plugin for hybris, which provides + a way to do hardware decode using both hybris and libstagefright from + Android. Package: gstreamer1.0-plugins-bad Architecture: any @@ -115,6 +140,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, + gstreamer1.0-hybris, gstreamer1.0-plugins-base, libgstreamer-plugins-bad1.0-0 (= ${binary:Version}), Provides: ${gstreamer:Provides} @@ -145,7 +171,8 @@ Multi-Arch: same Section: debug Priority: extra -Depends: gstreamer1.0-plugins-bad (= ${binary:Version}), +Depends: gstreamer1.0-hybris (= ${binary:Version}), + gstreamer1.0-plugins-bad (= ${binary:Version}), ${misc:Depends} Replaces: gstreamer1.0-plugins-base-dbg (<< 0.11.94), gstreamer1.0-plugins-good (<< 1.1.2) Conflicts: gstreamer1.0-plugins-base-dbg (<< 0.11.94), gstreamer1.0-plugins-good (<< 1.1.2) diff -Nru gst-plugins-bad1.0-1.1.4/debian/control.in gst-plugins-bad1.0-1.1.4/debian/control.in --- gst-plugins-bad1.0-1.1.4/debian/control.in 2013-09-04 11:21:07.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/control.in 2013-09-17 18:01:10.000000000 +0000 @@ -30,6 +30,30 @@ real live maintainer, or some actual wide use. . This package contains the documentation for plugins from the "bad" set. + +Package: @GST_PKGNAME@-hybris +Architecture: any +Multi-Arch: same +Depends: ${misc:Depends}, + ${shlibs:Depends} +Provides: ${gstreamer:Provides} +XB-GStreamer-Version: ${gstreamer:Version} +XB-GStreamer-Elements: ${gstreamer:Elements} +XB-GStreamer-URI-Sources: ${gstreamer:URISources} +XB-GStreamer-URI-Sinks: ${gstreamer:URISinks} +XB-GStreamer-Encoders: ${gstreamer:Encoders} +XB-GStreamer-Decoders: ${gstreamer:Decoders} +Description: GStreamer plugins from hybris + GStreamer is a streaming media framework, based on graphs of filters + which operate on media data. Applications using this library can do + anything from real-time sound processing to playing videos, and just + about anything else media-related. Its plugin-based architecture means + that new data types or processing capabilities can be added simply by + installing new plug-ins. + . + This package contains the GStreamer plugin for hybris, which provides + a way to do hardware decode using both hybris and libstagefright from + Android. Package: @GST_PKGNAME@-plugins-bad Architecture: any @@ -37,6 +61,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, + gstreamer@GST_ABI@-hybris, gstreamer@GST_ABI@-plugins-base, libgstreamer-plugins-bad@GST_DEB_ABI@ (= ${binary:Version}), Provides: ${gstreamer:Provides} @@ -67,7 +92,8 @@ Multi-Arch: same Section: debug Priority: extra -Depends: @GST_PKGNAME@-plugins-bad (= ${binary:Version}), +Depends: @GST_PKGNAME@-hybris (= ${binary:Version}), + @GST_PKGNAME@-plugins-bad (= ${binary:Version}), ${misc:Depends} Replaces: gstreamer1.0-plugins-base-dbg (<< 0.11.94), gstreamer1.0-plugins-good (<< 1.1.2) Conflicts: gstreamer1.0-plugins-base-dbg (<< 0.11.94), gstreamer1.0-plugins-good (<< 1.1.2) diff -Nru gst-plugins-bad1.0-1.1.4/debian/gstreamer-hybris.install gst-plugins-bad1.0-1.1.4/debian/gstreamer-hybris.install --- gst-plugins-bad1.0-1.1.4/debian/gstreamer-hybris.install 1970-01-01 00:00:00.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/gstreamer-hybris.install 2013-09-17 19:04:12.000000000 +0000 @@ -0,0 +1,2 @@ +debian/tmp/usr/lib/*/gstreamer-@GST_ABI@/libgstandroidmedia.so +debian/tmp/usr/lib/*/gstreamer-@GST_ABI@/libgstmirsink.so diff -Nru gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad-dev.install gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad-dev.install --- gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad-dev.install 2013-09-04 11:21:07.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad-dev.install 2013-09-12 18:11:51.000000000 +0000 @@ -9,3 +9,4 @@ debian/tmp/usr/lib/*/libgstinsertbin-@GST_ABI@.so debian/tmp/usr/lib/*/libgstmpegts-@GST_ABI@.so debian/tmp/usr/lib/*/libgsturidownloader-@GST_ABI@.so +@mirallocdev@ diff -Nru gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad.install gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad.install --- gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad.install 2013-09-04 11:21:07.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/libgstreamer-plugins-bad.install 2013-09-12 18:07:03.000000000 +0000 @@ -3,3 +3,4 @@ debian/tmp/usr/lib/*/libgstcodecparsers-@GST_ABI@.so.* debian/tmp/usr/lib/*/libgstmpegts-@GST_ABI@.so.* debian/tmp/usr/lib/*/libgsturidownloader-@GST_ABI@.so.* +@miralloc@ diff -Nru gst-plugins-bad1.0-1.1.4/debian/patches/adding-mirsink-and-android-media-over-hybris-support.patch gst-plugins-bad1.0-1.1.4/debian/patches/adding-mirsink-and-android-media-over-hybris-support.patch --- gst-plugins-bad1.0-1.1.4/debian/patches/adding-mirsink-and-android-media-over-hybris-support.patch 1970-01-01 00:00:00.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/patches/adding-mirsink-and-android-media-over-hybris-support.patch 2013-09-19 16:36:45.000000000 +0000 @@ -0,0 +1,6951 @@ +Description: Adding mirsink and Android media over hybris support, +for hardware accelerated decode using libstagefright and the hybris +compat layer. +Author: Jim Hodapp + +Origin: vendor +Forwarded: no + +diff --git a/configure.ac b/configure.ac +index 9a4a2d1..0322218 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -844,6 +844,14 @@ AG_GST_CHECK_FEATURE(ANDROID_MEDIA, [Android Media], androidmedia, [ + esac + ]) + ++dnl *** A Hybris-based Platform *** ++translit(dnm, m, l) AM_CONDITIONAL(USE_ANDROID_MEDIA_HYBRIS, true) ++HAVE_ANDROID_MEDIA_HYBRIS="no" ++dnl Check for the presence of Hybris (Ubuntu Touch) ++AG_GST_CHECK_FEATURE(ANDROID_MEDIA_HYBRIS, [Android Media Hybris], androidmediahybris, [ ++ AC_CHECK_HEADER(hybris/media/media_codec_layer.h, HAVE_ANDROID_MEDIA_HYBRIS="yes", HAVE_ANDROID_MEDIA_HYBRIS="no") ++]) ++ + dnl *** AppleMedia (OS X and iOS) *** + translit(dnm, m, l) AM_CONDITIONAL(USE_APPLE_MEDIA, true) + HAVE_APPLE_MEDIA="no" +@@ -2178,6 +2186,7 @@ AM_CONDITIONAL(DECKLINK_OSX, false) + AM_CONDITIONAL(USE_DIRECTFB, false) + AM_CONDITIONAL(USE_WAYLAND, false) + AM_CONDITIONAL(USE_DAALA, false) ++AM_CONDITIONAL(USE_ANDROID_MEDIA_HYBRIS, false) + AM_CONDITIONAL(USE_DTS, false) + AM_CONDITIONAL(USE_EXIF, false) + AM_CONDITIONAL(USE_RESINDVD, false) +@@ -2384,6 +2393,7 @@ gst-libs/Makefile + gst-libs/gst/Makefile + gst-libs/gst/basecamerabinsrc/Makefile + gst-libs/gst/egl/Makefile ++gst-libs/gst/mir/Makefile + gst-libs/gst/insertbin/Makefile + gst-libs/gst/interfaces/Makefile + gst-libs/gst/codecparsers/Makefile +@@ -2444,6 +2454,7 @@ ext/dc1394/Makefile + ext/directfb/Makefile + ext/wayland/Makefile + ext/daala/Makefile ++ext/mir/Makefile + ext/dts/Makefile + ext/eglgles/Makefile + ext/faac/Makefile +diff --git a/ext/Makefile.am b/ext/Makefile.am +index e13ce6a..b22cd95 100644 +--- a/ext/Makefile.am ++++ b/ext/Makefile.am +@@ -76,6 +76,12 @@ else + DAALA_DIR= + endif + ++if USE_ANDROID_MEDIA_HYBRIS ++MIR_DIR=mir ++else ++MIR_DIR= ++endif ++ + if USE_DTS + DTS_DIR=dts + else +@@ -407,6 +413,7 @@ SUBDIRS=\ + $(LV2_DIR) \ + $(LIBFAME_DIR) \ + $(LIBMMS_DIR) \ ++ $(MIR_DIR) \ + $(MODPLUG_DIR) \ + $(MPEG2ENC_DIR) \ + $(MPG123_DIR) \ +@@ -452,6 +459,7 @@ DIST_SUBDIRS = \ + dc1394 \ + directfb \ + wayland \ ++ mir \ + faac \ + faad \ + flite \ +diff --git a/ext/mir/Makefile.am b/ext/mir/Makefile.am +new file mode 100644 +index 0000000..2121504 +--- /dev/null ++++ b/ext/mir/Makefile.am +@@ -0,0 +1,16 @@ ++plugin_LTLIBRARIES = libgstmirsink.la ++ ++libgstmirsink_la_SOURCES = gstmirsink.c mirpool.c ++libgstmirsink_la_CFLAGS = $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \ ++ $(MIR_CFLAGS) \ ++ -I../../gst-libs/ ++libgstmirsink_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \ ++ -lgstvideo-$(GST_API_VERSION) \ ++ $(top_builddir)/gst-libs/gst/mir/libgstmiralloc-$(GST_API_VERSION).la \ ++ -lmedia \ ++ $(EGL_LIBS) $(EGLGLES_LIBS) \ ++ $(MIR_LIBS) ++libgstmirsink_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) ++libgstmirsink_la_LIBTOOLFLAGS = $(GST_PLUGIN_LIBTOOLFLAGS) ++include_HEADERS = mirpool.h gstmirsink.h ++noinst_HEADERS = +diff --git a/ext/mir/gstmirsink.c b/ext/mir/gstmirsink.c +new file mode 100644 +index 0000000..c8d901b +--- /dev/null ++++ b/ext/mir/gstmirsink.c +@@ -0,0 +1,551 @@ ++/* ++ * GStreamer Mir video sink ++ * Copyright (C) 2013 Canonical Ltd ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 3 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the Free ++ * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, ++ * Boston, MA 02110-1301 USA. ++ */ ++ ++/** ++ * SECTION:element-mirsink ++ * ++ * The mirsink creates its own window and renders the decoded video frames there. ++ * Setup the Mir environment as described in ++ * Mir home page. ++ * ++ * ++ * Example pipeline ++ * |[ ++ * gst-launch -v filesrc ! qtdemux ! h264parse ! queue ! amcviddec-omxtiducati1videodecoder ! mirsink ++ * ]| test the video rendering with mirsink ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include ++#endif ++ ++#include "gstmirsink.h" ++#include "mirpool.h" ++ ++#include ++ ++#include ++#include ++#include ++#include ++#include ++ ++/* signals */ ++enum ++{ ++ SIGNAL_0, ++ LAST_SIGNAL ++}; ++ ++/* Properties */ ++enum ++{ ++ PROP_0, ++ PROP_MIR_TEXTURE_ID ++}; ++ ++GST_DEBUG_CATEGORY (gstmir_debug); ++#define GST_CAT_DEFAULT gstmir_debug ++ ++#if G_BYTE_ORDER == G_BIG_ENDIAN ++#define CAPS "{NV12, xRGB, ARGB}" ++#else ++#define CAPS "{NV21, BGRx, BGRA}" ++#endif ++ ++static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("video/x-raw, " ++ "format=(string)NV12, " ++ "width=(int)[ 1, MAX ], " "height=(int)[ 1, MAX ], " ++ "framerate=(fraction)[ 0, MAX ] ")); ++ ++static guint frame_ready_signal = 0; ++ ++/* Fixme: Add more interfaces */ ++#define gst_mir_sink_parent_class parent_class ++G_DEFINE_TYPE (GstMirSink, gst_mir_sink, GST_TYPE_VIDEO_SINK); ++ ++static void gst_mir_sink_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec); ++static void gst_mir_sink_set_property (GObject * object, ++ guint prop_id, const GValue * value, GParamSpec * pspec); ++static void gst_mir_sink_finalize (GObject * object); ++static GstCaps *gst_mir_sink_get_caps (GstBaseSink * bsink, GstCaps * filter); ++static gboolean gst_mir_sink_set_caps (GstBaseSink * bsink, GstCaps * caps); ++static gboolean gst_mir_sink_start (GstBaseSink * bsink); ++static gboolean gst_mir_sink_stop (GstBaseSink * bsink); ++static gboolean gst_mir_sink_preroll (GstBaseSink * bsink, GstBuffer * buffer); ++static gboolean ++gst_mir_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query); ++static gboolean gst_mir_sink_render (GstBaseSink * bsink, GstBuffer * buffer); ++ ++static void ++gst_mir_sink_class_init (GstMirSinkClass * klass) ++{ ++ GObjectClass *gobject_class; ++ GstElementClass *gstelement_class; ++ GstBaseSinkClass *gstbasesink_class; ++ ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = (GstElementClass *) klass; ++ gstbasesink_class = (GstBaseSinkClass *) klass; ++ ++ gobject_class->set_property = gst_mir_sink_set_property; ++ gobject_class->get_property = gst_mir_sink_get_property; ++ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_mir_sink_finalize); ++ ++ gst_element_class_add_pad_template (gstelement_class, ++ gst_static_pad_template_get (&sink_template)); ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "Mir video sink", "Sink/Video", ++ "Output to Mir surface", "Jim Hodapp "); ++ ++ gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_mir_sink_get_caps); ++ gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_mir_sink_set_caps); ++ gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_mir_sink_start); ++ gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_mir_sink_stop); ++ gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_mir_sink_preroll); ++ gstbasesink_class->propose_allocation = ++ GST_DEBUG_FUNCPTR (gst_mir_sink_propose_allocation); ++ gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_mir_sink_render); ++ ++ /* This signal is for being notified when a frame is ready to be rendered. This ++ * is useful for anything outside of the sink that needs to know when each frame ++ * is ready. */ ++ frame_ready_signal = ++ g_signal_new ("frame-ready", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_FIRST, 0, NULL, NULL, g_cclosure_marshal_generic, ++ G_TYPE_NONE, 1, G_TYPE_POINTER); ++ ++ g_object_class_install_property (gobject_class, PROP_MIR_TEXTURE_ID, ++ g_param_spec_uint ("texture-id", "Texture ID", ++ "Texture ID to render video to, created by the application", 0, ++ UINT_MAX, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_mir_sink_init (GstMirSink * sink) ++{ ++ GST_DEBUG_OBJECT (sink, "Initializing mir sink!"); ++ sink->pool = NULL; ++ ++ g_mutex_init (&sink->mir_lock); ++} ++ ++static void ++gst_mir_sink_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstMirSink *sink = GST_MIR_SINK (object); ++ ++ switch (prop_id) { ++ case PROP_MIR_TEXTURE_ID: ++ g_value_set_uint (value, sink->texture_id); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_mir_sink_create_surface_texture (GObject * object) ++{ ++ GstMirSink *sink = GST_MIR_SINK (object); ++ ++ surface_texture_client_create_by_id (sink->texture_id); ++ GST_DEBUG_OBJECT (sink, "Created new SurfaceTextureClientHybris instance"); ++} ++ ++static void ++gst_mir_sink_set_property (GObject * object, ++ guint prop_id, const GValue * value, GParamSpec * pspec) ++{ ++ GstMirSink *sink = GST_MIR_SINK (object); ++ ++ switch (prop_id) { ++ case PROP_MIR_TEXTURE_ID: ++ sink->texture_id = g_value_get_uint (value); ++ GST_DEBUG_OBJECT (object, "texture_id: %d", sink->texture_id); ++ gst_mir_sink_create_surface_texture (object); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_mir_sink_finalize (GObject * object) ++{ ++ GstMirSink *sink = GST_MIR_SINK (object); ++ ++ GST_DEBUG_OBJECT (sink, "Finalizing the sink.."); ++ ++ if (sink->surface_texture_client) ++ surface_texture_client_destroy (sink->surface_texture_client); ++ ++ g_mutex_clear (&sink->mir_lock); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static GstCaps * ++gst_mir_sink_get_caps (GstBaseSink * bsink, GstCaps * filter) ++{ ++ GstMirSink *sink; ++ GstCaps *caps; ++ ++ sink = GST_MIR_SINK (bsink); ++ ++ GST_DEBUG_OBJECT (sink, "%s", __PRETTY_FUNCTION__); ++ ++ caps = gst_pad_get_pad_template_caps (GST_VIDEO_SINK_PAD (sink)); ++ if (filter) { ++ GstCaps *intersection; ++ ++ intersection = ++ gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (caps); ++ caps = intersection; ++ } ++ return caps; ++} ++ ++static gboolean ++gst_mir_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) ++{ ++ GstMirSink *sink = GST_MIR_SINK (bsink); ++ GstBufferPool *newpool, *oldpool; ++ GstMirBufferPool *m_pool; ++ GstVideoInfo info; ++ GstStructure *config; ++ static GstAllocationParams params = { ++ 0, 0, 0, 15, ++ }; ++ guint size; ++ ++ sink = GST_MIR_SINK (bsink); ++ ++ GST_DEBUG_OBJECT (sink, "set caps %" GST_PTR_FORMAT, caps); ++ ++ if (!gst_video_info_from_caps (&info, caps)) ++ goto invalid_format; ++ ++ sink->video_width = info.width; ++ sink->video_height = info.height; ++ size = info.size; ++ ++ GST_DEBUG_OBJECT (sink, "Creating new GstMirBufferPool"); ++ /* Create a new pool for the new configuration */ ++ newpool = gst_mir_buffer_pool_new (sink); ++ ++ if (!newpool) { ++ GST_ERROR_OBJECT (sink, "Failed to create new pool"); ++ return FALSE; ++ } ++ ++ GST_DEBUG_OBJECT (sink, ++ "Setting SurfaceTextureClientHybris instance in m_pool"); ++ /* Add the SurfaceTextureClientHybris instance to the pool for later use */ ++ gst_mir_buffer_pool_set_surface_texture_client (newpool, ++ sink->surface_texture_client); ++ GST_WARNING_OBJECT (sink, "SurfaceTextureClientHybris: %p", ++ sink->surface_texture_client); ++ ++ m_pool = GST_MIR_BUFFER_POOL_CAST (newpool); ++ GST_WARNING_OBJECT (sink, "m_pool SurfaceTextureClientHybris: %p", ++ m_pool->surface_texture_client); ++ m_pool->width = sink->video_width; ++ m_pool->height = sink->video_height; ++ ++ config = gst_buffer_pool_get_config (newpool); ++ gst_buffer_pool_config_set_params (config, caps, size, 2, 0); ++ gst_buffer_pool_config_set_allocator (config, NULL, ¶ms); ++ if (!gst_buffer_pool_set_config (newpool, config)) ++ goto config_failed; ++ ++ GST_OBJECT_LOCK (sink); ++ oldpool = sink->pool; ++ sink->pool = newpool; ++ GST_OBJECT_UNLOCK (sink); ++ ++ GST_DEBUG_OBJECT (sink, "Finishing up set_caps"); ++ ++ if (oldpool) ++ gst_object_unref (oldpool); ++ ++ return TRUE; ++ ++invalid_format: ++ { ++ GST_DEBUG_OBJECT (sink, ++ "Could not locate image format from caps %" GST_PTR_FORMAT, caps); ++ return FALSE; ++ } ++config_failed: ++ { ++ GST_DEBUG_OBJECT (bsink, "failed setting config"); ++ return FALSE; ++ } ++} ++ ++static gboolean ++gst_mir_sink_start (GstBaseSink * bsink) ++{ ++ GstMirSink *sink = (GstMirSink *) bsink; ++ ++ GST_DEBUG_OBJECT (sink, "start"); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_mir_sink_stop (GstBaseSink * bsink) ++{ ++ GstMirSink *sink = (GstMirSink *) bsink; ++ ++ GST_DEBUG_OBJECT (sink, "stop"); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_mir_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query) ++{ ++ GstMirSink *sink = GST_MIR_SINK (bsink); ++ GstBufferPool *pool; ++ GstStructure *config; ++ GstCaps *caps; ++ guint size = 0; ++ gboolean need_pool; ++ GstAllocator *allocator; ++ GstAllocationParams params; ++ ++ GST_DEBUG_OBJECT (sink, "%s", __PRETTY_FUNCTION__); ++ GST_DEBUG_OBJECT (sink, "Proposing ALLOCATION params"); ++ ++ gst_allocation_params_init (¶ms); ++ ++ gst_query_parse_allocation (query, &caps, &need_pool); ++ if (!caps) ++ goto no_caps; ++ ++ GST_OBJECT_LOCK (sink); ++ pool = sink->pool ? gst_object_ref (sink->pool) : NULL; ++ GST_OBJECT_UNLOCK (sink); ++ ++ GST_DEBUG_OBJECT (sink, "pool: %p, need_pool: %d", pool, need_pool); ++ ++ if (pool) { ++ GstCaps *pcaps; ++ GST_WARNING_OBJECT (sink, "already have a pool"); ++ ++ /* We had a pool, check caps */ ++ config = gst_buffer_pool_get_config (pool); ++ gst_buffer_pool_config_get_params (config, &pcaps, &size, NULL, NULL); ++ ++ if (!gst_caps_is_equal (caps, pcaps)) { ++ /* Different caps, we can't use this pool */ ++ gst_object_unref (pool); ++ pool = NULL; ++ } ++ gst_structure_free (config); ++ } ++ ++ if (pool == NULL && need_pool) { ++ GstVideoInfo info; ++ info.size = 0; ++ ++ if (!gst_video_info_from_caps (&info, caps)) ++ goto invalid_caps; ++ ++ GST_DEBUG_OBJECT (sink, "size: %d", size); ++ GST_DEBUG_OBJECT (sink, "caps %" GST_PTR_FORMAT, caps); ++ GST_DEBUG_OBJECT (sink, "create new pool"); ++ pool = gst_mir_buffer_pool_new (sink); ++ ++#if 0 ++ gst_mir_buffer_pool_set_surface_texture_client (pool, ++ sink->surface_texture_client); ++ GST_WARNING_OBJECT (sink, "SurfaceTextureClientHybris: %p", ++ sink->surface_texture_client); ++#endif ++ ++ /* The normal size of a frame */ ++ size = (info.size == 0) ? info.height * info.width : info.size; ++ ++ config = gst_buffer_pool_get_config (pool); ++ gst_buffer_pool_config_set_params (config, caps, size, 2, 0); ++ if (!gst_buffer_pool_set_config (pool, config)) ++ goto config_failed; ++ } ++ ++ if (pool) { ++ gst_mir_buffer_pool_set_surface_texture_client (pool, ++ sink->surface_texture_client); ++ GST_WARNING_OBJECT (sink, "SurfaceTextureClientHybris: %p", ++ sink->surface_texture_client); ++ ++ GST_WARNING_OBJECT (sink, "adding allocation pool"); ++ // FIXME: How many buffers min do we need? It's 2 right now. ++ GST_WARNING_OBJECT (sink, "size: %d", size); ++ gst_query_add_allocation_pool (query, pool, size, 2, 0); ++ gst_object_unref (pool); ++ } ++ ++ /* First the default allocator */ ++ if (!gst_mir_image_memory_is_mappable ()) { ++ allocator = gst_allocator_find (NULL); ++ gst_query_add_allocation_param (query, allocator, ¶ms); ++ gst_object_unref (allocator); ++ } ++ ++ allocator = gst_mir_image_allocator_obtain (); ++ if (!gst_mir_image_memory_is_mappable ()) ++ params.flags |= GST_MEMORY_FLAG_NOT_MAPPABLE; ++ gst_query_add_allocation_param (query, allocator, ¶ms); ++ gst_object_unref (allocator); ++ ++ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); ++ ++ return TRUE; ++ ++ /* ERRORS */ ++no_caps: ++ { ++ GST_DEBUG_OBJECT (bsink, "no caps specified"); ++ return FALSE; ++ } ++invalid_caps: ++ { ++ GST_DEBUG_OBJECT (bsink, "invalid caps specified"); ++ return FALSE; ++ } ++config_failed: ++ { ++ GST_DEBUG_OBJECT (bsink, "failed setting config"); ++ gst_object_unref (pool); ++ return FALSE; ++ } ++} ++ ++static GstFlowReturn ++gst_mir_sink_preroll (GstBaseSink * bsink, GstBuffer * buffer) ++{ ++ GST_DEBUG_OBJECT (bsink, "preroll buffer %p", buffer); ++ return gst_mir_sink_render (bsink, buffer); ++} ++ ++static GstFlowReturn ++gst_mir_sink_render (GstBaseSink * bsink, GstBuffer * buffer) ++{ ++ GstMirSink *sink = GST_MIR_SINK (bsink); ++ //GstVideoRectangle src, dst, res; ++ GstBuffer *to_render; ++ GstMirMeta *meta; ++ //GstFlowReturn ret; ++ ++ GST_DEBUG_OBJECT (sink, "render buffer %p", buffer); ++ ++ meta = gst_buffer_get_mir_meta (buffer); ++ ++ if (meta && meta->sink == sink) { ++ GST_LOG_OBJECT (sink, "buffer %p from our pool, writing directly", buffer); ++ to_render = buffer; ++ } else { ++ //GstMapInfo src; ++ GST_LOG_OBJECT (sink, "buffer %p not from our pool, copying", buffer); ++ to_render = buffer; ++ ++#if 0 ++ if (!sink->pool) ++ goto no_pool; ++ ++ if (!gst_buffer_pool_set_active (sink->pool, TRUE)) ++ goto activate_failed; ++ ++ ret = gst_buffer_pool_acquire_buffer (sink->pool, &to_render, NULL); ++ if (ret != GST_FLOW_OK) ++ goto no_buffer; ++ ++ gst_buffer_map (buffer, &src, GST_MAP_READ); ++ gst_buffer_fill (to_render, 0, src.data, src.size); ++ gst_buffer_unmap (buffer, &src); ++ ++ meta = gst_buffer_get_mir_meta (to_render); ++#endif ++ } ++ ++ g_signal_emit_by_name (G_OBJECT (bsink), "frame-ready"); ++ ++#if 0 ++ src.w = sink->video_width; ++ src.h = sink->video_height; ++ dst.w = sink->window->width; ++ dst.h = sink->window->height; ++ ++ gst_video_sink_center_rect (src, dst, &res, FALSE); ++#endif ++ ++ if (buffer != to_render) ++ gst_buffer_unref (to_render); ++ return GST_FLOW_OK; ++ ++#if 0 ++no_buffer: ++ { ++ GST_WARNING_OBJECT (sink, "could not create image"); ++ return ret; ++ } ++no_pool: ++ { ++ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, ++ ("Internal error: can't allocate images"), ++ ("We don't have a bufferpool negotiated")); ++ return GST_FLOW_ERROR; ++ } ++activate_failed: ++ { ++ GST_ERROR_OBJECT (sink, "failed to activate bufferpool."); ++ ret = GST_FLOW_ERROR; ++ return ret; ++ } ++#endif ++} ++ ++static gboolean ++plugin_init (GstPlugin * plugin) ++{ ++ GST_DEBUG_CATEGORY_INIT (gstmir_debug, "mirsink", 0, " mir video sink"); ++ ++ return gst_element_register (plugin, "mirsink", GST_RANK_MARGINAL, ++ GST_TYPE_MIR_SINK); ++} ++ ++GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, ++ GST_VERSION_MINOR, ++ mirsink, ++ "Mir Video Sink", plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, ++ GST_PACKAGE_ORIGIN) +diff --git a/ext/mir/gstmirsink.h b/ext/mir/gstmirsink.h +new file mode 100644 +index 0000000..b73022b +--- /dev/null ++++ b/ext/mir/gstmirsink.h +@@ -0,0 +1,87 @@ ++/* ++ * GStreamer Mir video sink ++ * Copyright (C) 2013 Canonical Ltd ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 3 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the Free ++ * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, ++ * Boston, MA 02110-1301 USA. ++ */ ++ ++#ifndef __GST_MIR_VIDEO_SINK_H__ ++#define __GST_MIR_VIDEO_SINK_H__ ++ ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#include ++#include ++#include ++#include ++ ++#include ++ ++#define GST_TYPE_MIR_SINK \ ++ (gst_mir_sink_get_type()) ++#define GST_MIR_SINK(obj) \ ++ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MIR_SINK,GstMirSink)) ++#define GST_MIR_SINK_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MIR_SINK,GstMirSinkClass)) ++#define GST_IS_MIR_SINK(obj) \ ++ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MIR_SINK)) ++#define GST_IS_MIR_SINK_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MIR_SINK)) ++#define GST_MIR_SINK_GET_CLASS(inst) \ ++ (G_TYPE_INSTANCE_GET_CLASS ((inst), GST_TYPE_MIR_SINK, GstMirSinkClass)) ++ ++typedef struct _GstMirSink GstMirSink; ++typedef struct _GstMirSinkClass GstMirSinkClass; ++ ++#include "mirpool.h" ++ ++struct _GstMirSink ++{ ++ GstVideoSink parent; ++ ++ SurfaceTextureClientHybris surface_texture_client; ++ ++ guint texture_id; ++ ++ GstBufferPool *pool; ++ ++ GMutex mir_lock; ++ ++ gint video_width; ++ gint video_height; ++}; ++ ++struct _GstMirSinkClass ++{ ++ GstVideoSinkClass parent; ++}; ++ ++GType gst_mir_sink_get_type (void) G_GNUC_CONST; ++ ++G_END_DECLS ++ ++#endif /* __GST_MIR_VIDEO_SINK_H__ */ +diff --git a/ext/mir/mirpool.c b/ext/mir/mirpool.c +new file mode 100644 +index 0000000..a5d2dc1 +--- /dev/null ++++ b/ext/mir/mirpool.c +@@ -0,0 +1,407 @@ ++/* ++ * GStreamer Mir buffer pool ++ * Copyright (C) 2013 Canonical Ltd ++ ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 3 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++/* Object header */ ++#include "gstmirsink.h" ++#include "mirpool.h" ++ ++#include ++ ++/* Debugging category */ ++#include ++ ++/* Helper functions */ ++#include ++#include ++#include ++ ++GST_DEBUG_CATEGORY (gstmirbufferpool_debug); ++#define GST_CAT_DEFAULT gstmirbufferpool_debug ++ ++/* mir metadata */ ++GType ++gst_mir_meta_api_get_type (void) ++{ ++ static volatile GType type; ++ static const gchar *tags[] = ++ { "memory", "size", "colorspace", "orientation", NULL }; ++ ++ if (g_once_init_enter (&type)) { ++ GType _type = gst_meta_api_type_register ("GstMirMetaAPI", tags); ++ g_once_init_leave (&type, _type); ++ } ++ return type; ++} ++ ++static void ++gst_mir_meta_free (GstMirMeta * meta, GstBuffer * buffer) ++{ ++ gst_object_unref (meta->sink); ++} ++ ++const GstMetaInfo * ++gst_mir_meta_get_info (void) ++{ ++ static const GstMetaInfo *mir_meta_info = NULL; ++ ++ if (g_once_init_enter (&mir_meta_info)) { ++ const GstMetaInfo *meta = ++ gst_meta_register (GST_MIR_META_API_TYPE, "GstMirMeta", ++ sizeof (GstMirMeta), (GstMetaInitFunction) NULL, ++ (GstMetaFreeFunction) gst_mir_meta_free, ++ (GstMetaTransformFunction) NULL); ++ g_once_init_leave (&mir_meta_info, meta); ++ } ++ return mir_meta_info; ++} ++ ++/* bufferpool */ ++static void gst_mir_buffer_pool_finalize (GObject * object); ++ ++#define gst_mir_buffer_pool_parent_class parent_class ++G_DEFINE_TYPE (GstMirBufferPool, gst_mir_buffer_pool, GST_TYPE_BUFFER_POOL); ++ ++static gboolean ++mir_buffer_pool_set_config (GstBufferPool * pool, GstStructure * config) ++{ ++ GstMirBufferPool *mpool = GST_MIR_BUFFER_POOL_CAST (pool); ++ GstVideoInfo info; ++ GstCaps *caps; ++ ++ GST_DEBUG_OBJECT (mpool, "%s", __PRETTY_FUNCTION__); ++ ++ if (!gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL)) ++ goto wrong_config; ++ ++ if (caps == NULL) ++ goto no_caps; ++ ++ if (mpool->allocator) ++ gst_object_unref (mpool->allocator); ++ mpool->allocator = NULL; ++ ++ /* now parse the caps from the config */ ++ if (!gst_video_info_from_caps (&info, caps)) ++ goto wrong_caps; ++ ++ if (!gst_buffer_pool_config_get_allocator (config, &mpool->allocator, ++ &mpool->params)) ++ return FALSE; ++ if (mpool->allocator) ++ gst_object_ref (mpool->allocator); ++ ++ GST_LOG_OBJECT (mpool, "%dx%d, caps %" GST_PTR_FORMAT, info.width, ++ info.height, caps); ++ ++ /*Fixme: Enable metadata checking handling based on the config of pool */ ++ ++ mpool->caps = gst_caps_ref (caps); ++ mpool->info = info; ++ mpool->width = info.width; ++ mpool->height = info.height; ++ ++ GST_DEBUG_OBJECT (mpool, "Calling set_config() on the parent class"); ++ return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config); ++ /* ERRORS */ ++wrong_config: ++ { ++ GST_WARNING_OBJECT (pool, "invalid config"); ++ return FALSE; ++ } ++no_caps: ++ { ++ GST_WARNING_OBJECT (pool, "no caps in config"); ++ return FALSE; ++ } ++wrong_caps: ++ { ++ GST_WARNING_OBJECT (pool, ++ "failed getting geometry from caps %" GST_PTR_FORMAT, caps); ++ return FALSE; ++ } ++} ++ ++static GstMirMeta * ++gst_buffer_add_mir_meta (GstBuffer * buffer, GstMirBufferPool * mpool) ++{ ++ GstMirMeta *mmeta; ++ GstMirSink *sink; ++ guint stride = 0; ++ guint size = 0; ++ ++ sink = mpool->sink; ++ stride = mpool->width * 4; ++ size = stride * mpool->height; ++ ++ GST_DEBUG_OBJECT (mpool, "%s", __PRETTY_FUNCTION__); ++ ++ /* Add metadata so that the render function can tell the difference between a zero-copy ++ * rendering buffer vs one that it must manually copy through the main CPU */ ++ mmeta = (GstMirMeta *) gst_buffer_add_meta (buffer, GST_MIR_META_INFO, NULL); ++ mmeta->sink = gst_object_ref (sink); ++ ++ mmeta->size = size; ++ ++ return mmeta; ++} ++ ++// FIXME: rename this function since it no longer makes sense ++static GstBuffer * ++gst_mir_allocate_native_window_buffer (GstBufferPool * pool, ++ GstAllocator * allocator, GstBufferPoolAcquireParams * params, ++ GstVideoFormat format, gint width, gint height) ++{ ++ GstMirBufferPool *m_pool = GST_MIR_BUFFER_POOL_CAST (pool); ++ GstBuffer *buffer; ++ GstMemory *mem = { NULL }; ++ gsize size = 0; ++ gint stride = 0; ++ GstMemoryFlags flags = 0; ++ ++ GST_DEBUG_OBJECT (pool, "%s", __PRETTY_FUNCTION__); ++ ++ if (!gst_mir_image_memory_is_mappable ()) ++ flags |= GST_MEMORY_FLAG_NOT_MAPPABLE; ++ ++ flags |= GST_MEMORY_FLAG_NO_SHARE; ++ ++ switch (format) { ++ gsize buffer_id = 0; ++ ++ case GST_VIDEO_FORMAT_RGB: ++ case GST_VIDEO_FORMAT_BGR: ++ case GST_VIDEO_FORMAT_RGB16: ++ case GST_VIDEO_FORMAT_NV12: ++ case GST_VIDEO_FORMAT_NV21: ++ case GST_VIDEO_FORMAT_RGBA: ++ case GST_VIDEO_FORMAT_BGRA: ++ case GST_VIDEO_FORMAT_ARGB: ++ case GST_VIDEO_FORMAT_ABGR: ++ case GST_VIDEO_FORMAT_RGBx: ++ case GST_VIDEO_FORMAT_BGRx: ++ case GST_VIDEO_FORMAT_xRGB: ++ case GST_VIDEO_FORMAT_xBGR: ++ case GST_VIDEO_FORMAT_AYUV: ++ case GST_VIDEO_FORMAT_YV12: ++ case GST_VIDEO_FORMAT_I420: ++ case GST_VIDEO_FORMAT_Y444: ++ case GST_VIDEO_FORMAT_Y42B: ++ case GST_VIDEO_FORMAT_Y41B:{ ++ ++ GST_WARNING_OBJECT (m_pool, ++ "Allocating new Mir image, height: %d, width: %d, size: %d", height, ++ width, size); ++ ++ /* A fallback to make sure we have a size */ ++ if (size == 0) ++ size = height * width; ++ ++ stride = size / height; ++ size = stride * height; ++ ++ GST_WARNING_OBJECT (m_pool, "stride: %d, size: %d", stride, size); ++ ++ //if (m_pool->sink->surface_texture_client) { ++ buffer_id = 0; ++ ++ GST_WARNING_OBJECT (m_pool, "Allocating new buffer memory of size: %d", ++ size); ++ mem = ++ gst_mir_image_allocator_wrap (allocator, m_pool->codec_delegate, ++ buffer_id, flags, size, NULL, NULL); ++ if (mem == NULL) ++ GST_WARNING_OBJECT (m_pool, "mem is NULL!"); ++ //} ++ ++ break; ++ } ++ default: ++ GST_WARNING_OBJECT (m_pool, ++ "Using the default buffer allocator, hit the default case"); ++ if (GST_BUFFER_POOL_CLASS (gst_mir_buffer_pool_parent_class)->alloc_buffer ++ (pool, &buffer, params) != GST_FLOW_OK) ++ return NULL; ++ break; ++ } ++ ++ buffer = gst_buffer_new (); ++ if (!buffer) { ++ GST_WARNING_OBJECT (m_pool, "Fallback memory allocation"); ++ if (GST_BUFFER_POOL_CLASS (gst_mir_buffer_pool_parent_class)->alloc_buffer ++ (pool, &buffer, params) != GST_FLOW_OK) ++ return NULL; ++ } ++ ++ GST_DEBUG ("Appending memory to GstBuffer"); ++ gst_buffer_append_memory (buffer, mem); ++ ++ return buffer; ++} ++ ++static GstFlowReturn ++mir_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, ++ GstBufferPoolAcquireParams * params) ++{ ++ GstMirBufferPool *m_pool = GST_MIR_BUFFER_POOL_CAST (pool); ++ GstMirMeta *meta; ++ ++ GST_DEBUG_OBJECT (m_pool, "%s", __PRETTY_FUNCTION__); ++ ++ if (m_pool->allocator == NULL) { ++ GST_ERROR_OBJECT (m_pool, "Can't create buffer, couldn't get allocator"); ++ return GST_FLOW_ERROR; ++ } ++ ++ GST_WARNING_OBJECT (m_pool, "Height: %d, width: %d", m_pool->height, ++ m_pool->width); ++ ++ *buffer = ++ gst_mir_allocate_native_window_buffer (pool, m_pool->allocator, params, ++ m_pool->info.finfo->format, m_pool->width, m_pool->height); ++ ++ meta = gst_buffer_add_mir_meta (*buffer, m_pool); ++ if (meta == NULL) { ++ gst_buffer_unref (*buffer); ++ goto no_buffer; ++ } ++ ++ return GST_FLOW_OK; ++ ++ /* ERROR */ ++no_buffer: ++ { ++ GST_WARNING_OBJECT (pool, "can't create buffer"); ++ return GST_FLOW_ERROR; ++ } ++} ++ ++static void ++gst_mir_buffer_pool_release_buffer (GstBufferPool * pool, GstBuffer * buffer) ++{ ++#if 1 ++ GstMemory *mem = { NULL }; ++ int err = 0; ++ MediaCodecDelegate delegate; ++ ++ /* Get access to the GstMemory stored in the GstBuffer */ ++ if (gst_buffer_n_memory (buffer) >= 1 && ++ (mem = gst_buffer_peek_memory (buffer, 0)) ++ && gst_is_mir_image_memory (mem)) { ++ GST_DEBUG_OBJECT (pool, "It is Mir image memory"); ++ } else ++ GST_DEBUG_OBJECT (pool, "It is NOT Mir image memory"); ++ ++ delegate = gst_mir_image_memory_get_codec (mem); ++ if (!delegate) { ++ GST_WARNING_OBJECT (pool, "delegate is NULL, rendering will not function"); ++ goto done; ++ } ++ ++ GST_DEBUG_OBJECT (pool, "mem: %p", mem); ++ GST_DEBUG_OBJECT (pool, "gst_mir_image_memory_get_codec (mem): %p", delegate); ++ GST_DEBUG_OBJECT (pool, "gst_mir_image_memory_get_buffer_index (mem): %d", ++ gst_mir_image_memory_get_buffer_index (mem)); ++ GST_DEBUG_OBJECT (pool, "Rendering buffer: %d", ++ gst_mir_image_memory_get_buffer_index (mem)); ++ GST_DEBUG_OBJECT (pool, "Releasing output buffer index: %d", ++ gst_mir_image_memory_get_buffer_index (mem)); ++ ++ /* Render and release the output buffer back to the decoder */ ++ err = ++ media_codec_release_output_buffer (delegate, ++ gst_mir_image_memory_get_buffer_index (mem)); ++ if (err < 0) ++ GST_WARNING_OBJECT (pool, ++ "Failed to release output buffer. Rendering will probably be affected (err: %d).", ++ err); ++#endif ++ ++done: ++ GST_BUFFER_POOL_CLASS (parent_class)->release_buffer (pool, buffer); ++} ++ ++GstBufferPool * ++gst_mir_buffer_pool_new (GstMirSink * mirsink) ++{ ++ GstMirBufferPool *m_pool; ++ ++ GST_WARNING ("%s", __PRETTY_FUNCTION__); ++ ++ g_return_val_if_fail (GST_IS_MIR_SINK (mirsink), NULL); ++ m_pool = g_object_new (GST_TYPE_MIR_BUFFER_POOL, NULL); ++ m_pool->sink = gst_object_ref (mirsink); ++ ++ return GST_BUFFER_POOL_CAST (m_pool); ++} ++ ++void ++gst_mir_buffer_pool_set_surface_texture_client (GstBufferPool * pool, ++ SurfaceTextureClientHybris sfc) ++{ ++ GstMirBufferPool *m_pool = GST_MIR_BUFFER_POOL_CAST (pool); ++ ++ GST_DEBUG_OBJECT (m_pool, "%s", __PRETTY_FUNCTION__); ++ m_pool->surface_texture_client = sfc; ++} ++ ++void ++gst_mir_buffer_pool_set_codec_delegate (GstBufferPool * pool, ++ MediaCodecDelegate * delegate) ++{ ++ GstMirBufferPool *m_pool = GST_MIR_BUFFER_POOL_CAST (pool); ++ ++ GST_DEBUG_OBJECT (m_pool, "%s", __PRETTY_FUNCTION__); ++ m_pool->codec_delegate = delegate; ++} ++ ++static void ++gst_mir_buffer_pool_class_init (GstMirBufferPoolClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ GstBufferPoolClass *gstbufferpool_class = (GstBufferPoolClass *) klass; ++ ++ gobject_class->finalize = gst_mir_buffer_pool_finalize; ++ ++ gstbufferpool_class->set_config = mir_buffer_pool_set_config; ++ gstbufferpool_class->alloc_buffer = mir_buffer_pool_alloc; ++ gstbufferpool_class->release_buffer = gst_mir_buffer_pool_release_buffer; ++} ++ ++static void ++gst_mir_buffer_pool_init (GstMirBufferPool * pool) ++{ ++ GST_DEBUG_CATEGORY_INIT (gstmirbufferpool_debug, "mirbufferpool", 0, ++ " mir buffer pool"); ++} ++ ++static void ++gst_mir_buffer_pool_finalize (GObject * object) ++{ ++ GstMirBufferPool *pool = GST_MIR_BUFFER_POOL_CAST (object); ++ ++ GST_DEBUG_OBJECT (pool, "%s", __PRETTY_FUNCTION__); ++ gst_object_unref (pool->sink); ++ ++ G_OBJECT_CLASS (gst_mir_buffer_pool_parent_class)->finalize (object); ++} +diff --git a/ext/mir/mirpool.h b/ext/mir/mirpool.h +new file mode 100644 +index 0000000..bf8543a +--- /dev/null ++++ b/ext/mir/mirpool.h +@@ -0,0 +1,85 @@ ++/* ++ * GStreamer Mir buffer pool ++ * Copyright (C) 2013 Canonical Ltd ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 3 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_MIR_BUFFER_POOL_H__ ++#define __GST_MIR_BUFFER_POOL_H__ ++ ++G_BEGIN_DECLS ++ ++#include "gstmirsink.h" ++#include ++#include ++typedef struct _GstMirMeta GstMirMeta; ++ ++typedef struct _GstMirBufferPool GstMirBufferPool; ++typedef struct _GstMirBufferPoolClass GstMirBufferPoolClass; ++ ++GType gst_mir_meta_api_get_type (void); ++#define GST_MIR_META_API_TYPE (gst_mir_meta_api_get_type()) ++const GstMetaInfo * gst_mir_meta_get_info (void); ++#define GST_MIR_META_INFO (gst_mir_meta_get_info()) ++ ++#define gst_buffer_get_mir_meta(b) ((GstMirMeta*)gst_buffer_get_meta((b),GST_MIR_META_API_TYPE)) ++ ++struct _GstMirMeta { ++ GstMeta meta; ++ ++ GstMirSink *sink; ++ ++ size_t size; ++}; ++ ++/* buffer pool functions */ ++#define GST_TYPE_MIR_BUFFER_POOL (gst_mir_buffer_pool_get_type()) ++#define GST_IS_MIR_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MIR_BUFFER_POOL)) ++#define GST_MIR_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MIR_BUFFER_POOL, GstMirBufferPool)) ++#define GST_MIR_BUFFER_POOL_CAST(obj) ((GstMirBufferPool*)(obj)) ++ ++struct _GstMirBufferPool ++{ ++ GstBufferPool bufferpool; ++ ++ GstMirSink *sink; ++ ++ /*Fixme: keep all these in GstMirBufferPoolPrivate*/ ++ GstCaps *caps; ++ GstVideoInfo info; ++ guint width; ++ guint height; ++ GstAllocator *allocator; ++ GstAllocationParams params; ++ SurfaceTextureClientHybris surface_texture_client; ++ MediaCodecDelegate *codec_delegate; ++}; ++ ++struct _GstMirBufferPoolClass ++{ ++ GstBufferPoolClass parent_class; ++}; ++ ++GType gst_mir_buffer_pool_get_type (void); ++ ++GstBufferPool *gst_mir_buffer_pool_new (GstMirSink * mirsink); ++void gst_mir_buffer_pool_set_surface_texture_client (GstBufferPool * pool, SurfaceTextureClientHybris sfc); ++void gst_mir_buffer_pool_set_codec_delegate (GstBufferPool * pool, MediaCodecDelegate *delegate); ++ ++G_END_DECLS ++ ++#endif /*__GST_MIR_BUFFER_POOL_H__*/ +diff --git a/gst-libs/gst/Makefile.am b/gst-libs/gst/Makefile.am +index 1d6cc35..748105e 100644 +--- a/gst-libs/gst/Makefile.am ++++ b/gst-libs/gst/Makefile.am +@@ -2,8 +2,12 @@ if HAVE_EGL + EGL_DIR = egl + endif + ++if USE_ANDROID_MEDIA_HYBRIS ++MIR_DIR = mir ++endif ++ + SUBDIRS = interfaces basecamerabinsrc codecparsers \ +- insertbin uridownloader mpegts $(EGL_DIR) ++ insertbin uridownloader mpegts $(EGL_DIR) $(MIR_DIR) + + noinst_HEADERS = gst-i18n-plugin.h gettext.h glib-compat-private.h + DIST_SUBDIRS = interfaces egl basecamerabinsrc codecparsers \ +diff --git a/gst-libs/gst/mir/Makefile.am b/gst-libs/gst/mir/Makefile.am +new file mode 100644 +index 0000000..41d7e22 +--- /dev/null ++++ b/gst-libs/gst/mir/Makefile.am +@@ -0,0 +1,26 @@ ++lib_LTLIBRARIES = libgstmiralloc-@GST_API_VERSION@.la ++ ++libgstmiralloc_@GST_API_VERSION@_la_SOURCES = mirallocator.c ++ ++libgstmiralloc_@GST_API_VERSION@includedir = \ ++ $(includedir)/gstreamer-@GST_API_VERSION@/gst/mir ++ ++libgstmiralloc_@GST_API_VERSION@include_HEADERS = mirallocator.h ++ ++libgstmiralloc_@GST_API_VERSION@_la_CFLAGS = \ ++ $(GST_PLUGINS_BAD_CFLAGS) \ ++ $(GST_PLUGINS_BASE_CFLAGS) \ ++ $(GST_CFLAGS) \ ++ $(MIR_CFLAGS) ++ ++libgstmiralloc_@GST_API_VERSION@_la_LIBADD = \ ++ $(GST_PLUGINS_BASE_LIBS) \ ++ -lgstvideo-@GST_API_VERSION@ \ ++ $(GST_LIBS) \ ++ $(MIR_LIBS) ++ -lmedia ++ ++libgstmiralloc_@GST_API_VERSION@_la_LDFLAGS = \ ++ $(GST_LIB_LDFLAGS) \ ++ $(GST_ALL_LDFLAGS) \ ++ $(GST_LT_LDFLAGS) +diff --git a/gst-libs/gst/mir/mirallocator.c b/gst-libs/gst/mir/mirallocator.c +new file mode 100644 +index 0000000..826b369 +--- /dev/null ++++ b/gst-libs/gst/mir/mirallocator.c +@@ -0,0 +1,260 @@ ++/* ++ * Mir GstMemory allocator ++ * Copyright (C) 2013 Collabora Ltd. ++ * @author: Jim Hodapp ++ * * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 3 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "mirallocator.h" ++ ++#include ++ ++#define GST_MIR_IMAGE_MEMORY(mem) ((GstMirImageMemory*)(mem)) ++ ++gboolean ++gst_mir_image_memory_is_mappable (void) ++{ ++ return FALSE; ++} ++ ++gboolean ++gst_is_mir_image_memory (GstMemory * mem) ++{ ++ g_return_val_if_fail (mem != NULL, FALSE); ++ g_return_val_if_fail (mem->allocator != NULL, FALSE); ++ ++ return g_strcmp0 (mem->allocator->mem_type, GST_MIR_IMAGE_MEMORY_TYPE) == 0; ++} ++ ++gsize ++gst_mir_image_memory_get_buffer_index (GstMemory * mem) ++{ ++ g_return_val_if_fail (gst_is_mir_image_memory (mem), 0); ++ ++ if (mem->parent) ++ mem = mem->parent; ++ ++ return GST_MIR_IMAGE_MEMORY (mem)->buffer_index; ++} ++ ++MediaCodecDelegate ++gst_mir_image_memory_get_codec (GstMemory * mem) ++{ ++ g_return_val_if_fail (gst_is_mir_image_memory (mem), 0); ++ ++ if (mem->parent) ++ mem = mem->parent; ++ ++ return GST_MIR_IMAGE_MEMORY (mem)->codec_delegate; ++} ++ ++void ++gst_mir_image_memory_set_codec (GstMemory * mem, MediaCodecDelegate delegate) ++{ ++ g_return_if_fail (gst_is_mir_image_memory (mem)); ++ g_return_if_fail (delegate != NULL); ++ ++ if (mem->parent) ++ mem = mem->parent; ++ ++ GST_MIR_IMAGE_MEMORY (mem)->codec_delegate = delegate; ++} ++ ++void ++gst_mir_image_memory_set_buffer_index (GstMemory * mem, gsize index) ++{ ++ g_return_if_fail (gst_is_mir_image_memory (mem)); ++ ++ if (mem->parent) ++ mem = mem->parent; ++ ++ GST_MIR_IMAGE_MEMORY (mem)->buffer_index = index; ++} ++ ++static GstMemory * ++gst_mir_image_allocator_alloc_vfunc (GstAllocator * allocator, gsize size, ++ GstAllocationParams * params) ++{ ++ g_warning ++ ("Use gst_mir_image_allocator_alloc() to allocate from this allocator"); ++ ++ return NULL; ++} ++ ++static void ++gst_mir_image_allocator_free_vfunc (GstAllocator * allocator, GstMemory * mem) ++{ ++ GstMirImageMemory *emem = (GstMirImageMemory *) mem; ++ GST_WARNING ("%s", __PRETTY_FUNCTION__); ++ ++ g_return_if_fail (gst_is_mir_image_memory (mem)); ++ ++ /* Shared memory should not destroy all the data */ ++ if (!mem->parent) { ++ ++ if (emem->user_data_destroy) ++ emem->user_data_destroy (emem->user_data); ++ } ++ ++ g_slice_free (GstMirImageMemory, emem); ++} ++ ++static gpointer ++gst_mir_image_mem_map (GstMemory * mem, gsize maxsize, GstMapFlags flags) ++{ ++ return NULL; ++} ++ ++static void ++gst_mir_image_mem_unmap (GstMemory * mem) ++{ ++} ++ ++static GstMemory * ++gst_mir_image_mem_share (GstMemory * mem, gssize offset, gssize size) ++{ ++ GstMemory *sub; ++ GstMemory *parent; ++ ++ GST_WARNING ("%s", __PRETTY_FUNCTION__); ++ ++ if (offset != 0) ++ return NULL; ++ ++ if (size != -1 && size != mem->size) ++ return NULL; ++ ++ /* find the real parent */ ++ if ((parent = mem->parent) == NULL) ++ parent = (GstMemory *) mem; ++ ++ if (size == -1) ++ size = mem->size - offset; ++ ++ sub = (GstMemory *) g_slice_new (GstMirImageMemory); ++ ++ /* the shared memory is always readonly */ ++ gst_memory_init (GST_MEMORY_CAST (sub), GST_MINI_OBJECT_FLAGS (parent) | ++ GST_MINI_OBJECT_FLAG_LOCK_READONLY, mem->allocator, parent, ++ mem->maxsize, mem->align, mem->offset + offset, size); ++ ++ return sub; ++} ++ ++static GstMemory * ++gst_mir_image_mem_copy (GstMemory * mem, gssize offset, gssize size) ++{ ++ return NULL; ++} ++ ++static gboolean ++gst_mir_image_mem_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset) ++{ ++ return FALSE; ++} ++ ++typedef GstAllocator GstMirImageAllocator; ++typedef GstAllocatorClass GstMirImageAllocatorClass; ++ ++GType gst_mir_image_allocator_get_type (void); ++G_DEFINE_TYPE (GstMirImageAllocator, gst_mir_image_allocator, ++ GST_TYPE_ALLOCATOR); ++ ++#define GST_TYPE_MIR_IMAGE_ALLOCATOR (gst_mir_image_mem_allocator_get_type()) ++#define GST_IS_MIR_IMAGE_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MIR_IMAGE_ALLOCATOR)) ++ ++static void ++gst_mir_image_allocator_class_init (GstMirImageAllocatorClass * klass) ++{ ++ GstAllocatorClass *allocator_class = (GstAllocatorClass *) klass; ++ ++ allocator_class->alloc = gst_mir_image_allocator_alloc_vfunc; ++ allocator_class->free = gst_mir_image_allocator_free_vfunc; ++} ++ ++static void ++gst_mir_image_allocator_init (GstMirImageAllocator * allocator) ++{ ++ GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator); ++ ++ alloc->mem_type = GST_MIR_IMAGE_MEMORY_TYPE; ++ alloc->mem_map = gst_mir_image_mem_map; ++ alloc->mem_unmap = gst_mir_image_mem_unmap; ++ alloc->mem_share = gst_mir_image_mem_share; ++ alloc->mem_copy = gst_mir_image_mem_copy; ++ alloc->mem_is_span = gst_mir_image_mem_is_span; ++ ++ GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC); ++} ++ ++static gpointer ++gst_mir_image_allocator_init_instance (gpointer data) ++{ ++ return g_object_new (gst_mir_image_allocator_get_type (), NULL); ++} ++ ++GstAllocator * ++gst_mir_image_allocator_obtain (void) ++{ ++ static GOnce once = G_ONCE_INIT; ++ ++ g_once (&once, gst_mir_image_allocator_init_instance, NULL); ++ ++ g_return_val_if_fail (once.retval != NULL, NULL); ++ ++ return GST_ALLOCATOR (g_object_ref (once.retval)); ++} ++ ++GstMemory * ++gst_mir_image_allocator_alloc (GstAllocator * allocator, ++ gint width, gint height, gsize * size) ++{ ++ GST_WARNING ("%s", __PRETTY_FUNCTION__); ++ return NULL; ++} ++ ++GstMemory * ++gst_mir_image_allocator_wrap (GstAllocator * allocator, ++ MediaCodecDelegate delegate, gsize buffer_id, GstMemoryFlags flags, ++ gsize size, gpointer user_data, GDestroyNotify user_data_destroy) ++{ ++ GstMirImageMemory *mem; ++ ++ GST_WARNING ("%s", __PRETTY_FUNCTION__); ++ GST_WARNING ("size: %d", size); ++ GST_WARNING ("delegate: %p", delegate); ++ ++ if (!allocator) { ++ allocator = gst_mir_image_allocator_obtain (); ++ } ++ mem = g_slice_new (GstMirImageMemory); ++ // FIXME: calling gst_mir_image_allocator_obtain() is a hack to select my allocator, this really ++ // should be selected automatically by the decoder. This selection is not working correctly yet. ++ gst_memory_init (GST_MEMORY_CAST (mem), flags, ++ gst_mir_image_allocator_obtain (), NULL, size, 0, 0, size); ++ ++ mem->codec_delegate = delegate; ++ mem->buffer_index = buffer_id; ++ mem->user_data = user_data; ++ mem->user_data_destroy = user_data_destroy; ++ ++ return GST_MEMORY_CAST (mem); ++} +diff --git a/gst-libs/gst/mir/mirallocator.h b/gst-libs/gst/mir/mirallocator.h +new file mode 100644 +index 0000000..e696ea9 +--- /dev/null ++++ b/gst-libs/gst/mir/mirallocator.h +@@ -0,0 +1,66 @@ ++/* ++ * Mir GstMemory allocator ++ * Copyright (C) 2013 Collabora Ltd. ++ * @author: Jim Hodapp ++ * * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 3 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_EGL_H__ ++#define __GST_EGL_H__ ++ ++#include ++#include ++#include ++#include ++ ++#include ++#include ++ ++#define GST_MIR_IMAGE_MEMORY_TYPE "MirImage" ++ ++#define GST_CAPS_FEATURE_MEMORY_MIR_IMAGE "memory:MirImage" ++ ++typedef struct _GstMirDisplay GstMirDisplay; ++ ++typedef struct ++{ ++ GstMemory parent; ++ ++ MediaCodecDelegate codec_delegate; ++ gsize buffer_index; ++ ++ gpointer user_data; ++ GDestroyNotify user_data_destroy; ++} GstMirImageMemory; ++ ++/* MirImage GstMemory handling */ ++gboolean gst_mir_image_memory_is_mappable (void); ++gboolean gst_is_mir_image_memory (GstMemory * mem); ++gsize gst_mir_image_memory_get_buffer_index (GstMemory * mem); ++MediaCodecDelegate gst_mir_image_memory_get_codec (GstMemory * mem); ++void gst_mir_image_memory_set_codec (GstMemory * mem, MediaCodecDelegate delegate); ++void gst_mir_image_memory_set_buffer_index (GstMemory * mem, gsize index); ++ ++/* Generic MirImage allocator that doesn't support mapping, copying or anything */ ++GstAllocator *gst_mir_image_allocator_obtain (void); ++GstMemory *gst_mir_image_allocator_alloc (GstAllocator * allocator, ++ gint width, gint height, gsize * size); ++GstMemory *gst_mir_image_allocator_wrap (GstAllocator * allocator, MediaCodecDelegate delegate, ++ gsize buffer_id, GstMemoryFlags flags, gsize size, gpointer user_data, ++ GDestroyNotify user_data_destroy); ++ ++#endif /* __GST_EGL_H__ */ +diff --git a/sys/Makefile.am b/sys/Makefile.am +index b1abda6..e5bf070 100644 +--- a/sys/Makefile.am ++++ b/sys/Makefile.am +@@ -10,6 +10,12 @@ else + ANDROID_MEDIA_DIR= + endif + ++if USE_ANDROID_MEDIA_HYBRIS ++ANDROID_MEDIA_DIR=androidmedia ++else ++ANDROID_MEDIA_DIR= ++endif ++ + if USE_APPLE_MEDIA + APPLE_MEDIA_DIR=applemedia applemedia-nonpublic + else +diff --git a/sys/androidmedia/Makefile.am b/sys/androidmedia/Makefile.am +index fba8777..443f136 100644 +--- a/sys/androidmedia/Makefile.am ++++ b/sys/androidmedia/Makefile.am +@@ -1,9 +1,16 @@ + plugin_LTLIBRARIES = libgstandroidmedia.la + ++if USE_ANDROID_MEDIA_HYBRIS ++libgstandroidmedia_la_SOURCES = \ ++ gstamchybris.c \ ++ gstamcaudiodechybris.c \ ++ gstamcvideodechybris.c ++else + libgstandroidmedia_la_SOURCES = \ + gstamc.c \ + gstamcaudiodec.c \ + gstamcvideodec.c ++endif + + noinst_HEADERS = \ + gstamc.h \ +@@ -12,18 +19,22 @@ noinst_HEADERS = \ + gstamcvideodec.h + + libgstandroidmedia_la_CFLAGS = \ ++ $(GST_PLUGINS_BAD_CFLAGS) \ + $(GST_PLUGINS_BASE_CFLAGS) \ + $(GST_BASE_CFLAGS) \ + $(GST_CFLAGS) \ + $(ORC_CFLAGS) + libgstandroidmedia_la_LIBADD = \ ++ $(GST_PLUGINS_BAD_LIBS) \ + $(GST_PLUGINS_BASE_LIBS) \ + -lgstaudio-@GST_API_VERSION@ \ + -lgstpbutils-@GST_API_VERSION@ \ + -lgstvideo-@GST_API_VERSION@ \ + $(GST_BASE_LIBS) \ + $(GST_LIBS) \ +- $(ORC_LIBS) ++ $(ORC_LIBS) \ ++ -lmedia \ ++ $(top_builddir)/gst-libs/gst/mir/libgstmiralloc-@GST_API_VERSION@.la + libgstandroidmedia_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) + libgstandroidmedia_la_LIBTOOLFLAGS = $(GST_PLUGIN_LIBTOOLFLAGS) + +@@ -34,7 +45,7 @@ Android.mk: Makefile.am $(BUILT_SOURCES) + -:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \ + -:SOURCES $(libgstandroidmedia_la_SOURCES) \ + $(nodist_libgstandroidmedia_la_SOURCES) \ +- -:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstandroidmedia_la_CFLAGS) \ ++ -:CPPFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstandroidmedia_la_CFLAGS) \ + -:LDFLAGS $(libgstandroidmedia_la_LDFLAGS) \ + $(libgstandroidmedia_la_LIBADD) \ + -ldl \ +diff --git a/sys/androidmedia/gstamc-constants.h b/sys/androidmedia/gstamc-constants.h +index ea6288c..f263cdd 100644 +--- a/sys/androidmedia/gstamc-constants.h ++++ b/sys/androidmedia/gstamc-constants.h +@@ -95,6 +95,7 @@ enum + COLOR_FormatAndroidOpaque = 0x7F000789, + COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100, + COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00, ++ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka = 0x7fa30c03, + /* From hardware/ti/omap4xxx/domx/omx_core/inc/OMX_TI_IVCommon.h */ + COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced = 0x7f000001 + }; +diff --git a/sys/androidmedia/gstamc.h b/sys/androidmedia/gstamc.h +index b48a3ee..17fc681 100644 +--- a/sys/androidmedia/gstamc.h ++++ b/sys/androidmedia/gstamc.h +@@ -24,7 +24,14 @@ + #include + #include + #include ++#ifdef HAVE_ANDROID_MEDIA + #include ++#endif ++ ++#ifdef HAVE_ANDROID_MEDIA_HYBRIS ++#include ++#include ++#endif + + G_BEGIN_DECLS + +@@ -56,19 +63,30 @@ struct _GstAmcCodecInfo { + }; + + struct _GstAmcBuffer { ++#ifdef HAVE_ANDROID_MEDIA + jobject object; /* global reference */ ++#endif + guint8 *data; + gsize size; + }; + + struct _GstAmcFormat { ++#ifdef HAVE_ANDROID_MEDIA + /* < private > */ + jobject object; /* global reference */ ++#endif ++ MediaFormat format; + }; + + struct _GstAmcCodec { ++#ifdef HAVE_ANDROID_MEDIA + /* < private > */ + jobject object; /* global reference */ ++#endif ++#ifdef HAVE_ANDROID_MEDIA_HYBRIS ++ MediaCodecDelegate *codec_delegate; ++ guint texture_id; ++#endif + }; + + struct _GstAmcBufferInfo { +@@ -83,7 +101,12 @@ extern GQuark gst_amc_codec_info_quark; + GstAmcCodec * gst_amc_codec_new (const gchar *name); + void gst_amc_codec_free (GstAmcCodec * codec); + ++#ifdef HAVE_ANDROID_MEDIA_HYBRIS ++gboolean gst_amc_codec_configure (GstAmcCodec * codec, GstAmcFormat * format, SurfaceTextureClientHybris stc, gint flags); ++#else + gboolean gst_amc_codec_configure (GstAmcCodec * codec, GstAmcFormat * format, gint flags); ++#endif ++gboolean gst_amc_codec_queue_csd (GstAmcCodec * codec, GstAmcFormat * format); + GstAmcFormat * gst_amc_codec_get_output_format (GstAmcCodec * codec); + + gboolean gst_amc_codec_start (GstAmcCodec * codec); +diff --git a/sys/androidmedia/gstamcaudiodechybris.c b/sys/androidmedia/gstamcaudiodechybris.c +new file mode 100644 +index 0000000..fb108c0 +--- /dev/null ++++ b/sys/androidmedia/gstamcaudiodechybris.c +@@ -0,0 +1,1292 @@ ++/* ++ * Initially based on gst-omx/omx/gstomxvideodec.c ++ * ++ * Copyright (C) 2011, Hewlett-Packard Development Company, L.P. ++ * Author: Sebastian Dröge , Collabora Ltd. ++ * ++ * Copyright (C) 2012, Collabora Ltd. ++ * Author: Sebastian Dröge ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation ++ * version 2.1 of the License. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with this library; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++#include ++ ++#ifdef HAVE_ORC ++#include ++#else ++#define orc_memcpy memcpy ++#endif ++ ++#include "gstamcaudiodec.h" ++#include "gstamc-constants.h" ++ ++GST_DEBUG_CATEGORY_STATIC (gst_amc_audio_dec_debug_category); ++#define GST_CAT_DEFAULT gst_amc_audio_dec_debug_category ++ ++/* prototypes */ ++static void gst_amc_audio_dec_finalize (GObject * object); ++ ++static GstStateChangeReturn ++gst_amc_audio_dec_change_state (GstElement * element, ++ GstStateChange transition); ++ ++static gboolean gst_amc_audio_dec_open (GstAudioDecoder * decoder); ++static gboolean gst_amc_audio_dec_close (GstAudioDecoder * decoder); ++static gboolean gst_amc_audio_dec_start (GstAudioDecoder * decoder); ++static gboolean gst_amc_audio_dec_stop (GstAudioDecoder * decoder); ++static gboolean gst_amc_audio_dec_set_format (GstAudioDecoder * decoder, ++ GstCaps * caps); ++static void gst_amc_audio_dec_flush (GstAudioDecoder * decoder, gboolean hard); ++static GstFlowReturn gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, ++ GstBuffer * buffer); ++ ++static GstFlowReturn gst_amc_audio_dec_drain (GstAmcAudioDec * self); ++ ++enum ++{ ++ PROP_0 ++}; ++ ++/* class initialization */ ++ ++static void gst_amc_audio_dec_class_init (GstAmcAudioDecClass * klass); ++static void gst_amc_audio_dec_init (GstAmcAudioDec * self); ++static void gst_amc_audio_dec_base_init (gpointer g_class); ++ ++static GstAudioDecoderClass *parent_class = NULL; ++ ++GType ++gst_amc_audio_dec_get_type (void) ++{ ++ static volatile gsize type = 0; ++ ++ if (g_once_init_enter (&type)) { ++ GType _type; ++ static const GTypeInfo info = { ++ sizeof (GstAmcAudioDecClass), ++ gst_amc_audio_dec_base_init, ++ NULL, ++ (GClassInitFunc) gst_amc_audio_dec_class_init, ++ NULL, ++ NULL, ++ sizeof (GstAmcAudioDec), ++ 0, ++ (GInstanceInitFunc) gst_amc_audio_dec_init, ++ NULL ++ }; ++ ++ _type = g_type_register_static (GST_TYPE_AUDIO_DECODER, "GstAmcAudioDec", ++ &info, 0); ++ ++ GST_DEBUG_CATEGORY_INIT (gst_amc_audio_dec_debug_category, "amcaudiodec", 0, ++ "Android MediaCodec audio decoder"); ++ ++ g_once_init_leave (&type, _type); ++ } ++ return type; ++} ++ ++static GstCaps * ++create_sink_caps (const GstAmcCodecInfo * codec_info) ++{ ++ GstCaps *ret; ++ gint i; ++ ++ ret = gst_caps_new_empty (); ++ ++ for (i = 0; i < codec_info->n_supported_types; i++) { ++ const GstAmcCodecType *type = &codec_info->supported_types[i]; ++ ++ if (strcmp (type->mime, "audio/mpeg") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/mpeg", ++ "mpegversion", G_TYPE_INT, 1, ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "parsed", G_TYPE_BOOLEAN, TRUE, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/3gpp") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/AMR", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/amr-wb") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/AMR-WB", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/mp4a-latm") == 0) { ++ gint j; ++ GstStructure *tmp, *tmp2; ++ gboolean have_profile = FALSE; ++ GValue va = { 0, }; ++ GValue v = { 0, }; ++ ++ g_value_init (&va, GST_TYPE_LIST); ++ g_value_init (&v, G_TYPE_STRING); ++ g_value_set_string (&v, "raw"); ++ gst_value_list_append_value (&va, &v); ++ g_value_set_string (&v, "adts"); ++ gst_value_list_append_value (&va, &v); ++ g_value_unset (&v); ++ ++ tmp = gst_structure_new ("audio/mpeg", ++ "mpegversion", G_TYPE_INT, 4, ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "framed", G_TYPE_BOOLEAN, TRUE, NULL); ++ gst_structure_set_value (tmp, "stream-format", &va); ++ g_value_unset (&va); ++ ++ for (j = 0; j < type->n_profile_levels; j++) { ++ const gchar *profile; ++ ++ profile = ++ gst_amc_aac_profile_to_string (type->profile_levels[j].profile); ++ ++ if (!profile) { ++ GST_ERROR ("Unable to map AAC profile 0x%08x", ++ type->profile_levels[j].profile); ++ continue; ++ } ++ ++ tmp2 = gst_structure_copy (tmp); ++ gst_structure_set (tmp2, "profile", G_TYPE_STRING, profile, NULL); ++ ret = gst_caps_merge_structure (ret, tmp2); ++ ++ have_profile = TRUE; ++ } ++ ++ if (!have_profile) { ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else { ++ gst_structure_free (tmp); ++ } ++ } else if (strcmp (type->mime, "audio/g711-alaw") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/x-alaw", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/g711-mlaw") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/x-mulaw", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/vorbis") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/x-vorbis", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/flac") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/x-flac", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "framed", G_TYPE_BOOLEAN, TRUE, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "audio/mpeg-L2") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("audio/mpeg", ++ "mpegversion", G_TYPE_INT, 1, ++ "layer", G_TYPE_INT, 2, ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "parsed", G_TYPE_BOOLEAN, TRUE, NULL); ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else { ++ GST_WARNING ("Unsupported mimetype '%s'", type->mime); ++ } ++ } ++ ++ return ret; ++} ++ ++static const gchar * ++caps_to_mime (GstCaps * caps) ++{ ++ GstStructure *s; ++ const gchar *name; ++ ++ s = gst_caps_get_structure (caps, 0); ++ if (!s) ++ return NULL; ++ ++ name = gst_structure_get_name (s); ++ ++ if (strcmp (name, "audio/mpeg") == 0) { ++ gint mpegversion; ++ ++ if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) ++ return NULL; ++ ++ if (mpegversion == 1) { ++ gint layer; ++ ++ if (!gst_structure_get_int (s, "layer", &layer) || layer == 3) ++ return "audio/mpeg"; ++ else if (layer == 2) ++ return "audio/mpeg-L2"; ++ } else if (mpegversion == 2 || mpegversion == 4) { ++ return "audio/mp4a-latm"; ++ } ++ } else if (strcmp (name, "audio/AMR") == 0) { ++ return "audio/3gpp"; ++ } else if (strcmp (name, "audio/AMR-WB") == 0) { ++ return "audio/amr-wb"; ++ } else if (strcmp (name, "audio/x-alaw") == 0) { ++ return "audio/g711-alaw"; ++ } else if (strcmp (name, "audio/x-mulaw") == 0) { ++ return "audio/g711-mlaw"; ++ } else if (strcmp (name, "audio/x-vorbis") == 0) { ++ return "audio/vorbis"; ++ } ++ ++ return NULL; ++} ++ ++static GstCaps * ++create_src_caps (const GstAmcCodecInfo * codec_info) ++{ ++ GstCaps *ret; ++ ++ ret = gst_caps_new_simple ("audio/x-raw", ++ "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "format", G_TYPE_STRING, GST_AUDIO_NE (S16), NULL); ++ ++ return ret; ++} ++ ++static void ++gst_amc_audio_dec_base_init (gpointer g_class) ++{ ++ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); ++ GstAmcAudioDecClass *amcaudiodec_class = GST_AMC_AUDIO_DEC_CLASS (g_class); ++ const GstAmcCodecInfo *codec_info; ++ GstPadTemplate *templ; ++ GstCaps *caps; ++ gchar *longname; ++ ++ codec_info = ++ g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), gst_amc_codec_info_quark); ++ /* This happens for the base class and abstract subclasses */ ++ if (!codec_info) ++ return; ++ ++ amcaudiodec_class->codec_info = codec_info; ++ ++ /* Add pad templates */ ++ caps = create_sink_caps (codec_info); ++ templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); ++ gst_element_class_add_pad_template (element_class, templ); ++ gst_caps_unref (caps); ++ ++ caps = create_src_caps (codec_info); ++ templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); ++ gst_element_class_add_pad_template (element_class, templ); ++ gst_caps_unref (caps); ++ ++ longname = g_strdup_printf ("Android MediaCodec %s", codec_info->name); ++ gst_element_class_set_metadata (element_class, ++ codec_info->name, ++ "Codec/Decoder/Audio", ++ longname, "Sebastian Dröge "); ++ g_free (longname); ++} ++ ++static void ++gst_amc_audio_dec_class_init (GstAmcAudioDecClass * klass) ++{ ++ GObjectClass *gobject_class = G_OBJECT_CLASS (klass); ++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass); ++ GstAudioDecoderClass *audiodec_class = GST_AUDIO_DECODER_CLASS (klass); ++ ++ parent_class = g_type_class_peek_parent (klass); ++ ++ gobject_class->finalize = gst_amc_audio_dec_finalize; ++ ++ element_class->change_state = ++ GST_DEBUG_FUNCPTR (gst_amc_audio_dec_change_state); ++ ++ audiodec_class->start = GST_DEBUG_FUNCPTR (gst_amc_audio_dec_start); ++ audiodec_class->stop = GST_DEBUG_FUNCPTR (gst_amc_audio_dec_stop); ++ audiodec_class->open = GST_DEBUG_FUNCPTR (gst_amc_audio_dec_open); ++ audiodec_class->close = GST_DEBUG_FUNCPTR (gst_amc_audio_dec_close); ++ audiodec_class->flush = GST_DEBUG_FUNCPTR (gst_amc_audio_dec_flush); ++ audiodec_class->set_format = GST_DEBUG_FUNCPTR (gst_amc_audio_dec_set_format); ++ audiodec_class->handle_frame = ++ GST_DEBUG_FUNCPTR (gst_amc_audio_dec_handle_frame); ++} ++ ++static void ++gst_amc_audio_dec_init (GstAmcAudioDec * self) ++{ ++ gst_audio_decoder_set_needs_format (GST_AUDIO_DECODER (self), TRUE); ++ gst_audio_decoder_set_drainable (GST_AUDIO_DECODER (self), TRUE); ++ ++ g_mutex_init (&self->drain_lock); ++ g_cond_init (&self->drain_cond); ++} ++ ++static gboolean ++gst_amc_audio_dec_open (GstAudioDecoder * decoder) ++{ ++ GstAmcAudioDec *self = GST_AMC_AUDIO_DEC (decoder); ++ GstAmcAudioDecClass *klass = GST_AMC_AUDIO_DEC_GET_CLASS (self); ++ ++ GST_DEBUG_OBJECT (self, "Opening decoder"); ++ ++ self->codec = gst_amc_codec_new (klass->codec_info->name); ++ if (!self->codec) ++ return FALSE; ++ self->started = FALSE; ++ self->flushing = TRUE; ++ ++ GST_DEBUG_OBJECT (self, "Opened decoder"); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_audio_dec_close (GstAudioDecoder * decoder) ++{ ++ GstAmcAudioDec *self = GST_AMC_AUDIO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Closing decoder"); ++ ++ if (self->codec) ++ gst_amc_codec_free (self->codec); ++ self->codec = NULL; ++ ++ self->started = FALSE; ++ self->flushing = TRUE; ++ ++ GST_DEBUG_OBJECT (self, "Closed decoder"); ++ ++ return TRUE; ++} ++ ++static void ++gst_amc_audio_dec_finalize (GObject * object) ++{ ++ GstAmcAudioDec *self = GST_AMC_AUDIO_DEC (object); ++ ++ g_mutex_clear (&self->drain_lock); ++ g_cond_clear (&self->drain_cond); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static GstStateChangeReturn ++gst_amc_audio_dec_change_state (GstElement * element, GstStateChange transition) ++{ ++ GstAmcAudioDec *self; ++ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; ++ ++ g_return_val_if_fail (GST_IS_AMC_AUDIO_DEC (element), ++ GST_STATE_CHANGE_FAILURE); ++ self = GST_AMC_AUDIO_DEC (element); ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_NULL_TO_READY: ++ break; ++ case GST_STATE_CHANGE_READY_TO_PAUSED: ++ self->downstream_flow_ret = GST_FLOW_OK; ++ self->draining = FALSE; ++ self->started = FALSE; ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_PLAYING: ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ self->flushing = TRUE; ++ gst_amc_codec_flush (self->codec); ++ g_mutex_lock (&self->drain_lock); ++ self->draining = FALSE; ++ g_cond_broadcast (&self->drain_cond); ++ g_mutex_unlock (&self->drain_lock); ++ break; ++ default: ++ break; ++ } ++ ++ if (ret == GST_STATE_CHANGE_FAILURE) ++ return ret; ++ ++ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ++ ++ if (ret == GST_STATE_CHANGE_FAILURE) ++ return ret; ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_PLAYING_TO_PAUSED: ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ self->downstream_flow_ret = GST_FLOW_FLUSHING; ++ self->started = FALSE; ++ break; ++ case GST_STATE_CHANGE_READY_TO_NULL: ++ break; ++ default: ++ break; ++ } ++ ++ return ret; ++} ++ ++static gboolean ++gst_amc_audio_dec_set_src_caps (GstAmcAudioDec * self, GstAmcFormat * format) ++{ ++ gint rate, channels; ++ guint32 channel_mask = 0; ++ GstAudioChannelPosition to[64]; ++ ++ if (!gst_amc_format_get_int (format, "sample-rate", &rate) || ++ !gst_amc_format_get_int (format, "channel-count", &channels)) { ++ GST_ERROR_OBJECT (self, "Failed to get output format metadata"); ++ return FALSE; ++ } ++ ++ if (rate == 0 || channels == 0) { ++ GST_ERROR_OBJECT (self, "Rate or channels not set"); ++ return FALSE; ++ } ++ ++ /* Not always present */ ++ if (gst_amc_format_contains_key (format, "channel-mask")) ++ gst_amc_format_get_int (format, "channel-mask", (gint *) & channel_mask); ++ ++ gst_amc_audio_channel_mask_to_positions (channel_mask, channels, ++ self->positions); ++ memcpy (to, self->positions, sizeof (to)); ++ gst_audio_channel_positions_to_valid_order (to, channels); ++ self->needs_reorder = ++ (memcmp (self->positions, to, ++ sizeof (GstAudioChannelPosition) * channels) != 0); ++ if (self->needs_reorder) ++ gst_audio_get_channel_reorder_map (channels, self->positions, to, ++ self->reorder_map); ++ ++ gst_audio_info_init (&self->info); ++ gst_audio_info_set_format (&self->info, GST_AUDIO_FORMAT_S16, rate, channels, ++ to); ++ ++ if (!gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (self), ++ &self->info)) ++ return FALSE; ++ ++ self->input_caps_changed = FALSE; ++ ++ return TRUE; ++} ++ ++static void ++gst_amc_audio_dec_loop (GstAmcAudioDec * self) ++{ ++ GstFlowReturn flow_ret = GST_FLOW_OK; ++ gboolean is_eos; ++ GstAmcBufferInfo buffer_info; ++ gint idx; ++ ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ ++retry: ++ /*if (self->input_caps_changed) { ++ idx = INFO_OUTPUT_FORMAT_CHANGED; ++ } else { */ ++ GST_DEBUG_OBJECT (self, "Waiting for available output buffer"); ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ /* Wait at most 100ms here, some codecs don't fail dequeueing if ++ * the codec is flushing, causing deadlocks during shutdown */ ++ idx = gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ /*} */ ++ ++ if (idx < 0) { ++ if (self->flushing) ++ goto flushing; ++ ++ switch (idx) { ++ case INFO_OUTPUT_BUFFERS_CHANGED:{ ++ GST_DEBUG_OBJECT (self, "Output buffers have changed"); ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, ++ self->n_output_buffers); ++ self->output_buffers = ++ gst_amc_codec_get_output_buffers (self->codec, ++ &self->n_output_buffers); ++ if (!self->output_buffers) ++ goto get_output_buffers_error; ++ break; ++ } ++ case INFO_OUTPUT_FORMAT_CHANGED:{ ++ GstAmcFormat *format; ++ gchar *format_string; ++ ++ GST_DEBUG_OBJECT (self, "Output format has changed"); ++ ++ format = gst_amc_codec_get_output_format (self->codec); ++ if (!format) ++ goto format_error; ++ ++ format_string = gst_amc_format_to_string (format); ++ GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string); ++ g_free (format_string); ++ ++ if (!gst_amc_audio_dec_set_src_caps (self, format)) { ++ gst_amc_format_free (format); ++ goto format_error; ++ } ++ gst_amc_format_free (format); ++ ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, ++ self->n_output_buffers); ++ self->output_buffers = ++ gst_amc_codec_get_output_buffers (self->codec, ++ &self->n_output_buffers); ++ if (!self->output_buffers) ++ goto get_output_buffers_error; ++ ++ goto retry; ++ break; ++ } ++ case INFO_TRY_AGAIN_LATER: ++ GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out"); ++ goto retry; ++ break; ++ case G_MININT: ++ GST_ERROR_OBJECT (self, "Failure dequeueing output buffer"); ++ goto dequeue_error; ++ break; ++ default: ++ g_assert_not_reached (); ++ break; ++ } ++ ++ goto retry; ++ } ++ ++ GST_DEBUG_OBJECT (self, ++ "Got output buffer at index %d: size %d time %" G_GINT64_FORMAT ++ " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, ++ buffer_info.flags); ++ ++ is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM); ++ self->n_buffers++; ++ ++ if (buffer_info.size > 0) { ++ GstAmcAudioDecClass *klass = GST_AMC_AUDIO_DEC_GET_CLASS (self); ++ GstBuffer *outbuf; ++ GstAmcBuffer *buf; ++ GstMapInfo minfo; ++ ++ /* This sometimes happens at EOS or if the input is not properly framed, ++ * let's handle it gracefully by allocating a new buffer for the current ++ * caps and filling it ++ */ ++ if (idx >= self->n_output_buffers) ++ goto invalid_buffer_index; ++ ++ if (strcmp (klass->codec_info->name, "OMX.google.mp3.decoder") == 0) { ++ /* Google's MP3 decoder outputs garbage in the first output buffer ++ * so we just drop it here */ ++ if (self->n_buffers == 1) { ++ GST_DEBUG_OBJECT (self, ++ "Skipping first buffer of Google MP3 decoder output"); ++ goto done; ++ } ++ } ++ ++ outbuf = ++ gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (self), ++ buffer_info.size); ++ if (!outbuf) ++ goto failed_allocate; ++ ++ gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE); ++ buf = &self->output_buffers[idx]; ++ if (self->needs_reorder) { ++ gint i, n_samples, c, n_channels; ++ gint *reorder_map = self->reorder_map; ++ gint16 *dest, *source; ++ ++ dest = (gint16 *) minfo.data; ++ source = (gint16 *) (buf->data + buffer_info.offset); ++ n_samples = buffer_info.size / self->info.bpf; ++ n_channels = self->info.channels; ++ ++ for (i = 0; i < n_samples; i++) { ++ for (c = 0; c < n_channels; c++) { ++ dest[i * n_channels + reorder_map[c]] = source[i * n_channels + c]; ++ } ++ } ++ } else { ++ orc_memcpy (minfo.data, buf->data + buffer_info.offset, buffer_info.size); ++ } ++ gst_buffer_unmap (outbuf, &minfo); ++ ++ /* FIXME: We should get one decoded input frame here for ++ * every buffer. If this is not the case somewhere, we will ++ * error out at some point and will need to add workarounds ++ */ ++ flow_ret = ++ gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, 1); ++ } ++ ++done: ++ if (!gst_amc_codec_release_output_buffer (self->codec, idx)) ++ goto failed_release; ++ ++ if (is_eos || flow_ret == GST_FLOW_EOS) { ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ g_mutex_lock (&self->drain_lock); ++ if (self->draining) { ++ GST_DEBUG_OBJECT (self, "Drained"); ++ self->draining = FALSE; ++ g_cond_broadcast (&self->drain_cond); ++ } else if (flow_ret == GST_FLOW_OK) { ++ GST_DEBUG_OBJECT (self, "Component signalled EOS"); ++ flow_ret = GST_FLOW_EOS; ++ } ++ g_mutex_unlock (&self->drain_lock); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ } else { ++ GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); ++ } ++ ++ self->downstream_flow_ret = flow_ret; ++ ++ if (flow_ret != GST_FLOW_OK) ++ goto flow_error; ++ ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ ++ return; ++ ++dequeue_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to dequeue output buffer")); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++ ++get_output_buffers_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to get output buffers")); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++ ++format_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to handle format")); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++failed_release: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to release output buffer index %d", idx)); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++flushing: ++ { ++ GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_FLUSHING; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++ ++flow_error: ++ { ++ if (flow_ret == GST_FLOW_EOS) { ++ GST_DEBUG_OBJECT (self, "EOS"); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), ++ gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { ++ GST_ELEMENT_ERROR (self, STREAM, FAILED, ++ ("Internal data stream error."), ("stream stopped, reason %s", ++ gst_flow_get_name (flow_ret))); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), ++ gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ } ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++ ++invalid_buffer_index: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Invalid input buffer index %d of %d", idx, self->n_input_buffers)); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++ ++failed_allocate: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ++ ("Failed to allocate output buffer")); ++ gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++} ++ ++static gboolean ++gst_amc_audio_dec_start (GstAudioDecoder * decoder) ++{ ++ GstAmcAudioDec *self; ++ ++ self = GST_AMC_AUDIO_DEC (decoder); ++ self->last_upstream_ts = 0; ++ self->eos = FALSE; ++ self->downstream_flow_ret = GST_FLOW_OK; ++ self->started = FALSE; ++ self->flushing = TRUE; ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_audio_dec_stop (GstAudioDecoder * decoder) ++{ ++ GstAmcAudioDec *self; ++ ++ self = GST_AMC_AUDIO_DEC (decoder); ++ GST_DEBUG_OBJECT (self, "Stopping decoder"); ++ self->flushing = TRUE; ++ if (self->started) { ++ gst_amc_codec_flush (self->codec); ++ gst_amc_codec_stop (self->codec); ++ self->started = FALSE; ++ if (self->input_buffers) ++ gst_amc_codec_free_buffers (self->input_buffers, self->n_input_buffers); ++ self->input_buffers = NULL; ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, self->n_output_buffers); ++ self->output_buffers = NULL; ++ } ++ gst_pad_stop_task (GST_AUDIO_DECODER_SRC_PAD (decoder)); ++ ++ memset (self->positions, 0, sizeof (self->positions)); ++ ++ g_list_foreach (self->codec_datas, (GFunc) g_free, NULL); ++ g_list_free (self->codec_datas); ++ self->codec_datas = NULL; ++ ++ self->downstream_flow_ret = GST_FLOW_FLUSHING; ++ self->eos = FALSE; ++ g_mutex_lock (&self->drain_lock); ++ self->draining = FALSE; ++ g_cond_broadcast (&self->drain_cond); ++ g_mutex_unlock (&self->drain_lock); ++ ++ GST_DEBUG_OBJECT (self, "Stopped decoder"); ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_audio_dec_set_format (GstAudioDecoder * decoder, GstCaps * caps) ++{ ++ GstAmcAudioDec *self; ++ GstStructure *s; ++ GstAmcFormat *format; ++ const gchar *mime; ++ gboolean is_format_change = FALSE; ++ gboolean needs_disable = FALSE; ++ gchar *format_string; ++ gint rate, channels; ++ ++ self = GST_AMC_AUDIO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, caps); ++ ++ /* Check if the caps change is a real format change or if only irrelevant ++ * parts of the caps have changed or nothing at all. ++ */ ++ is_format_change |= (!self->input_caps ++ || !gst_caps_is_equal (self->input_caps, caps)); ++ ++ needs_disable = self->started; ++ ++ /* If the component is not started and a real format change happens ++ * we have to restart the component. If no real format change ++ * happened we can just exit here. ++ */ ++ if (needs_disable && !is_format_change) { ++ /* Framerate or something minor changed */ ++ self->input_caps_changed = TRUE; ++ GST_DEBUG_OBJECT (self, ++ "Already running and caps did not change the format"); ++ return TRUE; ++ } ++ ++ if (needs_disable && is_format_change) { ++ gst_amc_audio_dec_drain (self); ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ gst_amc_audio_dec_stop (GST_AUDIO_DECODER (self)); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ gst_amc_audio_dec_close (GST_AUDIO_DECODER (self)); ++ if (!gst_amc_audio_dec_open (GST_AUDIO_DECODER (self))) { ++ GST_ERROR_OBJECT (self, "Failed to open codec again"); ++ return FALSE; ++ } ++ ++ if (!gst_amc_audio_dec_start (GST_AUDIO_DECODER (self))) { ++ GST_ERROR_OBJECT (self, "Failed to start codec again"); ++ } ++ } ++ /* srcpad task is not running at this point */ ++ ++ mime = caps_to_mime (caps); ++ if (!mime) { ++ GST_ERROR_OBJECT (self, "Failed to convert caps to mime"); ++ return FALSE; ++ } ++ ++ s = gst_caps_get_structure (caps, 0); ++ if (!gst_structure_get_int (s, "rate", &rate) || ++ !gst_structure_get_int (s, "channels", &channels)) { ++ GST_ERROR_OBJECT (self, "Failed to get rate/channels"); ++ return FALSE; ++ } ++ ++ format = gst_amc_format_new_audio (mime, rate, channels); ++ if (!format) { ++ GST_ERROR_OBJECT (self, "Failed to create audio format"); ++ return FALSE; ++ } ++ ++ /* FIXME: These buffers needs to be valid until the codec is stopped again */ ++ g_list_foreach (self->codec_datas, (GFunc) gst_buffer_unref, NULL); ++ g_list_free (self->codec_datas); ++ self->codec_datas = NULL; ++ if (gst_structure_has_field (s, "codec_data")) { ++ const GValue *h = gst_structure_get_value (s, "codec_data"); ++ GstBuffer *codec_data = gst_value_get_buffer (h); ++ GstMapInfo minfo; ++ guint8 *data; ++ ++ gst_buffer_map (codec_data, &minfo, GST_MAP_READ); ++ data = g_memdup (minfo.data, minfo.size); ++ self->codec_datas = g_list_prepend (self->codec_datas, data); ++ gst_amc_format_set_buffer (format, "csd-0", data, minfo.size); ++ gst_buffer_unmap (codec_data, &minfo); ++ } else if (gst_structure_has_field (s, "streamheader")) { ++ const GValue *sh = gst_structure_get_value (s, "streamheader"); ++ gint nsheaders = gst_value_array_get_size (sh); ++ GstBuffer *buf; ++ const GValue *h; ++ gint i, j; ++ gchar *fname; ++ GstMapInfo minfo; ++ guint8 *data; ++ ++ for (i = 0, j = 0; i < nsheaders; i++) { ++ h = gst_value_array_get_value (sh, i); ++ buf = gst_value_get_buffer (h); ++ ++ if (strcmp (mime, "audio/vorbis") == 0) { ++ guint8 header_type; ++ ++ gst_buffer_extract (buf, 0, &header_type, 1); ++ ++ /* Only use the identification and setup packets */ ++ if (header_type != 0x01 && header_type != 0x05) ++ continue; ++ } ++ ++ fname = g_strdup_printf ("csd-%d", j); ++ gst_buffer_map (buf, &minfo, GST_MAP_READ); ++ data = g_memdup (minfo.data, minfo.size); ++ self->codec_datas = g_list_prepend (self->codec_datas, data); ++ gst_amc_format_set_buffer (format, fname, data, minfo.size); ++ gst_buffer_unmap (buf, &minfo); ++ g_free (fname); ++ j++; ++ } ++ } ++ ++ format_string = gst_amc_format_to_string (format); ++ GST_DEBUG_OBJECT (self, "Configuring codec with format: %s", format_string); ++ g_free (format_string); ++ ++ self->n_buffers = 0; ++ if (!gst_amc_codec_configure (self->codec, format, NULL, 0)) { ++ GST_ERROR_OBJECT (self, "Failed to configure codec"); ++ return FALSE; ++ } ++ ++ gst_amc_format_free (format); ++ ++ if (!gst_amc_codec_start (self->codec)) { ++ GST_ERROR_OBJECT (self, "Failed to start codec"); ++ return FALSE; ++ } ++ ++ if (self->input_buffers) ++ gst_amc_codec_free_buffers (self->input_buffers, self->n_input_buffers); ++ self->input_buffers = ++ gst_amc_codec_get_input_buffers (self->codec, &self->n_input_buffers); ++ if (!self->input_buffers) { ++ GST_ERROR_OBJECT (self, "Failed to get input buffers"); ++ return FALSE; ++ } ++ ++ self->started = TRUE; ++ self->input_caps_changed = TRUE; ++ ++ /* Start the srcpad loop again */ ++ self->flushing = FALSE; ++ self->downstream_flow_ret = GST_FLOW_OK; ++ gst_pad_start_task (GST_AUDIO_DECODER_SRC_PAD (self), ++ (GstTaskFunction) gst_amc_audio_dec_loop, decoder, NULL); ++ ++ return TRUE; ++} ++ ++static void ++gst_amc_audio_dec_flush (GstAudioDecoder * decoder, gboolean hard) ++{ ++ GstAmcAudioDec *self; ++ ++ self = GST_AMC_AUDIO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Resetting decoder"); ++ ++ if (!self->started) { ++ GST_DEBUG_OBJECT (self, "Codec not started yet"); ++ return; ++ } ++ ++ self->flushing = TRUE; ++ gst_amc_codec_flush (self->codec); ++ ++ /* Wait until the srcpad loop is finished, ++ * unlock GST_AUDIO_DECODER_STREAM_LOCK to prevent deadlocks ++ * caused by using this lock from inside the loop function */ ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ GST_PAD_STREAM_LOCK (GST_AUDIO_DECODER_SRC_PAD (self)); ++ GST_PAD_STREAM_UNLOCK (GST_AUDIO_DECODER_SRC_PAD (self)); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ self->flushing = FALSE; ++ ++ /* Start the srcpad loop again */ ++ self->last_upstream_ts = 0; ++ self->eos = FALSE; ++ self->downstream_flow_ret = GST_FLOW_OK; ++ gst_pad_start_task (GST_AUDIO_DECODER_SRC_PAD (self), ++ (GstTaskFunction) gst_amc_audio_dec_loop, decoder, NULL); ++ ++ GST_DEBUG_OBJECT (self, "Reset decoder"); ++} ++ ++static GstFlowReturn ++gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf) ++{ ++ GstAmcAudioDec *self; ++ gint idx; ++ GstAmcBuffer *buf; ++ GstAmcBufferInfo buffer_info; ++ guint offset = 0; ++ GstClockTime timestamp, duration, timestamp_offset = 0; ++ GstMapInfo minfo; ++ ++ memset (&minfo, 0, sizeof (minfo)); ++ ++ self = GST_AMC_AUDIO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Handling frame"); ++ ++ /* Make sure to keep a reference to the input here, ++ * it can be unreffed from the other thread if ++ * finish_frame() is called */ ++ if (inbuf) ++ inbuf = gst_buffer_ref (inbuf); ++ ++ if (!self->started) { ++ GST_ERROR_OBJECT (self, "Codec not started yet"); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return GST_FLOW_NOT_NEGOTIATED; ++ } ++ ++ if (self->eos) { ++ GST_WARNING_OBJECT (self, "Got frame after EOS"); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return GST_FLOW_EOS; ++ } ++ ++ if (self->flushing) ++ goto flushing; ++ ++ if (self->downstream_flow_ret != GST_FLOW_OK) ++ goto downstream_error; ++ ++ if (!inbuf) ++ return gst_amc_audio_dec_drain (self); ++ ++ timestamp = GST_BUFFER_PTS (inbuf); ++ duration = GST_BUFFER_DURATION (inbuf); ++ ++ gst_buffer_map (inbuf, &minfo, GST_MAP_READ); ++ ++ while (offset < minfo.size) { ++ /* Make sure to release the base class stream lock, otherwise ++ * _loop() can't call _finish_frame() and we might block forever ++ * because no input buffers are released */ ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ /* Wait at most 100ms here, some codecs don't fail dequeueing if ++ * the codec is flushing, causing deadlocks during shutdown */ ++ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ ++ if (idx < 0) { ++ if (self->flushing) ++ goto flushing; ++ switch (idx) { ++ case INFO_TRY_AGAIN_LATER: ++ GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); ++ continue; /* next try */ ++ break; ++ case G_MININT: ++ GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); ++ goto dequeue_error; ++ default: ++ g_assert_not_reached (); ++ break; ++ } ++ ++ continue; ++ } ++ ++ if (idx >= self->n_input_buffers) ++ goto invalid_buffer_index; ++ ++ if (self->flushing) ++ goto flushing; ++ ++ if (self->downstream_flow_ret != GST_FLOW_OK) { ++ memset (&buffer_info, 0, sizeof (buffer_info)); ++ gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info); ++ goto downstream_error; ++ } ++ ++ /* Now handle the frame */ ++ ++ /* Copy the buffer content in chunks of size as requested ++ * by the port */ ++ buf = &self->input_buffers[idx]; ++ ++ memset (&buffer_info, 0, sizeof (buffer_info)); ++ buffer_info.offset = 0; ++ buffer_info.size = MIN (minfo.size - offset, buf->size); ++ ++ orc_memcpy (buf->data, minfo.data + offset, buffer_info.size); ++ ++ /* Interpolate timestamps if we're passing the buffer ++ * in multiple chunks */ ++ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { ++ timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size); ++ } ++ ++ if (timestamp != GST_CLOCK_TIME_NONE) { ++ buffer_info.presentation_time_us = ++ gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); ++ self->last_upstream_ts = timestamp + timestamp_offset; ++ } ++ if (duration != GST_CLOCK_TIME_NONE) ++ self->last_upstream_ts += duration; ++ ++ if (offset == 0) { ++ if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT)) ++ buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; ++ } ++ ++ offset += buffer_info.size; ++ GST_DEBUG_OBJECT (self, ++ "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", ++ idx, buffer_info.size, buffer_info.presentation_time_us, ++ buffer_info.flags); ++ if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info)) ++ goto queue_error; ++ } ++ gst_buffer_unmap (inbuf, &minfo); ++ gst_buffer_unref (inbuf); ++ ++ return self->downstream_flow_ret; ++ ++downstream_error: ++ { ++ GST_ERROR_OBJECT (self, "Downstream returned %s", ++ gst_flow_get_name (self->downstream_flow_ret)); ++ if (minfo.data) ++ gst_buffer_unmap (inbuf, &minfo); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return self->downstream_flow_ret; ++ } ++invalid_buffer_index: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Invalid input buffer index %d of %d", idx, self->n_input_buffers)); ++ if (minfo.data) ++ gst_buffer_unmap (inbuf, &minfo); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return GST_FLOW_ERROR; ++ } ++dequeue_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to dequeue input buffer")); ++ if (minfo.data) ++ gst_buffer_unmap (inbuf, &minfo); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return GST_FLOW_ERROR; ++ } ++queue_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to queue input buffer")); ++ if (minfo.data) ++ gst_buffer_unmap (inbuf, &minfo); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return GST_FLOW_ERROR; ++ } ++flushing: ++ { ++ GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); ++ if (minfo.data) ++ gst_buffer_unmap (inbuf, &minfo); ++ if (inbuf) ++ gst_buffer_unref (inbuf); ++ return GST_FLOW_FLUSHING; ++ } ++} ++ ++static GstFlowReturn ++gst_amc_audio_dec_drain (GstAmcAudioDec * self) ++{ ++ GstFlowReturn ret; ++ gint idx; ++ ++ GST_DEBUG_OBJECT (self, "Draining codec"); ++ if (!self->started) { ++ GST_DEBUG_OBJECT (self, "Codec not started yet"); ++ return GST_FLOW_OK; ++ } ++ ++ /* Don't send EOS buffer twice, this doesn't work */ ++ if (self->eos) { ++ GST_DEBUG_OBJECT (self, "Codec is EOS already"); ++ return GST_FLOW_OK; ++ } ++ ++ /* Make sure to release the base class stream lock, otherwise ++ * _loop() can't call _finish_frame() and we might block forever ++ * because no input buffers are released */ ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ /* Send an EOS buffer to the component and let the base ++ * class drop the EOS event. We will send it later when ++ * the EOS buffer arrives on the output port. ++ * Wait at most 0.5s here. */ ++ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 500000); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ ++ if (idx >= 0 && idx < self->n_input_buffers) { ++ GstAmcBufferInfo buffer_info; ++ ++ GST_AUDIO_DECODER_STREAM_UNLOCK (self); ++ g_mutex_lock (&self->drain_lock); ++ self->draining = TRUE; ++ ++ memset (&buffer_info, 0, sizeof (buffer_info)); ++ buffer_info.size = 0; ++ buffer_info.presentation_time_us = ++ gst_util_uint64_scale (self->last_upstream_ts, 1, GST_USECOND); ++ buffer_info.flags |= BUFFER_FLAG_END_OF_STREAM; ++ ++ if (gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info)) { ++ GST_DEBUG_OBJECT (self, "Waiting until codec is drained"); ++ g_cond_wait (&self->drain_cond, &self->drain_lock); ++ GST_DEBUG_OBJECT (self, "Drained codec"); ++ ret = GST_FLOW_OK; ++ } else { ++ GST_ERROR_OBJECT (self, "Failed to queue input buffer"); ++ ret = GST_FLOW_ERROR; ++ } ++ ++ g_mutex_unlock (&self->drain_lock); ++ GST_AUDIO_DECODER_STREAM_LOCK (self); ++ } else if (idx >= self->n_input_buffers) { ++ GST_ERROR_OBJECT (self, "Invalid input buffer index %d of %d", ++ idx, self->n_input_buffers); ++ ret = GST_FLOW_ERROR; ++ } else { ++ GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", idx); ++ ret = GST_FLOW_ERROR; ++ } ++ ++ return ret; ++} +diff --git a/sys/androidmedia/gstamchybris.c b/sys/androidmedia/gstamchybris.c +new file mode 100644 +index 0000000..2120ad9 +--- /dev/null ++++ b/sys/androidmedia/gstamchybris.c +@@ -0,0 +1,1836 @@ ++/* ++ * Initially based on gstamc.c ++ * ++ * Copyright (C) 2013, Canonical Ltd. ++ * Author: Jim Hodapp ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation ++ * version 2.1 of the License. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with this library; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "gstamc.h" ++#include "gstamc-constants.h" ++ ++#include "gstamcvideodec.h" ++#include "gstamcaudiodec.h" ++ ++#include ++#include ++#include ++#include ++#include ++ ++#include ++#include ++#include ++#include ++ ++#include ++ ++GST_DEBUG_CATEGORY (gst_amc_debug); ++#define GST_CAT_DEFAULT gst_amc_debug ++ ++GQuark gst_amc_codec_info_quark = 0; ++ ++static GList *codec_infos = NULL; ++#ifdef GST_AMC_IGNORE_UNKNOWN_COLOR_FORMATS ++static gboolean ignore_unknown_color_formats = TRUE; ++#else ++static gboolean ignore_unknown_color_formats = FALSE; ++#endif ++ ++static gboolean accepted_color_formats (GstAmcCodecType * type, ++ gboolean is_encoder); ++ ++static gchar * ++locale_to_utf8 (gchar * str, gssize len) ++{ ++ GError *error = NULL; ++ gsize bytes_read = 0, bytes_written = 0; ++ gchar *out = NULL; ++ ++ out = g_locale_to_utf8 (str, len, &bytes_read, &bytes_written, &error); ++ if (bytes_read == 0) ++ GST_WARNING ("Zero bytes read for UTF8 string conversion"); ++ if (bytes_written == 0) ++ GST_WARNING ("Zero bytes written for UTF8 string conversion"); ++ ++ return out; ++} ++ ++GstAmcCodec * ++gst_amc_codec_new (const gchar * name) ++{ ++ GstAmcCodec *codec = NULL; ++ gchar *name_str = NULL; ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ g_return_val_if_fail (name != NULL, NULL); ++ ++ codec = g_slice_new0 (GstAmcCodec); ++ ++ name_str = g_strdup (name); ++ name_str = locale_to_utf8 (name_str, strlen (name)); ++ if (name_str == NULL) ++ goto error; ++ GST_DEBUG ("codec name '%s'", name_str); ++ ++ codec->codec_delegate = media_codec_create_by_codec_name (name_str); ++ if (codec->codec_delegate == NULL) { ++ GST_ERROR ("Failed to create codec '%s'", name_str); ++ goto error; ++ } ++ ++done: ++ if (name_str) ++ g_free (name_str); ++ name_str = NULL; ++ ++ return codec; ++ ++error: ++ if (codec) ++ g_slice_free (GstAmcCodec, codec); ++ codec = NULL; ++ goto done; ++} ++ ++void ++gst_amc_codec_free (GstAmcCodec * codec) ++{ ++ g_return_if_fail (codec != NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ media_codec_delegate_unref (codec->codec_delegate); ++ media_codec_delegate_destroy (codec->codec_delegate); ++ ++ g_slice_free (GstAmcCodec, codec); ++} ++ ++gboolean ++gst_amc_codec_configure (GstAmcCodec * codec, GstAmcFormat * format, ++ SurfaceTextureClientHybris stc, gint flags) ++{ ++ gboolean ret = TRUE; ++ int err = 0; ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ g_return_val_if_fail (format != NULL, FALSE); ++ ++ err = media_codec_configure (codec->codec_delegate, format->format, stc, 0); ++ if (err > 0) { ++ GST_ERROR ("Failed to configure media codec"); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_queue_csd (GstAmcCodec * codec, GstAmcFormat * format) ++{ ++ gboolean ret = TRUE; ++ int err = 0; ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ g_return_val_if_fail (format != NULL, FALSE); ++ ++ err = media_codec_queue_csd (codec->codec_delegate, format->format); ++ if (err > 0) { ++ GST_ERROR ("Failed to queue codec specific data"); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ ++ return ret; ++} ++ ++GstAmcFormat * ++gst_amc_codec_get_output_format (GstAmcCodec * codec) ++{ ++ GstAmcFormat *ret = NULL; ++ ++ g_return_val_if_fail (codec != NULL, NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ ret = g_slice_new0 (GstAmcFormat); ++ ++ ret->format = media_codec_get_output_format (codec->codec_delegate); ++ if (ret->format == NULL) { ++ GST_ERROR ("Failed to get output format"); ++ g_slice_free (GstAmcFormat, ret); ++ ret = NULL; ++ goto done; ++ } ++ ++done: ++ ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_start (GstAmcCodec * codec) ++{ ++ gboolean ret = TRUE; ++ int err = 0; ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ err = media_codec_start (codec->codec_delegate); ++ if (err > 0) { ++ GST_ERROR ("Failed to start media codec"); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_stop (GstAmcCodec * codec) ++{ ++ gboolean ret = TRUE; ++ int err = 0; ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ err = media_codec_stop (codec->codec_delegate); ++ if (err > 0) { ++ GST_ERROR ("Failed to start media codec"); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_flush (GstAmcCodec * codec) ++{ ++ gboolean ret = TRUE; ++ gint err = 0; ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ err = media_codec_flush (codec->codec_delegate); ++ if (err < 0) { ++ GST_ERROR ("Failed to flush the media codec (err: %d)", err); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_release (GstAmcCodec * codec) ++{ ++ gboolean ret = TRUE; ++ gint err = 0; ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ err = media_codec_release (codec->codec_delegate); ++ if (err < 0) { ++ GST_ERROR ("Failed to release media codec (err: %d)", err); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ return ret; ++} ++ ++void ++gst_amc_codec_free_buffers (GstAmcBuffer * buffers, gsize n_buffers) ++{ ++ g_return_if_fail (buffers != NULL); ++ ++ g_free (buffers); ++} ++ ++GstAmcBuffer * ++gst_amc_codec_get_output_buffers (GstAmcCodec * codec, gsize * n_buffers) ++{ ++ size_t n_output_buffers; ++ GstAmcBuffer *ret = NULL; ++ size_t i; ++ ++ g_return_val_if_fail (codec != NULL, NULL); ++ g_return_val_if_fail (n_buffers != NULL, NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ *n_buffers = 0; ++ n_output_buffers = ++ media_codec_get_output_buffers_size (codec->codec_delegate); ++ if (n_output_buffers == 0) { ++ GST_ERROR ("Failed to get output buffers array length"); ++ goto done; ++ } ++ GST_DEBUG ("n_output_buffers: %u", n_output_buffers); ++ ++ *n_buffers = n_output_buffers; ++ ret = g_new0 (GstAmcBuffer, n_output_buffers); ++ ++ for (i = 0; i < n_output_buffers; i++) { ++ ret[i].data = media_codec_get_nth_output_buffer (codec->codec_delegate, i); ++ if (!ret[i].data) { ++ GST_ERROR ("Failed to get output buffer address %d", i); ++ goto error; ++ } ++ ret[i].size = ++ media_codec_get_nth_output_buffer_capacity (codec->codec_delegate, i); ++ GST_DEBUG ("output buffer[%d] size: %d", i, ret[i].size); ++ } ++ ++done: ++ return ret; ++ ++error: ++ if (ret) ++ gst_amc_codec_free_buffers (ret, n_output_buffers); ++ ret = NULL; ++ *n_buffers = 0; ++ goto done; ++} ++ ++GstAmcBuffer * ++gst_amc_codec_get_input_buffers (GstAmcCodec * codec, gsize * n_buffers) ++{ ++ size_t n_input_buffers; ++ GstAmcBuffer *ret = NULL; ++ size_t i; ++ ++ g_return_val_if_fail (codec != NULL, NULL); ++ g_return_val_if_fail (n_buffers != NULL, NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ *n_buffers = 0; ++ n_input_buffers = media_codec_get_input_buffers_size (codec->codec_delegate); ++ if (n_input_buffers == 0) { ++ GST_ERROR ("Failed to get input buffers array length"); ++ goto done; ++ } ++ GST_DEBUG ("n_input_buffers: %u", n_input_buffers); ++ ++ *n_buffers = n_input_buffers; ++ ret = g_new0 (GstAmcBuffer, n_input_buffers); ++ ++ for (i = 0; i < n_input_buffers; i++) { ++ ret[i].data = media_codec_get_nth_input_buffer (codec->codec_delegate, i); ++ if (!ret[i].data) { ++ GST_ERROR ("Failed to get input buffer address %d", i); ++ goto error; ++ } ++ ret[i].size = ++ media_codec_get_nth_input_buffer_capacity (codec->codec_delegate, i); ++ GST_DEBUG ("input buffer[%d] size: %d", i, ret[i].size); ++ } ++ ++done: ++ return ret; ++ ++error: ++ if (ret) ++ gst_amc_codec_free_buffers (ret, n_input_buffers); ++ ret = NULL; ++ *n_buffers = 0; ++ goto done; ++} ++ ++gint ++gst_amc_codec_dequeue_input_buffer (GstAmcCodec * codec, gint64 timeoutUs) ++{ ++ gint ret = G_MININT; ++ size_t index = 0; ++ ++ g_return_val_if_fail (codec != NULL, G_MININT); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ ret = ++ media_codec_dequeue_input_buffer (codec->codec_delegate, &index, ++ timeoutUs); ++ if (ret < 0) { ++ GST_WARNING ("Failed to dequeue input buffer (ret: %d)", ret); ++ if (ret == -11) ++ ret = INFO_TRY_AGAIN_LATER; ++ goto done; ++ } ++ ret = index; ++ ++ GST_DEBUG ("Dequeued input buffer #%d", index); ++ ++done: ++ return ret; ++} ++ ++gint ++gst_amc_codec_dequeue_output_buffer (GstAmcCodec * codec, ++ GstAmcBufferInfo * info, gint64 timeoutUs) ++{ ++ gint ret = G_MININT; ++ MediaCodecBufferInfo priv_info; ++ ++ g_return_val_if_fail (codec != NULL, G_MININT); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ ret = ++ media_codec_dequeue_output_buffer (codec->codec_delegate, &priv_info, ++ timeoutUs); ++ GST_DEBUG ("dequeue output buffer ret: %d", ret); ++ if (ret == INFO_TRY_AGAIN_LATER) { ++ GST_WARNING ("media_codec_dequeue_output_buffer timed out, trying again"); ++ info->flags = 0; ++ info->offset = 0; ++ info->size = 0; ++ info->presentation_time_us = 0; ++ goto done; ++ } else if (ret == INFO_OUTPUT_FORMAT_CHANGED) { ++ GST_INFO ("Output format has changed"); ++ goto done; ++ } else if (ret == INFO_OUTPUT_BUFFERS_CHANGED) { ++ GST_INFO ("Output buffers have changed"); ++ goto done; ++ } ++ ++ info->flags = priv_info.flags; ++ info->offset = priv_info.offset; ++ info->size = priv_info.size; ++ info->presentation_time_us = priv_info.presentation_time_us; ++ ++ GST_DEBUG ("info->flags: %d", info->flags); ++ GST_DEBUG ("info->offset: %d", info->offset); ++ GST_DEBUG ("info->size: %d", info->size); ++ GST_DEBUG ("info->presentation_time_us: %lld", info->presentation_time_us); ++ ++done: ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_queue_input_buffer (GstAmcCodec * codec, gint index, ++ const GstAmcBufferInfo * info) ++{ ++ gboolean ret = TRUE; ++ gint err = 0; ++ MediaCodecBufferInfo buf_info; ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ g_return_val_if_fail (info != NULL, FALSE); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ buf_info.index = index; ++ buf_info.offset = info->offset; ++ buf_info.size = info->size; ++ buf_info.presentation_time_us = info->presentation_time_us; ++ buf_info.flags = info->flags; ++ GST_DEBUG ("buf_info.index: %d", buf_info.index); ++ GST_DEBUG ("buf_info.offset %d", buf_info.offset); ++ GST_DEBUG ("buf_info.size %d", buf_info.size); ++ GST_DEBUG ("buf_info.presentation_time_us %lld", ++ buf_info.presentation_time_us); ++ GST_DEBUG ("buf_info.flags %d", buf_info.flags); ++ ++ err = media_codec_queue_input_buffer (codec->codec_delegate, &buf_info); ++ if (err < 0) { ++ GST_ERROR ("Failed to queue input buffer (err: %d, index: %d)", err, index); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ return ret; ++} ++ ++gboolean ++gst_amc_codec_release_output_buffer (GstAmcCodec * codec, gint index) ++{ ++ gboolean ret = TRUE; ++ gint err = 0; ++ ++ g_return_val_if_fail (codec != NULL, FALSE); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ err = media_codec_release_output_buffer (codec->codec_delegate, index); ++ if (err < 0) { ++ GST_ERROR ("Failed to release output buffer (err: %d, index: %d)", err, ++ index); ++ ret = FALSE; ++ goto done; ++ } ++ ++done: ++ return ret; ++} ++ ++GstAmcFormat * ++gst_amc_format_new_audio (const gchar * mime, gint sample_rate, gint channels) ++{ ++#if 0 ++ JNIEnv *env; ++ GstAmcFormat *format = NULL; ++ jstring mime_str; ++ jobject object = NULL; ++#endif ++ ++ g_return_val_if_fail (mime != NULL, NULL); ++ ++#if 0 ++ env = gst_amc_get_jni_env (); ++ ++ mime_str = (*env)->NewStringUTF (env, mime); ++ if (mime_str == NULL) ++ goto error; ++ ++ format = g_slice_new0 (GstAmcFormat); ++ ++ object = ++ (*env)->CallStaticObjectMethod (env, media_format.klass, ++ media_format.create_audio_format, mime_str, sample_rate, channels); ++ if ((*env)->ExceptionCheck (env) || !object) { ++ (*env)->ExceptionClear (env); ++ GST_ERROR ("Failed to create format '%s'", mime); ++ goto error; ++ } ++ ++ format->object = (*env)->NewGlobalRef (env, object); ++ if (!format->object) { ++ GST_ERROR ("Failed to create global reference"); ++ (*env)->ExceptionClear (env); ++ goto error; ++ } ++ ++done: ++ if (object) ++ (*env)->DeleteLocalRef (env, object); ++ if (mime_str) ++ (*env)->DeleteLocalRef (env, mime_str); ++ mime_str = NULL; ++ ++ ++ return format; ++ ++error: ++ if (format) ++ g_slice_free (GstAmcFormat, format); ++ format = NULL; ++ goto done; ++#endif ++ ++ return NULL; ++} ++ ++GstAmcFormat * ++gst_amc_format_new_video (const gchar * mime, gint width, gint height) ++{ ++ GstAmcFormat *format = NULL; ++ gchar *mime_str = NULL; ++ ++ g_return_val_if_fail (mime != NULL, NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ mime_str = g_strdup (mime); ++ mime_str = locale_to_utf8 (mime_str, strlen (mime_str)); ++ if (mime_str == NULL) ++ goto error; ++ ++ format = g_slice_new0 (GstAmcFormat); ++ ++ format->format = ++ media_format_create_video_format (mime_str, width, height, 0, 0); ++ if (format->format == NULL) { ++ GST_ERROR ("Failed to create format '%s'", mime); ++ goto error; ++ } ++ ++done: ++ if (mime_str) ++ g_free (mime_str); ++ mime_str = NULL; ++ ++ return format; ++ ++error: ++ if (format) ++ g_slice_free (GstAmcFormat, format); ++ format = NULL; ++ goto done; ++} ++ ++void ++gst_amc_format_free (GstAmcFormat * format) ++{ ++ g_return_if_fail (format != NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ media_format_destroy (format->format); ++ g_slice_free (GstAmcFormat, format); ++} ++ ++gchar * ++gst_amc_format_to_string (GstAmcFormat * format) ++{ ++ return NULL; ++} ++ ++gboolean ++gst_amc_format_contains_key (GstAmcFormat * format, const gchar * key) ++{ ++ return FALSE; ++} ++ ++gboolean ++gst_amc_format_get_float (GstAmcFormat * format, const gchar * key, ++ gfloat * value) ++{ ++ return FALSE; ++} ++ ++void ++gst_amc_format_set_float (GstAmcFormat * format, const gchar * key, ++ gfloat value) ++{ ++} ++ ++gboolean ++gst_amc_format_get_int (GstAmcFormat * format, const gchar * key, gint * value) ++{ ++ return FALSE; ++} ++ ++void ++gst_amc_format_set_int (GstAmcFormat * format, const gchar * key, gint value) ++{ ++} ++ ++gboolean ++gst_amc_format_get_string (GstAmcFormat * format, const gchar * key, ++ gchar ** value) ++{ ++ return FALSE; ++} ++ ++void ++gst_amc_format_set_string (GstAmcFormat * format, const gchar * key, ++ const gchar * value) ++{ ++} ++ ++gboolean ++gst_amc_format_get_buffer (GstAmcFormat * format, const gchar * key, ++ guint8 ** data, gsize * size) ++{ ++ return FALSE; ++} ++ ++void ++gst_amc_format_set_buffer (GstAmcFormat * format, const gchar * key, ++ guint8 * data, gsize size) ++{ ++ gchar *key_str = NULL; ++ ++ g_return_if_fail (format != NULL); ++ g_return_if_fail (key != NULL); ++ g_return_if_fail (data != NULL); ++ ++ GST_DEBUG ("%s", __PRETTY_FUNCTION__); ++ ++ key_str = g_strdup (key); ++ key_str = locale_to_utf8 (key_str, strlen (key)); ++ if (!key_str) ++ goto done; ++ ++ media_format_set_byte_buffer (format->format, key, data, size); ++ ++done: ++ if (key_str) ++ g_free (key_str); ++ key_str = NULL; ++} ++ ++static gboolean ++scan_codecs (GstPlugin * plugin) ++{ ++ gboolean ret = TRUE; ++ guint32 codec_count, i; ++ const GstStructure *cache_data; ++ ++ GST_DEBUG ("Scanning available codecs"); ++ ++ if ((cache_data = gst_plugin_get_cache_data (plugin))) { ++ const GValue *arr = gst_structure_get_value (cache_data, "codecs"); ++ guint i, n; ++ ++ GST_DEBUG ("Getting codecs from cache"); ++ n = gst_value_array_get_size (arr); ++ for (i = 0; i < n; i++) { ++ const GValue *cv = gst_value_array_get_value (arr, i); ++ const GstStructure *cs = gst_value_get_structure (cv); ++ const gchar *name; ++ gboolean is_encoder; ++ const GValue *starr; ++ guint j, n2; ++ GstAmcCodecInfo *gst_codec_info; ++ ++ gst_codec_info = g_new0 (GstAmcCodecInfo, 1); ++ ++ name = gst_structure_get_string (cs, "name"); ++ gst_structure_get_boolean (cs, "is-encoder", &is_encoder); ++ gst_codec_info->name = g_strdup (name); ++ gst_codec_info->is_encoder = is_encoder; ++ ++ starr = gst_structure_get_value (cs, "supported-types"); ++ n2 = gst_value_array_get_size (starr); ++ ++ gst_codec_info->n_supported_types = n2; ++ gst_codec_info->supported_types = g_new0 (GstAmcCodecType, n2); ++ ++ for (j = 0; j < n2; j++) { ++ const GValue *stv = gst_value_array_get_value (starr, j); ++ const GstStructure *sts = gst_value_get_structure (stv); ++ const gchar *mime; ++ const GValue *cfarr; ++ const GValue *plarr; ++ guint k, n3; ++ GstAmcCodecType *gst_codec_type = &gst_codec_info->supported_types[j]; ++ ++ mime = gst_structure_get_string (sts, "mime"); ++ gst_codec_type->mime = g_strdup (mime); ++ ++ cfarr = gst_structure_get_value (sts, "color-formats"); ++ n3 = gst_value_array_get_size (cfarr); ++ ++ gst_codec_type->n_color_formats = n3; ++ gst_codec_type->color_formats = g_new0 (gint, n3); ++ ++ for (k = 0; k < n3; k++) { ++ const GValue *cfv = gst_value_array_get_value (cfarr, k); ++ gint cf = g_value_get_int (cfv); ++ ++ gst_codec_type->color_formats[k] = cf; ++ } ++ ++ plarr = gst_structure_get_value (sts, "profile-levels"); ++ n3 = gst_value_array_get_size (plarr); ++ ++ gst_codec_type->n_profile_levels = n3; ++ gst_codec_type->profile_levels = ++ g_malloc0 (sizeof (gst_codec_type->profile_levels[0]) * n3); ++ ++ for (k = 0; k < n3; k++) { ++ const GValue *plv = gst_value_array_get_value (plarr, k); ++ const GValue *p, *l; ++ ++ p = gst_value_array_get_value (plv, 0); ++ l = gst_value_array_get_value (plv, 1); ++ gst_codec_type->profile_levels[k].profile = g_value_get_int (p); ++ gst_codec_type->profile_levels[k].level = g_value_get_int (l); ++ } ++ } ++ ++ codec_infos = g_list_append (codec_infos, gst_codec_info); ++ } ++ ++ return TRUE; ++ } ++ ++ codec_count = media_codec_list_count_codecs (); ++ if (codec_count == 0) { ++ GST_ERROR ("Failed to get number of available codecs"); ++ goto done; ++ } ++ ++ GST_DEBUG ("Found %d available codecs", codec_count); ++ ++ for (i = 0; i < codec_count; i++) { ++ GstAmcCodecInfo *gst_codec_info; ++ const gchar *name_str = NULL; ++ gboolean is_encoder; ++ size_t n_supported_types = 0; ++ size_t j; ++ gboolean valid_codec = TRUE; ++ ++ gst_codec_info = g_new0 (GstAmcCodecInfo, 1); ++ ++ media_codec_list_get_codec_info_at_id (i); ++ ++ name_str = media_codec_list_get_codec_name (i); ++ if (!name_str) { ++ GST_ERROR ("Failed to get codec name"); ++ valid_codec = FALSE; ++ goto next_codec; ++ } ++ ++ GST_INFO ("Checking codec '%s'", name_str); ++ ++ /* Compatibility codec names */ ++ if (strcmp (name_str, "AACEncoder") == 0 || ++ strcmp (name_str, "OMX.google.raw.decoder") == 0) { ++ GST_INFO ("Skipping compatibility codec '%s'", name_str); ++ valid_codec = FALSE; ++ goto next_codec; ++ } ++ ++ if (g_str_has_suffix (name_str, ".secure")) { ++ GST_INFO ("Skipping DRM codec '%s'", name_str); ++ valid_codec = FALSE; ++ goto next_codec; ++ } ++ ++ /* FIXME: Non-Google codecs usually just don't work and hang forever ++ * or crash when not used from a process that started the Java ++ * VM via the non-public AndroidRuntime class. Can we somehow ++ * initialize all this? ++ */ ++#if 0 ++ if (!g_str_has_prefix (name_str, "OMX.google.")) { ++ GST_INFO ("Skipping non-Google codec '%s' in standalone mode", name_str); ++ valid_codec = FALSE; ++ goto next_codec; ++ } ++#endif ++ ++ if (g_str_has_prefix (name_str, "OMX.ARICENT.")) { ++ GST_INFO ("Skipping possible broken codec '%s'", name_str); ++ valid_codec = FALSE; ++ goto next_codec; ++ } ++ ++ /* FIXME: ++ * - Vorbis: Generates clicks for multi-channel streams ++ * - *Law: Generates output with too low frequencies ++ */ ++ if (strcmp (name_str, "OMX.google.vorbis.decoder") == 0 || ++ strcmp (name_str, "OMX.google.g711.alaw.decoder") == 0 || ++ strcmp (name_str, "OMX.google.g711.mlaw.decoder") == 0) { ++ GST_INFO ("Skipping known broken codec '%s'", name_str); ++ valid_codec = FALSE; ++ goto next_codec; ++ } ++ gst_codec_info->name = g_strdup (name_str); ++ ++ is_encoder = media_codec_list_is_encoder (i); ++ gst_codec_info->is_encoder = is_encoder; ++ ++ n_supported_types = media_codec_list_get_num_supported_types (i); ++ ++ GST_INFO ("Codec '%s' has %d supported types", name_str, n_supported_types); ++ ++ gst_codec_info->supported_types = ++ g_new0 (GstAmcCodecType, n_supported_types); ++ gst_codec_info->n_supported_types = n_supported_types; ++ ++ if (n_supported_types == 0) { ++ valid_codec = FALSE; ++ GST_ERROR ("Codec has no supported types"); ++ goto next_codec; ++ } ++ ++ for (j = 0; j < n_supported_types; j++) { ++ GstAmcCodecType *gst_codec_type; ++ gchar *supported_type_str; ++ guint32 *color_formats_elems = NULL; ++ size_t n_elems = 0, k; ++ int err = 0; ++ size_t len = 0; ++ gchar *mime = NULL; ++ ++ gst_codec_type = &gst_codec_info->supported_types[j]; ++ ++ len = media_codec_list_get_nth_supported_type_len (i, j); ++ supported_type_str = g_malloc (len); ++ err = media_codec_list_get_nth_supported_type (i, supported_type_str, j); ++ if (err > 0 || !supported_type_str) { ++ GST_ERROR ("Failed to get %d-th supported type", j); ++ valid_codec = FALSE; ++ goto next_supported_type; ++ } ++ ++ mime = g_malloc (len); ++ mime = locale_to_utf8 (supported_type_str, len); ++ if (!mime) { ++ GST_ERROR ("Failed to convert supported type to UTF8"); ++ valid_codec = FALSE; ++ goto next_supported_type; ++ } ++ ++ GST_INFO ("Supported type '%s'", mime); ++ gst_codec_type->mime = g_strdup (mime); ++ ++ n_elems = media_codec_list_get_num_color_formats (i, mime); ++ GST_INFO ("Type '%s' has %d supported color formats", mime, n_elems); ++ if (n_elems == 0) { ++ GST_INFO ("Zero supported color formats for type '%s'", mime); ++ valid_codec = FALSE; ++ goto next_supported_type; ++ } ++ gst_codec_type->n_color_formats = n_elems; ++ gst_codec_type->color_formats = g_new0 (gint, n_elems); ++ ++ color_formats_elems = g_new0 (guint32, n_elems); ++ err = ++ media_codec_list_get_codec_color_formats (i, mime, ++ color_formats_elems); ++ if (!color_formats_elems) { ++ GST_ERROR ("Failed to get color format elements"); ++ valid_codec = FALSE; ++ goto next_supported_type; ++ } ++ ++ for (k = 0; k < n_elems; k++) { ++ GST_INFO ("Color format %d: %d", k, color_formats_elems[k]); ++ gst_codec_type->color_formats[k] = color_formats_elems[k]; ++ } ++ ++ if (g_str_has_prefix (gst_codec_type->mime, "video/")) { ++ if (!n_elems) { ++ GST_ERROR ("No supported color formats for video codec"); ++ valid_codec = FALSE; ++ goto next_supported_type; ++ } ++ ++ if (!ignore_unknown_color_formats ++ && !accepted_color_formats (gst_codec_type, is_encoder)) { ++ GST_ERROR ("Codec has unknown color formats, ignoring"); ++ valid_codec = FALSE; ++ g_assert_not_reached (); ++ goto next_supported_type; ++ } ++ } ++ ++ n_elems = media_codec_list_get_num_profile_levels (i, mime); ++ GST_INFO ("Type '%s' has %d supported profile levels", mime, n_elems); ++ if (n_elems == 0) { ++ GST_INFO ("Zero supported profile levels for type '%s'", mime); ++ valid_codec = FALSE; ++ goto next_supported_type; ++ } ++ gst_codec_type->n_profile_levels = n_elems; ++ gst_codec_type->profile_levels = ++ g_malloc0 (sizeof (gst_codec_type->profile_levels[0]) * n_elems); ++ ++ for (k = 0; k < n_elems; k++) { ++ guint32 level = 0, profile = 0; ++ profile_level pro_level; ++ ++ err = ++ media_codec_list_get_nth_codec_profile_level (i, mime, &pro_level, ++ k); ++ if (err > 0) { ++ GST_ERROR ("Failed to get %d-th profile/level", k); ++ valid_codec = FALSE; ++ goto next_profile_level; ++ } ++ ++ level = pro_level.level; ++ profile = pro_level.profile; ++ ++ GST_INFO ("Level %d: 0x%08x", k, level); ++ gst_codec_type->profile_levels[k].level = level; ++ ++ GST_INFO ("Profile %d: 0x%08x", k, profile); ++ gst_codec_type->profile_levels[k].profile = profile; ++ ++ next_profile_level: ++ if (!valid_codec) ++ break; ++ } ++ ++ next_supported_type: ++ if (color_formats_elems) ++ g_free (color_formats_elems); ++ color_formats_elems = NULL; ++ if (supported_type_str) ++ g_free (supported_type_str); ++ supported_type_str = NULL; ++ if (mime) ++ g_free (mime); ++ mime = NULL; ++ if (!valid_codec) ++ break; ++ } ++ ++ /* We need at least a valid supported type */ ++ if (valid_codec) { ++ GST_LOG ("Successfully scanned codec '%s'", name_str); ++ codec_infos = g_list_append (codec_infos, gst_codec_info); ++ gst_codec_info = NULL; ++ } ++ ++ /* Clean up of all local references we got */ ++ next_codec: ++ if (gst_codec_info) { ++ gint j; ++ ++ for (j = 0; j < gst_codec_info->n_supported_types; j++) { ++ GstAmcCodecType *gst_codec_type = &gst_codec_info->supported_types[j]; ++ ++ g_free (gst_codec_type->mime); ++ g_free (gst_codec_type->color_formats); ++ g_free (gst_codec_type->profile_levels); ++ } ++ g_free (gst_codec_info->supported_types); ++ g_free (gst_codec_info->name); ++ g_free (gst_codec_info); ++ } ++ gst_codec_info = NULL; ++ valid_codec = TRUE; ++ } ++ ++ ret = codec_infos != NULL; ++ ++ /* If successful we store a cache of the codec information in ++ * the registry. Otherwise we would always load all codecs during ++ * plugin initialization which can take quite some time (because ++ * of hardware) and also loads lots of shared libraries (which ++ * number is limited by 64 in Android). ++ */ ++ if (ret) { ++ GstStructure *new_cache_data = gst_structure_new_empty ("gst-amc-cache"); ++ GList *l; ++ GValue arr = { 0, }; ++ ++ g_value_init (&arr, GST_TYPE_ARRAY); ++ ++ for (l = codec_infos; l; l = l->next) { ++ GstAmcCodecInfo *gst_codec_info = l->data; ++ GValue cv = { 0, }; ++ GstStructure *cs = gst_structure_new_empty ("gst-amc-codec"); ++ GValue starr = { 0, }; ++ gint i; ++ ++ gst_structure_set (cs, "name", G_TYPE_STRING, gst_codec_info->name, ++ "is-encoder", G_TYPE_BOOLEAN, gst_codec_info->is_encoder, NULL); ++ ++ g_value_init (&starr, GST_TYPE_ARRAY); ++ ++ for (i = 0; i < gst_codec_info->n_supported_types; i++) { ++ GstAmcCodecType *gst_codec_type = &gst_codec_info->supported_types[i]; ++ GstStructure *sts = gst_structure_new_empty ("gst-amc-supported-type"); ++ GValue stv = { 0, }; ++ GValue tmparr = { 0, }; ++ gint j; ++ ++ gst_structure_set (sts, "mime", G_TYPE_STRING, gst_codec_type->mime, ++ NULL); ++ ++ g_value_init (&tmparr, GST_TYPE_ARRAY); ++ for (j = 0; j < gst_codec_type->n_color_formats; j++) { ++ GValue tmp = { 0, }; ++ ++ g_value_init (&tmp, G_TYPE_INT); ++ g_value_set_int (&tmp, gst_codec_type->color_formats[j]); ++ gst_value_array_append_value (&tmparr, &tmp); ++ g_value_unset (&tmp); ++ } ++ gst_structure_set_value (sts, "color-formats", &tmparr); ++ g_value_unset (&tmparr); ++ ++ g_value_init (&tmparr, GST_TYPE_ARRAY); ++ for (j = 0; j < gst_codec_type->n_profile_levels; j++) { ++ GValue tmparr2 = { 0, }; ++ GValue tmp = { 0, }; ++ ++ g_value_init (&tmparr2, GST_TYPE_ARRAY); ++ g_value_init (&tmp, G_TYPE_INT); ++ g_value_set_int (&tmp, gst_codec_type->profile_levels[j].profile); ++ gst_value_array_append_value (&tmparr2, &tmp); ++ g_value_set_int (&tmp, gst_codec_type->profile_levels[j].level); ++ gst_value_array_append_value (&tmparr2, &tmp); ++ gst_value_array_append_value (&tmparr, &tmparr2); ++ g_value_unset (&tmp); ++ g_value_unset (&tmparr2); ++ } ++ gst_structure_set_value (sts, "profile-levels", &tmparr); ++ ++ g_value_init (&stv, GST_TYPE_STRUCTURE); ++ gst_value_set_structure (&stv, sts); ++ gst_value_array_append_value (&starr, &stv); ++ g_value_unset (&tmparr); ++ gst_structure_free (sts); ++ } ++ ++ gst_structure_set_value (cs, "supported-types", &starr); ++ g_value_unset (&starr); ++ ++ g_value_init (&cv, GST_TYPE_STRUCTURE); ++ gst_value_set_structure (&cv, cs); ++ gst_value_array_append_value (&arr, &cv); ++ g_value_unset (&cv); ++ gst_structure_free (cs); ++ } ++ ++ gst_structure_set_value (new_cache_data, "codecs", &arr); ++ g_value_unset (&arr); ++ ++ gst_plugin_set_cache_data (plugin, new_cache_data); ++ } ++ ++done: ++ return ret; ++} ++ ++static const struct ++{ ++ gint color_format; ++ GstVideoFormat video_format; ++} color_format_mapping_table[] = { ++ { ++ COLOR_FormatYUV420Planar, GST_VIDEO_FORMAT_I420}, { ++ COLOR_FormatYUV420SemiPlanar, GST_VIDEO_FORMAT_NV12}, { ++ COLOR_TI_FormatYUV420PackedSemiPlanar, GST_VIDEO_FORMAT_NV12}, { ++ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced, GST_VIDEO_FORMAT_NV12}, { ++ COLOR_QCOM_FormatYUV420SemiPlanar, GST_VIDEO_FORMAT_NV12}, { ++ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka, GST_VIDEO_FORMAT_NV12}, { ++ 256, GST_VIDEO_FORMAT_NV12} ++}; ++ ++static gboolean ++accepted_color_formats (GstAmcCodecType * type, gboolean is_encoder) ++{ ++ gint i, j; ++ gint accepted = 0, all = type->n_color_formats; ++ ++ for (i = 0; i < type->n_color_formats; i++) { ++ gboolean found = FALSE; ++ /* We ignore this one */ ++ if (type->color_formats[i] == COLOR_FormatAndroidOpaque) ++ all--; ++ ++ for (j = 0; j < G_N_ELEMENTS (color_format_mapping_table); j++) { ++ //g_print("color_format_mapping_table[%d].color_format: %d, type->color_formats[%d]: %d", j, color_format_mapping_table[j].color_format, i, type->color_formats[i]); ++ if (color_format_mapping_table[j].color_format == type->color_formats[i]) { ++ found = TRUE; ++ break; ++ } ++ } ++ ++ if (found) ++ accepted++; ++ } ++ ++ if (is_encoder) ++ return accepted > 0; ++ else ++ return accepted == all && all > 0; ++} ++ ++GstVideoFormat ++gst_amc_color_format_to_video_format (gint color_format) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (color_format_mapping_table); i++) { ++ if (color_format_mapping_table[i].color_format == color_format) ++ return color_format_mapping_table[i].video_format; ++ } ++ ++ return GST_VIDEO_FORMAT_UNKNOWN; ++} ++ ++gint ++gst_amc_video_format_to_color_format (GstVideoFormat video_format) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (color_format_mapping_table); i++) { ++ if (color_format_mapping_table[i].video_format == video_format) ++ return color_format_mapping_table[i].color_format; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ const gchar *str; ++ const gchar *alt_str; ++} avc_profile_mapping_table[] = { ++ { ++ AVCProfileBaseline, "baseline", "constrained-baseline"}, { ++ AVCProfileMain, "main", NULL}, { ++ AVCProfileExtended, "extended", NULL}, { ++ AVCProfileHigh, "high"}, { ++ AVCProfileHigh10, "high-10", "high-10-intra"}, { ++ AVCProfileHigh422, "high-4:2:2", "high-4:2:2-intra"}, { ++ AVCProfileHigh444, "high-4:4:4", "high-4:4:4-intra"} ++}; ++ ++const gchar * ++gst_amc_avc_profile_to_string (gint profile, const gchar ** alternative) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (avc_profile_mapping_table); i++) { ++ if (avc_profile_mapping_table[i].id == profile) { ++ *alternative = avc_profile_mapping_table[i].alt_str; ++ return avc_profile_mapping_table[i].str; ++ } ++ } ++ ++ return NULL; ++} ++ ++gint ++gst_amc_avc_profile_from_string (const gchar * profile) ++{ ++ gint i; ++ ++ g_return_val_if_fail (profile != NULL, -1); ++ ++ for (i = 0; i < G_N_ELEMENTS (avc_profile_mapping_table); i++) { ++ if (strcmp (avc_profile_mapping_table[i].str, profile) == 0) ++ return avc_profile_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ const gchar *str; ++} avc_level_mapping_table[] = { ++ { ++ AVCLevel1, "1"}, { ++ AVCLevel1b, "1b"}, { ++ AVCLevel11, "1.1"}, { ++ AVCLevel12, "1.2"}, { ++ AVCLevel13, "1.3"}, { ++ AVCLevel2, "2"}, { ++ AVCLevel21, "2.1"}, { ++ AVCLevel22, "2.2"}, { ++ AVCLevel3, "3"}, { ++ AVCLevel31, "3.1"}, { ++ AVCLevel32, "3.2"}, { ++ AVCLevel4, "4"}, { ++ AVCLevel41, "4.1"}, { ++ AVCLevel42, "4.2"}, { ++ AVCLevel5, "5"}, { ++ AVCLevel51, "5.1"} ++}; ++ ++const gchar * ++gst_amc_avc_level_to_string (gint level) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (avc_level_mapping_table); i++) { ++ if (avc_level_mapping_table[i].id == level) ++ return avc_level_mapping_table[i].str; ++ } ++ ++ return NULL; ++} ++ ++gint ++gst_amc_avc_level_from_string (const gchar * level) ++{ ++ gint i; ++ ++ g_return_val_if_fail (level != NULL, -1); ++ ++ for (i = 0; i < G_N_ELEMENTS (avc_level_mapping_table); i++) { ++ if (strcmp (avc_level_mapping_table[i].str, level) == 0) ++ return avc_level_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ gint gst_id; ++} h263_profile_mapping_table[] = { ++ { ++ H263ProfileBaseline, 0}, { ++ H263ProfileH320Coding, 1}, { ++ H263ProfileBackwardCompatible, 2}, { ++ H263ProfileISWV2, 3}, { ++ H263ProfileISWV3, 4}, { ++ H263ProfileHighCompression, 5}, { ++ H263ProfileInternet, 6}, { ++ H263ProfileInterlace, 7}, { ++ H263ProfileHighLatency, 8} ++}; ++ ++gint ++gst_amc_h263_profile_to_gst_id (gint profile) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (h263_profile_mapping_table); i++) { ++ if (h263_profile_mapping_table[i].id == profile) ++ return h263_profile_mapping_table[i].gst_id; ++ } ++ ++ return -1; ++} ++ ++gint ++gst_amc_h263_profile_from_gst_id (gint profile) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (h263_profile_mapping_table); i++) { ++ if (h263_profile_mapping_table[i].gst_id == profile) ++ return h263_profile_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ gint gst_id; ++} h263_level_mapping_table[] = { ++ { ++ H263Level10, 10}, { ++ H263Level20, 20}, { ++ H263Level30, 30}, { ++ H263Level40, 40}, { ++ H263Level50, 50}, { ++ H263Level60, 60}, { ++ H263Level70, 70} ++}; ++ ++gint ++gst_amc_h263_level_to_gst_id (gint level) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (h263_level_mapping_table); i++) { ++ if (h263_level_mapping_table[i].id == level) ++ return h263_level_mapping_table[i].gst_id; ++ } ++ ++ return -1; ++} ++ ++gint ++gst_amc_h263_level_from_gst_id (gint level) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (h263_level_mapping_table); i++) { ++ if (h263_level_mapping_table[i].gst_id == level) ++ return h263_level_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ const gchar *str; ++} mpeg4_profile_mapping_table[] = { ++ { ++ MPEG4ProfileSimple, "simple"}, { ++ MPEG4ProfileSimpleScalable, "simple-scalable"}, { ++ MPEG4ProfileCore, "core"}, { ++ MPEG4ProfileMain, "main"}, { ++ MPEG4ProfileNbit, "n-bit"}, { ++ MPEG4ProfileScalableTexture, "scalable"}, { ++ MPEG4ProfileSimpleFace, "simple-face"}, { ++ MPEG4ProfileSimpleFBA, "simple-fba"}, { ++ MPEG4ProfileBasicAnimated, "basic-animated-texture"}, { ++ MPEG4ProfileHybrid, "hybrid"}, { ++ MPEG4ProfileAdvancedRealTime, "advanced-real-time"}, { ++ MPEG4ProfileCoreScalable, "core-scalable"}, { ++ MPEG4ProfileAdvancedCoding, "advanced-coding-efficiency"}, { ++ MPEG4ProfileAdvancedCore, "advanced-core"}, { ++ MPEG4ProfileAdvancedScalable, "advanced-scalable-texture"}, { ++ MPEG4ProfileAdvancedSimple, "advanced-simple"} ++}; ++ ++const gchar * ++gst_amc_mpeg4_profile_to_string (gint profile) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (mpeg4_profile_mapping_table); i++) { ++ if (mpeg4_profile_mapping_table[i].id == profile) ++ return mpeg4_profile_mapping_table[i].str; ++ } ++ ++ return NULL; ++} ++ ++gint ++gst_amc_avc_mpeg4_profile_from_string (const gchar * profile) ++{ ++ gint i; ++ ++ g_return_val_if_fail (profile != NULL, -1); ++ ++ for (i = 0; i < G_N_ELEMENTS (mpeg4_profile_mapping_table); i++) { ++ if (strcmp (mpeg4_profile_mapping_table[i].str, profile) == 0) ++ return mpeg4_profile_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ const gchar *str; ++} mpeg4_level_mapping_table[] = { ++ { ++ MPEG4Level0, "0"}, { ++ MPEG4Level0b, "0b"}, { ++ MPEG4Level1, "1"}, { ++ MPEG4Level2, "2"}, { ++ MPEG4Level3, "3"}, { ++ MPEG4Level4, "4"}, { ++ MPEG4Level4a, "4a"}, { ++MPEG4Level5, "5"},}; ++ ++const gchar * ++gst_amc_mpeg4_level_to_string (gint level) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (mpeg4_level_mapping_table); i++) { ++ if (mpeg4_level_mapping_table[i].id == level) ++ return mpeg4_level_mapping_table[i].str; ++ } ++ ++ return NULL; ++} ++ ++gint ++gst_amc_mpeg4_level_from_string (const gchar * level) ++{ ++ gint i; ++ ++ g_return_val_if_fail (level != NULL, -1); ++ ++ for (i = 0; i < G_N_ELEMENTS (mpeg4_level_mapping_table); i++) { ++ if (strcmp (mpeg4_level_mapping_table[i].str, level) == 0) ++ return mpeg4_level_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ gint id; ++ const gchar *str; ++} aac_profile_mapping_table[] = { ++ { ++ AACObjectMain, "main"}, { ++ AACObjectLC, "lc"}, { ++ AACObjectSSR, "ssr"}, { ++ AACObjectLTP, "ltp"} ++}; ++ ++const gchar * ++gst_amc_aac_profile_to_string (gint profile) ++{ ++ gint i; ++ ++ for (i = 0; i < G_N_ELEMENTS (aac_profile_mapping_table); i++) { ++ if (aac_profile_mapping_table[i].id == profile) ++ return aac_profile_mapping_table[i].str; ++ } ++ ++ return NULL; ++} ++ ++gint ++gst_amc_aac_profile_from_string (const gchar * profile) ++{ ++ gint i; ++ ++ g_return_val_if_fail (profile != NULL, -1); ++ ++ for (i = 0; i < G_N_ELEMENTS (aac_profile_mapping_table); i++) { ++ if (strcmp (aac_profile_mapping_table[i].str, profile) == 0) ++ return aac_profile_mapping_table[i].id; ++ } ++ ++ return -1; ++} ++ ++static const struct ++{ ++ guint32 mask; ++ GstAudioChannelPosition pos; ++} channel_mapping_table[] = { ++ { ++ CHANNEL_OUT_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { ++ CHANNEL_OUT_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, { ++ CHANNEL_OUT_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER}, { ++ CHANNEL_OUT_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE1}, { ++ CHANNEL_OUT_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT}, { ++ CHANNEL_OUT_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}, { ++ CHANNEL_OUT_FRONT_LEFT_OF_CENTER, ++ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, { ++ CHANNEL_OUT_FRONT_RIGHT_OF_CENTER, ++ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, { ++ CHANNEL_OUT_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, { ++ CHANNEL_OUT_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT}, { ++ CHANNEL_OUT_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}, { ++ CHANNEL_OUT_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_INVALID}, { ++ CHANNEL_OUT_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_INVALID}, { ++ CHANNEL_OUT_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_INVALID}, { ++ CHANNEL_OUT_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_INVALID}, { ++ CHANNEL_OUT_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_INVALID}, { ++ CHANNEL_OUT_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_INVALID}, { ++ CHANNEL_OUT_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_INVALID} ++}; ++ ++gboolean ++gst_amc_audio_channel_mask_to_positions (guint32 channel_mask, gint channels, ++ GstAudioChannelPosition * pos) ++{ ++ gint i, j; ++ ++ if (channel_mask == 0) { ++ if (channels == 1) { ++ pos[0] = GST_AUDIO_CHANNEL_POSITION_MONO; ++ return TRUE; ++ } ++ if (channels == 2) { ++ pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT; ++ pos[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT; ++ return TRUE; ++ } ++ ++ /* Now let the guesswork begin, these are the ++ * AAC default channel assignments for these numbers ++ * of channels */ ++ if (channels == 3) { ++ channel_mask = ++ CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | ++ CHANNEL_OUT_FRONT_CENTER; ++ } else if (channels == 4) { ++ channel_mask = ++ CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | ++ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_CENTER; ++ } else if (channels == 5) { ++ channel_mask = ++ CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | ++ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_LEFT | ++ CHANNEL_OUT_BACK_RIGHT; ++ } else if (channels == 6) { ++ channel_mask = ++ CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | ++ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_LEFT | ++ CHANNEL_OUT_BACK_RIGHT | CHANNEL_OUT_LOW_FREQUENCY; ++ } else if (channels == 8) { ++ channel_mask = ++ CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | ++ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_LEFT | ++ CHANNEL_OUT_BACK_RIGHT | CHANNEL_OUT_LOW_FREQUENCY | ++ CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER; ++ } ++ } ++ ++ for (i = 0, j = 0; i < G_N_ELEMENTS (channel_mapping_table); i++) { ++ if ((channel_mask & channel_mapping_table[i].mask)) { ++ pos[j++] = channel_mapping_table[i].pos; ++ if (channel_mapping_table[i].pos == GST_AUDIO_CHANNEL_POSITION_INVALID) { ++ memset (pos, 0, sizeof (GstAudioChannelPosition) * channels); ++ GST_ERROR ("Unable to map channel mask 0x%08x", ++ channel_mapping_table[i].mask); ++ return FALSE; ++ } ++ if (j == channels) ++ break; ++ } ++ } ++ ++ if (j != channels) { ++ memset (pos, 0, sizeof (GstAudioChannelPosition) * channels); ++ GST_ERROR ("Unable to map all channel positions in mask 0x%08x", ++ channel_mask); ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++guint32 ++gst_amc_audio_channel_mask_from_positions (GstAudioChannelPosition * positions, ++ gint channels) ++{ ++ gint i, j; ++ guint32 channel_mask = 0; ++ ++ if (channels == 1 && !positions) ++ return CHANNEL_OUT_FRONT_CENTER; ++ if (channels == 2 && !positions) ++ return CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT; ++ ++ for (i = 0; i < channels; i++) { ++ if (positions[i] == GST_AUDIO_CHANNEL_POSITION_INVALID) ++ return 0; ++ ++ for (j = 0; j < G_N_ELEMENTS (channel_mapping_table); j++) { ++ if (channel_mapping_table[j].pos == positions[i]) { ++ channel_mask |= channel_mapping_table[j].mask; ++ break; ++ } ++ } ++ ++ if (j == G_N_ELEMENTS (channel_mapping_table)) { ++ GST_ERROR ("Unable to map channel position %d", positions[i]); ++ return 0; ++ } ++ } ++ ++ return channel_mask; ++} ++ ++static gchar * ++create_type_name (const gchar * parent_name, const gchar * codec_name) ++{ ++ gchar *typified_name; ++ gint i, k; ++ gint parent_name_len = strlen (parent_name); ++ gint codec_name_len = strlen (codec_name); ++ gboolean upper = TRUE; ++ ++ typified_name = g_new0 (gchar, parent_name_len + 1 + strlen (codec_name) + 1); ++ memcpy (typified_name, parent_name, parent_name_len); ++ typified_name[parent_name_len] = '-'; ++ ++ for (i = 0, k = 0; i < codec_name_len; i++) { ++ if (g_ascii_isalnum (codec_name[i])) { ++ if (upper) ++ typified_name[parent_name_len + 1 + k++] = ++ g_ascii_toupper (codec_name[i]); ++ else ++ typified_name[parent_name_len + 1 + k++] = ++ g_ascii_tolower (codec_name[i]); ++ ++ upper = FALSE; ++ } else { ++ /* Skip all non-alnum chars and start a new upper case word */ ++ upper = TRUE; ++ } ++ } ++ ++ return typified_name; ++} ++ ++static gchar * ++create_element_name (gboolean video, gboolean encoder, const gchar * codec_name) ++{ ++#define PREFIX_LEN 10 ++ static const gchar *prefixes[] = { ++ "amcviddec-", ++ "amcauddec-", ++ "amcvidenc-", ++ "amcaudenc-" ++ }; ++ gchar *element_name; ++ gint i, k; ++ gint codec_name_len = strlen (codec_name); ++ const gchar *prefix; ++ ++ if (video && !encoder) ++ prefix = prefixes[0]; ++ else if (!video && !encoder) ++ prefix = prefixes[1]; ++ else if (video && encoder) ++ prefix = prefixes[2]; ++ else ++ prefix = prefixes[3]; ++ ++ element_name = g_new0 (gchar, PREFIX_LEN + strlen (codec_name) + 1); ++ memcpy (element_name, prefix, PREFIX_LEN); ++ ++ for (i = 0, k = 0; i < codec_name_len; i++) { ++ if (g_ascii_isalnum (codec_name[i])) { ++ element_name[PREFIX_LEN + k++] = g_ascii_tolower (codec_name[i]); ++ } ++ /* Skip all non-alnum chars */ ++ } ++ ++ return element_name; ++} ++ ++#undef PREFIX_LEN ++ ++static gboolean ++register_codecs (GstPlugin * plugin) ++{ ++ gboolean ret = TRUE; ++ GList *l; ++ ++ GST_DEBUG ("Registering plugins"); ++ ++ for (l = codec_infos; l; l = l->next) { ++ GstAmcCodecInfo *codec_info = l->data; ++ gboolean is_audio = FALSE; ++ gboolean is_video = FALSE; ++ gint i; ++ gint n_types; ++ ++ GST_DEBUG ("Registering codec '%s'", codec_info->name); ++ for (i = 0; i < codec_info->n_supported_types; i++) { ++ GstAmcCodecType *codec_type = &codec_info->supported_types[i]; ++ ++ if (g_str_has_prefix (codec_type->mime, "audio/")) ++ is_audio = TRUE; ++ else if (g_str_has_prefix (codec_type->mime, "video/")) ++ is_video = TRUE; ++ } ++ ++ n_types = 0; ++ if (is_audio) ++ n_types++; ++ if (is_video) ++ n_types++; ++ ++ for (i = 0; i < n_types; i++) { ++ GTypeQuery type_query; ++ GTypeInfo type_info = { 0, }; ++ GType type, subtype; ++ gchar *type_name, *element_name; ++ guint rank; ++ ++ if (is_video && !codec_info->is_encoder) { ++ type = gst_amc_video_dec_get_type (); ++ } else if (is_audio && !codec_info->is_encoder) { ++ type = gst_amc_audio_dec_get_type (); ++ } else { ++ GST_DEBUG ("Skipping unsupported codec type"); ++ continue; ++ } ++ ++ g_type_query (type, &type_query); ++ memset (&type_info, 0, sizeof (type_info)); ++ type_info.class_size = type_query.class_size; ++ type_info.instance_size = type_query.instance_size; ++ type_name = create_type_name (type_query.type_name, codec_info->name); ++ ++ if (g_type_from_name (type_name) != G_TYPE_INVALID) { ++ GST_ERROR ("Type '%s' already exists for codec '%s'", type_name, ++ codec_info->name); ++ g_free (type_name); ++ continue; ++ } ++ ++ subtype = g_type_register_static (type, type_name, &type_info, 0); ++ g_free (type_name); ++ ++ g_type_set_qdata (subtype, gst_amc_codec_info_quark, codec_info); ++ ++ element_name = ++ create_element_name (is_video, codec_info->is_encoder, ++ codec_info->name); ++ ++ /* Give the Google software codec a secondary rank, ++ * everything else is likely a hardware codec */ ++ if (g_str_has_prefix (codec_info->name, "OMX.google")) ++ rank = GST_RANK_SECONDARY; ++ else ++ rank = GST_RANK_PRIMARY; ++ ++ ret |= gst_element_register (plugin, element_name, rank, subtype); ++ g_free (element_name); ++ ++ is_video = FALSE; ++ } ++ } ++ ++ return ret; ++} ++ ++static gboolean ++plugin_init (GstPlugin * plugin) ++{ ++ const gchar *ignore; ++ ++ GST_DEBUG_CATEGORY_INIT (gst_amc_debug, "amc", 0, "android-media-codec"); ++ ++ gst_plugin_add_dependency_simple (plugin, NULL, "/system/etc", "media_codecs.xml", ++ GST_PLUGIN_DEPENDENCY_FLAG_NONE); ++ ++ /* Set this to TRUE to allow registering decoders that have ++ * any unknown color formats, or encoders that only have ++ * unknown color formats ++ */ ++ ignore = g_getenv ("GST_AMC_IGNORE_UNKNOWN_COLOR_FORMATS"); ++ if (ignore && strcmp (ignore, "yes") == 0) ++ ignore_unknown_color_formats = TRUE; ++ ++ /* Check if the media compat layer is available */ ++ if (!media_compat_check_availability()) ++ return FALSE; ++ ++ if (!scan_codecs (plugin)) ++ return FALSE; ++ ++ gst_amc_codec_info_quark = g_quark_from_static_string ("gst-amc-codec-info"); ++ ++ if (!register_codecs (plugin)) ++ return FALSE; ++ ++ GST_DEBUG ("Finished %s", __PRETTY_FUNCTION__); ++ ++ return TRUE; ++} ++ ++GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, ++ GST_VERSION_MINOR, ++ androidmedia, ++ "Android Media Hybris plugin", ++ plugin_init, ++ PACKAGE_VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) +diff --git a/sys/androidmedia/gstamcvideodec.h b/sys/androidmedia/gstamcvideodec.h +index 3353dc6..ae91cf9 100644 +--- a/sys/androidmedia/gstamcvideodec.h ++++ b/sys/androidmedia/gstamcvideodec.h +@@ -51,6 +51,7 @@ struct _GstAmcVideoDec + + /* < private > */ + GstAmcCodec *codec; ++ GstAmcFormat *dec_format; + GstAmcBuffer *input_buffers, *output_buffers; + gsize n_input_buffers, n_output_buffers; + +diff --git a/sys/androidmedia/gstamcvideodechybris.c b/sys/androidmedia/gstamcvideodechybris.c +new file mode 100644 +index 0000000..dae45f3 +--- /dev/null ++++ b/sys/androidmedia/gstamcvideodechybris.c +@@ -0,0 +1,1999 @@ ++/* ++ * Initially based on gstamcvideodec.c ++ * ++ * Copyright (C) 2011, Hewlett-Packard Development Company, L.P. ++ * Author: Sebastian Dröge , Collabora Ltd. ++ * ++ * Copyright (C) 2012, Collabora Ltd. ++ * Author: Sebastian Dröge ++ * ++ * Copyright (C) 2012, Rafaël Carré ++ * ++ * Copyright (C) 2013, Canonical Ltd.. ++ * Author: Jim Hodapp ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation ++ * version 2.1 of the License. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with this library; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++#include ++#include ++ ++#include ++#include ++ ++#include ++ ++#ifdef HAVE_ORC ++#include ++#else ++#define orc_memcpy memcpy ++#endif ++ ++#include "gstamcvideodec.h" ++#include "gstamc-constants.h" ++ ++GST_DEBUG_CATEGORY_STATIC (gst_amc_video_dec_debug_category); ++#define GST_CAT_DEFAULT gst_amc_video_dec_debug_category ++ ++typedef struct _BufferIdentification BufferIdentification; ++struct _BufferIdentification ++{ ++ guint64 timestamp; ++}; ++ ++static BufferIdentification * ++buffer_identification_new (GstClockTime timestamp) ++{ ++ BufferIdentification *id = g_slice_new (BufferIdentification); ++ ++ id->timestamp = timestamp; ++ ++ return id; ++} ++ ++static void ++buffer_identification_free (BufferIdentification * id) ++{ ++ g_slice_free (BufferIdentification, id); ++} ++ ++/* prototypes */ ++static void gst_amc_video_dec_finalize (GObject * object); ++ ++static GstStateChangeReturn ++gst_amc_video_dec_change_state (GstElement * element, ++ GstStateChange transition); ++ ++static gboolean gst_amc_video_dec_open (GstVideoDecoder * decoder); ++static gboolean gst_amc_video_dec_close (GstVideoDecoder * decoder); ++static gboolean gst_amc_video_dec_start (GstVideoDecoder * decoder); ++static gboolean gst_amc_video_dec_stop (GstVideoDecoder * decoder); ++static gboolean gst_amc_video_dec_set_format (GstVideoDecoder * decoder, ++ GstVideoCodecState * state); ++static gboolean gst_amc_video_dec_reset (GstVideoDecoder * decoder, ++ gboolean hard); ++static GstFlowReturn gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame); ++static GstFlowReturn gst_amc_video_dec_finish (GstVideoDecoder * decoder); ++static gboolean gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec, ++ GstQuery * query); ++ ++static GstFlowReturn gst_amc_video_dec_drain (GstAmcVideoDec * self, ++ gboolean at_eos); ++ ++enum ++{ ++ PROP_0 ++}; ++ ++/* class initialization */ ++ ++static void gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass); ++static void gst_amc_video_dec_init (GstAmcVideoDec * self); ++static void gst_amc_video_dec_base_init (gpointer g_class); ++ ++static GstVideoDecoderClass *parent_class = NULL; ++ ++GType ++gst_amc_video_dec_get_type (void) ++{ ++ static volatile gsize type = 0; ++ ++ if (g_once_init_enter (&type)) { ++ GType _type; ++ static const GTypeInfo info = { ++ sizeof (GstAmcVideoDecClass), ++ gst_amc_video_dec_base_init, ++ NULL, ++ (GClassInitFunc) gst_amc_video_dec_class_init, ++ NULL, ++ NULL, ++ sizeof (GstAmcVideoDec), ++ 0, ++ (GInstanceInitFunc) gst_amc_video_dec_init, ++ NULL ++ }; ++ ++ _type = g_type_register_static (GST_TYPE_VIDEO_DECODER, "GstAmcVideoDec", ++ &info, 0); ++ ++ GST_DEBUG_CATEGORY_INIT (gst_amc_video_dec_debug_category, "amcvideodec", 0, ++ "Android MediaCodec video decoder"); ++ ++ g_once_init_leave (&type, _type); ++ } ++ return type; ++} ++ ++static GstCaps * ++create_sink_caps (const GstAmcCodecInfo * codec_info) ++{ ++ GstCaps *ret; ++ gint i; ++ ++ ret = gst_caps_new_empty (); ++ ++ for (i = 0; i < codec_info->n_supported_types; i++) { ++ const GstAmcCodecType *type = &codec_info->supported_types[i]; ++ ++ if (strcmp (type->mime, "video/mp4v-es") == 0) { ++ gint j; ++ GstStructure *tmp, *tmp2; ++ gboolean have_profile_level = FALSE; ++ ++ tmp = gst_structure_new ("video/mpeg", ++ "width", GST_TYPE_INT_RANGE, 16, 4096, ++ "height", GST_TYPE_INT_RANGE, 16, 4096, ++ "framerate", GST_TYPE_FRACTION_RANGE, ++ 0, 1, G_MAXINT, 1, ++ "mpegversion", G_TYPE_INT, 4, ++ "systemstream", G_TYPE_BOOLEAN, FALSE, ++ "parsed", G_TYPE_BOOLEAN, TRUE, NULL); ++ ++ if (type->n_profile_levels) { ++ for (j = type->n_profile_levels - 1; j >= 0; j--) { ++ const gchar *profile, *level; ++ gint k; ++ GValue va = { 0, }; ++ GValue v = { 0, }; ++ ++ g_value_init (&va, GST_TYPE_LIST); ++ g_value_init (&v, G_TYPE_STRING); ++ ++ profile = ++ gst_amc_mpeg4_profile_to_string (type->profile_levels[j].profile); ++ if (!profile) { ++ GST_ERROR ("Unable to map MPEG4 profile 0x%08x", ++ type->profile_levels[j].profile); ++ continue; ++ } ++ ++ for (k = 1; k <= type->profile_levels[j].level && k != 0; k <<= 1) { ++ level = gst_amc_mpeg4_level_to_string (k); ++ if (!level) ++ continue; ++ ++ g_value_set_string (&v, level); ++ gst_value_list_append_value (&va, &v); ++ g_value_reset (&v); ++ } ++ ++ tmp2 = gst_structure_copy (tmp); ++ gst_structure_set (tmp2, "profile", G_TYPE_STRING, profile, NULL); ++ gst_structure_set_value (tmp2, "level", &va); ++ g_value_unset (&va); ++ g_value_unset (&v); ++ ret = gst_caps_merge_structure (ret, tmp2); ++ have_profile_level = TRUE; ++ } ++ } ++ ++ if (!have_profile_level) { ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else { ++ gst_structure_free (tmp); ++ } ++ } else if (strcmp (type->mime, "video/3gpp") == 0) { ++ gint j; ++ GstStructure *tmp, *tmp2; ++ gboolean have_profile_level = FALSE; ++ ++ tmp = gst_structure_new ("video/x-h263", ++ "width", GST_TYPE_INT_RANGE, 16, 4096, ++ "height", GST_TYPE_INT_RANGE, 16, 4096, ++ "framerate", GST_TYPE_FRACTION_RANGE, ++ 0, 1, G_MAXINT, 1, ++ "parsed", G_TYPE_BOOLEAN, TRUE, ++ "variant", G_TYPE_STRING, "itu", NULL); ++ ++ if (type->n_profile_levels) { ++ for (j = type->n_profile_levels - 1; j >= 0; j--) { ++ gint profile, level; ++ gint k; ++ GValue va = { 0, }; ++ GValue v = { 0, }; ++ ++ g_value_init (&va, GST_TYPE_LIST); ++ g_value_init (&v, G_TYPE_UINT); ++ ++ profile = ++ gst_amc_h263_profile_to_gst_id (type->profile_levels[j].profile); ++ ++ if (profile == -1) { ++ GST_ERROR ("Unable to map h263 profile 0x%08x", ++ type->profile_levels[j].profile); ++ continue; ++ } ++ ++ for (k = 1; k <= type->profile_levels[j].level && k != 0; k <<= 1) { ++ level = gst_amc_h263_level_to_gst_id (k); ++ if (level == -1) ++ continue; ++ ++ g_value_set_uint (&v, level); ++ gst_value_list_append_value (&va, &v); ++ g_value_reset (&v); ++ } ++ tmp2 = gst_structure_copy (tmp); ++ gst_structure_set (tmp2, "profile", G_TYPE_UINT, profile, NULL); ++ gst_structure_set_value (tmp2, "level", &va); ++ g_value_unset (&va); ++ g_value_unset (&v); ++ ret = gst_caps_merge_structure (ret, tmp2); ++ have_profile_level = TRUE; ++ } ++ } ++ ++ if (!have_profile_level) { ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else { ++ gst_structure_free (tmp); ++ } ++ } else if (strcmp (type->mime, "video/avc") == 0) { ++ gint j; ++ GstStructure *tmp, *tmp2; ++ gboolean have_profile_level = FALSE; ++ ++ tmp = gst_structure_new ("video/x-h264", ++ "width", GST_TYPE_INT_RANGE, 16, 4096, ++ "height", GST_TYPE_INT_RANGE, 16, 4096, ++ "framerate", GST_TYPE_FRACTION_RANGE, ++ 0, 1, G_MAXINT, 1, ++ "parsed", G_TYPE_BOOLEAN, TRUE, ++ "stream-format", G_TYPE_STRING, "byte-stream", ++ "alignment", G_TYPE_STRING, "au", NULL); ++ ++ if (type->n_profile_levels) { ++ for (j = type->n_profile_levels - 1; j >= 0; j--) { ++ const gchar *profile, *alternative = NULL, *level; ++ gint k; ++ GValue va = { 0, }; ++ GValue v = { 0, }; ++ ++ g_value_init (&va, GST_TYPE_LIST); ++ g_value_init (&v, G_TYPE_STRING); ++ ++ profile = ++ gst_amc_avc_profile_to_string (type->profile_levels[j].profile, ++ &alternative); ++ ++ if (!profile) { ++ GST_ERROR ("Unable to map H264 profile 0x%08x", ++ type->profile_levels[j].profile); ++ continue; ++ } ++ ++ for (k = 1; k <= type->profile_levels[j].level && k != 0; k <<= 1) { ++ level = gst_amc_avc_level_to_string (k); ++ if (!level) ++ continue; ++ ++ g_value_set_string (&v, level); ++ gst_value_list_append_value (&va, &v); ++ g_value_reset (&v); ++ } ++ tmp2 = gst_structure_copy (tmp); ++ gst_structure_set (tmp2, "profile", G_TYPE_STRING, profile, NULL); ++ gst_structure_set_value (tmp2, "level", &va); ++ if (!alternative) ++ g_value_unset (&va); ++ g_value_unset (&v); ++ ret = gst_caps_merge_structure (ret, tmp2); ++ ++ if (alternative) { ++ tmp2 = gst_structure_copy (tmp); ++ gst_structure_set (tmp2, "profile", G_TYPE_STRING, alternative, ++ NULL); ++ gst_structure_set_value (tmp2, "level", &va); ++ g_value_unset (&va); ++ ret = gst_caps_merge_structure (ret, tmp2); ++ } ++ have_profile_level = TRUE; ++ } ++ } ++ ++ if (!have_profile_level) { ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else { ++ gst_structure_free (tmp); ++ } ++ } else if (strcmp (type->mime, "video/x-vnd.on2.vp8") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("video/x-vp8", ++ "width", GST_TYPE_INT_RANGE, 16, 4096, ++ "height", GST_TYPE_INT_RANGE, 16, 4096, ++ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); ++ ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else if (strcmp (type->mime, "video/mpeg2") == 0) { ++ GstStructure *tmp; ++ ++ tmp = gst_structure_new ("video/mpeg", ++ "width", GST_TYPE_INT_RANGE, 16, 4096, ++ "height", GST_TYPE_INT_RANGE, 16, 4096, ++ "framerate", GST_TYPE_FRACTION_RANGE, ++ 0, 1, G_MAXINT, 1, ++ "mpegversion", GST_TYPE_INT_RANGE, 1, 2, ++ "systemstream", G_TYPE_BOOLEAN, FALSE, ++ "parsed", G_TYPE_BOOLEAN, TRUE, NULL); ++ ++ ret = gst_caps_merge_structure (ret, tmp); ++ } else { ++ GST_WARNING ("Unsupported mimetype '%s'", type->mime); ++ } ++ } ++ ++ return ret; ++} ++ ++static const gchar * ++caps_to_mime (GstCaps * caps) ++{ ++ GstStructure *s; ++ const gchar *name; ++ ++ s = gst_caps_get_structure (caps, 0); ++ if (!s) ++ return NULL; ++ ++ name = gst_structure_get_name (s); ++ ++ if (strcmp (name, "video/mpeg") == 0) { ++ gint mpegversion; ++ ++ if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) ++ return NULL; ++ ++ if (mpegversion == 4) ++ return "video/mp4v-es"; ++ else if (mpegversion == 1 || mpegversion == 2) ++ return "video/mpeg2"; ++ } else if (strcmp (name, "video/x-h263") == 0) { ++ return "video/3gpp"; ++ } else if (strcmp (name, "video/x-h264") == 0) { ++ return "video/avc"; ++ } else if (strcmp (name, "video/x-vp8") == 0) { ++ return "video/x-vnd.on2.vp8"; ++ } ++ ++ return NULL; ++} ++ ++static GstCaps * ++create_src_caps (const GstAmcCodecInfo * codec_info) ++{ ++ GstCaps *ret; ++ gint i; ++ ++ ret = gst_caps_new_empty (); ++ ++ for (i = 0; i < codec_info->n_supported_types; i++) { ++ const GstAmcCodecType *type = &codec_info->supported_types[i]; ++ gint j; ++ ++ for (j = 0; j < type->n_color_formats; j++) { ++ GstVideoFormat format; ++ GstCaps *tmp; ++ ++ format = gst_amc_color_format_to_video_format (type->color_formats[j]); ++ if (format == GST_VIDEO_FORMAT_UNKNOWN) { ++ GST_WARNING ("Unknown color format 0x%08x", type->color_formats[j]); ++ continue; ++ } ++ ++ tmp = gst_caps_new_simple ("video/x-raw", ++ "format", G_TYPE_STRING, gst_video_format_to_string (format), ++ "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, ++ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); ++ ret = gst_caps_merge (ret, tmp); ++ } ++ } ++ ++ return ret; ++} ++ ++static void ++gst_amc_video_dec_base_init (gpointer g_class) ++{ ++ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); ++ GstAmcVideoDecClass *amcvideodec_class = GST_AMC_VIDEO_DEC_CLASS (g_class); ++ const GstAmcCodecInfo *codec_info; ++ GstPadTemplate *templ; ++ GstCaps *caps; ++ gchar *longname; ++ ++ codec_info = ++ g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), gst_amc_codec_info_quark); ++ /* This happens for the base class and abstract subclasses */ ++ if (!codec_info) ++ return; ++ ++ amcvideodec_class->codec_info = codec_info; ++ ++ /* Add pad templates */ ++ caps = create_sink_caps (codec_info); ++ templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps); ++ gst_element_class_add_pad_template (element_class, templ); ++ gst_caps_unref (caps); ++ ++ caps = create_src_caps (codec_info); ++ templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); ++ gst_element_class_add_pad_template (element_class, templ); ++ gst_caps_unref (caps); ++ ++ longname = g_strdup_printf ("Android MediaCodec Hybris %s", codec_info->name); ++ gst_element_class_set_metadata (element_class, ++ codec_info->name, ++ "Codec/Decoder/Video", longname, "Jim Hodapp "); ++ g_free (longname); ++} ++ ++static void ++gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass) ++{ ++ GObjectClass *gobject_class = G_OBJECT_CLASS (klass); ++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass); ++ GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass); ++ ++ parent_class = g_type_class_peek_parent (klass); ++ ++ gobject_class->finalize = gst_amc_video_dec_finalize; ++ ++ element_class->change_state = ++ GST_DEBUG_FUNCPTR (gst_amc_video_dec_change_state); ++ ++ videodec_class->start = GST_DEBUG_FUNCPTR (gst_amc_video_dec_start); ++ videodec_class->stop = GST_DEBUG_FUNCPTR (gst_amc_video_dec_stop); ++ videodec_class->open = GST_DEBUG_FUNCPTR (gst_amc_video_dec_open); ++ videodec_class->close = GST_DEBUG_FUNCPTR (gst_amc_video_dec_close); ++ videodec_class->reset = GST_DEBUG_FUNCPTR (gst_amc_video_dec_reset); ++ videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_format); ++ videodec_class->handle_frame = ++ GST_DEBUG_FUNCPTR (gst_amc_video_dec_handle_frame); ++ videodec_class->finish = GST_DEBUG_FUNCPTR (gst_amc_video_dec_finish); ++ videodec_class->decide_allocation = ++ GST_DEBUG_FUNCPTR (gst_amc_video_dec_decide_allocation); ++} ++ ++static void ++gst_amc_video_dec_init (GstAmcVideoDec * self) ++{ ++ gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE); ++ ++ g_mutex_init (&self->drain_lock); ++ g_cond_init (&self->drain_cond); ++ ++ self->dec_format = NULL; ++} ++ ++static gboolean ++gst_amc_video_dec_open (GstVideoDecoder * decoder) ++{ ++ GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder); ++ GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self); ++ ++ GST_DEBUG_OBJECT (self, "Opening hardware decoder"); ++ ++ self->codec = gst_amc_codec_new (klass->codec_info->name); ++ if (!self->codec) { ++ GST_WARNING_OBJECT (self, "Failed to get valid hardware decoder"); ++ return FALSE; ++ } ++ self->started = FALSE; ++ self->flushing = TRUE; ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_video_dec_close (GstVideoDecoder * decoder) ++{ ++ GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Closing decoder"); ++ ++ if (self->codec) ++ gst_amc_codec_free (self->codec); ++ self->codec = NULL; ++ ++ self->started = FALSE; ++ self->flushing = TRUE; ++ ++ GST_DEBUG_OBJECT (self, "Closed decoder"); ++ ++ return TRUE; ++} ++ ++static void ++gst_amc_video_dec_finalize (GObject * object) ++{ ++ GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (object); ++ ++ gst_amc_format_free (self->dec_format); ++ ++ g_mutex_clear (&self->drain_lock); ++ g_cond_clear (&self->drain_cond); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static GstStateChangeReturn ++gst_amc_video_dec_change_state (GstElement * element, GstStateChange transition) ++{ ++ GstAmcVideoDec *self; ++ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; ++ ++ g_return_val_if_fail (GST_IS_AMC_VIDEO_DEC (element), ++ GST_STATE_CHANGE_FAILURE); ++ self = GST_AMC_VIDEO_DEC (element); ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_NULL_TO_READY: ++ break; ++ case GST_STATE_CHANGE_READY_TO_PAUSED: ++ self->downstream_flow_ret = GST_FLOW_OK; ++ self->draining = FALSE; ++ self->started = FALSE; ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_PLAYING: ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ self->flushing = TRUE; ++ gst_amc_codec_flush (self->codec); ++ g_mutex_lock (&self->drain_lock); ++ self->draining = FALSE; ++ g_cond_broadcast (&self->drain_cond); ++ g_mutex_unlock (&self->drain_lock); ++ break; ++ default: ++ break; ++ } ++ ++ if (ret == GST_STATE_CHANGE_FAILURE) ++ return ret; ++ ++ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ++ ++ if (ret == GST_STATE_CHANGE_FAILURE) ++ return ret; ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_PLAYING_TO_PAUSED: ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ self->downstream_flow_ret = GST_FLOW_FLUSHING; ++ self->started = FALSE; ++ break; ++ case GST_STATE_CHANGE_READY_TO_NULL: ++ break; ++ default: ++ break; ++ } ++ ++ return ret; ++} ++ ++#define MAX_FRAME_DIST_TIME (5 * GST_SECOND) ++#define MAX_FRAME_DIST_FRAMES (100) ++ ++static GstVideoCodecFrame * ++_find_nearest_frame (GstAmcVideoDec * self, GstClockTime reference_timestamp) ++{ ++ GList *l, *best_l = NULL; ++ GList *finish_frames = NULL; ++ GstVideoCodecFrame *best = NULL; ++ guint64 best_timestamp = 0; ++ guint64 best_diff = G_MAXUINT64; ++ BufferIdentification *best_id = NULL; ++ GList *frames; ++ ++ frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self)); ++ ++ for (l = frames; l; l = l->next) { ++ GstVideoCodecFrame *tmp = l->data; ++ BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp); ++ guint64 timestamp, diff; ++ ++ /* This happens for frames that were just added but ++ * which were not passed to the component yet. Ignore ++ * them here! ++ */ ++ if (!id) ++ continue; ++ ++ timestamp = id->timestamp; ++ ++ if (timestamp > reference_timestamp) ++ diff = timestamp - reference_timestamp; ++ else ++ diff = reference_timestamp - timestamp; ++ ++ if (best == NULL || diff < best_diff) { ++ best = tmp; ++ best_timestamp = timestamp; ++ best_diff = diff; ++ best_l = l; ++ best_id = id; ++ ++ /* For frames without timestamp we simply take the first frame */ ++ if ((reference_timestamp == 0 && timestamp == 0) || diff == 0) ++ break; ++ } ++ } ++ ++ if (best_id) { ++ for (l = frames; l && l != best_l; l = l->next) { ++ GstVideoCodecFrame *tmp = l->data; ++ BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp); ++ guint64 diff_time, diff_frames; ++ ++ if (id->timestamp > best_timestamp) ++ break; ++ ++ if (id->timestamp == 0 || best_timestamp == 0) ++ diff_time = 0; ++ else ++ diff_time = best_timestamp - id->timestamp; ++ diff_frames = best->system_frame_number - tmp->system_frame_number; ++ ++ if (diff_time > MAX_FRAME_DIST_TIME ++ || diff_frames > MAX_FRAME_DIST_FRAMES) { ++ finish_frames = ++ g_list_prepend (finish_frames, gst_video_codec_frame_ref (tmp)); ++ } ++ } ++ } ++ ++ if (finish_frames) { ++ g_warning ("%s: Frames are too old, bug in decoder -- please file a bug", ++ GST_ELEMENT_NAME (self)); ++ for (l = finish_frames; l; l = l->next) { ++ gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), l->data); ++ } ++ } ++ ++ if (best) ++ gst_video_codec_frame_ref (best); ++ ++ g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL); ++ g_list_free (frames); ++ ++ return best; ++} ++ ++static gboolean ++gst_amc_video_dec_set_src_caps (GstAmcVideoDec * self, GstAmcFormat * format) ++{ ++ GstVideoCodecState *output_state; ++ gint color_format, width, height; ++ gint stride, slice_height; ++ gint crop_left, crop_right; ++ gint crop_top, crop_bottom; ++ GstVideoFormat gst_format; ++ ++ GST_DEBUG_OBJECT (self, "Setting src caps"); ++ ++ color_format = media_format_get_color_format (format->format); ++ width = media_format_get_width (format->format); ++ height = media_format_get_height (format->format); ++ if (!color_format || !height || !width) { ++ GST_ERROR_OBJECT (self, "Failed to get output format metadata"); ++ return FALSE; ++ } ++ ++ stride = media_format_get_stride (format->format); ++ slice_height = media_format_get_slice_height (format->format); ++ if (!stride || !slice_height) { ++ GST_ERROR_OBJECT (self, "Failed to get stride and slice-height"); ++ return FALSE; ++ } ++ ++ crop_left = media_format_get_crop_left (format->format); ++ crop_right = media_format_get_crop_right (format->format); ++ crop_top = media_format_get_crop_top (format->format); ++ crop_bottom = media_format_get_crop_bottom (format->format); ++ ++ if (width == 0 || height == 0) { ++ GST_ERROR_OBJECT (self, "Height or width not set"); ++ return FALSE; ++ } ++ ++ if (crop_bottom) ++ height = height - (height - crop_bottom - 1); ++ if (crop_top) ++ height = height - crop_top; ++ ++ if (crop_right) ++ width = width - (width - crop_right - 1); ++ if (crop_left) ++ width = width - crop_left; ++ ++ gst_format = gst_amc_color_format_to_video_format (color_format); ++ if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) { ++ GST_ERROR_OBJECT (self, "Unknown color format 0x%08x", color_format); ++ return FALSE; ++ } ++ ++ output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self), ++ gst_format, width, height, self->input_state); ++ ++ self->format = gst_format; ++ self->color_format = color_format; ++ self->height = height; ++ self->width = width; ++ self->stride = stride; ++ self->slice_height = slice_height; ++ self->crop_left = crop_left; ++ self->crop_right = crop_right; ++ self->crop_top = crop_top; ++ self->crop_bottom = crop_bottom; ++ ++ gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)); ++ gst_video_codec_state_unref (output_state); ++ self->input_state_changed = FALSE; ++ ++ return TRUE; ++} ++ ++/* Disabled for now since this plugin does not support non-hardware accelerated ++ * video rendering at the moment. ++ */ ++#if 0 ++/* ++ * The format is called QOMX_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka. ++ * Which is actually NV12 (interleaved U&V). ++ */ ++#define TILE_WIDTH 64 ++#define TILE_HEIGHT 32 ++#define TILE_SIZE (TILE_WIDTH * TILE_HEIGHT) ++#define TILE_GROUP_SIZE (4 * TILE_SIZE) ++ ++/* get frame tile coordinate. XXX: nothing to be understood here, don't try. */ ++static size_t ++tile_pos (size_t x, size_t y, size_t w, size_t h) ++{ ++ size_t flim = x + (y & ~1) * w; ++ ++ if (y & 1) { ++ flim += (x & ~3) + 2; ++ } else if ((h & 1) == 0 || y != (h - 1)) { ++ flim += (x + 2) & ~3; ++ } ++ ++ return flim; ++} ++#endif ++ ++/* The weird handling of cropping, alignment and everything is taken from ++ * platform/frameworks/media/libstagefright/colorconversion/ColorConversion.cpp ++ */ ++static gboolean ++gst_amc_video_dec_fill_buffer (GstAmcVideoDec * self, gint idx, ++ const GstAmcBufferInfo * buffer_info, GstBuffer * outbuf) ++{ ++ //GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self); ++ GstAmcBuffer *buf = &self->output_buffers[idx]; ++ GstVideoCodecState *state = ++ gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self)); ++ //GstVideoInfo *info = &state->info; ++ gboolean ret = FALSE; ++ GstMemory *mem = { NULL }; ++ ++ GST_DEBUG_OBJECT (self, "%s", __PRETTY_FUNCTION__); ++ ++ if (idx >= self->n_output_buffers) { ++ GST_ERROR_OBJECT (self, "Invalid output buffer index %d of %d", ++ idx, self->n_output_buffers); ++ goto done; ++ } ++ ++ GST_DEBUG_OBJECT (self, ++ "buffer_info->size: %d, gst_buffer_get_size (outbuf): %d", ++ buffer_info->size, gst_buffer_get_size (outbuf)); ++ /* Same video format */ ++#if 0 ++ if (buffer_info->size == gst_buffer_get_size (outbuf)) { ++ GstMemory *mem = { NULL }; ++#endif ++ ++ GST_DEBUG_OBJECT (self, "Buffer sizes equal, not doing a copy"); ++ ++ if (gst_buffer_n_memory (outbuf) >= 1 && ++ (mem = gst_buffer_peek_memory (outbuf, 0)) ++ && gst_is_mir_image_memory (mem)) { ++#if 0 ++ gint err = 0; ++#endif ++ ++ GST_DEBUG_OBJECT (self, "It is Mir image memory"); ++ GST_DEBUG_OBJECT (self, "mem: %p", mem); ++ GST_DEBUG_OBJECT (self, "gst_mir_image_memory_get_codec: %p", ++ self->codec->codec_delegate); ++ gst_mir_image_memory_set_codec (mem, self->codec->codec_delegate); ++ gst_mir_image_memory_set_buffer_index (mem, idx); ++ ++#if 0 ++ if (!self->codec->codec_delegate) ++ GST_ERROR_OBJECT (self, ++ "codec_delegate is NULL, rendering will not function"); ++ ++ GST_DEBUG_OBJECT (self, "mem: %p", mem); ++ GST_DEBUG_OBJECT (self, "gst_mir_image_memory_get_codec (mem): %p", ++ self->codec->codec_delegate); ++ GST_DEBUG_OBJECT (self, "gst_mir_image_memory_get_buffer_index (mem): %d", ++ gst_mir_image_memory_get_buffer_index (mem)); ++ GST_DEBUG_OBJECT (self, "Rendering buffer: %d", ++ gst_mir_image_memory_get_buffer_index (mem)); ++ GST_DEBUG_OBJECT (self, "Releasing output buffer index: %d", ++ gst_mir_image_memory_get_buffer_index (mem)); ++ ++ /* Render and release the output buffer back to the decoder */ ++ err = ++ media_codec_release_output_buffer (self->codec->codec_delegate, ++ gst_mir_image_memory_get_buffer_index (mem)); ++ if (err < 0) ++ GST_WARNING_OBJECT (self, ++ "Failed to release output buffer. Rendering will probably be affected (err: %d).", ++ err); ++#endif ++ } else { ++ GstMapInfo minfo; ++ ++ gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE); ++ orc_memcpy (minfo.data, buf->data + buffer_info->offset, ++ buffer_info->size); ++ gst_buffer_unmap (outbuf, &minfo); ++ } ++ ret = TRUE; ++ goto done; ++#if 0 ++ } ++#endif ++ ++ /* Disabled for now since this plugin does not support non-hardware accelerated ++ * video rendering at the moment. ++ */ ++#if 0 ++ GST_DEBUG_OBJECT (self, ++ "Sizes not equal (%d vs %d), doing slow line-by-line copying", ++ buffer_info->size, gst_buffer_get_size (outbuf)); ++ ++ /* Different video format, try to convert */ ++ switch (self->color_format) { ++ case COLOR_FormatYUV420Planar:{ ++ GstVideoFrame vframe; ++ gint i, j, height; ++ guint8 *src, *dest; ++ gint stride, slice_height; ++ gint src_stride, dest_stride; ++ gint row_length; ++ ++ stride = self->stride; ++ if (stride == 0) { ++ GST_ERROR_OBJECT (self, "Stride not set"); ++ goto done; ++ } ++ ++ slice_height = self->slice_height; ++ if (slice_height == 0) { ++ /* NVidia Tegra 3 on Nexus 7 does not set this */ ++ if (g_str_has_prefix (klass->codec_info->name, "OMX.Nvidia.")) { ++ slice_height = GST_ROUND_UP_32 (self->height); ++ } else { ++ GST_ERROR_OBJECT (self, "Slice height not set"); ++ goto done; ++ } ++ } ++ ++ gst_video_frame_map (&vframe, info, outbuf, GST_MAP_WRITE); ++ for (i = 0; i < 3; i++) { ++ if (i == 0) { ++ src_stride = stride; ++ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, i); ++ } else { ++ src_stride = (stride + 1) / 2; ++ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, i); ++ } ++ ++ src = buf->data + buffer_info->offset; ++ ++ if (i == 0) { ++ src += self->crop_top * stride; ++ src += self->crop_left; ++ row_length = self->width; ++ } else if (i > 0) { ++ src += slice_height * stride; ++ src += self->crop_top * src_stride; ++ src += self->crop_left / 2; ++ row_length = (self->width + 1) / 2; ++ } ++ if (i == 2) ++ src += ((slice_height + 1) / 2) * ((stride + 1) / 2); ++ ++ dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, i); ++ height = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, i); ++ ++ for (j = 0; j < height; j++) { ++ orc_memcpy (dest, src, row_length); ++ src += src_stride; ++ dest += dest_stride; ++ } ++ } ++ gst_video_frame_unmap (&vframe); ++ ret = TRUE; ++ break; ++ } ++ case COLOR_TI_FormatYUV420PackedSemiPlanar: ++ case COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced:{ ++ gint i, j, height; ++ guint8 *src, *dest; ++ gint src_stride, dest_stride; ++ gint row_length; ++ GstVideoFrame vframe; ++ ++ /* This should always be set */ ++ if (self->stride == 0 || self->slice_height == 0) { ++ GST_ERROR_OBJECT (self, "Stride or slice height not set"); ++ goto done; ++ } ++ ++ /* FIXME: This does not work for odd widths or heights ++ * but might as well be a bug in the codec */ ++ gst_video_frame_map (&vframe, info, outbuf, GST_MAP_WRITE); ++ for (i = 0; i < 2; i++) { ++ if (i == 0) { ++ src_stride = self->stride; ++ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, i); ++ } else { ++ src_stride = GST_ROUND_UP_2 (self->stride); ++ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, i); ++ } ++ ++ src = buf->data + buffer_info->offset; ++ if (i == 0) { ++ row_length = self->width; ++ } else if (i == 1) { ++ src += (self->slice_height - self->crop_top / 2) * self->stride; ++ row_length = GST_ROUND_UP_2 (self->width); ++ } ++ ++ dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, i); ++ height = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, i); ++ ++ for (j = 0; j < height; j++) { ++ orc_memcpy (dest, src, row_length); ++ src += src_stride; ++ dest += dest_stride; ++ } ++ } ++ gst_video_frame_unmap (&vframe); ++ ret = TRUE; ++ break; ++ } ++ case COLOR_QCOM_FormatYUV420SemiPlanar: ++ case COLOR_FormatYUV420SemiPlanar:{ ++ gint i, j, height; ++ guint8 *src, *dest; ++ gint src_stride, dest_stride; ++ gint row_length; ++ GstVideoFrame vframe; ++ ++ /* This should always be set */ ++ if (self->stride == 0 || self->slice_height == 0) { ++ GST_ERROR_OBJECT (self, "Stride or slice height not set"); ++ goto done; ++ } ++ ++ /* FIXME: This is untested! */ ++ gst_video_frame_map (&vframe, info, outbuf, GST_MAP_WRITE); ++ for (i = 0; i < 2; i++) { ++ if (i == 0) { ++ src_stride = self->stride; ++ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, i); ++ } else { ++ src_stride = self->stride; ++ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, i); ++ } ++ ++ src = buf->data + buffer_info->offset; ++ if (i == 0) { ++ src += self->crop_top * self->stride; ++ src += self->crop_left; ++ row_length = self->width; ++ } else if (i == 1) { ++ src += self->slice_height * self->stride; ++ src += self->crop_top * self->stride; ++ src += self->crop_left; ++ row_length = self->width; ++ } ++ ++ dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, i); ++ height = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, i); ++ ++ for (j = 0; j < height; j++) { ++ orc_memcpy (dest, src, row_length); ++ src += src_stride; ++ dest += dest_stride; ++ } ++ } ++ gst_video_frame_unmap (&vframe); ++ ret = TRUE; ++ break; ++ } ++ case COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka:{ ++ GstVideoFrame vframe; ++ gint width = self->width; ++ gint height = self->height; ++ gint dest_luma_stride, dest_chroma_stride; ++ guint8 *src = buf->data + buffer_info->offset; ++ guint8 *dest_luma, *dest_chroma; ++ gint y; ++ const size_t tile_w = (width - 1) / TILE_WIDTH + 1; ++ const size_t tile_w_align = (tile_w + 1) & ~1; ++ const size_t tile_h_luma = (height - 1) / TILE_HEIGHT + 1; ++ const size_t tile_h_chroma = (height / 2 - 1) / TILE_HEIGHT + 1; ++ size_t luma_size = tile_w_align * tile_h_luma * TILE_SIZE; ++ ++ gst_video_frame_map (&vframe, info, outbuf, GST_MAP_WRITE); ++ dest_luma = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0); ++ dest_chroma = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1); ++ dest_luma_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0); ++ dest_chroma_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 1); ++ ++ if ((luma_size % TILE_GROUP_SIZE) != 0) ++ luma_size = (((luma_size - 1) / TILE_GROUP_SIZE) + 1) * TILE_GROUP_SIZE; ++ ++ for (y = 0; y < tile_h_luma; y++) { ++ size_t row_width = width; ++ gint x; ++ ++ for (x = 0; x < tile_w; x++) { ++ size_t tile_width = row_width; ++ size_t tile_height = height; ++ gint luma_idx; ++ gint chroma_idx; ++ /* luma source pointer for this tile */ ++ const uint8_t *src_luma = src ++ + tile_pos (x, y, tile_w_align, tile_h_luma) * TILE_SIZE; ++ ++ /* chroma source pointer for this tile */ ++ const uint8_t *src_chroma = src + luma_size ++ + tile_pos (x, y / 2, tile_w_align, tile_h_chroma) * TILE_SIZE; ++ if (y & 1) ++ src_chroma += TILE_SIZE / 2; ++ ++ /* account for right columns */ ++ if (tile_width > TILE_WIDTH) ++ tile_width = TILE_WIDTH; ++ ++ /* account for bottom rows */ ++ if (tile_height > TILE_HEIGHT) ++ tile_height = TILE_HEIGHT; ++ ++ /* dest luma memory index for this tile */ ++ luma_idx = y * TILE_HEIGHT * dest_luma_stride + x * TILE_WIDTH; ++ ++ /* dest chroma memory index for this tile */ ++ /* XXX: remove divisions */ ++ chroma_idx = ++ y * TILE_HEIGHT / 2 * dest_chroma_stride + x * TILE_WIDTH; ++ ++ tile_height /= 2; // we copy 2 luma lines at once ++ while (tile_height--) { ++ memcpy (dest_luma + luma_idx, src_luma, tile_width); ++ src_luma += TILE_WIDTH; ++ luma_idx += dest_luma_stride; ++ ++ memcpy (dest_luma + luma_idx, src_luma, tile_width); ++ src_luma += TILE_WIDTH; ++ luma_idx += dest_luma_stride; ++ ++ memcpy (dest_chroma + chroma_idx, src_chroma, tile_width); ++ src_chroma += TILE_WIDTH; ++ chroma_idx += dest_chroma_stride; ++ } ++ row_width -= TILE_WIDTH; ++ } ++ height -= TILE_HEIGHT; ++ } ++ gst_video_frame_unmap (&vframe); ++ ret = TRUE; ++ break; ++ ++ } ++ default: ++ GST_ERROR_OBJECT (self, "Unsupported color format %d", ++ self->color_format); ++ goto done; ++ break; ++ } ++#endif ++ ++done: ++ gst_video_codec_state_unref (state); ++ return ret; ++} ++ ++static gboolean ++gst_amc_video_dec_configure_self (GstAmcVideoDec * self) ++{ ++ /* Configure the hardware decoder */ ++ if (!gst_amc_codec_configure (self->codec, self->dec_format, NULL, 0)) { ++ GST_ERROR_OBJECT (self, "Failed to configure codec"); ++ return FALSE; ++ } ++ ++ if (!gst_amc_codec_start (self->codec)) { ++ GST_ERROR_OBJECT (self, "Failed to start codec"); ++ return FALSE; ++ } ++ ++ if (self->input_buffers) ++ gst_amc_codec_free_buffers (self->input_buffers, self->n_input_buffers); ++ self->input_buffers = ++ gst_amc_codec_get_input_buffers (self->codec, &self->n_input_buffers); ++ if (!self->input_buffers) { ++ GST_ERROR_OBJECT (self, "Failed to get input buffers"); ++ return FALSE; ++ } ++ // JH added ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, self->n_output_buffers); ++ self->output_buffers = ++ gst_amc_codec_get_output_buffers (self->codec, &self->n_output_buffers); ++ if (!self->output_buffers) { ++ GST_ERROR_OBJECT (self, "Failed to get output buffers"); ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static void ++gst_amc_video_dec_loop (GstAmcVideoDec * self) ++{ ++ GstVideoCodecFrame *frame; ++ GstFlowReturn flow_ret = GST_FLOW_OK; ++ GstClockTimeDiff deadline; ++ gboolean is_eos; ++ GstAmcBufferInfo buffer_info; ++ gint idx; ++ ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ ++retry: ++ /*if (self->input_state_changed) { ++ idx = INFO_OUTPUT_FORMAT_CHANGED; ++ } else { */ ++ GST_DEBUG_OBJECT (self, "Waiting for available output buffer"); ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ /* Wait at most 100ms here, some codecs don't fail dequeueing if ++ * the codec is flushing, causing deadlocks during shutdown */ ++ idx = gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ /*} */ ++ ++ GST_DEBUG_OBJECT (self, "Tried to dequeue output buffer (idx: %d)", idx); ++ if (idx < 0) { ++ if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) ++ goto flushing; ++ ++ switch (idx) { ++ case INFO_OUTPUT_BUFFERS_CHANGED:{ ++ GST_DEBUG_OBJECT (self, "Output buffers have changed"); ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, ++ self->n_output_buffers); ++ self->output_buffers = ++ gst_amc_codec_get_output_buffers (self->codec, ++ &self->n_output_buffers); ++ if (!self->output_buffers) ++ goto get_output_buffers_error; ++ break; ++ } ++ case INFO_OUTPUT_FORMAT_CHANGED:{ ++ GstAmcFormat *format; ++ /* gchar *format_string; */ ++ ++ GST_DEBUG_OBJECT (self, "Output format has changed"); ++ ++ format = gst_amc_codec_get_output_format (self->codec); ++ if (!format) ++ goto format_error; ++ ++ /* ++ format_string = gst_amc_format_to_string (format); ++ GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string); ++ g_free (format_string); ++ */ ++ ++ if (!gst_amc_video_dec_set_src_caps (self, format)) { ++ gst_amc_format_free (format); ++ goto format_error; ++ } ++ gst_amc_format_free (format); ++ ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, ++ self->n_output_buffers); ++ self->output_buffers = ++ gst_amc_codec_get_output_buffers (self->codec, ++ &self->n_output_buffers); ++ if (!self->output_buffers) ++ goto get_output_buffers_error; ++ ++ goto retry; ++ break; ++ } ++ case INFO_TRY_AGAIN_LATER: ++ GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out"); ++ goto retry; ++ break; ++ case G_MININT: ++ GST_ERROR_OBJECT (self, "Failure dequeueing output buffer"); ++ goto dequeue_error; ++ break; ++ default: ++ g_assert_not_reached (); ++ break; ++ } ++ ++ goto retry; ++ } ++ ++ GST_DEBUG_OBJECT (self, ++ "Got output buffer at index %d: size %d time %" G_GINT64_FORMAT ++ " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, ++ buffer_info.flags); ++ ++ frame = ++ _find_nearest_frame (self, ++ gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1)); ++ ++ is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM); ++ ++ if (is_eos) { ++ GST_DEBUG_OBJECT (self, "EOS detected!"); ++ flow_ret = GST_FLOW_EOS; ++ } ++ ++ GST_DEBUG_OBJECT (self, "Checking the max decode time deadline"); ++ if (frame ++ && (deadline = ++ gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self), ++ frame)) < 0) { ++ GST_WARNING_OBJECT (self, ++ "Frame is too late, dropping (deadline %" GST_TIME_FORMAT ")", ++ GST_TIME_ARGS (-deadline)); ++ flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame); ++ } else if (!frame && buffer_info.size > 0) { ++ GstBuffer *outbuf; ++ ++ /* This sometimes happens at EOS or if the input is not properly framed, ++ * let's handle it gracefully by allocating a new buffer for the current ++ * caps and filling it ++ */ ++ GST_ERROR_OBJECT (self, "No corresponding frame found"); ++ ++ outbuf = ++ gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); ++ ++ if (!gst_amc_video_dec_fill_buffer (self, idx, &buffer_info, outbuf)) { ++ gst_buffer_unref (outbuf); ++ if (!gst_amc_codec_release_output_buffer (self->codec, idx)) ++ GST_ERROR_OBJECT (self, "Failed to release output buffer index %d", ++ idx); ++ goto invalid_buffer; ++ } ++ ++ GST_BUFFER_PTS (outbuf) = ++ gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, ++ 1); ++ flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf); ++ } else if (buffer_info.size >= 0 && is_eos == FALSE) { ++ GST_DEBUG_OBJECT (self, "Should be allocating buffer from custom pool"); ++ /* Allocate buffer from the GstBufferPool */ ++ if ((flow_ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER ++ (self), frame)) != GST_FLOW_OK) { ++ GST_ERROR_OBJECT (self, "Failed to allocate buffer"); ++ goto flow_error; ++ } ++ ++ GST_DEBUG_OBJECT (self, "Filling the output buffer"); ++ /* Stuff the raw decoded video data into the buffer */ ++ if (!gst_amc_video_dec_fill_buffer (self, idx, &buffer_info, ++ frame->output_buffer)) { ++ gst_buffer_replace (&frame->output_buffer, NULL); ++ gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame); ++ if (!gst_amc_codec_release_output_buffer (self->codec, idx)) ++ GST_ERROR_OBJECT (self, "Failed to release output buffer index %d", ++ idx); ++ goto invalid_buffer; ++ } ++ ++ /* Push the frame downstream through the pipeline */ ++ flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame); ++ } else if (frame != NULL) { ++ GST_DEBUG_OBJECT (self, "Dropping frame"); ++ flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame); ++ } ++ ++ if (is_eos || flow_ret == GST_FLOW_EOS) { ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ g_mutex_lock (&self->drain_lock); ++ if (self->draining) { ++ GST_DEBUG_OBJECT (self, "Drained"); ++ self->draining = FALSE; ++ g_cond_broadcast (&self->drain_cond); ++ } else if (flow_ret == GST_FLOW_OK) { ++ GST_DEBUG_OBJECT (self, "Component signalled EOS"); ++ flow_ret = GST_FLOW_EOS; ++ } ++ g_mutex_unlock (&self->drain_lock); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ } else { ++ GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); ++ } ++ ++ self->downstream_flow_ret = flow_ret; ++ ++ if (flow_ret != GST_FLOW_OK) ++ goto flow_error; ++ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ ++ return; ++ ++dequeue_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to dequeue output buffer")); ++ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++get_output_buffers_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to get output buffers")); ++ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++format_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to handle format")); ++ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_ERROR; ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++flushing: ++ { ++ GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_FLUSHING; ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++flow_error: ++ { ++ if (flow_ret == GST_FLOW_EOS) { ++ GST_DEBUG_OBJECT (self, "EOS"); ++ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), ++ gst_event_new_eos ()); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { ++ GST_ELEMENT_ERROR (self, STREAM, FAILED, ++ ("Internal data stream error."), ("stream stopped, reason %s", ++ gst_flow_get_name (flow_ret))); ++ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), ++ gst_event_new_eos ()); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ } ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++invalid_buffer: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ++ ("Invalid sized input buffer")); ++ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ()); ++ gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self)); ++ self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED; ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ return; ++ } ++} ++ ++static gboolean ++gst_amc_video_dec_start (GstVideoDecoder * decoder) ++{ ++ GstAmcVideoDec *self; ++ ++ self = GST_AMC_VIDEO_DEC (decoder); ++ self->last_upstream_ts = 0; ++ self->eos = FALSE; ++ self->downstream_flow_ret = GST_FLOW_OK; ++ self->started = FALSE; ++ self->flushing = TRUE; ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_video_dec_stop (GstVideoDecoder * decoder) ++{ ++ GstAmcVideoDec *self; ++ ++ self = GST_AMC_VIDEO_DEC (decoder); ++ GST_DEBUG_OBJECT (self, "Stopping decoder"); ++ self->flushing = TRUE; ++ if (self->started) { ++ gst_amc_codec_flush (self->codec); ++ gst_amc_codec_stop (self->codec); ++ self->started = FALSE; ++ if (self->input_buffers) ++ gst_amc_codec_free_buffers (self->input_buffers, self->n_input_buffers); ++ self->input_buffers = NULL; ++ if (self->output_buffers) ++ gst_amc_codec_free_buffers (self->output_buffers, self->n_output_buffers); ++ self->output_buffers = NULL; ++ } ++ gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (decoder)); ++ ++ self->downstream_flow_ret = GST_FLOW_FLUSHING; ++ self->eos = FALSE; ++ g_mutex_lock (&self->drain_lock); ++ self->draining = FALSE; ++ g_cond_broadcast (&self->drain_cond); ++ g_mutex_unlock (&self->drain_lock); ++ g_free (self->codec_data); ++ self->codec_data_size = 0; ++ if (self->input_state) ++ gst_video_codec_state_unref (self->input_state); ++ self->input_state = NULL; ++ GST_DEBUG_OBJECT (self, "Stopped decoder"); ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_video_dec_set_format (GstVideoDecoder * decoder, ++ GstVideoCodecState * state) ++{ ++ GstAmcVideoDec *self; ++ GstAmcFormat *format; ++ gboolean ret = TRUE; ++ const gchar *mime; ++ gboolean is_format_change = FALSE; ++ gboolean needs_disable = FALSE; ++ /* gchar *format_string; */ ++ guint8 *codec_data = NULL; ++ gsize codec_data_size = 0; ++ ++ self = GST_AMC_VIDEO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps); ++ ++ /* Check if the caps change is a real format change or if only irrelevant ++ * parts of the caps have changed or nothing at all. ++ */ ++ is_format_change |= self->width != state->info.width; ++ is_format_change |= self->height != state->info.height; ++ if (state->codec_data) { ++ GstMapInfo cminfo; ++ ++ GST_DEBUG_OBJECT (self, "state->codec_data present!"); ++ ++ gst_buffer_map (state->codec_data, &cminfo, GST_MAP_READ); ++ codec_data = g_memdup (cminfo.data, cminfo.size); ++ codec_data_size = cminfo.size; ++ ++ is_format_change |= (!self->codec_data ++ || self->codec_data_size != codec_data_size ++ || memcmp (self->codec_data, codec_data, codec_data_size) != 0); ++ gst_buffer_unmap (state->codec_data, &cminfo); ++ } else if (self->codec_data) { ++ GST_DEBUG_OBJECT (self, "format change"); ++ is_format_change |= TRUE; ++ } ++ ++ needs_disable = self->started; ++ ++ /* If the component is not started and a real format change happens ++ * we have to restart the component. If no real format change ++ * happened we can just exit here. ++ */ ++ if (needs_disable && !is_format_change) { ++ g_free (codec_data); ++ codec_data = NULL; ++ codec_data_size = 0; ++ ++ /* Framerate or something minor changed */ ++ self->input_state_changed = TRUE; ++ if (self->input_state) ++ gst_video_codec_state_unref (self->input_state); ++ self->input_state = gst_video_codec_state_ref (state); ++ GST_DEBUG_OBJECT (self, ++ "Already running and caps did not change the format"); ++ return TRUE; ++ } ++ ++ if (needs_disable && is_format_change) { ++ gst_amc_video_dec_drain (self, FALSE); ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ gst_amc_video_dec_stop (GST_VIDEO_DECODER (self)); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ gst_amc_video_dec_close (GST_VIDEO_DECODER (self)); ++ if (!gst_amc_video_dec_open (GST_VIDEO_DECODER (self))) { ++ GST_ERROR_OBJECT (self, "Failed to open codec again"); ++ return FALSE; ++ } ++ ++ if (!gst_amc_video_dec_start (GST_VIDEO_DECODER (self))) { ++ GST_ERROR_OBJECT (self, "Failed to start codec again"); ++ } ++ } ++ /* srcpad task is not running at this point */ ++ if (self->input_state) ++ gst_video_codec_state_unref (self->input_state); ++ self->input_state = NULL; ++ ++ g_free (self->codec_data); ++ if (codec_data) ++ GST_DEBUG_OBJECT (self, "codec_data is NOT NULL"); ++ else ++ GST_DEBUG_OBJECT (self, "codec_data is NULL"); ++ self->codec_data = codec_data; ++ self->codec_data_size = codec_data_size; ++ ++ GST_DEBUG_OBJECT (self, "codec_data_size: %d", codec_data_size); ++ ++ mime = caps_to_mime (state->caps); ++ if (!mime) { ++ GST_ERROR_OBJECT (self, "Failed to convert caps to mime"); ++ return FALSE; ++ } ++ ++ GST_DEBUG_OBJECT (self, "mime: '%s', width: %d, height %d", mime, ++ state->info.width, state->info.height); ++ format = ++ gst_amc_format_new_video (mime, state->info.width, state->info.height); ++ if (!format) { ++ GST_ERROR_OBJECT (self, "Failed to create video format"); ++ return FALSE; ++ } ++ /* FIXME: New fix */ ++ self->dec_format = format; ++ ++ /* FIXME: This buffer needs to be valid until the codec is stopped again */ ++ if (self->codec_data) ++ gst_amc_format_set_buffer (format, "csd-0", self->codec_data, ++ self->codec_data_size); ++ ++/* ++ format_string = gst_amc_format_to_string (format); ++ GST_DEBUG_OBJECT (self, "Configuring codec with format: %s", format_string); ++ g_free (format_string); ++*/ ++ ++ /* Configure the hardware codec with format */ ++ ret = gst_amc_video_dec_configure_self (self); ++ GST_DEBUG_OBJECT (self, "gst_amc_video_dec_configure_self returned: %d", ret); ++ ++ gst_amc_format_free (format); ++ ++ self->started = TRUE; ++ self->input_state = gst_video_codec_state_ref (state); ++ self->input_state_changed = TRUE; ++ ++ /* Start the srcpad loop again */ ++ self->flushing = FALSE; ++ self->downstream_flow_ret = GST_FLOW_OK; ++ gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self), ++ (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_amc_video_dec_reset (GstVideoDecoder * decoder, gboolean hard) ++{ ++ GstAmcVideoDec *self; ++ ++ self = GST_AMC_VIDEO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Resetting decoder"); ++ ++ if (!self->started) { ++ GST_DEBUG_OBJECT (self, "Codec not started yet"); ++ return TRUE; ++ } ++ ++ self->flushing = TRUE; ++ gst_amc_codec_flush (self->codec); ++ ++ /* Wait until the srcpad loop is finished, ++ * unlock GST_VIDEO_DECODER_STREAM_LOCK to prevent deadlocks ++ * caused by using this lock from inside the loop function */ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ GST_PAD_STREAM_LOCK (GST_VIDEO_DECODER_SRC_PAD (self)); ++ GST_PAD_STREAM_UNLOCK (GST_VIDEO_DECODER_SRC_PAD (self)); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ self->flushing = FALSE; ++ ++ /* Start the srcpad loop again */ ++ self->last_upstream_ts = 0; ++ self->eos = FALSE; ++ self->downstream_flow_ret = GST_FLOW_OK; ++ gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self), ++ (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL); ++ ++ GST_DEBUG_OBJECT (self, "Reset decoder"); ++ ++ return TRUE; ++} ++ ++static GstFlowReturn ++gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame) ++{ ++ GstAmcVideoDec *self; ++ gint idx; ++ GstAmcBuffer *buf; ++ GstAmcBufferInfo buffer_info; ++ guint offset = 0; ++ GstClockTime timestamp, duration, timestamp_offset = 0; ++ GstMapInfo minfo; ++ ++ memset (&minfo, 0, sizeof (minfo)); ++ ++ self = GST_AMC_VIDEO_DEC (decoder); ++ ++ GST_DEBUG_OBJECT (self, "Handling frame"); ++ ++ if (!self->started) { ++ GST_ERROR_OBJECT (self, "Codec not started yet"); ++ gst_video_codec_frame_unref (frame); ++ return GST_FLOW_NOT_NEGOTIATED; ++ } ++ ++ if (self->eos) { ++ GST_WARNING_OBJECT (self, "Got frame after EOS"); ++ gst_video_codec_frame_unref (frame); ++ return GST_FLOW_EOS; ++ } ++ ++ if (self->flushing) ++ goto flushing; ++ ++ if (self->downstream_flow_ret != GST_FLOW_OK) ++ goto downstream_error; ++ ++ timestamp = frame->pts; ++ duration = frame->duration; ++ ++ gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ); ++ ++ while (offset < minfo.size) { ++ /* Make sure to release the base class stream lock, otherwise ++ * _loop() can't call _finish_frame() and we might block forever ++ * because no input buffers are released */ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ /* Wait at most 100ms here, some codecs don't fail dequeueing if ++ * the codec is flushing, causing deadlocks during shutdown */ ++ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ ++ GST_DEBUG_OBJECT (self, "Tried to dequeue input buffer idx: %d", idx); ++ if (idx < 0) { ++ if (self->flushing) ++ goto flushing; ++ switch (idx) { ++ case INFO_TRY_AGAIN_LATER: ++ GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); ++ continue; /* next try */ ++ break; ++ case G_MININT: ++ GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); ++ goto dequeue_error; ++ default: ++ g_assert_not_reached (); ++ break; ++ } ++ ++ continue; ++ } ++ ++ if (idx >= self->n_input_buffers) ++ goto invalid_buffer_index; ++ ++ if (self->flushing) ++ goto flushing; ++ ++ if (self->downstream_flow_ret != GST_FLOW_OK) { ++ memset (&buffer_info, 0, sizeof (buffer_info)); ++ gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info); ++ goto downstream_error; ++ } ++ ++ /* Now handle the frame */ ++ ++ /* Copy the buffer content in chunks of size as requested ++ * by the port */ ++ buf = &self->input_buffers[idx]; ++ ++ memset (&buffer_info, 0, sizeof (buffer_info)); ++ buffer_info.offset = 0; ++ buffer_info.size = MIN (minfo.size - offset, buf->size); ++ ++ orc_memcpy (buf->data, minfo.data + offset, buffer_info.size); ++ ++ /* Interpolate timestamps if we're passing the buffer ++ * in multiple chunks */ ++ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { ++ timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size); ++ } ++ ++ if (timestamp != GST_CLOCK_TIME_NONE) { ++ buffer_info.presentation_time_us = ++ gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); ++ self->last_upstream_ts = timestamp + timestamp_offset; ++ } ++ if (duration != GST_CLOCK_TIME_NONE) ++ self->last_upstream_ts += duration; ++ ++ if (offset == 0) { ++ BufferIdentification *id = ++ buffer_identification_new (timestamp + timestamp_offset); ++ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) ++ buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; ++ gst_video_codec_frame_set_user_data (frame, id, ++ (GDestroyNotify) buffer_identification_free); ++ } ++ ++ offset += buffer_info.size; ++ GST_DEBUG_OBJECT (self, ++ "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", ++ idx, buffer_info.size, buffer_info.presentation_time_us, ++ buffer_info.flags); ++ if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info)) ++ goto queue_error; ++ } ++ ++ gst_buffer_unmap (frame->input_buffer, &minfo); ++ gst_video_codec_frame_unref (frame); ++ ++ return self->downstream_flow_ret; ++ ++downstream_error: ++ { ++ GST_ERROR_OBJECT (self, "Downstream returned %s", ++ gst_flow_get_name (self->downstream_flow_ret)); ++ if (minfo.data) ++ gst_buffer_unmap (frame->input_buffer, &minfo); ++ gst_video_codec_frame_unref (frame); ++ return self->downstream_flow_ret; ++ } ++invalid_buffer_index: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Invalid input buffer index %d of %d", idx, self->n_input_buffers)); ++ if (minfo.data) ++ gst_buffer_unmap (frame->input_buffer, &minfo); ++ gst_video_codec_frame_unref (frame); ++ return GST_FLOW_ERROR; ++ } ++dequeue_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to dequeue input buffer")); ++ if (minfo.data) ++ gst_buffer_unmap (frame->input_buffer, &minfo); ++ gst_video_codec_frame_unref (frame); ++ return GST_FLOW_ERROR; ++ } ++queue_error: ++ { ++ GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ++ ("Failed to queue input buffer")); ++ if (minfo.data) ++ gst_buffer_unmap (frame->input_buffer, &minfo); ++ gst_video_codec_frame_unref (frame); ++ return GST_FLOW_ERROR; ++ } ++flushing: ++ { ++ GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); ++ if (minfo.data) ++ gst_buffer_unmap (frame->input_buffer, &minfo); ++ gst_video_codec_frame_unref (frame); ++ return GST_FLOW_FLUSHING; ++ } ++} ++ ++static GstFlowReturn ++gst_amc_video_dec_finish (GstVideoDecoder * decoder) ++{ ++ GstAmcVideoDec *self; ++ ++ self = GST_AMC_VIDEO_DEC (decoder); ++ ++ return gst_amc_video_dec_drain (self, TRUE); ++} ++ ++static GstFlowReturn ++gst_amc_video_dec_drain (GstAmcVideoDec * self, gboolean at_eos) ++{ ++ GstFlowReturn ret; ++ gint idx; ++ ++ GST_DEBUG_OBJECT (self, "Draining codec"); ++ if (!self->started) { ++ GST_DEBUG_OBJECT (self, "Codec not started yet"); ++ return GST_FLOW_OK; ++ } ++ ++ /* Don't send EOS buffer twice, this doesn't work */ ++ if (self->eos) { ++ GST_DEBUG_OBJECT (self, "Codec is EOS already"); ++ return GST_FLOW_OK; ++ } ++ if (at_eos) ++ self->eos = TRUE; ++ ++ /* Make sure to release the base class stream lock, otherwise ++ * _loop() can't call _finish_frame() and we might block forever ++ * because no input buffers are released */ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ /* Send an EOS buffer to the component and let the base ++ * class drop the EOS event. We will send it later when ++ * the EOS buffer arrives on the output port. ++ * Wait at most 0.5s here. */ ++ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 500000); ++ GST_DEBUG_OBJECT (self, "dequeued input buffer with idx: %d", idx); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ ++ if (idx >= 0 && idx < self->n_input_buffers) { ++ GstAmcBufferInfo buffer_info; ++ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (self); ++ g_mutex_lock (&self->drain_lock); ++ self->draining = TRUE; ++ ++ memset (&buffer_info, 0, sizeof (buffer_info)); ++ buffer_info.size = 0; ++ buffer_info.presentation_time_us = ++ gst_util_uint64_scale (self->last_upstream_ts, 1, GST_USECOND); ++ buffer_info.flags |= BUFFER_FLAG_END_OF_STREAM; ++ ++ if (gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info)) { ++ GST_DEBUG_OBJECT (self, "Waiting until codec is drained"); ++ g_cond_wait (&self->drain_cond, &self->drain_lock); ++ GST_DEBUG_OBJECT (self, "Drained codec"); ++ ret = GST_FLOW_OK; ++ } else { ++ GST_ERROR_OBJECT (self, "Failed to queue input buffer"); ++ ret = GST_FLOW_ERROR; ++ } ++ ++ g_mutex_unlock (&self->drain_lock); ++ GST_VIDEO_DECODER_STREAM_LOCK (self); ++ } else if (idx >= self->n_input_buffers) { ++ GST_ERROR_OBJECT (self, "Invalid input buffer index %d of %d", ++ idx, self->n_input_buffers); ++ ret = GST_FLOW_ERROR; ++ } else { ++ GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", idx); ++ ret = GST_FLOW_ERROR; ++ } ++ ++ return ret; ++} ++ ++static gboolean ++gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query) ++{ ++ GstBufferPool *pool; ++ GstMirBufferPool *m_pool; ++ GstStructure *config; ++ GstCaps *caps; ++ guint i, n; ++ ++ GST_DEBUG_OBJECT (bdec, "%s", __PRETTY_FUNCTION__); ++ GST_DEBUG_OBJECT (bdec, "Deciding ALLOCATION params"); ++ ++ /* Prefer a MirImage allocator if available */ ++ gst_query_parse_allocation (query, &caps, NULL); ++ /* if (caps && gst_video_info_from_caps (&info, caps) && info.finfo->format == GST_VIDEO_FORMAT_RGBA) { */ ++ { ++ GST_DEBUG_OBJECT (bdec, "Considering using the MirImage allocator"); ++ n = gst_query_get_n_allocation_params (query); ++ for (i = 0; i < n; i++) { ++ GstAllocator *allocator = NULL; ++ GstAllocationParams params; ++ ++ gst_query_parse_nth_allocation_param (query, i, &allocator, ¶ms); ++ /* Select the MirImage allocator from the ALLOCATION query */ ++ if (allocator ++ && g_strcmp0 (allocator->mem_type, GST_MIR_IMAGE_MEMORY_TYPE) == 0) { ++ GST_DEBUG_OBJECT (bdec, "Found the MirImage allocator!"); ++ g_assert (allocator != NULL); ++ gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms); ++ while (gst_query_get_n_allocation_params (query) > 1) ++ gst_query_remove_nth_allocation_param (query, 1); ++ break; ++ } ++ } ++ } ++ ++ if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query)) ++ return FALSE; ++ ++ g_assert (gst_query_get_n_allocation_pools (query) > 0); ++ gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL); ++ g_assert (pool != NULL); ++ ++ /* Add the codec_delegate instance to the current pool */ ++ m_pool = (GstMirBufferPool *) pool; ++ m_pool->codec_delegate = (GST_AMC_VIDEO_DEC (bdec))->codec->codec_delegate; ++ ++ config = gst_buffer_pool_get_config (pool); ++ if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) { ++ GST_DEBUG_OBJECT (bdec, ++ "ALLOCATION query has GST_VIDEO_META_API_TYPE embedded"); ++ gst_buffer_pool_config_add_option (config, ++ GST_BUFFER_POOL_OPTION_VIDEO_META); ++ } ++ gst_buffer_pool_set_config (pool, config); ++ gst_object_unref (pool); ++ ++ return TRUE; ++} diff -Nru gst-plugins-bad1.0-1.1.4/debian/patches/series gst-plugins-bad1.0-1.1.4/debian/patches/series --- gst-plugins-bad1.0-1.1.4/debian/patches/series 2013-09-10 08:49:25.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/patches/series 2013-09-12 04:22:16.000000000 +0000 @@ -3,3 +3,4 @@ 99_ltmain_as-needed.patch pcfile-requires-plugins-good 0001-modplug-Specify-directory-when-including-stdafx.h.patch +adding-mirsink-and-android-media-over-hybris-support.patch diff -Nru gst-plugins-bad1.0-1.1.4/debian/rules gst-plugins-bad1.0-1.1.4/debian/rules --- gst-plugins-bad1.0-1.1.4/debian/rules 2013-08-30 11:02:31.000000000 +0000 +++ gst-plugins-bad1.0-1.1.4/debian/rules 2013-09-17 19:03:39.000000000 +0000 @@ -67,7 +67,7 @@ PLUGINS += plugins-bad $(EXTRA_PLUGINS) ifeq ($(DEB_HOST_ARCH_OS),linux) -PLUGINS += +PLUGINS += hybris endif VERSIONIZE= \ plugins-bad-doc.install \ @@ -80,6 +80,13 @@ real = debian/tmp/usr/lib/$(DEB_HOST_MULTIARCH)/gstreamer-$(gst_abi)/libgstreal.so endif +# miralloc is just enabled for i386 and armhf (need android for the other archs) +android_hybris_archs := i386 armhf +ifeq ($(DEB_HOST_ARCH_CPU), $(findstring $(DEB_HOST_ARCH_CPU), $(android_hybris_archs))) +miralloc = debian/tmp/usr/lib/$(DEB_HOST_MULTIARCH)/libgstmiralloc-$(gst_abi).so.* +mirallocdev = debian/tmp/usr/lib/$(DEB_HOST_MULTIARCH)/libgstmiralloc-$(gst_abi).so +endif + ifeq ($(DEB_HOST_ARCH_OS),linux) dvb = debian/tmp/usr/lib/$(DEB_HOST_MULTIARCH)/gstreamer-$(gst_abi)/libgstdvb.so fbdev = debian/tmp/usr/lib/$(DEB_HOST_MULTIARCH)/gstreamer-$(gst_abi)/libgstfbdevsink.so @@ -141,10 +148,17 @@ > debian/$(gst_pkgname)-$$f; \ done - sed 's/@GST_ABI@/$(gst_abi)/g' debian/libgstreamer-plugins-bad.install \ - > debian/libgstreamer-plugins-bad$(gst_deb_abi).install - sed 's/@GST_ABI@/$(gst_abi)/g' debian/libgstreamer-plugins-bad-dev.install \ - > debian/libgstreamer-plugins-bad$(gst_abi)-dev.install + sed \ + -e 's,@miralloc@,$(miralloc),g' \ + -e 's/@GST_ABI@/$(gst_abi)/g' \ + debian/libgstreamer-plugins-bad.install \ + > debian/libgstreamer-plugins-bad$(gst_deb_abi).install + + sed \ + -e 's,@mirallocdev@,$(mirallocdev),g' \ + -e 's/@GST_ABI@/$(gst_abi)/g' \ + debian/libgstreamer-plugins-bad-dev.install \ + > debian/libgstreamer-plugins-bad$(gst_abi)-dev.install maint: debian/control