summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynaldo H. Verdejo Pinochet <reynaldo.verdejo@collabora.co.uk>2011-04-06 19:01:04 -0400
committerReynaldo H. Verdejo Pinochet <reynaldo.verdejo@collabora.co.uk>2011-04-08 11:20:53 -0400
commit58fba8990ba5b57f0ae8f53faa924445f8e48a21 (patch)
treeddd104917679b90360f5d37c1d25c5a692cf6504
parent74bf6ff3e9305492db5391364e459bb05fbc97d9 (diff)
gst-indent the whole mess
-rwxr-xr-xgstplayer/GstDriver.cpp127
-rw-r--r--gstplayer/GstDriver.h40
-rw-r--r--gstplayer/GstMediaRecorder.cpp2293
-rw-r--r--gstplayer/GstMediaRecorder.h241
-rwxr-xr-xgstplayer/GstMediaScanner.cpp748
-rwxr-xr-xgstplayer/GstMediaScanner.h55
-rw-r--r--gstplayer/GstMetadataRetriever.cpp586
-rw-r--r--gstplayer/GstMetadataRetriever.h61
-rw-r--r--gstplayer/GstMetadataRetrieverDriver.cpp1260
-rw-r--r--gstplayer/GstMetadataRetrieverDriver.h161
-rw-r--r--gstplayer/GstPlayer.cpp379
-rw-r--r--gstplayer/GstPlayer.h92
-rw-r--r--gstplayer/GsticbAndroid.cpp11
-rw-r--r--gstplayer/GsticbAndroidVideoSink.cpp234
-rw-r--r--gstplayer/GsticbAndroidVideoSink.h32
15 files changed, 3236 insertions, 3084 deletions
diff --git a/gstplayer/GstDriver.cpp b/gstplayer/GstDriver.cpp
index aaf8ac2..469696f 100755
--- a/gstplayer/GstDriver.cpp
+++ b/gstplayer/GstDriver.cpp
@@ -49,7 +49,7 @@ mAudioStreamType (0), mAudioFlingerGstId (0), mAudioFlinger (0),
mAudioLeftVolume (1.0f), mAudioRightVolume (1.0f),
mNbAudioStream (0), mNbAudioStreamError (0),
mDuration (0), mPlaybackType (GSTDRIVER_PLAYBACK_TYPE_UNKNOWN),
-mMainCtx(NULL), mMainLoop(NULL), mMainThread(NULL), mBusWatch(NULL)
+mMainCtx (NULL), mMainLoop (NULL), mMainThread (NULL), mBusWatch (NULL)
{
LOGV ("constructor");
@@ -81,11 +81,12 @@ GstDriver::~GstDriver ()
mState = GSTDRIVER_STATE_END;
}
-GstStateChangeReturn
-GstDriver::wait_for_set_state (int timeout_msec)
+GstStateChangeReturn GstDriver::wait_for_set_state (int timeout_msec)
{
- GstMessage *msg;
- GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
+ GstMessage *
+ msg;
+ GstStateChangeReturn
+ ret = GST_STATE_CHANGE_FAILURE;
/* Wait for state change */
msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS (mPlaybin), timeout_msec * GST_MSECOND, /* in nanosec */
@@ -101,8 +102,7 @@ GstDriver::wait_for_set_state (int timeout_msec)
return ret;
}
-gpointer
-GstDriver::do_loop (GstDriver * ed)
+gpointer GstDriver::do_loop (GstDriver * ed)
{
LOGV ("enter main loop");
g_main_loop_run (ed->mMainLoop);
@@ -199,7 +199,7 @@ GstDriver::setDataSource (const char *url)
strcat (uri, url);
LOGV ("set uri to playbin %s", uri);
g_object_set (G_OBJECT (mPlaybin), "uri", uri, (gchar *) NULL);
- mPlaybackType = GSTDRIVER_PLAYBACK_TYPE_LOCAL_FILE;
+ mPlaybackType = GSTDRIVER_PLAYBACK_TYPE_LOCAL_FILE;
} else {
LOGV ("set uri to playbin %s", url);
@@ -209,11 +209,11 @@ GstDriver::setDataSource (const char *url)
(gchar *) NULL);
g_object_set (G_OBJECT (mPlaybin), "buffer-size", (gint) 500 * 1024,
(gchar *) NULL);
- mPlaybackType = GSTDRIVER_PLAYBACK_TYPE_HTTP;
+ mPlaybackType = GSTDRIVER_PLAYBACK_TYPE_HTTP;
+ }
+ if (strncasecmp (url, "rtsp", 4) == 0) {
+ mPlaybackType = GSTDRIVER_PLAYBACK_TYPE_RTSP;
}
- if (strncasecmp (url, "rtsp", 4) == 0) {
- mPlaybackType = GSTDRIVER_PLAYBACK_TYPE_RTSP;
- }
}
}
@@ -252,7 +252,7 @@ GstDriver::need_data (GstAppSrc * src, guint length, gpointer user_data)
}
/*static*/ gboolean
-GstDriver::seek_data (GstAppSrc * src, guint64 offset, gpointer user_data)
+ GstDriver::seek_data (GstAppSrc * src, guint64 offset, gpointer user_data)
{
UNUSED (src);
@@ -370,11 +370,11 @@ GstDriver::setVideoSurface (const sp < ISurface > &surface)
{
LOGV ("set surface to videosink");
mSurface = surface;
- g_object_set (G_OBJECT (mVideoBin), "surface", surface.get(), (gchar *) NULL);
+ g_object_set (G_OBJECT (mVideoBin), "surface", surface.get (),
+ (gchar *) NULL);
}
-bool
-GstDriver::setAudioSink (sp < MediaPlayerInterface::AudioSink > audiosink)
+bool GstDriver::setAudioSink (sp < MediaPlayerInterface::AudioSink > audiosink)
{
if (audiosink == 0) {
LOGE ("Error audio sink %p", audiosink.get ());
@@ -441,7 +441,7 @@ GstDriver::start ()
case GSTDRIVER_STATE_ERROR:
case GSTDRIVER_STATE_END:
{
- LOGD("We are in IDLE/INITIALIZED/STOPPPED/ERROR/END: %d", mState);
+ LOGD ("We are in IDLE/INITIALIZED/STOPPPED/ERROR/END: %d", mState);
GstPlayer *parent = (GstPlayer *) mparent;
if (parent) {
parent->sendEvent (MEDIA_ERROR, 0);
@@ -452,7 +452,7 @@ GstDriver::start ()
case GSTDRIVER_STATE_COMPLETED:
{
- LOGD("We are in GSTDRIVER_STATE_COMPLETED");
+ LOGD ("We are in GSTDRIVER_STATE_COMPLETED");
gint64 duration, position;
duration = getDuration ();
position = getPosition ();
@@ -460,23 +460,23 @@ GstDriver::start ()
if ((duration - position) <= 0) {
seek (0);
}
- }
+ }
case GSTDRIVER_STATE_PREPARED:
case GSTDRIVER_STATE_STARTED:
case GSTDRIVER_STATE_PAUSED:
- LOGD("We are in PREPARED/STARTED/PAUSED: %d", mState);
+ LOGD ("We are in PREPARED/STARTED/PAUSED: %d", mState);
/* FIXME, twi says NOT
HAVING THIS MAKES GENERATING THUMBNAILS DOGSLOW
no track means the sink is an AudioCache instance and the player is
being used to decode in memory
- */
+ */
- if (mAudioOut->getTrack() == NULL) {
+ if (mAudioOut->getTrack () == NULL) {
g_object_set (mAudioBin, "sync", FALSE, NULL);
} else {
g_object_set (mAudioBin, "sync", TRUE, NULL);
}
-
+
mEos = false;
gst_element_set_state (mPlaybin, GST_STATE_PLAYING);
mState = GSTDRIVER_STATE_STARTED;
@@ -497,10 +497,10 @@ GstDriver::seek (gint64 p)
if (!mPlaybin)
goto bail;
-
- if(p < 0) //don't seek to negative time
- goto bail;
+
+ if (p < 0) //don't seek to negative time
+ goto bail;
mLastValidPosition = p;
@@ -519,11 +519,12 @@ bail:
}
}
-gint64
-GstDriver::getPosition ()
+gint64 GstDriver::getPosition ()
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 pos = 0;
+ GstFormat
+ fmt = GST_FORMAT_TIME;
+ gint64
+ pos = 0;
if (!mPlaybin) {
LOGV ("get postion but pipeline has not been created yet");
@@ -546,12 +547,13 @@ GstDriver::getStatus ()
return mState;
}
-gint64
-GstDriver::getDuration ()
+gint64 GstDriver::getDuration ()
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 len;
+ GstFormat
+ fmt = GST_FORMAT_TIME;
+ gint64
+ len;
if (!mPlaybin) {
LOGV ("get duration but pipeline has not been created yet");
@@ -587,7 +589,7 @@ GstDriver::stop ()
{
LOGV ("stop");
gst_element_set_state (mPlaybin, GST_STATE_NULL);
-
+
if (wait_for_set_state (500) != GST_STATE_CHANGE_SUCCESS) {
LOGW ("TIMEOUT on stop request");
}
@@ -635,15 +637,15 @@ GstDriver::quit ()
if (mMainLoop) {
g_source_destroy (mBusWatch);
- g_source_unref(mBusWatch);
- mBusWatch = NULL;
+ g_source_unref (mBusWatch);
+ mBusWatch = NULL;
g_main_loop_quit (mMainLoop);
g_thread_join (mMainThread);
- mMainThread = NULL;
+ mMainThread = NULL;
g_main_loop_unref (mMainLoop);
- mMainLoop = NULL;
+ mMainLoop = NULL;
g_main_context_unref (mMainCtx);
- mMainCtx = NULL;
+ mMainCtx = NULL;
}
mState = GSTDRIVER_STATE_END;
@@ -667,17 +669,17 @@ GstDriver::getStreamsInfo ()
}
if (n_video > 0) {
- LOGV("We Have a video stream");
+ LOGV ("We Have a video stream");
mHaveStreamVideo = TRUE;
- } else
- LOGV("We don't have a video stream");
+ } else
+ LOGV ("We don't have a video stream");
mHaveStreamInfo = TRUE;
}
}
/*static*/ GstBusSyncReply
-GstDriver::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
+ GstDriver::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
{
GstDriver *ed = (GstDriver *) data;
GstPlayer *parent = (GstPlayer *) ed->mparent;
@@ -685,25 +687,25 @@ GstDriver::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
UNUSED (bus);
switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- {
+ case GST_MESSAGE_EOS:
+ {
LOGV ("bus receive message EOS");
/* set state to paused (we want that "isPlaying" fct returns false after eos) */
ed->mState = GSTDRIVER_STATE_COMPLETED;
- gst_element_set_state(ed->mPlaybin, GST_STATE_PAUSED);
-
+ gst_element_set_state (ed->mPlaybin, GST_STATE_PAUSED);
+
if (ed->mAudioOut != 0) {
- ed->mAudioOut->stop();
+ ed->mAudioOut->stop ();
}
- ed->setEos(ed->getDuration());
- LOGV("set position on eos %"GST_TIME_FORMAT,
- GST_TIME_ARGS(ed->getPosition()));
+ ed->setEos (ed->getDuration ());
+ LOGV ("set position on eos %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (ed->getPosition ()));
if (parent)
parent->sendEvent (MEDIA_PLAYBACK_COMPLETE);
-
+
break;
}
@@ -748,14 +750,15 @@ GstDriver::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
GstState old_state, new_state, pending;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending);
-
-
+
+
/* we only care about pipeline state change messages */
if (GST_MESSAGE_SRC (msg) != GST_OBJECT_CAST (ed->mPlaybin))
break;
-
- LOGV ("bus receive message STATE_CHANGED old %d new %d pending %d", old_state, new_state, pending);
-
+
+ LOGV ("bus receive message STATE_CHANGED old %d new %d pending %d",
+ old_state, new_state, pending);
+
//
if ((new_state == GST_STATE_PLAYING) && (old_state == GST_STATE_PAUSED))
ed->getStreamsInfo ();
@@ -782,10 +785,10 @@ GstDriver::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
if ((ed->mPlaybackType == GSTDRIVER_PLAYBACK_TYPE_RTSP) && parent) {
LOGV ("bus handler send event MEDIA_PREPARED");
- ed->mState = GSTDRIVER_STATE_PREPARED;
+ ed->mState = GSTDRIVER_STATE_PREPARED;
parent->sendEvent (MEDIA_PREPARED);
}
-
+
if (ed->mVideoBin) {
int width = 0, height = 0;
if (ed->mHaveStreamInfo && ed->mHaveStreamVideo) {
@@ -816,7 +819,7 @@ GstDriver::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
LOGV ("Buffering complete");
if ((ed->mState == GSTDRIVER_STATE_INITIALIZED) && parent) {
LOGV ("Sending MEDIA_PREPARED");
- ed->mState = GSTDRIVER_STATE_PREPARED;
+ ed->mState = GSTDRIVER_STATE_PREPARED;
parent->sendEvent (MEDIA_PREPARED);
} else if (ed->mPausedByUser == FALSE) {
LOGV ("buffer level hit high watermark -> PLAYING");
@@ -1003,7 +1006,7 @@ GstDriver::init_gstreamer ()
// @param[inout] records Parcel where the player appends its metadata.
// @return OK if the call was successful.
status_t
-GstDriver::getMetadata (const SortedVector < media::Metadata::Type > &ids,
+ GstDriver::getMetadata (const SortedVector < media::Metadata::Type > &ids,
Parcel * records)
{
using media::Metadata;
diff --git a/gstplayer/GstDriver.h b/gstplayer/GstDriver.h
index 24a2680..69774d5 100644
--- a/gstplayer/GstDriver.h
+++ b/gstplayer/GstDriver.h
@@ -59,7 +59,7 @@ namespace android
void quit ();
gint64 getPosition ();
gint64 getDuration ();
- void setEos(gint64 position);
+ void setEos (gint64 position);
int getStatus ();
void getVideoSize (int *width, int *height);
void setVolume (float left, float right);
@@ -143,25 +143,25 @@ namespace android
GObject * object, GstDebugMessage * message, gpointer data);
gboolean mPausedByUser; /* false if paused by buffering logic. user pausing takes precedent */
-
- gint64 mDuration;
-
- enum GstDriverPlaybackType
- {
- GSTDRIVER_PLAYBACK_TYPE_UNKNOWN,
- GSTDRIVER_PLAYBACK_TYPE_LOCAL_FILE,
- GSTDRIVER_PLAYBACK_TYPE_RTSP,
- GSTDRIVER_PLAYBACK_TYPE_HTTP
- };
-
- int mPlaybackType;
-
- GMainContext* mMainCtx;
- GMainLoop* mMainLoop;
- static gpointer do_loop (GstDriver *ed);
- GThread* mMainThread;
- GSource* mBusWatch;
- sp<ISurface> mSurface;
+
+ gint64 mDuration;
+
+ enum GstDriverPlaybackType
+ {
+ GSTDRIVER_PLAYBACK_TYPE_UNKNOWN,
+ GSTDRIVER_PLAYBACK_TYPE_LOCAL_FILE,
+ GSTDRIVER_PLAYBACK_TYPE_RTSP,
+ GSTDRIVER_PLAYBACK_TYPE_HTTP
+ };
+
+ int mPlaybackType;
+
+ GMainContext *mMainCtx;
+ GMainLoop *mMainLoop;
+ static gpointer do_loop (GstDriver * ed);
+ GThread *mMainThread;
+ GSource *mBusWatch;
+ sp < ISurface > mSurface;
};
}; // namespace android
diff --git a/gstplayer/GstMediaRecorder.cpp b/gstplayer/GstMediaRecorder.cpp
index 04b3e2b..1695cca 100644
--- a/gstplayer/GstMediaRecorder.cpp
+++ b/gstplayer/GstMediaRecorder.cpp
@@ -45,7 +45,7 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include <ui/ISurface.h>
#include <ui/ICamera.h>
#include <ui/Camera.h>
-#else
+#else
#include <camera/CameraParameters.h>
#include <utils/Errors.h>
#include <media/AudioSystem.h>
@@ -62,1276 +62,1325 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
using namespace android;
-GstMediaRecorder::GstMediaRecorder()
+GstMediaRecorder::GstMediaRecorder ()
{
- LOGV("GstMediaRecorder constructor");
- mCamera = NULL;
- mSurface = NULL;
- mFlags = 0;
-
- mVideoBin = NULL;
- mAudioBin = NULL;
- mPipeline = NULL;
-
- mUse_video_src = FALSE;
- mUse_audio_src = FALSE;
-
- mVideoSrc = NULL;
- mAudioSrc = NULL;
-
- mOutFilePath = NULL;
-
- mMaxDuration = -1;
- mMaxFileSize = -1;
- mCurrentFileSize = 0;
- mTimer = NULL;
-
- //default init
- mFps = 15;
- mWidth = 176;
- mHeight = 144;
- mOutput_format = OUTPUT_FORMAT_DEFAULT;
- mVideo_encoder = VIDEO_ENCODER_DEFAULT;
- mAudio_encoder = AUDIO_ENCODER_DEFAULT;
- mAudio_source = AUDIO_SOURCE_MIC;
-
- mAudioSampleRate = 48000;
- mAudioChannels = 2;
- mAudioBitrate = 192000;
- mVideoBitrate = 786432;
- mVTMode = 0;
- mIPeriod = 0;
- mIMBRefreshMode = 0;
-
- if (!g_thread_supported ()) {
- LOGV("GstMediaRecorder GLib thread init");
- g_thread_init (NULL);
- }
-
- // setup callback listener
- mCameraListener = new AndroidGstCameraListener(this);
-
- /* create and init the EOS mutex now */
- mEOSlock = g_mutex_new ();
- g_mutex_lock (mEOSlock);
- mIsEos = FALSE;
-
- if(snd_hwdep_open (&mHwdep_handle,"hw:0,0", O_RDWR) < 0){
- LOGE("Error %d opening hwdep device\n", errno);
- }
+ LOGV ("GstMediaRecorder constructor");
+ mCamera = NULL;
+ mSurface = NULL;
+ mFlags = 0;
+
+ mVideoBin = NULL;
+ mAudioBin = NULL;
+ mPipeline = NULL;
+
+ mUse_video_src = FALSE;
+ mUse_audio_src = FALSE;
+
+ mVideoSrc = NULL;
+ mAudioSrc = NULL;
+
+ mOutFilePath = NULL;
+
+ mMaxDuration = -1;
+ mMaxFileSize = -1;
+ mCurrentFileSize = 0;
+ mTimer = NULL;
+
+ //default init
+ mFps = 15;
+ mWidth = 176;
+ mHeight = 144;
+ mOutput_format = OUTPUT_FORMAT_DEFAULT;
+ mVideo_encoder = VIDEO_ENCODER_DEFAULT;
+ mAudio_encoder = AUDIO_ENCODER_DEFAULT;
+ mAudio_source = AUDIO_SOURCE_MIC;
+
+ mAudioSampleRate = 48000;
+ mAudioChannels = 2;
+ mAudioBitrate = 192000;
+ mVideoBitrate = 786432;
+ mVTMode = 0;
+ mIPeriod = 0;
+ mIMBRefreshMode = 0;
+
+ if (!g_thread_supported ()) {
+ LOGV ("GstMediaRecorder GLib thread init");
+ g_thread_init (NULL);
+ }
+ // setup callback listener
+ mCameraListener = new AndroidGstCameraListener (this);
+
+ /* create and init the EOS mutex now */
+ mEOSlock = g_mutex_new ();
+ g_mutex_lock (mEOSlock);
+ mIsEos = FALSE;
+
+ if (snd_hwdep_open (&mHwdep_handle, "hw:0,0", O_RDWR) < 0) {
+ LOGE ("Error %d opening hwdep device\n", errno);
+ }
}
-GstMediaRecorder::~GstMediaRecorder()
+GstMediaRecorder::~GstMediaRecorder ()
{
- LOGV("GstMediaRecorder destructor");
-
- if (mCamera != NULL) {
- mCamera->setListener(NULL);
- if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
- LOGV("GstMediaRecorder camera was cold when we started, stopping preview");
- mCamera->stopPreview();
- }
- if (mFlags & FLAGS_SET_CAMERA) {
- LOGV("GstMediaRecorder unlocking camera to return to app");
- mCamera->unlock();
- } else {
- LOGV("GstMediaRecorder disconnect from camera");
- mCamera->disconnect();
- }
- mCamera.clear();
- }
-
- mFlags = 0;
-
- // don't send eos but release the pipeline
- release_pipeline();
-
- if(mOutFilePath) {
- g_free(mOutFilePath);
- }
- mCameraListener.clear();
-
- // free mutex
- g_mutex_free (mEOSlock);
- mEOSlock = NULL;
-
- // free timer
- g_timer_destroy(mTimer);
- mTimer = NULL;
-
- if(mHwdep_handle) {
- snd_hwdep_close(mHwdep_handle);
- }
+ LOGV ("GstMediaRecorder destructor");
+
+ if (mCamera != NULL) {
+ mCamera->setListener (NULL);
+ if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
+ LOGV ("GstMediaRecorder camera was cold when we started, stopping preview");
+ mCamera->stopPreview ();
+ }
+ if (mFlags & FLAGS_SET_CAMERA) {
+ LOGV ("GstMediaRecorder unlocking camera to return to app");
+ mCamera->unlock ();
+ } else {
+ LOGV ("GstMediaRecorder disconnect from camera");
+ mCamera->disconnect ();
+ }
+ mCamera.clear ();
+ }
+
+ mFlags = 0;
+
+ // don't send eos but release the pipeline
+ release_pipeline ();
+
+ if (mOutFilePath) {
+ g_free (mOutFilePath);
+ }
+ mCameraListener.clear ();
+
+ // free mutex
+ g_mutex_free (mEOSlock);
+ mEOSlock = NULL;
+
+ // free timer
+ g_timer_destroy (mTimer);
+ mTimer = NULL;
+
+ if (mHwdep_handle) {
+ snd_hwdep_close (mHwdep_handle);
+ }
}
-status_t GstMediaRecorder::init()
+status_t
+GstMediaRecorder::init ()
{
- LOGV("GstMediaRecorder init");
+ LOGV ("GstMediaRecorder init");
- return OK;
+ return OK;
}
-status_t GstMediaRecorder::setAudioSource(audio_source as)
+status_t
+GstMediaRecorder::setAudioSource (audio_source as)
{
- //LOGV("GstMediaRecorder setAudioSource %s", (as==AUDIO_SOURCE_DEFAULT)?"AUDIO_SOURCE_DEFAULT":"AUDIO_SOURCE_MIC");
- mAudio_source = as;
- mUse_audio_src = TRUE;
- switch (as)
- {
- case AUDIO_SOURCE_DEFAULT:
- LOGV("GstMediaRecorder setAudioSource DEFAULT (MIC)");
- //the default value is equal to AUDIO_SOURCE_MIC
- mAudio_source = AUDIO_SOURCE_MIC;
- break;
- case AUDIO_SOURCE_MIC:
- LOGV("GstMediaRecorder setAudioSource MIC");
- break;
- case AUDIO_SOURCE_VOICE_UPLINK:
- LOGV("GstMediaRecorder setAudioSource VOICE_UPLINK");
- break;
- case AUDIO_SOURCE_VOICE_DOWNLINK:
- LOGV("GstMediaRecorder setAudioSource VOICE_DOWNLINK");
- break;
- case AUDIO_SOURCE_CAMCORDER:
- LOGV("GstMediaRecorder setAudioSource CAMCORDER");
- break;
- case AUDIO_SOURCE_VOICE_RECOGNITION:
- LOGV("GstMediaRecorder setAudioSource VOICE_RECOGNITION");
- break;
- case AUDIO_SOURCE_VOICE_CALL:
- LOGV("GstMediaRecorder setAudioSource VOICE_CALL");
- break;
- default:
- break;
- }
- return OK;
+ //LOGV("GstMediaRecorder setAudioSource %s", (as==AUDIO_SOURCE_DEFAULT)?"AUDIO_SOURCE_DEFAULT":"AUDIO_SOURCE_MIC");
+ mAudio_source = as;
+ mUse_audio_src = TRUE;
+ switch (as) {
+ case AUDIO_SOURCE_DEFAULT:
+ LOGV ("GstMediaRecorder setAudioSource DEFAULT (MIC)");
+ //the default value is equal to AUDIO_SOURCE_MIC
+ mAudio_source = AUDIO_SOURCE_MIC;
+ break;
+ case AUDIO_SOURCE_MIC:
+ LOGV ("GstMediaRecorder setAudioSource MIC");
+ break;
+ case AUDIO_SOURCE_VOICE_UPLINK:
+ LOGV ("GstMediaRecorder setAudioSource VOICE_UPLINK");
+ break;
+ case AUDIO_SOURCE_VOICE_DOWNLINK:
+ LOGV ("GstMediaRecorder setAudioSource VOICE_DOWNLINK");
+ break;
+ case AUDIO_SOURCE_CAMCORDER:
+ LOGV ("GstMediaRecorder setAudioSource CAMCORDER");
+ break;
+ case AUDIO_SOURCE_VOICE_RECOGNITION:
+ LOGV ("GstMediaRecorder setAudioSource VOICE_RECOGNITION");
+ break;
+ case AUDIO_SOURCE_VOICE_CALL:
+ LOGV ("GstMediaRecorder setAudioSource VOICE_CALL");
+ break;
+ default:
+ break;
+ }
+ return OK;
}
-status_t GstMediaRecorder::setVideoSource(video_source vs)
+status_t
+GstMediaRecorder::setVideoSource (video_source vs)
{
- LOGV("GstMediaRecorder setVideoSource %s", (vs==VIDEO_SOURCE_DEFAULT)?"VIDEO_SOURCE_DEFAULT":"VIDEO_SOURCE_CAMERA");
- switch (vs)
- {
- case VIDEO_SOURCE_DEFAULT:
- //the default value is equal to VIDEO_SOURCE_CAMERA
- mUse_video_src = TRUE;
- break;
- case VIDEO_SOURCE_CAMERA:
- mUse_video_src = TRUE;
- break;
- default:
- mUse_video_src = FALSE;
- break;
- }
- return OK;
+ LOGV ("GstMediaRecorder setVideoSource %s",
+ (vs ==
+ VIDEO_SOURCE_DEFAULT) ? "VIDEO_SOURCE_DEFAULT" :
+ "VIDEO_SOURCE_CAMERA");
+ switch (vs) {
+ case VIDEO_SOURCE_DEFAULT:
+ //the default value is equal to VIDEO_SOURCE_CAMERA
+ mUse_video_src = TRUE;
+ break;
+ case VIDEO_SOURCE_CAMERA:
+ mUse_video_src = TRUE;
+ break;
+ default:
+ mUse_video_src = FALSE;
+ break;
+ }
+ return OK;
}
-status_t GstMediaRecorder::setOutputFormat(output_format of)
+status_t
+GstMediaRecorder::setOutputFormat (output_format of)
{
- LOGV("GstMediaRecorder setOutputFormat %d", of);
- mOutput_format = of;
-
- switch(of)
- {
- case OUTPUT_FORMAT_DEFAULT:
- LOGV("GstMediaRecorder setOutputFormat DEFAULT (3GPP)");
- mOutput_format = OUTPUT_FORMAT_THREE_GPP;
- break;
- case OUTPUT_FORMAT_THREE_GPP:
- LOGV("GstMediaRecorder setOutputFormat 3GPP");
- break;
- case OUTPUT_FORMAT_MPEG_4:
- LOGV("GstMediaRecorder setOutputFormat MPEG4");
- break;
- case OUTPUT_FORMAT_RAW_AMR:
- LOGV("GstMediaRecorder setOutputFormat RAW AMR (AMR NB)");
- break;
- case OUTPUT_FORMAT_LIST_END:
- break;
- case OUTPUT_FORMAT_AMR_WB:
- LOGV(" AMR WB");
- break;
- case OUTPUT_FORMAT_AAC_ADIF:
- LOGV(" AAC ADIF");
- break;
- case OUTPUT_FORMAT_AAC_ADTS:
- LOGV(" AAC ADTS");
- break;
- }
- return OK;
+ LOGV ("GstMediaRecorder setOutputFormat %d", of);
+ mOutput_format = of;
+
+ switch (of) {
+ case OUTPUT_FORMAT_DEFAULT:
+ LOGV ("GstMediaRecorder setOutputFormat DEFAULT (3GPP)");
+ mOutput_format = OUTPUT_FORMAT_THREE_GPP;
+ break;
+ case OUTPUT_FORMAT_THREE_GPP:
+ LOGV ("GstMediaRecorder setOutputFormat 3GPP");
+ break;
+ case OUTPUT_FORMAT_MPEG_4:
+ LOGV ("GstMediaRecorder setOutputFormat MPEG4");
+ break;
+ case OUTPUT_FORMAT_RAW_AMR:
+ LOGV ("GstMediaRecorder setOutputFormat RAW AMR (AMR NB)");
+ break;
+ case OUTPUT_FORMAT_LIST_END:
+ break;
+ case OUTPUT_FORMAT_AMR_WB:
+ LOGV (" AMR WB");
+ break;
+ case OUTPUT_FORMAT_AAC_ADIF:
+ LOGV (" AAC ADIF");
+ break;
+ case OUTPUT_FORMAT_AAC_ADTS:
+ LOGV (" AAC ADTS");
+ break;
+ }
+ return OK;
}
-status_t GstMediaRecorder::setAudioEncoder(audio_encoder ae)
+status_t
+GstMediaRecorder::setAudioEncoder (audio_encoder ae)
{
- //LOGV("GstMediaRecorder setAudioEncoder %s", (ae==AUDIO_ENCODER_DEFAULT)?"AUDIO_ENCODER_DEFAULT":"AUDIO_ENCODER_AMR_NB");
- mAudio_encoder = ae;
- switch(mAudio_encoder)
- {
- case AUDIO_ENCODER_DEFAULT:
- case AUDIO_ENCODER_AMR_NB:
- LOGV("GstMediaRecorder setAudioEncoder AMR NB");
- mAudio_encoder = AUDIO_ENCODER_AMR_NB;
- break;
- case AUDIO_ENCODER_AMR_WB:
- LOGV("GstMediaRecorder setAudioEncoder AMR WB");
- break;
- case AUDIO_ENCODER_AAC:
- LOGV("GstMediaRecorder setAudioEncoder AAC");
- break;
- case AUDIO_ENCODER_AAC_PLUS:
- LOGV("GstMediaRecorder setAudioEncoder AAC PLUS");
- break;
- case AUDIO_ENCODER_EAAC_PLUS:
- LOGV("GstMediaRecorder setAudioEncoder EAAC PLUS");
- break;
- default:
- LOGV("GstMediaRecorder setAudioEncoder AMR NB");
- mAudio_encoder = AUDIO_ENCODER_AMR_NB;
- break;
- }
- return OK;
+ //LOGV("GstMediaRecorder setAudioEncoder %s", (ae==AUDIO_ENCODER_DEFAULT)?"AUDIO_ENCODER_DEFAULT":"AUDIO_ENCODER_AMR_NB");
+ mAudio_encoder = ae;
+ switch (mAudio_encoder) {
+ case AUDIO_ENCODER_DEFAULT:
+ case AUDIO_ENCODER_AMR_NB:
+ LOGV ("GstMediaRecorder setAudioEncoder AMR NB");
+ mAudio_encoder = AUDIO_ENCODER_AMR_NB;
+ break;
+ case AUDIO_ENCODER_AMR_WB:
+ LOGV ("GstMediaRecorder setAudioEncoder AMR WB");
+ break;
+ case AUDIO_ENCODER_AAC:
+ LOGV ("GstMediaRecorder setAudioEncoder AAC");
+ break;
+ case AUDIO_ENCODER_AAC_PLUS:
+ LOGV ("GstMediaRecorder setAudioEncoder AAC PLUS");
+ break;
+ case AUDIO_ENCODER_EAAC_PLUS:
+ LOGV ("GstMediaRecorder setAudioEncoder EAAC PLUS");
+ break;
+ default:
+ LOGV ("GstMediaRecorder setAudioEncoder AMR NB");
+ mAudio_encoder = AUDIO_ENCODER_AMR_NB;
+ break;
+ }
+ return OK;
}
-status_t GstMediaRecorder::setVideoEncoder(video_encoder ve)
+status_t
+GstMediaRecorder::setVideoEncoder (video_encoder ve)
{
- LOGV("GstMediaRecorder setVideoEncoder %d", ve);
- mVideo_encoder = ve;
- switch(mVideo_encoder)
- {
- case VIDEO_ENCODER_DEFAULT:
- LOGV("GstMediaRecorder setVideoEncoder DEFAULT (MPEG4)");
- mVideo_encoder = VIDEO_ENCODER_MPEG_4_SP;
- break;
- case VIDEO_ENCODER_H263:
- LOGV("GstMediaRecorder setVideoEncoder H263");
- break;
- case VIDEO_ENCODER_H264:
- LOGV("GstMediaRecorder setVideoEncoder H264");
- break;
- case VIDEO_ENCODER_MPEG_4_SP:
- LOGV("GstMediaRecorder setVideoEncoder MPEG4");
- break;
- }
- return OK;
+ LOGV ("GstMediaRecorder setVideoEncoder %d", ve);
+ mVideo_encoder = ve;
+ switch (mVideo_encoder) {
+ case VIDEO_ENCODER_DEFAULT:
+ LOGV ("GstMediaRecorder setVideoEncoder DEFAULT (MPEG4)");
+ mVideo_encoder = VIDEO_ENCODER_MPEG_4_SP;
+ break;
+ case VIDEO_ENCODER_H263:
+ LOGV ("GstMediaRecorder setVideoEncoder H263");
+ break;
+ case VIDEO_ENCODER_H264:
+ LOGV ("GstMediaRecorder setVideoEncoder H264");
+ break;
+ case VIDEO_ENCODER_MPEG_4_SP:
+ LOGV ("GstMediaRecorder setVideoEncoder MPEG4");
+ break;
+ }
+ return OK;
}
-status_t GstMediaRecorder::setVideoSize(int width, int height)
+status_t
+GstMediaRecorder::setVideoSize (int width, int height)
{
- LOGV("GstMediaRecorder setVideoSize width=%d height=%d", width, height);
- mWidth = width;
- mHeight = height;
- return OK;
+ LOGV ("GstMediaRecorder setVideoSize width=%d height=%d", width, height);
+ mWidth = width;
+ mHeight = height;
+ return OK;
}
-status_t GstMediaRecorder::setVideoFrameRate(int frames_per_second)
+status_t
+GstMediaRecorder::setVideoFrameRate (int frames_per_second)
{
- LOGV("GstMediaRecorder setVideoFrameRate %d fps", frames_per_second);
- mFps = frames_per_second;
- return OK;
+ LOGV ("GstMediaRecorder setVideoFrameRate %d fps", frames_per_second);
+ mFps = frames_per_second;
+ return OK;
}
-status_t GstMediaRecorder::setCamera(const sp<ICamera>& camera)
+status_t
+GstMediaRecorder::setCamera (const sp < ICamera > &camera)
{
- LOGV("GstMediaRecorder setCamera");
-
- mFlags &= ~ FLAGS_SET_CAMERA | FLAGS_HOT_CAMERA;
- if (camera == NULL) {
- LOGV("camera is NULL");
- return OK;
- }
-
- // Connect our client to the camera remote
- mCamera = Camera::create(camera);
- if (mCamera == NULL) {
- LOGV("Unable to connect to camera");
- return OK;
- }
-
- LOGV("Connected to camera");
- mFlags |= FLAGS_SET_CAMERA;
- if (mCamera->previewEnabled()) {
- mFlags |= FLAGS_HOT_CAMERA;
- LOGV("camera is hot");
- }
- mUse_video_src = TRUE;
- return OK;
+ LOGV ("GstMediaRecorder setCamera");
+
+ mFlags &= ~FLAGS_SET_CAMERA | FLAGS_HOT_CAMERA;
+ if (camera == NULL) {
+ LOGV ("camera is NULL");
+ return OK;
+ }
+ // Connect our client to the camera remote
+ mCamera = Camera::create (camera);
+ if (mCamera == NULL) {
+ LOGV ("Unable to connect to camera");
+ return OK;
+ }
+
+ LOGV ("Connected to camera");
+ mFlags |= FLAGS_SET_CAMERA;
+ if (mCamera->previewEnabled ()) {
+ mFlags |= FLAGS_HOT_CAMERA;
+ LOGV ("camera is hot");
+ }
+ mUse_video_src = TRUE;
+ return OK;
}
-status_t GstMediaRecorder::setPreviewSurface(const sp<ISurface>& surface)
+status_t
+GstMediaRecorder::setPreviewSurface (const sp < ISurface > &surface)
{
- LOGV("GstMediaRecorder setPreviewSurface");
- mSurface = surface;
- return OK;
+ LOGV ("GstMediaRecorder setPreviewSurface");
+ mSurface = surface;
+ return OK;
}
-status_t GstMediaRecorder::setOutputFile(const char *path)
+status_t
+GstMediaRecorder::setOutputFile (const char *path)
{
- LOGV("GstMediaRecorder setOutputFile %s", path);
- mOutFilePath = g_strdup_printf("file://%s", path);
- mOutFilePath_fd = -1;
- return OK;
+ LOGV ("GstMediaRecorder setOutputFile %s", path);
+ mOutFilePath = g_strdup_printf ("file://%s", path);
+ mOutFilePath_fd = -1;
+ return OK;
}
-status_t GstMediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
+
+status_t
+GstMediaRecorder::setOutputFile (int fd, int64_t offset, int64_t length)
{
- LOGV("GstMediaRecorder setOutputFile for fd : fd=%d offset=%lld length=%lld", fd, offset, length);
- GST_UNUSED(offset);
- GST_UNUSED(length);
- mOutFilePath = g_strdup_printf("fd://%d",fd);
- mOutFilePath_fd = fd;
- return OK;
+ LOGV ("GstMediaRecorder setOutputFile for fd : fd=%d offset=%lld length=%lld",
+ fd, offset, length);
+ GST_UNUSED (offset);
+ GST_UNUSED (length);
+ mOutFilePath = g_strdup_printf ("fd://%d", fd);
+ mOutFilePath_fd = fd;
+ return OK;
}
-status_t GstMediaRecorder::setParameters(const String8& params)
+status_t
+GstMediaRecorder::setParameters (const String8 & params)
{
- LOGV("GstMediaRecorder setParameters");
-
- if(strstr(params, "max-duration") != NULL) {
- sscanf(params,"max-duration=%lld", &mMaxDuration);
- }
- if(strstr(params, "max-filesize") != NULL) {
- sscanf(params,"max-filesize=%lld", &mMaxFileSize);
- }
- if(strstr(params, "audio-param-sampling-rate") != NULL) {
- sscanf(params,"audio-param-sampling-rate=%lld", &mAudioSampleRate);
- if ( (mAudioSampleRate < 8000) || (mAudioSampleRate > 48000) )
- mAudioSampleRate = 48000;
-
- }
- if(strstr(params, "audio-param-number-of-channels") != NULL) {
- sscanf(params,"audio-param-number-of-channels=%lld", &mAudioChannels);
- if ( (mAudioChannels < 0) || (mAudioChannels > 2) )
- mAudioChannels = 2;
- }
- if(strstr(params, "audio-param-encoding-bitrate") != NULL) {
- sscanf(params,"audio-param-encoding-bitrate=%lld", &mAudioBitrate);
- if ( (mAudioBitrate < 0) || (mAudioBitrate > 192000) )
- mAudioBitrate = 128000;
- }
- if(strstr(params, "video-param-encoding-bitrate") != NULL) {
- sscanf(params,"video-param-encoding-bitrate=%lld", &mVideoBitrate);
- if ( (mVideoBitrate < 0) || (mVideoBitrate > 786432) )
- mVideoBitrate = 360000;
- }
- if(strstr(params, "vt-mode") != NULL) {
- sscanf(params,"vt-mode=%d", &mVTMode);
- }
- if(strstr(params, "i-mb-refresh") != NULL) {
- sscanf(params,"i-mb-refresh=%d", &mIMBRefreshMode);
- }
- if(strstr(params, "i-period") != NULL) {
- sscanf(params,"i-period=%d", &mIPeriod);
- }
- if(strstr(params, "video-bitrate") != NULL) {
- sscanf(params,"video-bitrate=%lld", &mVideoBitrate);
- }
- //if (mCamera != NULL) {
- //send the parameters to the camera to set specific effect or others parameters
- // mCamera->setParameters(params);
- //}
- LOGV("GstMediaRecorder max duration %lld max file size %lld", mMaxDuration, mMaxFileSize);
- return OK;
+ LOGV ("GstMediaRecorder setParameters");
+
+ if (strstr (params, "max-duration") != NULL) {
+ sscanf (params, "max-duration=%lld", &mMaxDuration);
+ }
+ if (strstr (params, "max-filesize") != NULL) {
+ sscanf (params, "max-filesize=%lld", &mMaxFileSize);
+ }
+ if (strstr (params, "audio-param-sampling-rate") != NULL) {
+ sscanf (params, "audio-param-sampling-rate=%lld", &mAudioSampleRate);
+ if ((mAudioSampleRate < 8000) || (mAudioSampleRate > 48000))
+ mAudioSampleRate = 48000;
+
+ }
+ if (strstr (params, "audio-param-number-of-channels") != NULL) {
+ sscanf (params, "audio-param-number-of-channels=%lld", &mAudioChannels);
+ if ((mAudioChannels < 0) || (mAudioChannels > 2))
+ mAudioChannels = 2;
+ }
+ if (strstr (params, "audio-param-encoding-bitrate") != NULL) {
+ sscanf (params, "audio-param-encoding-bitrate=%lld", &mAudioBitrate);
+ if ((mAudioBitrate < 0) || (mAudioBitrate > 192000))
+ mAudioBitrate = 128000;
+ }
+ if (strstr (params, "video-param-encoding-bitrate") != NULL) {
+ sscanf (params, "video-param-encoding-bitrate=%lld", &mVideoBitrate);
+ if ((mVideoBitrate < 0) || (mVideoBitrate > 786432))
+ mVideoBitrate = 360000;
+ }
+ if (strstr (params, "vt-mode") != NULL) {
+ sscanf (params, "vt-mode=%d", &mVTMode);
+ }
+ if (strstr (params, "i-mb-refresh") != NULL) {
+ sscanf (params, "i-mb-refresh=%d", &mIMBRefreshMode);
+ }
+ if (strstr (params, "i-period") != NULL) {
+ sscanf (params, "i-period=%d", &mIPeriod);
+ }
+ if (strstr (params, "video-bitrate") != NULL) {
+ sscanf (params, "video-bitrate=%lld", &mVideoBitrate);
+ }
+ //if (mCamera != NULL) {
+ //send the parameters to the camera to set specific effect or others parameters
+ // mCamera->setParameters(params);
+ //}
+ LOGV ("GstMediaRecorder max duration %lld max file size %lld", mMaxDuration,
+ mMaxFileSize);
+ return OK;
}
-status_t GstMediaRecorder::setListener(const sp<IMediaPlayerClient>& listener)
+
+status_t
+GstMediaRecorder::setListener (const sp < IMediaPlayerClient > &listener)
{
- LOGV("GstMediaRecorder setListener");
- mListener = listener;
- return OK;
+ LOGV ("GstMediaRecorder setListener");
+ mListener = listener;
+ return OK;
}
-status_t GstMediaRecorder::prepare()
+status_t
+GstMediaRecorder::prepare ()
{
- LOGV("GstMediaRecorder prepare");
-
- // create a camera if the app didn't supply one
- if ((mCamera == 0) && (mUse_video_src == TRUE)) {
- mCamera = Camera::connect();
- }
-
- if (mCamera != NULL && mSurface != NULL) {
- LOGV("GstMediaRecorder set preview display surface");
- mCamera->setPreviewDisplay(mSurface);
- }
-
- if (mCamera != NULL) {
- LOGV("GstMediaRecorder set camera parameters width=%d height=%d fps=%d", mWidth, mHeight, mFps);
- String8 s = mCamera->getParameters();
- CameraParameters p(s);
- p.setPreviewSize(mWidth, mHeight);
-
- if (mCamera->previewEnabled()) {
- s = p.flatten();
- mCamera->setParameters(s);
- mFlags |= FLAGS_HOT_CAMERA;
- LOGV("GstMediaRecorder preview camera already enabled");
- }else {
- p.setPreviewFrameRate(mFps);
- s = p.flatten();
- mCamera->setParameters(s);
- mCamera->startPreview();
- mFlags &= ~FLAGS_HOT_CAMERA;
- }
- }
-
- return build_record_graph();
+ LOGV ("GstMediaRecorder prepare");
+
+ // create a camera if the app didn't supply one
+ if ((mCamera == 0) && (mUse_video_src == TRUE)) {
+ mCamera = Camera::connect ();
+ }
+
+ if (mCamera != NULL && mSurface != NULL) {
+ LOGV ("GstMediaRecorder set preview display surface");
+ mCamera->setPreviewDisplay (mSurface);
+ }
+
+ if (mCamera != NULL) {
+ LOGV ("GstMediaRecorder set camera parameters width=%d height=%d fps=%d",
+ mWidth, mHeight, mFps);
+ String8 s = mCamera->getParameters ();
+ CameraParameters p (s);
+ p.setPreviewSize (mWidth, mHeight);
+
+ if (mCamera->previewEnabled ()) {
+ s = p.flatten ();
+ mCamera->setParameters (s);
+ mFlags |= FLAGS_HOT_CAMERA;
+ LOGV ("GstMediaRecorder preview camera already enabled");
+ } else {
+ p.setPreviewFrameRate (mFps);
+ s = p.flatten ();
+ mCamera->setParameters (s);
+ mCamera->startPreview ();
+ mFlags &= ~FLAGS_HOT_CAMERA;
+ }
+ }
+
+ return build_record_graph ();
}
-typedef struct {
- sp<IMemory> frame;
- sp<Camera> camera;
-} record_callback_cookie;
+typedef struct
+{
+ sp < IMemory > frame;
+ sp < Camera > camera;
+} record_callback_cookie;
-static void video_frame_release(GstICBVideoBuffer* buffer)
+static void
+video_frame_release (GstICBVideoBuffer * buffer)
{
- //LOGE("GstMediaRecorder video frame release");
+ //LOGE("GstMediaRecorder video frame release");
- record_callback_cookie* cookie = (record_callback_cookie*)(buffer->ctx);
+ record_callback_cookie *cookie = (record_callback_cookie *) (buffer->ctx);
- cookie->camera->releaseRecordingFrame(cookie->frame);
+ cookie->camera->releaseRecordingFrame (cookie->frame);
- cookie->frame.clear();
+ cookie->frame.clear ();
- g_free(cookie);
+ g_free (cookie);
}
-/*static*/ void GstMediaRecorder::record_callback(const sp<IMemory>& frame, void *cookie)
+/*static*/ void
+GstMediaRecorder::record_callback (const sp < IMemory > &frame, void *cookie)
{
- ssize_t offset = 0;
- size_t size = 0;
- video_frame_t video_frame = VIDEO_FRAME_INIT;
- GstBuffer* buffer;
- GstClockTime duration;
-
- //LOGE("GstMediaRecorder record callback");
- record_callback_cookie * lcookie = g_new0 (record_callback_cookie, 1);
- sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-
- GstMediaRecorder* mediarecorder = (GstMediaRecorder*) cookie;
- if(mediarecorder->mVideoSrc == NULL) {
- LOGV("GstMediaRecorder record_callback the videosrc don't exist");
- mediarecorder->mCamera->stopRecording();
- return ;
- }
-
- video_frame.pmem_fd = heap->getHeapID();
- video_frame.pmem_offset = offset;
- video_frame.pmem_size = size;
-
- lcookie->frame = frame;
- lcookie->camera = mediarecorder->mCamera;
-
- buffer = gst_icbvideo_buffer_new(&video_frame, (GstMiniObjectFinalizeFunction) video_frame_release,
- lcookie,
- GST_ELEMENT(mediarecorder->mVideoSrc));
-
- GST_BUFFER_SIZE(buffer) = size; //needed to build correct timestamp in basesrc
-
- duration = gst_util_uint64_scale_int (GST_SECOND, 1, mediarecorder->mFps);
- GST_BUFFER_DURATION(buffer) = duration; //needed to build correct duration in basesrc
-
- gst_app_src_push_buffer(GST_APP_SRC(mediarecorder->mVideoSrc), buffer);
+ ssize_t offset = 0;
+ size_t size = 0;
+ video_frame_t video_frame = VIDEO_FRAME_INIT;
+ GstBuffer *buffer;
+ GstClockTime duration;
+
+ //LOGE("GstMediaRecorder record callback");
+ record_callback_cookie *lcookie = g_new0 (record_callback_cookie, 1);
+ sp < IMemoryHeap > heap = frame->getMemory (&offset, &size);
+
+ GstMediaRecorder *mediarecorder = (GstMediaRecorder *) cookie;
+ if (mediarecorder->mVideoSrc == NULL) {
+ LOGV ("GstMediaRecorder record_callback the videosrc don't exist");
+ mediarecorder->mCamera->stopRecording ();
+ return;
+ }
+
+ video_frame.pmem_fd = heap->getHeapID ();
+ video_frame.pmem_offset = offset;
+ video_frame.pmem_size = size;
+
+ lcookie->frame = frame;
+ lcookie->camera = mediarecorder->mCamera;
+
+ buffer =
+ gst_icbvideo_buffer_new (&video_frame,
+ (GstMiniObjectFinalizeFunction) video_frame_release, lcookie,
+ GST_ELEMENT (mediarecorder->mVideoSrc));
+
+ GST_BUFFER_SIZE (buffer) = size; //needed to build correct timestamp in basesrc
+
+ duration = gst_util_uint64_scale_int (GST_SECOND, 1, mediarecorder->mFps);
+ GST_BUFFER_DURATION (buffer) = duration; //needed to build correct duration in basesrc
+
+ gst_app_src_push_buffer (GST_APP_SRC (mediarecorder->mVideoSrc), buffer);
}
-GstStateChangeReturn GstMediaRecorder::wait_for_set_state(int timeout_msec)
+GstStateChangeReturn
+GstMediaRecorder::wait_for_set_state (int timeout_msec)
{
- GstMessage *msg;
- GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
+ GstMessage *msg;
+ GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
- /* Wait for state change */
- msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS(mPipeline),
- timeout_msec * GST_MSECOND, /* in nanosec */
- (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE));
+ /* Wait for state change */
+ msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS (mPipeline), timeout_msec * GST_MSECOND, /* in nanosec */
+ (GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE));
- if (msg) {
- if ((GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ASYNC_DONE))
- ret = GST_STATE_CHANGE_SUCCESS;
+ if (msg) {
+ if ((GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ASYNC_DONE))
+ ret = GST_STATE_CHANGE_SUCCESS;
- gst_message_unref(msg);
- }
+ gst_message_unref (msg);
+ }
- return ret;
+ return ret;
}
-status_t GstMediaRecorder::start()
+status_t
+GstMediaRecorder::start ()
{
- GstStateChangeReturn ret;
- LOGV("GstMediaRecorder start recording");
-
- if(mPipeline == NULL) {
- LOGV("GstMediaRecorder start pipeline not created");
- return OK;
- }
-
- ret = gst_element_set_state (mPipeline, GST_STATE_PLAYING);
-
- // set the audio source device, open micro
- const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
- if (audioFlinger != 0) {
- LOGV("GstMediaRecorder start recording: unmute the microphone");
- audioFlinger->setMicMute(FALSE);
- }
-
- if (mCamera != NULL) {
- mCamera->setListener(mCameraListener);
- mCamera->startRecording();
- }
-
- if( ret == GST_STATE_CHANGE_ASYNC) {
- ret = wait_for_set_state(2000); // wait 2 second for state change
- }
-
- if(ret != GST_STATE_CHANGE_SUCCESS) {
- goto bail;
- }
-
- LOGV("GstMediaRecorder pipeline is in playing state");
- return OK;
+ GstStateChangeReturn ret;
+ LOGV ("GstMediaRecorder start recording");
+
+ if (mPipeline == NULL) {
+ LOGV ("GstMediaRecorder start pipeline not created");
+ return OK;
+ }
+
+ ret = gst_element_set_state (mPipeline, GST_STATE_PLAYING);
+
+ // set the audio source device, open micro
+ const sp < IAudioFlinger > &audioFlinger = AudioSystem::get_audio_flinger ();
+ if (audioFlinger != 0) {
+ LOGV ("GstMediaRecorder start recording: unmute the microphone");
+ audioFlinger->setMicMute (FALSE);
+ }
+
+ if (mCamera != NULL) {
+ mCamera->setListener (mCameraListener);
+ mCamera->startRecording ();
+ }
+
+ if (ret == GST_STATE_CHANGE_ASYNC) {
+ ret = wait_for_set_state (2000); // wait 2 second for state change
+ }
+
+ if (ret != GST_STATE_CHANGE_SUCCESS) {
+ goto bail;
+ }
+
+ LOGV ("GstMediaRecorder pipeline is in playing state");
+ return OK;
bail:
- LOGV("GstMediaRecorder start failed");
+ LOGV ("GstMediaRecorder start failed");
- if (mCamera != NULL) {
- mCamera->stopRecording();
- }
+ if (mCamera != NULL) {
+ mCamera->stopRecording ();
+ }
- release_pipeline();
+ release_pipeline ();
- return OK; // return OK to avoid execption in java
+ return OK; // return OK to avoid execption in java
}
-status_t GstMediaRecorder::stop()
+status_t
+GstMediaRecorder::stop ()
{
- LOGV("GstMediaRecorder stop recording");
-
- if(mPipeline == NULL) {
- LOGV("GstMediaRecorder stop pipeline not created");
- return OK;
- }
-
- if (mCamera != NULL) {
- mCamera->stopRecording();
- mCamera->setListener(NULL);
- }
-
- /* Send audio & video Eos */
- sendEos();
-
- if (mIsEos)
- g_mutex_lock (mEOSlock);
-
- // EOS has been receive now release the pipeline
- return release_pipeline();
+ LOGV ("GstMediaRecorder stop recording");
+
+ if (mPipeline == NULL) {
+ LOGV ("GstMediaRecorder stop pipeline not created");
+ return OK;
+ }
+
+ if (mCamera != NULL) {
+ mCamera->stopRecording ();
+ mCamera->setListener (NULL);
+ }
+
+ /* Send audio & video Eos */
+ sendEos ();
+
+ if (mIsEos)
+ g_mutex_lock (mEOSlock);
+
+ // EOS has been receive now release the pipeline
+ return release_pipeline ();
}
-status_t GstMediaRecorder::release_pipeline()
+status_t
+GstMediaRecorder::release_pipeline ()
{
- if(mPipeline == NULL) {
- return OK;
- }
+ if (mPipeline == NULL) {
+ return OK;
+ }
- LOGV("GstMediaRecorder change pipeline state to NULL");
- gst_element_set_state (mPipeline, GST_STATE_NULL);
- gst_element_get_state (mPipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
- LOGV("GstMediaRecorder unref pipeline");
- gst_object_unref(mPipeline);
- mPipeline = NULL;
- mVideoBin = NULL;
- mAudioBin = NULL;
- mVideoSrc = NULL;
+ LOGV ("GstMediaRecorder change pipeline state to NULL");
+ gst_element_set_state (mPipeline, GST_STATE_NULL);
+ gst_element_get_state (mPipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
+ LOGV ("GstMediaRecorder unref pipeline");
+ gst_object_unref (mPipeline);
+ mPipeline = NULL;
+ mVideoBin = NULL;
+ mAudioBin = NULL;
+ mVideoSrc = NULL;
- if (mOutFilePath_fd > -1) {
- ::close(mOutFilePath_fd);
- mOutFilePath_fd = -1;
- }
+ if (mOutFilePath_fd > -1) {
+ ::close (mOutFilePath_fd);
+ mOutFilePath_fd = -1;
+ }
- LOGV("GstMediaRecorder stop exit");
+ LOGV ("GstMediaRecorder stop exit");
- return OK;
+ return OK;
}
-status_t GstMediaRecorder::close()
+status_t
+GstMediaRecorder::close ()
{
- LOGV("GstMediaRecorder close");
-
- return OK;
+ LOGV ("GstMediaRecorder close");
+
+ return OK;
}
-status_t GstMediaRecorder::reset()
+status_t
+GstMediaRecorder::reset ()
{
- LOGV("GstMediaRecorder reset");
- release_pipeline();
+ LOGV ("GstMediaRecorder reset");
+ release_pipeline ();
- return OK;
+ return OK;
}
-status_t GstMediaRecorder::getMaxAmplitude(int *max)
+status_t
+GstMediaRecorder::getMaxAmplitude (int *max)
{
- int ioParam;
-
- LOGV("GstMediaRecorder getMaxAmplitude");
-
- ioParam = 5; // device C0
- if(snd_hwdep_ioctl(mHwdep_handle,ASND_HWDEP_IOCTL_GET_MAX_AMP, (void*)&ioParam)<0) {
- LOGE("error : get max amplitude returned %d\n", errno);
- *max = 0;
- return OK;
- }
- *max = ioParam;
-
- return OK;
+ int ioParam;
+
+ LOGV ("GstMediaRecorder getMaxAmplitude");
+
+ ioParam = 5; // device C0
+ if (snd_hwdep_ioctl (mHwdep_handle, ASND_HWDEP_IOCTL_GET_MAX_AMP,
+ (void *) &ioParam) < 0) {
+ LOGE ("error : get max amplitude returned %d\n", errno);
+ *max = 0;
+ return OK;
+ }
+ *max = ioParam;
+
+ return OK;
}
// create a video bin appsrc->icbvideoenc->capsfilter
-GstElement* GstMediaRecorder::create_video_bin()
+GstElement *
+GstMediaRecorder::create_video_bin ()
{
- GstElement *vbin;
- GstElement *video_src;
- GstElement *video_encoder, *video_format_filter;
- GstElement *video_queue;
- GstPad *pad;
-
- video_queue = NULL;
-
- if(mUse_video_src == FALSE) {
- // nothing the create in this case
- return NULL;
- }
-
- LOGV("GstMediaRecorder create_video_bin");
-
- LOGV("GstMediaRecorder create video appsrc");
- video_src = gst_element_factory_make("appsrc", "videosrc");
- if(!video_src) {
- LOGV("GstMediaRecorder can't create video src");
- return NULL;
- }
-
- g_object_set(G_OBJECT(video_src),"is-live", TRUE, NULL); // it is a pseudo live source
- g_object_set(G_OBJECT(video_src),"max-bytes", (guint64)mWidth*mHeight*3, NULL); // max byte limit equal to 2 frames
- g_object_set(G_OBJECT(video_src),"format", 2, NULL); // byte format
- g_object_set(G_OBJECT(video_src),"block", true, NULL); // Block push-buffer when max-bytes are queued
-
- g_object_set(G_OBJECT(video_src) ,"caps",
- gst_caps_new_simple ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('N','V','1','2'),
- "width", G_TYPE_INT, mWidth,
- "height", G_TYPE_INT, mHeight,
- "framerate", GST_TYPE_FRACTION, mFps, 1,
- NULL),
- NULL);
-
-
- video_encoder = gst_element_factory_make("icbvideoenc",NULL);
-
- if(!video_encoder) {
- LOGE("GstMediaRecorder can't create video encoder");
- goto remove_video_src;
- }
-
- video_format_filter = gst_element_factory_make("capsfilter",NULL);
-
- if(!video_format_filter) {
- LOGE("GstMediaRecorder can't create video format filter");
- goto remove_video_encoder;
- }
-
- switch(mVideo_encoder)
- {
- case VIDEO_ENCODER_DEFAULT:
- case VIDEO_ENCODER_MPEG_4_SP:
- LOGV("GstMediaRecorder set video caps: video/mpeg, width=%d, height=%d, framerate=%d/1", mWidth, mHeight, mFps);
- g_object_set(G_OBJECT(video_format_filter) , "caps",
- gst_caps_new_simple ("video/mpeg",
- "width", G_TYPE_INT, mWidth,
- "height", G_TYPE_INT, mHeight,
-//VT "framerate", GST_TYPE_FRACTION, mFps, 1,
- "mpegversion", G_TYPE_INT, 4,
- NULL),
- NULL);
- break;
- case VIDEO_ENCODER_H264:
- LOGV("GstMediaRecorder can't encode in h264");
- goto remove_video_format_filter;
- case VIDEO_ENCODER_H263:
- default:
- LOGV("GstMediaRecorder set video caps: video/x-h263, width=%d, height=%d, framerate=%d/1", mWidth, mHeight, mFps);
- g_object_set( G_OBJECT(video_format_filter) , "caps",
- gst_caps_new_simple ( "video/x-h263",
- "width", G_TYPE_INT, mWidth,
- "height", G_TYPE_INT, mHeight,
-//VT "framerate", GST_TYPE_FRACTION, mFps, 1,
- NULL ),
- NULL );
- break;
- }
-
-
- /* VT support */
- {
- GValue framerate = { 0 };
- int framerate_num = mFps;
- int framerate_denom = 1;
- int bitrate = mVideoBitrate;
- int i_period = mIPeriod;
- int i_mb_refresh = mIMBRefreshMode;
- int vt_mode = mVTMode;
-
- if (vt_mode) {
- g_object_set(G_OBJECT(video_encoder), "vt-mode", vt_mode, NULL);
- }
- if (bitrate) {
- g_object_set(G_OBJECT(video_encoder), "bitrate", bitrate, NULL);
- }
- if (i_period) {
- /* in seconds, convert to nb of frames */
- i_period = i_period*framerate_num/framerate_denom;
- g_object_set(G_OBJECT(video_encoder), "i-period", i_period, NULL);
- }
- if (i_mb_refresh) {
- g_object_set(G_OBJECT(video_encoder), "i-mb-refresh", i_mb_refresh, NULL);
- }
-
- /* ! take care of framerate because of fraction type,
- use g_object_set_property with a gvalue instead g_object_set */
- g_value_init (&framerate, GST_TYPE_FRACTION);
- gst_value_set_fraction (&framerate, framerate_num, framerate_denom);
- g_object_set_property(G_OBJECT(video_encoder), "framerate", &framerate);
- g_value_unset(&framerate);
- }
-
- video_queue = gst_element_factory_make("queue", NULL);
- g_object_set(G_OBJECT(video_queue), "max-size-time", 2000000000, NULL);
-
-
- LOGV("GstMediaRecorder create vbin");
- vbin = gst_bin_new("vbin");
- if(!vbin) {
- LOGE("GstMediaRecorder can't create vbin");
- goto remove_video_format_filter;
- }
-
- gst_bin_add_many (GST_BIN_CAST(vbin), video_src, video_encoder, video_format_filter, video_queue, NULL);
-
- LOGV("GstMediaRecorder link video_src->->queue->video_encoder->video_format_filter->queue");
- if(!gst_element_link_many(video_src, video_encoder, video_format_filter, video_queue, NULL)) {
- LOGE("GstMediaRecorder can't link elements");
- goto remove_vbin;
- }
-
- LOGV("GstMediaRecorder create src ghost pad in vbin");
- pad = gst_element_get_static_pad (video_queue, "src");
- gst_element_add_pad (vbin, gst_ghost_pad_new ("src", pad));
- gst_object_unref (pad);
-
- mVideoSrc = video_src;
-
- return vbin;
+ GstElement *vbin;
+ GstElement *video_src;
+ GstElement *video_encoder, *video_format_filter;
+ GstElement *video_queue;
+ GstPad *pad;
+
+ video_queue = NULL;
+
+ if (mUse_video_src == FALSE) {
+ // nothing the create in this case
+ return NULL;
+ }
+
+ LOGV ("GstMediaRecorder create_video_bin");
+
+ LOGV ("GstMediaRecorder create video appsrc");
+ video_src = gst_element_factory_make ("appsrc", "videosrc");
+ if (!video_src) {
+ LOGV ("GstMediaRecorder can't create video src");
+ return NULL;
+ }
+
+ g_object_set (G_OBJECT (video_src), "is-live", TRUE, NULL); // it is a pseudo live source
+ g_object_set (G_OBJECT (video_src), "max-bytes", (guint64) mWidth * mHeight * 3, NULL); // max byte limit equal to 2 frames
+ g_object_set (G_OBJECT (video_src), "format", 2, NULL); // byte format
+ g_object_set (G_OBJECT (video_src), "block", true, NULL); // Block push-buffer when max-bytes are queued
+
+ g_object_set (G_OBJECT (video_src), "caps",
+ gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', '2'),
+ "width", G_TYPE_INT, mWidth,
+ "height", G_TYPE_INT, mHeight,
+ "framerate", GST_TYPE_FRACTION, mFps, 1, NULL), NULL);
+
+
+ video_encoder = gst_element_factory_make ("icbvideoenc", NULL);
+
+ if (!video_encoder) {
+ LOGE ("GstMediaRecorder can't create video encoder");
+ goto remove_video_src;
+ }
+
+ video_format_filter = gst_element_factory_make ("capsfilter", NULL);
+
+ if (!video_format_filter) {
+ LOGE ("GstMediaRecorder can't create video format filter");
+ goto remove_video_encoder;
+ }
+
+ switch (mVideo_encoder) {
+ case VIDEO_ENCODER_DEFAULT:
+ case VIDEO_ENCODER_MPEG_4_SP:
+ LOGV ("GstMediaRecorder set video caps: video/mpeg, width=%d, height=%d, framerate=%d/1", mWidth, mHeight, mFps);
+ g_object_set (G_OBJECT (video_format_filter), "caps",
+ gst_caps_new_simple ("video/mpeg",
+ "width", G_TYPE_INT, mWidth, "height", G_TYPE_INT, mHeight,
+//VT "framerate", GST_TYPE_FRACTION, mFps, 1,
+ "mpegversion", G_TYPE_INT, 4, NULL), NULL);
+ break;
+ case VIDEO_ENCODER_H264:
+ LOGV ("GstMediaRecorder can't encode in h264");
+ goto remove_video_format_filter;
+ case VIDEO_ENCODER_H263:
+ default:
+ LOGV ("GstMediaRecorder set video caps: video/x-h263, width=%d, height=%d, framerate=%d/1", mWidth, mHeight, mFps);
+ g_object_set (G_OBJECT (video_format_filter), "caps",
+ gst_caps_new_simple ("video/x-h263",
+ "width", G_TYPE_INT, mWidth, "height", G_TYPE_INT, mHeight,
+//VT "framerate", GST_TYPE_FRACTION, mFps, 1,
+ NULL), NULL);
+ break;
+ }
+
+
+ /* VT support */
+ {
+ GValue framerate = { 0 };
+ int framerate_num = mFps;
+ int framerate_denom = 1;
+ int bitrate = mVideoBitrate;
+ int i_period = mIPeriod;
+ int i_mb_refresh = mIMBRefreshMode;
+ int vt_mode = mVTMode;
+
+ if (vt_mode) {
+ g_object_set (G_OBJECT (video_encoder), "vt-mode", vt_mode, NULL);
+ }
+ if (bitrate) {
+ g_object_set (G_OBJECT (video_encoder), "bitrate", bitrate, NULL);
+ }
+ if (i_period) {
+ /* in seconds, convert to nb of frames */
+ i_period = i_period * framerate_num / framerate_denom;
+ g_object_set (G_OBJECT (video_encoder), "i-period", i_period, NULL);
+ }
+ if (i_mb_refresh) {
+ g_object_set (G_OBJECT (video_encoder), "i-mb-refresh", i_mb_refresh,
+ NULL);
+ }
+
+ /* ! take care of framerate because of fraction type,
+ use g_object_set_property with a gvalue instead g_object_set */
+ g_value_init (&framerate, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&framerate, framerate_num, framerate_denom);
+ g_object_set_property (G_OBJECT (video_encoder), "framerate", &framerate);
+ g_value_unset (&framerate);
+ }
+
+ video_queue = gst_element_factory_make ("queue", NULL);
+ g_object_set (G_OBJECT (video_queue), "max-size-time", 2000000000, NULL);
+
+
+ LOGV ("GstMediaRecorder create vbin");
+ vbin = gst_bin_new ("vbin");
+ if (!vbin) {
+ LOGE ("GstMediaRecorder can't create vbin");
+ goto remove_video_format_filter;
+ }
+
+ gst_bin_add_many (GST_BIN_CAST (vbin), video_src, video_encoder,
+ video_format_filter, video_queue, NULL);
+
+ LOGV ("GstMediaRecorder link video_src->->queue->video_encoder->video_format_filter->queue");
+ if (!gst_element_link_many (video_src, video_encoder, video_format_filter,
+ video_queue, NULL)) {
+ LOGE ("GstMediaRecorder can't link elements");
+ goto remove_vbin;
+ }
+
+ LOGV ("GstMediaRecorder create src ghost pad in vbin");
+ pad = gst_element_get_static_pad (video_queue, "src");
+ gst_element_add_pad (vbin, gst_ghost_pad_new ("src", pad));
+ gst_object_unref (pad);
+
+ mVideoSrc = video_src;
+
+ return vbin;
remove_vbin:
- gst_object_unref(vbin);
+ gst_object_unref (vbin);
remove_video_format_filter:
- gst_object_unref(video_format_filter);
- gst_object_unref(video_queue);
+ gst_object_unref (video_format_filter);
+ gst_object_unref (video_queue);
remove_video_encoder:
- gst_object_unref(video_encoder);
+ gst_object_unref (video_encoder);
remove_video_src:
- gst_object_unref(video_src);
- return NULL;
+ gst_object_unref (video_src);
+ return NULL;
}
// create a audio bin icbaudiosrc->icbaudioenc->capsfilter
-GstElement* GstMediaRecorder::create_audio_bin()
+GstElement *
+GstMediaRecorder::create_audio_bin ()
{
- GstElement *abin;
- GstElement *audio_src, *audio_enc, *audio_format_filter;
- GstElement *audio_queue;
- GstPad *pad;
- gint recordsrc;
-
- if(mUse_audio_src == FALSE) {
- return NULL;
- }
- LOGV("GstMediaRecorder create_audio_bin");
-
- LOGV("GstMediaRecorder create audio src");
- audio_src = gst_element_factory_make("icbaudiosrc","icbaudiosrc0"); // do not change the element name
-
- if(!audio_src) {
- LOGE("GstMediaRecorder can't create audio source");
- return NULL;
- }
-
- // set the audio source device
- LOGV("GstMediaRecorder set device to audio src");
- g_object_set(G_OBJECT(audio_src), "device", "C0", NULL);
-
- // set the record source
- LOGV("GstMediaRecorder set record src");
- recordsrc = mAudio_source;
- if (recordsrc > 0 ) recordsrc--;
- g_object_set(G_OBJECT(audio_src), "recordsrc", recordsrc, NULL);
-
- LOGV("GstMediaRecorder create audio encoder");
- audio_enc = gst_element_factory_make("icbaudioenc", "icbaudioenc0"); // do not change the element name
-
- if(!audio_enc) {
- LOGE("GstMediaRecorder can't create audio encoder");
- goto remove_audio_src;
- }
-
-// g_object_set(G_OBJECT(audio_enc),"bitrate", mAudioBitrate, NULL);
-// g_object_set(G_OBJECT(audio_enc),"channel", mAudioChannels, NULL);
-// g_object_set(G_OBJECT(audio_enc),"freq", mAudioSampleRate, NULL);
-
- // configure audio encoder
- LOGV("GstMediaRecorder set properties to audio encoder");
- switch(mAudio_encoder)
- {
- case AUDIO_ENCODER_AMR_WB:
- // configure audio encoder for AMR-WB
- LOGV("GstMediaRecorder set properties to audio encoder for AMR_WB");
- if((mOutput_format == OUTPUT_FORMAT_RAW_AMR) && (mUse_video_src == FALSE)) {
- // in AMR RAW format we will not have muxer after audio encoder so use the amr storage format
- g_object_set(G_OBJECT(audio_enc), "stream-type", 2, "format", 2, "bitrate", (gint64)(23850), "freq", (gint)16000, "channel", 1, (gchar*)NULL);
- }else {
- g_object_set(G_OBJECT(audio_enc), "stream-type", 2, "format", 3, "bitrate", (gint64)(23850), "freq", (gint)16000, "channel", 1, (gchar*)NULL);
- }
- audio_format_filter = gst_element_factory_make("capsfilter",NULL);
- g_object_set(G_OBJECT(audio_format_filter) , "caps",
- gst_caps_new_simple ("audio/AMR-WB",
- "rate", G_TYPE_INT, 16000,
- "channels", G_TYPE_INT, 1,
- NULL),
- NULL);
- break;
- case AUDIO_ENCODER_AAC:
- case AUDIO_ENCODER_AAC_PLUS:
- case AUDIO_ENCODER_EAAC_PLUS:
- // configure audio encoder for AAC
- LOGV("GstMediaRecorder set properties to audio encoder for AAC");
- g_object_set(G_OBJECT(audio_enc), "stream-type", 3, "format", 1, "bitrate", (gint64)(16000), "freq", (gint)32000, "channel", 2, (gchar*)NULL);
- audio_format_filter = gst_element_factory_make("capsfilter",NULL);
- g_object_set(G_OBJECT(audio_format_filter) , "caps",
- gst_caps_new_simple ("audio/mpeg",
- "mpegversion", G_TYPE_INT, 4,
- "rate", G_TYPE_INT, 32000,
- "channels", G_TYPE_INT, 2,
- NULL),
- NULL);
- break;
- case AUDIO_ENCODER_DEFAULT:
- case AUDIO_ENCODER_AMR_NB:
- default:
- // configure audio encoder for AMR-NB
- LOGV("GstMediaRecorder set properties to audio encoder for AMR_NB");
- if((mOutput_format == OUTPUT_FORMAT_RAW_AMR) && (mUse_video_src == FALSE)) {
- // in AMR RAW format we will not have muxer after audio encoder so use the amr storage format
- g_object_set(G_OBJECT(audio_enc), "stream-type", 1, "format", 2, "bitrate", (gint64)(12200), "freq", (gint)8000, "channel", 1, (gchar*)NULL);
- }else {
- g_object_set(G_OBJECT(audio_enc), "stream-type", 1, "format", 3, "bitrate", (gint64)(12200), "freq", (gint)8000, "channel", 1, (gchar*)NULL);
- }
- audio_format_filter = gst_element_factory_make("capsfilter",NULL);
- g_object_set(G_OBJECT(audio_format_filter) , "caps",
- gst_caps_new_simple ("audio/AMR",
- "rate", G_TYPE_INT, 8000,
- "channels", G_TYPE_INT, 1,
- NULL),
- NULL);
- break;
- }
-
- audio_queue = gst_element_factory_make("queue", "audio_queue");
- g_object_set(G_OBJECT(audio_queue), "max-size-time", 2000000000, NULL);
-
- LOGV("GstMediaRecorder create audio bin");
- abin = gst_bin_new("abin");
-
- if(!abin) {
- LOGE("GstMediaRecorder can't create abin");
- goto remove_audio_enc;
- }
-
- LOGV("GstMediaRecorder add element to audio bin");
- gst_bin_add_many (GST_BIN_CAST(abin), audio_src, audio_enc, audio_format_filter, audio_queue, NULL);
-
- LOGV("GstMediaRecorder link audio_src->audio_enc");
- if(!gst_element_link_many(audio_src, audio_enc, audio_format_filter, audio_queue, NULL)) {
- LOGE("GstMediaRecorder can't link audio_src->audio_enc");
- goto remove_abin;
- }
-
- LOGV("GstMediaRecorder create src ghost pad in abin");
- pad = gst_element_get_static_pad (audio_queue, "src");
- gst_element_add_pad (abin, gst_ghost_pad_new ("src", pad));
- gst_object_unref (pad);
-
- mAudioSrc = audio_src;
- return abin;
+ GstElement *abin;
+ GstElement *audio_src, *audio_enc, *audio_format_filter;
+ GstElement *audio_queue;
+ GstPad *pad;
+ gint recordsrc;
+
+ if (mUse_audio_src == FALSE) {
+ return NULL;
+ }
+ LOGV ("GstMediaRecorder create_audio_bin");
+
+ LOGV ("GstMediaRecorder create audio src");
+ audio_src = gst_element_factory_make ("icbaudiosrc", "icbaudiosrc0"); // do not change the element name
+
+ if (!audio_src) {
+ LOGE ("GstMediaRecorder can't create audio source");
+ return NULL;
+ }
+ // set the audio source device
+ LOGV ("GstMediaRecorder set device to audio src");
+ g_object_set (G_OBJECT (audio_src), "device", "C0", NULL);
+
+ // set the record source
+ LOGV ("GstMediaRecorder set record src");
+ recordsrc = mAudio_source;
+ if (recordsrc > 0)
+ recordsrc--;
+ g_object_set (G_OBJECT (audio_src), "recordsrc", recordsrc, NULL);
+
+ LOGV ("GstMediaRecorder create audio encoder");
+ audio_enc = gst_element_factory_make ("icbaudioenc", "icbaudioenc0"); // do not change the element name
+
+ if (!audio_enc) {
+ LOGE ("GstMediaRecorder can't create audio encoder");
+ goto remove_audio_src;
+ }
+// g_object_set(G_OBJECT(audio_enc),"bitrate", mAudioBitrate, NULL);
+// g_object_set(G_OBJECT(audio_enc),"channel", mAudioChannels, NULL);
+// g_object_set(G_OBJECT(audio_enc),"freq", mAudioSampleRate, NULL);
+
+ // configure audio encoder
+ LOGV ("GstMediaRecorder set properties to audio encoder");
+ switch (mAudio_encoder) {
+ case AUDIO_ENCODER_AMR_WB:
+ // configure audio encoder for AMR-WB
+ LOGV ("GstMediaRecorder set properties to audio encoder for AMR_WB");
+ if ((mOutput_format == OUTPUT_FORMAT_RAW_AMR)
+ && (mUse_video_src == FALSE)) {
+ // in AMR RAW format we will not have muxer after audio encoder so use the amr storage format
+ g_object_set (G_OBJECT (audio_enc), "stream-type", 2, "format", 2,
+ "bitrate", (gint64) (23850), "freq", (gint) 16000, "channel", 1,
+ (gchar *) NULL);
+ } else {
+ g_object_set (G_OBJECT (audio_enc), "stream-type", 2, "format", 3,
+ "bitrate", (gint64) (23850), "freq", (gint) 16000, "channel", 1,
+ (gchar *) NULL);
+ }
+ audio_format_filter = gst_element_factory_make ("capsfilter", NULL);
+ g_object_set (G_OBJECT (audio_format_filter), "caps",
+ gst_caps_new_simple ("audio/AMR-WB",
+ "rate", G_TYPE_INT, 16000,
+ "channels", G_TYPE_INT, 1, NULL), NULL);
+ break;
+ case AUDIO_ENCODER_AAC:
+ case AUDIO_ENCODER_AAC_PLUS:
+ case AUDIO_ENCODER_EAAC_PLUS:
+ // configure audio encoder for AAC
+ LOGV ("GstMediaRecorder set properties to audio encoder for AAC");
+ g_object_set (G_OBJECT (audio_enc), "stream-type", 3, "format", 1,
+ "bitrate", (gint64) (16000), "freq", (gint) 32000, "channel", 2,
+ (gchar *) NULL);
+ audio_format_filter = gst_element_factory_make ("capsfilter", NULL);
+ g_object_set (G_OBJECT (audio_format_filter), "caps",
+ gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "rate", G_TYPE_INT, 32000,
+ "channels", G_TYPE_INT, 2, NULL), NULL);
+ break;
+ case AUDIO_ENCODER_DEFAULT:
+ case AUDIO_ENCODER_AMR_NB:
+ default:
+ // configure audio encoder for AMR-NB
+ LOGV ("GstMediaRecorder set properties to audio encoder for AMR_NB");
+ if ((mOutput_format == OUTPUT_FORMAT_RAW_AMR)
+ && (mUse_video_src == FALSE)) {
+ // in AMR RAW format we will not have muxer after audio encoder so use the amr storage format
+ g_object_set (G_OBJECT (audio_enc), "stream-type", 1, "format", 2,
+ "bitrate", (gint64) (12200), "freq", (gint) 8000, "channel", 1,
+ (gchar *) NULL);
+ } else {
+ g_object_set (G_OBJECT (audio_enc), "stream-type", 1, "format", 3,
+ "bitrate", (gint64) (12200), "freq", (gint) 8000, "channel", 1,
+ (gchar *) NULL);
+ }
+ audio_format_filter = gst_element_factory_make ("capsfilter", NULL);
+ g_object_set (G_OBJECT (audio_format_filter), "caps",
+ gst_caps_new_simple ("audio/AMR",
+ "rate", G_TYPE_INT, 8000, "channels", G_TYPE_INT, 1, NULL), NULL);
+ break;
+ }
+
+ audio_queue = gst_element_factory_make ("queue", "audio_queue");
+ g_object_set (G_OBJECT (audio_queue), "max-size-time", 2000000000, NULL);
+
+ LOGV ("GstMediaRecorder create audio bin");
+ abin = gst_bin_new ("abin");
+
+ if (!abin) {
+ LOGE ("GstMediaRecorder can't create abin");
+ goto remove_audio_enc;
+ }
+
+ LOGV ("GstMediaRecorder add element to audio bin");
+ gst_bin_add_many (GST_BIN_CAST (abin), audio_src, audio_enc,
+ audio_format_filter, audio_queue, NULL);
+
+ LOGV ("GstMediaRecorder link audio_src->audio_enc");
+ if (!gst_element_link_many (audio_src, audio_enc, audio_format_filter,
+ audio_queue, NULL)) {
+ LOGE ("GstMediaRecorder can't link audio_src->audio_enc");
+ goto remove_abin;
+ }
+
+ LOGV ("GstMediaRecorder create src ghost pad in abin");
+ pad = gst_element_get_static_pad (audio_queue, "src");
+ gst_element_add_pad (abin, gst_ghost_pad_new ("src", pad));
+ gst_object_unref (pad);
+
+ mAudioSrc = audio_src;
+ return abin;
remove_abin:
- gst_object_unref(abin);
+ gst_object_unref (abin);
remove_audio_enc:
- gst_object_unref(audio_format_filter);
- gst_object_unref(audio_queue);
- gst_object_unref(audio_enc);
+ gst_object_unref (audio_format_filter);
+ gst_object_unref (audio_queue);
+ gst_object_unref (audio_enc);
remove_audio_src:
- gst_object_unref(audio_src);
- return NULL;
+ gst_object_unref (audio_src);
+ return NULL;
}
-/*static*/ GstBusSyncReply GstMediaRecorder::bus_message(GstBus *bus, GstMessage * msg, gpointer data)
+/*static*/ GstBusSyncReply
+GstMediaRecorder::bus_message (GstBus * bus, GstMessage * msg, gpointer data)
{
- GstMediaRecorder *mediarecorder = (GstMediaRecorder*)data;
- if(bus) {
- // do nothing except remove compilation warning
- }
-
-
- switch(GST_MESSAGE_TYPE(msg)) {
- case GST_MESSAGE_EOS: {
- LOGV("GstMediaRecorder bus receive message EOS");
- /* unlock mutex */
- g_mutex_unlock (mediarecorder->mEOSlock);
- break;
- }
- case GST_MESSAGE_ERROR: {
- GError* err;
- gchar* debug;
-
- gst_message_parse_error(msg, &err, &debug);
- LOGE("GstMediaRecorder bus receive message ERROR %d: %s from %s", err->code, err->message, debug);
-
- if (mediarecorder->mListener != NULL) {
- mediarecorder->mListener->notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN,err->code);
- }
- g_error_free(err);
- g_free(debug);
- break;
- }
- default:
- // do nothing
- break;
- }
-
- return GST_BUS_PASS;
+ GstMediaRecorder *mediarecorder = (GstMediaRecorder *) data;
+ if (bus) {
+ // do nothing except remove compilation warning
+ }
+
+
+ switch (GST_MESSAGE_TYPE (msg)) {
+ case GST_MESSAGE_EOS:{
+ LOGV ("GstMediaRecorder bus receive message EOS");
+ /* unlock mutex */
+ g_mutex_unlock (mediarecorder->mEOSlock);
+ break;
+ }
+ case GST_MESSAGE_ERROR:{
+ GError *err;
+ gchar *debug;
+
+ gst_message_parse_error (msg, &err, &debug);
+ LOGE ("GstMediaRecorder bus receive message ERROR %d: %s from %s",
+ err->code, err->message, debug);
+
+ if (mediarecorder->mListener != NULL) {
+ mediarecorder->mListener->notify (MEDIA_RECORDER_EVENT_ERROR,
+ MEDIA_RECORDER_ERROR_UNKNOWN, err->code);
+ }
+ g_error_free (err);
+ g_free (debug);
+ break;
+ }
+ default:
+ // do nothing
+ break;
+ }
+
+ return GST_BUS_PASS;
}
-void GstMediaRecorder::sendEos()
-{
- if(!mIsEos) {
- LOGV("GstMediaRecorder : forcing EOS");
-
- // only sen EOS once
- mIsEos = TRUE;
-
- /* stop audio recording */
- if (mAudioSrc != NULL) {
- /* send EOS */
- g_object_set(G_OBJECT(mAudioSrc), "eos", TRUE, NULL);
-
- /* reset mAudioSrc (will avoid to send another eos upon stop request */
- mAudioSrc = NULL;
- }
-
- /* stop video recording */
- if (mVideoSrc != NULL) {
- /* send EOS */
- gst_app_src_end_of_stream(GST_APP_SRC(mVideoSrc));
-
- /* reset mVideoSrc (will avoid to send another eos upon stop request */
- mVideoSrc = NULL;
- }
- }
+void
+GstMediaRecorder::sendEos ()
+{
+ if (!mIsEos) {
+ LOGV ("GstMediaRecorder : forcing EOS");
+
+ // only sen EOS once
+ mIsEos = TRUE;
+
+ /* stop audio recording */
+ if (mAudioSrc != NULL) {
+ /* send EOS */
+ g_object_set (G_OBJECT (mAudioSrc), "eos", TRUE, NULL);
+
+ /* reset mAudioSrc (will avoid to send another eos upon stop request */
+ mAudioSrc = NULL;
+ }
+
+ /* stop video recording */
+ if (mVideoSrc != NULL) {
+ /* send EOS */
+ gst_app_src_end_of_stream (GST_APP_SRC (mVideoSrc));
+
+ /* reset mVideoSrc (will avoid to send another eos upon stop request */
+ mVideoSrc = NULL;
+ }
+ }
}
-/*static*/ void GstMediaRecorder::handoff(GstElement* object, GstBuffer* buffer, gpointer user_data)
+/*static*/ void
+GstMediaRecorder::handoff (GstElement * object, GstBuffer * buffer,
+ gpointer user_data)
{
- GstMediaRecorder *mediarecorder = (GstMediaRecorder*)user_data;
- gulong microsecond;
- int sizeMargin=0;
- mediarecorder->mCurrentFileSize += GST_BUFFER_SIZE(buffer);
-
- if(mediarecorder->mTimer == NULL) {
- mediarecorder->mTimer = g_timer_new();
- }
-
- //LOGE("GstMediaRecorder handoff current file size %lld duration %lld", mediarecorder->mCurrentFileSize, (gint64)g_timer_elapsed(mediarecorder->mTimer, &microsecond)*1000);
-
- if((mediarecorder->mMaxDuration != -1) && (mediarecorder->mMaxDuration <= (gint64)(g_timer_elapsed(mediarecorder->mTimer, &microsecond)*1000) )) {
- LOGV("GstMediaRecorder reached recording time limit");
- if(mediarecorder->mListener != NULL) {
- mediarecorder->mListener->notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
- }
- /* Send audio & video Eos */
- mediarecorder->sendEos();
-
- g_object_set(object, "signal-handoffs", FALSE, NULL);
- return;
- }
-
- /* consider a margin before stopping (because we will still get data to flush the pipeline */
- if (mediarecorder->mAudioSrc != NULL)
- sizeMargin+=3000; /* 3kB for Audio recording */
-
- if (mediarecorder->mVideoSrc != NULL)
- sizeMargin+=50000; /* 50kB for video recording */
-
- if((mediarecorder->mMaxFileSize != -1) && (mediarecorder->mMaxFileSize <= mediarecorder->mCurrentFileSize + sizeMargin)) {
- LOGV("GstMediaRecorder reached recording size limit");
- if(mediarecorder->mListener != NULL) {
- mediarecorder->mListener->notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
- }
- /* Send audio & video Eos */
- mediarecorder->sendEos();
-
- g_object_set(object, "signal-handoffs", FALSE, NULL);
- return;
- }
+ GstMediaRecorder *mediarecorder = (GstMediaRecorder *) user_data;
+ gulong microsecond;
+ int sizeMargin = 0;
+ mediarecorder->mCurrentFileSize += GST_BUFFER_SIZE (buffer);
+
+ if (mediarecorder->mTimer == NULL) {
+ mediarecorder->mTimer = g_timer_new ();
+ }
+ //LOGE("GstMediaRecorder handoff current file size %lld duration %lld", mediarecorder->mCurrentFileSize, (gint64)g_timer_elapsed(mediarecorder->mTimer, &microsecond)*1000);
+
+ if ((mediarecorder->mMaxDuration != -1)
+ && (mediarecorder->mMaxDuration <=
+ (gint64) (g_timer_elapsed (mediarecorder->mTimer,
+ &microsecond) * 1000))) {
+ LOGV ("GstMediaRecorder reached recording time limit");
+ if (mediarecorder->mListener != NULL) {
+ mediarecorder->mListener->notify (MEDIA_RECORDER_EVENT_INFO,
+ MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
+ }
+ /* Send audio & video Eos */
+ mediarecorder->sendEos ();
+
+ g_object_set (object, "signal-handoffs", FALSE, NULL);
+ return;
+ }
+
+ /* consider a margin before stopping (because we will still get data to flush the pipeline */
+ if (mediarecorder->mAudioSrc != NULL)
+ sizeMargin += 3000; /* 3kB for Audio recording */
+
+ if (mediarecorder->mVideoSrc != NULL)
+ sizeMargin += 50000; /* 50kB for video recording */
+
+ if ((mediarecorder->mMaxFileSize != -1)
+ && (mediarecorder->mMaxFileSize <=
+ mediarecorder->mCurrentFileSize + sizeMargin)) {
+ LOGV ("GstMediaRecorder reached recording size limit");
+ if (mediarecorder->mListener != NULL) {
+ mediarecorder->mListener->notify (MEDIA_RECORDER_EVENT_INFO,
+ MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
+ }
+ /* Send audio & video Eos */
+ mediarecorder->sendEos ();
+
+ g_object_set (object, "signal-handoffs", FALSE, NULL);
+ return;
+ }
}
-/*static*/ void GstMediaRecorder::debug_log (GstDebugCategory * category, GstDebugLevel level,
- const gchar * file, const gchar * function, gint line,
- GObject * object, GstDebugMessage * message, gpointer data)
+/*static*/ void
+GstMediaRecorder::debug_log (GstDebugCategory * category, GstDebugLevel level,
+ const gchar * file, const gchar * function, gint line,
+ GObject * object, GstDebugMessage * message, gpointer data)
{
- gint pid;
- GstClockTime elapsed;
- GstMediaRecorder* mediarecorder = (GstMediaRecorder*)data;
+ gint pid;
+ GstClockTime elapsed;
+ GstMediaRecorder *mediarecorder = (GstMediaRecorder *) data;
- GST_UNUSED(file);
- GST_UNUSED(object);
+ GST_UNUSED (file);
+ GST_UNUSED (object);
- if (level > gst_debug_category_get_threshold (category))
- return;
+ if (level > gst_debug_category_get_threshold (category))
+ return;
- pid = getpid ();
+ pid = getpid ();
- elapsed = GST_CLOCK_DIFF (mediarecorder->mGst_info_start_time,
- gst_util_get_timestamp ());
+ elapsed = GST_CLOCK_DIFF (mediarecorder->mGst_info_start_time,
+ gst_util_get_timestamp ());
- g_printerr ("%" GST_TIME_FORMAT " %5d %s %s %s:%d %s\r\n",
- GST_TIME_ARGS (elapsed),
- pid,
- gst_debug_level_get_name (level),
- gst_debug_category_get_name (category), function, line,
- gst_debug_message_get (message));
+ g_printerr ("%" GST_TIME_FORMAT " %5d %s %s %s:%d %s\r\n",
+ GST_TIME_ARGS (elapsed),
+ pid,
+ gst_debug_level_get_name (level),
+ gst_debug_category_get_name (category), function, line,
+ gst_debug_message_get (message));
}
-status_t GstMediaRecorder::build_record_graph ()
+status_t
+GstMediaRecorder::build_record_graph ()
{
- GstElement *muxer, *identity, *sink;
- GstBus *bus;
- GError *err = NULL;
- int argc=3;
- char **argv;
- char str0[] = "";
- //char str1[] = "";
- char str2[] = "";
- char trace[PROPERTY_VALUE_MAX];
-
- argv = (char **)malloc(sizeof(char *) * argc);
- argv[0] = (char *) malloc( sizeof(char) * (strlen(str0) + 1));
- argv[2] = (char *) malloc( sizeof(char) * (strlen(str2) + 1));
- strcpy( argv[0], str0);
- strcpy( argv[2], str2);
-
- char value[PROPERTY_VALUE_MAX];
- property_get("persist.gst.debug", value, "0");
- LOGV("persist.gst.debug property %s", value);
- argv[1] = (char *) malloc( sizeof(char) * (strlen(value) + 1));
- strcpy( argv[1], value);
-
- property_get("persist.gst.trace", trace, "/dev/console");
- LOGV("persist.gst.trace property %s", trace);
- LOGV("route the trace to %s", trace);
- int fd_trace = open(trace, O_RDWR);
- if(fd_trace != 1) {
- dup2(fd_trace, 0);
- dup2(fd_trace, 1);
- dup2(fd_trace, 2);
- ::close(fd_trace);
- }
-
- mGst_info_start_time = gst_util_get_timestamp ();
- gst_debug_remove_log_function(debug_log);
- gst_debug_add_log_function(debug_log, this);
- gst_debug_remove_log_function(gst_debug_log_default);
-
- LOGV("GstMediaRecorder gstreamer init check");
- // init gstreamer
- if(!gst_init_check (&argc, &argv, &err)) {
- LOGE ("GstMediaRecorder Could not initialize GStreamer: %s\n", err ? err->message : "unknown error occurred");
- if (err) {
- g_error_free (err);
- }
- }
-
- LOGV("GstMediaRecorder create pipeline");
- mPipeline = gst_pipeline_new (NULL);
- if(!mPipeline) {
- LOGE("GstMediaRecorder can't create pipeline");
- goto bail;
- }
-
- // verbose info (as gst-launch -v)
- // Activate the trace with the command: "setprop persist.gst.verbose 1"
- property_get("persist.gst.verbose", value, "0");
- LOGV("persist.gst.verbose property = %s", value);
- if (value[0] == '1') {
- LOGV("Activate deep_notify");
- g_signal_connect (mPipeline, "deep_notify",
- G_CALLBACK (gst_object_default_deep_notify), NULL);
- }
-
- LOGV("GstMediaRecorder register bus callback");
- bus = gst_pipeline_get_bus(GST_PIPELINE (mPipeline));
- gst_bus_set_sync_handler (bus, bus_message, this);
- gst_object_unref (bus);
-
- if((mOutput_format == OUTPUT_FORMAT_RAW_AMR) && (mUse_video_src == FALSE) ) {
- // in RAW AMR format don't use any muxer
- LOGV("GstMediaRecorder use identity as muxer in RAW_AMR format");
- muxer = gst_element_factory_make("identity", NULL);
- }
- else {
- LOGV("GstMediaRecorder use gppmux");
- muxer = gst_element_factory_make("gppmux", NULL);
- }
-
- if(!muxer) {
- LOGE("GstMediaRecorder can't create muxer");
- goto bail1;
- }
-
- gst_bin_add (GST_BIN_CAST(mPipeline), muxer);
-
- LOGV("GstMediaRecorder create sink from uri %s", mOutFilePath);
- sink = gst_element_make_from_uri(GST_URI_SINK, mOutFilePath, NULL);
- if(!sink) {
- LOGE("GstMediaRecorder can't create sink %s", mOutFilePath);
- goto bail1;
- }
-
- g_object_set(G_OBJECT(sink), "async", FALSE, NULL);
-
- gst_bin_add (GST_BIN_CAST(mPipeline), sink);
-
- LOGV("GstMediaRecorder create identity");
- identity = gst_element_factory_make("identity", NULL);
- if(!identity) {
- LOGE("GstMediaRecorder can't identity element");
- goto bail1;
- }
- gst_bin_add (GST_BIN_CAST(mPipeline), identity);
-
- mCurrentFileSize = 0;
- g_signal_connect (identity, "handoff", G_CALLBACK (handoff), this);
- g_object_set(G_OBJECT(identity), "signal-handoffs", TRUE, NULL);
-
- mVideoBin = create_video_bin();
- if(mVideoBin) {
- gst_bin_add (GST_BIN_CAST(mPipeline),mVideoBin);
- LOGV("GstMediaRecorder link vbin to muxer");
- if(!gst_element_link(mVideoBin, muxer)) {
- LOGE("GstMediaRecorder can't link vbin to muxer");
- }
- }
-
- mAudioBin = create_audio_bin();
- if(mAudioBin) {
- gst_bin_add (GST_BIN_CAST(mPipeline),mAudioBin);
- LOGV("GstMediaRecorder link abin to muxer");
- if(!gst_element_link(mAudioBin, muxer)) {
- LOGE("GstMediaRecorder can't link abin to muxer");
- }
- }
-
- if(!mAudioBin && !mVideoBin)
- {
- LOGE("GstMediaRecorder both audiobin and videobin are NULL !!!!!");
- goto bail1;
- }
- LOGV("GstMediaRecorder link muxer->identity->sink");
- if(!gst_element_link_many(muxer, identity, sink, NULL)) {
- LOGE("GstMediaRecorder can't link muxer->identity->sink");
- }
-
-
- gst_element_set_state (mPipeline, GST_STATE_READY);
- gst_element_get_state (mPipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
- return OK;
+ GstElement *muxer, *identity, *sink;
+ GstBus *bus;
+ GError *err = NULL;
+ int argc = 3;
+ char **argv;
+ char str0[] = "";
+ //char str1[] = "";
+ char str2[] = "";
+ char trace[PROPERTY_VALUE_MAX];
+
+ argv = (char **) malloc (sizeof (char *) * argc);
+ argv[0] = (char *) malloc (sizeof (char) * (strlen (str0) + 1));
+ argv[2] = (char *) malloc (sizeof (char) * (strlen (str2) + 1));
+ strcpy (argv[0], str0);
+ strcpy (argv[2], str2);
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get ("persist.gst.debug", value, "0");
+ LOGV ("persist.gst.debug property %s", value);
+ argv[1] = (char *) malloc (sizeof (char) * (strlen (value) + 1));
+ strcpy (argv[1], value);
+
+ property_get ("persist.gst.trace", trace, "/dev/console");
+ LOGV ("persist.gst.trace property %s", trace);
+ LOGV ("route the trace to %s", trace);
+ int fd_trace = open (trace, O_RDWR);
+ if (fd_trace != 1) {
+ dup2 (fd_trace, 0);
+ dup2 (fd_trace, 1);
+ dup2 (fd_trace, 2);
+ ::close (fd_trace);
+ }
+
+ mGst_info_start_time = gst_util_get_timestamp ();
+ gst_debug_remove_log_function (debug_log);
+ gst_debug_add_log_function (debug_log, this);
+ gst_debug_remove_log_function (gst_debug_log_default);
+
+ LOGV ("GstMediaRecorder gstreamer init check");
+ // init gstreamer
+ if (!gst_init_check (&argc, &argv, &err)) {
+ LOGE ("GstMediaRecorder Could not initialize GStreamer: %s\n",
+ err ? err->message : "unknown error occurred");
+ if (err) {
+ g_error_free (err);
+ }
+ }
+
+ LOGV ("GstMediaRecorder create pipeline");
+ mPipeline = gst_pipeline_new (NULL);
+ if (!mPipeline) {
+ LOGE ("GstMediaRecorder can't create pipeline");
+ goto bail;
+ }
+ // verbose info (as gst-launch -v)
+ // Activate the trace with the command: "setprop persist.gst.verbose 1"
+ property_get ("persist.gst.verbose", value, "0");
+ LOGV ("persist.gst.verbose property = %s", value);
+ if (value[0] == '1') {
+ LOGV ("Activate deep_notify");
+ g_signal_connect (mPipeline, "deep_notify",
+ G_CALLBACK (gst_object_default_deep_notify), NULL);
+ }
+
+ LOGV ("GstMediaRecorder register bus callback");
+ bus = gst_pipeline_get_bus (GST_PIPELINE (mPipeline));
+ gst_bus_set_sync_handler (bus, bus_message, this);
+ gst_object_unref (bus);
+
+ if ((mOutput_format == OUTPUT_FORMAT_RAW_AMR) && (mUse_video_src == FALSE)) {
+ // in RAW AMR format don't use any muxer
+ LOGV ("GstMediaRecorder use identity as muxer in RAW_AMR format");
+ muxer = gst_element_factory_make ("identity", NULL);
+ } else {
+ LOGV ("GstMediaRecorder use gppmux");
+ muxer = gst_element_factory_make ("gppmux", NULL);
+ }
+
+ if (!muxer) {
+ LOGE ("GstMediaRecorder can't create muxer");
+ goto bail1;
+ }
+
+ gst_bin_add (GST_BIN_CAST (mPipeline), muxer);
+
+ LOGV ("GstMediaRecorder create sink from uri %s", mOutFilePath);
+ sink = gst_element_make_from_uri (GST_URI_SINK, mOutFilePath, NULL);
+ if (!sink) {
+ LOGE ("GstMediaRecorder can't create sink %s", mOutFilePath);
+ goto bail1;
+ }
+
+ g_object_set (G_OBJECT (sink), "async", FALSE, NULL);
+
+ gst_bin_add (GST_BIN_CAST (mPipeline), sink);
+
+ LOGV ("GstMediaRecorder create identity");
+ identity = gst_element_factory_make ("identity", NULL);
+ if (!identity) {
+ LOGE ("GstMediaRecorder can't identity element");
+ goto bail1;
+ }
+ gst_bin_add (GST_BIN_CAST (mPipeline), identity);
+
+ mCurrentFileSize = 0;
+ g_signal_connect (identity, "handoff", G_CALLBACK (handoff), this);
+ g_object_set (G_OBJECT (identity), "signal-handoffs", TRUE, NULL);
+
+ mVideoBin = create_video_bin ();
+ if (mVideoBin) {
+ gst_bin_add (GST_BIN_CAST (mPipeline), mVideoBin);
+ LOGV ("GstMediaRecorder link vbin to muxer");
+ if (!gst_element_link (mVideoBin, muxer)) {
+ LOGE ("GstMediaRecorder can't link vbin to muxer");
+ }
+ }
+
+ mAudioBin = create_audio_bin ();
+ if (mAudioBin) {
+ gst_bin_add (GST_BIN_CAST (mPipeline), mAudioBin);
+ LOGV ("GstMediaRecorder link abin to muxer");
+ if (!gst_element_link (mAudioBin, muxer)) {
+ LOGE ("GstMediaRecorder can't link abin to muxer");
+ }
+ }
+
+ if (!mAudioBin && !mVideoBin) {
+ LOGE ("GstMediaRecorder both audiobin and videobin are NULL !!!!!");
+ goto bail1;
+ }
+ LOGV ("GstMediaRecorder link muxer->identity->sink");
+ if (!gst_element_link_many (muxer, identity, sink, NULL)) {
+ LOGE ("GstMediaRecorder can't link muxer->identity->sink");
+ }
+
+
+ gst_element_set_state (mPipeline, GST_STATE_READY);
+ gst_element_get_state (mPipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
+ return OK;
bail1:
- LOGV("GstMediaRecorder change pipeline state to NULL");
- gst_element_set_state (mPipeline, GST_STATE_NULL);
- gst_element_get_state (mPipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
- LOGV("GstMediaRecorder unref pipeline");
- gst_object_unref(mPipeline);
+ LOGV ("GstMediaRecorder change pipeline state to NULL");
+ gst_element_set_state (mPipeline, GST_STATE_NULL);
+ gst_element_get_state (mPipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
+ LOGV ("GstMediaRecorder unref pipeline");
+ gst_object_unref (mPipeline);
bail:
- mPipeline = NULL;
- mVideoBin = NULL;
- mAudioBin = NULL;
- mVideoSrc = NULL;
- return UNKNOWN_ERROR;
+ mPipeline = NULL;
+ mVideoBin = NULL;
+ mAudioBin = NULL;
+ mVideoSrc = NULL;
+ return UNKNOWN_ERROR;
}
-void GstMediaRecorder::postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
+void
+GstMediaRecorder::postDataTimestamp (nsecs_t timestamp, int32_t msgType,
+ const sp < IMemory > &dataPtr)
{
- ssize_t offset = 0;
- size_t size = 0;
- video_frame_t video_frame = VIDEO_FRAME_INIT;
- GstBuffer* buffer;
- GstClockTime duration;
- GST_UNUSED(timestamp);
- GST_UNUSED(msgType);
-
- //LOGV("postDataTimestamp");
- record_callback_cookie * lcookie = g_new0 (record_callback_cookie, 1);
- sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size);
-
- if(mVideoSrc == NULL) {
- LOGE(" record_callback the videosrc don't exist");
- mCamera->stopRecording();
- return ;
- }
-
- video_frame.pmem_fd = heap->getHeapID();
- video_frame.pmem_offset = offset;
- video_frame.pmem_size = size;
-
- lcookie->frame = dataPtr;
- lcookie->camera = mCamera;
-
- buffer = gst_icbvideo_buffer_new(&video_frame, (GstMiniObjectFinalizeFunction) video_frame_release, lcookie
- ,GST_ELEMENT(mVideoSrc) );
- GST_BUFFER_SIZE(buffer) = size; //needed to build correct timestamp in basesrc
- GST_BUFFER_TIMESTAMP(buffer) = timestamp;
-
- /* Last frame of video not encoded */
- duration = gst_util_uint64_scale_int (GST_SECOND, 1, mFps );
- GST_BUFFER_DURATION(buffer) = duration; //needed to build correct duration in basesrc
-
- gst_app_src_push_buffer(GST_APP_SRC(mVideoSrc), buffer);
+ ssize_t offset = 0;
+ size_t size = 0;
+ video_frame_t video_frame = VIDEO_FRAME_INIT;
+ GstBuffer *buffer;
+ GstClockTime duration;
+ GST_UNUSED (timestamp);
+ GST_UNUSED (msgType);
+
+ //LOGV("postDataTimestamp");
+ record_callback_cookie *lcookie = g_new0 (record_callback_cookie, 1);
+ sp < IMemoryHeap > heap = dataPtr->getMemory (&offset, &size);
+
+ if (mVideoSrc == NULL) {
+ LOGE (" record_callback the videosrc don't exist");
+ mCamera->stopRecording ();
+ return;
+ }
+
+ video_frame.pmem_fd = heap->getHeapID ();
+ video_frame.pmem_offset = offset;
+ video_frame.pmem_size = size;
+
+ lcookie->frame = dataPtr;
+ lcookie->camera = mCamera;
+
+ buffer =
+ gst_icbvideo_buffer_new (&video_frame,
+ (GstMiniObjectFinalizeFunction) video_frame_release, lcookie,
+ GST_ELEMENT (mVideoSrc));
+ GST_BUFFER_SIZE (buffer) = size; //needed to build correct timestamp in basesrc
+ GST_BUFFER_TIMESTAMP (buffer) = timestamp;
+
+ /* Last frame of video not encoded */
+ duration = gst_util_uint64_scale_int (GST_SECOND, 1, mFps);
+ GST_BUFFER_DURATION (buffer) = duration; //needed to build correct duration in basesrc
+
+ gst_app_src_push_buffer (GST_APP_SRC (mVideoSrc), buffer);
}
// camera callback interface
-void AndroidGstCameraListener::postData(int32_t msgType, const sp<IMemory>& dataPtr)
+void
+AndroidGstCameraListener::postData (int32_t msgType,
+ const sp < IMemory > &dataPtr)
{
- GST_UNUSED(msgType); GST_UNUSED(dataPtr);
+ GST_UNUSED (msgType);
+ GST_UNUSED (dataPtr);
}
-void AndroidGstCameraListener::postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
-{
- //LOGI("AndroidGstCameraListener::postDataTimestamp %lld ns", timestamp);
- if ((mRecorder != NULL) && (msgType == CAMERA_MSG_VIDEO_FRAME)) {
- mRecorder->postDataTimestamp(timestamp, msgType, dataPtr);
- }
+void
+AndroidGstCameraListener::postDataTimestamp (nsecs_t timestamp, int32_t msgType,
+ const sp < IMemory > &dataPtr)
+{
+ //LOGI("AndroidGstCameraListener::postDataTimestamp %lld ns", timestamp);
+ if ((mRecorder != NULL) && (msgType == CAMERA_MSG_VIDEO_FRAME)) {
+ mRecorder->postDataTimestamp (timestamp, msgType, dataPtr);
+ }
}
-
diff --git a/gstplayer/GstMediaRecorder.h b/gstplayer/GstMediaRecorder.h
index 7db6d36..260dcbc 100644
--- a/gstplayer/GstMediaRecorder.h
+++ b/gstplayer/GstMediaRecorder.h
@@ -39,7 +39,7 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include <ui/ISurface.h>
#include <ui/ICamera.h>
#include <ui/Camera.h>
-#else
+#else
#include <camera/CameraParameters.h>
#include <utils/Errors.h>
#include <surfaceflinger/ISurface.h>
@@ -53,122 +53,137 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#define GST_UNUSED(x) (void)x;
-namespace android {
+namespace android
+{
-class AndroidGstCameraListener;
+ class AndroidGstCameraListener;
#ifndef STECONF_ANDROID_VERSION_FROYO
-class GstMediaRecorder
-#else
-class GstMediaRecorder : public MediaRecorderBase
+ class GstMediaRecorder
+#else
+ class GstMediaRecorder:public MediaRecorderBase
#endif
-{
-public:
- GstMediaRecorder();
- ~GstMediaRecorder();
-
- status_t init();
- status_t setAudioSource(audio_source as);
- status_t setVideoSource(video_source vs);
- status_t setOutputFormat(output_format of);
- status_t setAudioEncoder(audio_encoder ae);
- status_t setVideoEncoder(video_encoder ve);
- status_t setVideoSize(int width, int height);
- status_t setVideoFrameRate(int frames_per_second);
- status_t setCamera(const sp<ICamera>& camera);
- status_t setPreviewSurface(const sp<ISurface>& surface);
- status_t setOutputFile(const char *path);
- status_t setOutputFile(int fd, int64_t offset, int64_t length);
- status_t setParameters(const String8& params);
- status_t setListener(const sp<IMediaPlayerClient>& listener);
- status_t prepare();
- status_t start();
- status_t stop();
- status_t close();
- status_t reset();
- status_t getMaxAmplitude(int *max);
-
- void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-private:
- enum AndroidCameraFlags {
- FLAGS_SET_CAMERA = 1L << 0,
- FLAGS_HOT_CAMERA = 1L << 1,
- FLAGS_HOT_MICRO = 1L << 2,
+ {
+ public:
+ GstMediaRecorder ();
+ ~GstMediaRecorder ();
+
+ status_t init ();
+ status_t setAudioSource (audio_source as);
+ status_t setVideoSource (video_source vs);
+ status_t setOutputFormat (output_format of);
+ status_t setAudioEncoder (audio_encoder ae);
+ status_t setVideoEncoder (video_encoder ve);
+ status_t setVideoSize (int width, int height);
+ status_t setVideoFrameRate (int frames_per_second);
+ status_t setCamera (const sp < ICamera > &camera);
+ status_t setPreviewSurface (const sp < ISurface > &surface);
+ status_t setOutputFile (const char *path);
+ status_t setOutputFile (int fd, int64_t offset, int64_t length);
+ status_t setParameters (const String8 & params);
+ status_t setListener (const sp < IMediaPlayerClient > &listener);
+ status_t prepare ();
+ status_t start ();
+ status_t stop ();
+ status_t close ();
+ status_t reset ();
+ status_t getMaxAmplitude (int *max);
+
+ void postDataTimestamp (nsecs_t timestamp, int32_t msgType,
+ const sp < IMemory > &dataPtr);
+
+ private:
+ enum AndroidCameraFlags
+ {
+ FLAGS_SET_CAMERA = 1L << 0,
+ FLAGS_HOT_CAMERA = 1L << 1,
+ FLAGS_HOT_MICRO = 1L << 2,
};
- sp<IMediaPlayerClient> mListener;
- sp<Camera> mCamera;
- sp<ISurface> mSurface;
- int mFlags;
- gchar* mOutFilePath;
- int mWidth;
- int mHeight;
- int mFps;
- int mOutput_format;
- int mVideo_encoder;
- int mAudio_encoder;
- int mAudio_source;
- int mUse_video_src; // set to TRUE when the appli has set a video src
- int mUse_audio_src; // set to TRUE when the appli has set a audio src
- int mOutFilePath_fd;
-
- int mVTMode;
- int mIPeriod;
- int mIMBRefreshMode;
- gboolean mIsEos;
-
- static GstBusSyncReply bus_message(GstBus *bus, GstMessage * msg, gpointer data);
- static void record_callback(const sp<IMemory>& frame, void *cookie);
- static void handoff(GstElement* object, GstBuffer* arg0, gpointer user_data);
- status_t build_record_graph();
- GstStateChangeReturn wait_for_set_state(int timeout_msec);
-
- void sendEos();
- status_t release_pipeline();
-
- GstElement* create_video_bin();
- GstElement* create_audio_bin();
- GstElement *mVideoBin,*mAudioBin;
- GstElement *mPipeline;
- GstElement *mVideoSrc;
- GstElement *mAudioSrc;
- GMutex *mEOSlock;
-
- gint64 mMaxDuration;
- GTimer *mTimer;
- gint64 mMaxFileSize;
- gint64 mCurrentFileSize;
- gint64 mAudioSampleRate;
- gint64 mAudioChannels;
- gint64 mAudioBitrate;
- gint64 mVideoBitrate;
-
- GstClockTime mGst_info_start_time;
- static void debug_log (GstDebugCategory * category, GstDebugLevel level,
- const gchar * file, const gchar * function, gint line,
- GObject * object, GstDebugMessage * message, gpointer data);
-
- snd_hwdep_t * mHwdep_handle;
-
- // callback interface
- sp<AndroidGstCameraListener> mCameraListener;
-};
-
-class AndroidGstCameraListener : public CameraListener
-{
-public:
- AndroidGstCameraListener(GstMediaRecorder* recorder) { mRecorder = recorder; }
- virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) {
- GST_UNUSED(msgType); GST_UNUSED(ext1); GST_UNUSED(ext2);
- }
- virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr);
- virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
- void release() { mRecorder = NULL; }
-private:
- AndroidGstCameraListener();
- GstMediaRecorder* mRecorder;
-};
-
-}; // namespace android
+ sp < IMediaPlayerClient > mListener;
+ sp < Camera > mCamera;
+ sp < ISurface > mSurface;
+ int mFlags;
+ gchar *mOutFilePath;
+ int mWidth;
+ int mHeight;
+ int mFps;
+ int mOutput_format;
+ int mVideo_encoder;
+ int mAudio_encoder;
+ int mAudio_source;
+ int mUse_video_src; // set to TRUE when the appli has set a video src
+ int mUse_audio_src; // set to TRUE when the appli has set a audio src
+ int mOutFilePath_fd;
+
+ int mVTMode;
+ int mIPeriod;
+ int mIMBRefreshMode;
+ gboolean mIsEos;
+
+ static GstBusSyncReply bus_message (GstBus * bus, GstMessage * msg,
+ gpointer data);
+ static void record_callback (const sp < IMemory > &frame, void *cookie);
+ static void handoff (GstElement * object, GstBuffer * arg0,
+ gpointer user_data);
+ status_t build_record_graph ();
+ GstStateChangeReturn wait_for_set_state (int timeout_msec);
+
+ void sendEos ();
+ status_t release_pipeline ();
+
+ GstElement *create_video_bin ();
+ GstElement *create_audio_bin ();
+ GstElement *mVideoBin, *mAudioBin;
+ GstElement *mPipeline;
+ GstElement *mVideoSrc;
+ GstElement *mAudioSrc;
+ GMutex *mEOSlock;
+
+ gint64 mMaxDuration;
+ GTimer *mTimer;
+ gint64 mMaxFileSize;
+ gint64 mCurrentFileSize;
+ gint64 mAudioSampleRate;
+ gint64 mAudioChannels;
+ gint64 mAudioBitrate;
+ gint64 mVideoBitrate;
+
+ GstClockTime mGst_info_start_time;
+ static void debug_log (GstDebugCategory * category, GstDebugLevel level,
+ const gchar * file, const gchar * function, gint line,
+ GObject * object, GstDebugMessage * message, gpointer data);
+
+ snd_hwdep_t *mHwdep_handle;
+
+ // callback interface
+ sp < AndroidGstCameraListener > mCameraListener;
+ };
+
+ class AndroidGstCameraListener:public CameraListener
+ {
+ public:
+ AndroidGstCameraListener (GstMediaRecorder * recorder)
+ {
+ mRecorder = recorder;
+ }
+ virtual void notify (int32_t msgType, int32_t ext1, int32_t ext2)
+ {
+ GST_UNUSED (msgType);
+ GST_UNUSED (ext1);
+ GST_UNUSED (ext2);
+ }
+ virtual void postData (int32_t msgType, const sp < IMemory > &dataPtr);
+ virtual void postDataTimestamp (nsecs_t timestamp, int32_t msgType,
+ const sp < IMemory > &dataPtr);
+ void release ()
+ {
+ mRecorder = NULL;
+ }
+ private:
+ AndroidGstCameraListener ();
+ GstMediaRecorder *mRecorder;
+ };
+
+}; // namespace android
#endif //GSTMEDIARECODER_H
diff --git a/gstplayer/GstMediaScanner.cpp b/gstplayer/GstMediaScanner.cpp
index e03bb33..b61d59b 100755
--- a/gstplayer/GstMediaScanner.cpp
+++ b/gstplayer/GstMediaScanner.cpp
@@ -44,435 +44,445 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#define UNUSED(x) (void)x
// flags used for native encoding detection
-enum {
- kEncodingNone = 0,
- kEncodingShiftJIS = (1 << 0),
- kEncodingGBK = (1 << 1),
- kEncodingBig5 = (1 << 2),
- kEncodingEUCKR = (1 << 3),
- kEncodingAll = (kEncodingShiftJIS | kEncodingGBK | kEncodingBig5 | kEncodingEUCKR),
+enum
+{
+ kEncodingNone = 0,
+ kEncodingShiftJIS = (1 << 0),
+ kEncodingGBK = (1 << 1),
+ kEncodingBig5 = (1 << 2),
+ kEncodingEUCKR = (1 << 3),
+ kEncodingAll =
+ (kEncodingShiftJIS | kEncodingGBK | kEncodingBig5 | kEncodingEUCKR),
};
-namespace android {
-#ifndef STECONF_ANDROID_VERSION_FROYO
-MediaScanner::MediaScanner()
- : mLocale(NULL)
-{
-}
-
-MediaScanner::~MediaScanner()
+namespace android
{
- if(mLocale)
- free(mLocale);
-}
+#ifndef STECONF_ANDROID_VERSION_FROYO
+ MediaScanner::MediaScanner ()
+ :mLocale (NULL)
+ {
+ }
+
+ MediaScanner::~MediaScanner ()
+ {
+ if (mLocale)
+ free (mLocale);
+ }
#else
-GstMediaScanner::GstMediaScanner() { }
+ GstMediaScanner::GstMediaScanner ()
+ {
+ }
-GstMediaScanner::~GstMediaScanner() {}
+ GstMediaScanner::~GstMediaScanner ()
+ {
+ }
#endif
#ifndef STECONF_ANDROID_VERSION_FROYO
-status_t MediaScanner::processFile(const char *path, const char* mimeType, MediaScannerClient& client)
-#else
-status_t GstMediaScanner::processFile(const char *path, const char* mimeType, MediaScannerClient& client)
+ status_t MediaScanner::processFile (const char *path, const char *mimeType,
+ MediaScannerClient & client)
+#else
+ status_t GstMediaScanner::processFile (const char *path, const char *mimeType,
+ MediaScannerClient & client)
#endif
-{
- const char* extension = strrchr(path, '.');
-
- UNUSED(mimeType);
-
- LOGV("MediaScanner processFile %s", path);
-
- //first check if extension is supported
- if( !extension) {
- return -1;
- }
-
- if(
- //audio file
- !strcasecmp(extension, ".mp3") ||
- !strcasecmp(extension, ".ogg") ||
- !strcasecmp(extension, ".oga") ||
- !strcasecmp(extension, ".mid") ||
- !strcasecmp(extension, ".midi") ||
- !strcasecmp(extension, ".smf") ||
- !strcasecmp(extension, ".xmf") ||
- !strcasecmp(extension, ".rtttl") ||
- !strcasecmp(extension, ".imy") ||
- !strcasecmp(extension, ".rtx") ||
- !strcasecmp(extension, ".ota") ||
- !strcasecmp(extension, ".wma") ||
- !strcasecmp(extension, ".m4a") ||
- !strcasecmp(extension, ".amr") ||
- !strcasecmp(extension, ".awb") ||
- !strcasecmp(extension, ".wav") ||
- !strcasecmp(extension, ".aac") ||
- //video file
- !strcasecmp(extension, ".mp4") ||
- !strcasecmp(extension, ".m4v") ||
- !strcasecmp(extension, ".3gp") ||
- !strcasecmp(extension, ".3gpp") ||
- !strcasecmp(extension, ".3gp2") ||
- !strcasecmp(extension, ".3gpp2") ||
- !strcasecmp(extension, ".3g2") ||
- !strcasecmp(extension, ".avi") ||
- !strcasecmp(extension, ".mov") ||
- !strcasecmp(extension, ".wmv") ||
- !strcasecmp(extension, ".asf") ||
- !strcasecmp(extension, ".divx")
- )
- {
- // extension of multimedia file supported
- }
- else
- {
- LOGV("MediaScanner processFile extension %s not supported", extension);
- return -1;
- }
-
- sp<MediaMetadataRetriever> retriever = new MediaMetadataRetriever();
- retriever->setMode(METADATA_MODE_METADATA_RETRIEVAL_ONLY);
- status_t status = retriever->setDataSource(path);
-
- if (status != NO_ERROR) {
- LOGE("MediaScanner setDataSource failed (%d)", status);
- retriever->disconnect();
- return status;
+ {
+ const char *extension = strrchr (path, '.');
+
+ UNUSED (mimeType);
+
+ LOGV ("MediaScanner processFile %s", path);
+
+ //first check if extension is supported
+ if (!extension) {
+ return -1;
}
-
- // init client
+
+ if (
+ //audio file
+ !strcasecmp (extension, ".mp3") ||
+ !strcasecmp (extension, ".ogg") ||
+ !strcasecmp (extension, ".oga") ||
+ !strcasecmp (extension, ".mid") ||
+ !strcasecmp (extension, ".midi") ||
+ !strcasecmp (extension, ".smf") ||
+ !strcasecmp (extension, ".xmf") ||
+ !strcasecmp (extension, ".rtttl") ||
+ !strcasecmp (extension, ".imy") ||
+ !strcasecmp (extension, ".rtx") ||
+ !strcasecmp (extension, ".ota") ||
+ !strcasecmp (extension, ".wma") ||
+ !strcasecmp (extension, ".m4a") ||
+ !strcasecmp (extension, ".amr") ||
+ !strcasecmp (extension, ".awb") ||
+ !strcasecmp (extension, ".wav") || !strcasecmp (extension, ".aac") ||
+ //video file
+ !strcasecmp (extension, ".mp4") ||
+ !strcasecmp (extension, ".m4v") ||
+ !strcasecmp (extension, ".3gp") ||
+ !strcasecmp (extension, ".3gpp") ||
+ !strcasecmp (extension, ".3gp2") ||
+ !strcasecmp (extension, ".3gpp2") ||
+ !strcasecmp (extension, ".3g2") ||
+ !strcasecmp (extension, ".avi") ||
+ !strcasecmp (extension, ".mov") ||
+ !strcasecmp (extension, ".wmv") ||
+ !strcasecmp (extension, ".asf") || !strcasecmp (extension, ".divx")
+ ) {
+ // extension of multimedia file supported
+ } else {
+ LOGV ("MediaScanner processFile extension %s not supported", extension);
+ return -1;
+ }
+
+ sp < MediaMetadataRetriever > retriever = new MediaMetadataRetriever ();
+ retriever->setMode (METADATA_MODE_METADATA_RETRIEVAL_ONLY);
+ status_t status = retriever->setDataSource (path);
+
+ if (status != NO_ERROR) {
+ LOGE ("MediaScanner setDataSource failed (%d)", status);
+ retriever->disconnect ();
+ return status;
+ }
+ // init client
#ifndef STECONF_ANDROID_VERSION_FROYO
- client.setLocale(mLocale);
-#else
- client.setLocale(locale());
+ client.setLocale (mLocale);
+#else
+ client.setLocale (locale ());
#endif
- client.beginFile();
+ client.beginFile ();
- const char* value;
+ const char *value;
- // extract metadata from the file
- value = retriever->extractMetadata(METADATA_KEY_IS_DRM_CRIPPLED);
- if (value && strcmp(value, "true") == 0) {
- // we don't support WMDRM currently
- // setting this invalid mimetype will make the java side ignore this file
- client.setMimeType("audio/x-wma-drm");
+ // extract metadata from the file
+ value = retriever->extractMetadata (METADATA_KEY_IS_DRM_CRIPPLED);
+ if (value && strcmp (value, "true") == 0) {
+ // we don't support WMDRM currently
+ // setting this invalid mimetype will make the java side ignore this file
+ client.setMimeType ("audio/x-wma-drm");
}
- value = retriever->extractMetadata(METADATA_KEY_CODEC);
- if (value && strcmp(value, "Windows Media Audio 10 Professional") == 0) {
- // we don't support WM 10 Professional currently
- // setting this invalid mimetype will make the java side ignore this file
- client.setMimeType("audio/x-wma-10-professional");
+ value = retriever->extractMetadata (METADATA_KEY_CODEC);
+ if (value && strcmp (value, "Windows Media Audio 10 Professional") == 0) {
+ // we don't support WM 10 Professional currently
+ // setting this invalid mimetype will make the java side ignore this file
+ client.setMimeType ("audio/x-wma-10-professional");
}
- value = retriever->extractMetadata(METADATA_KEY_ALBUM);
+ value = retriever->extractMetadata (METADATA_KEY_ALBUM);
if (value)
- client.addStringTag("album", value);
+ client.addStringTag ("album", value);
// Look for "author" tag first, if it is not found, try "artist" tag
- value = retriever->extractMetadata(METADATA_KEY_AUTHOR);
+ value = retriever->extractMetadata (METADATA_KEY_AUTHOR);
if (!value) {
- value = retriever->extractMetadata(METADATA_KEY_ARTIST);
+ value = retriever->extractMetadata (METADATA_KEY_ARTIST);
}
if (value)
- client.addStringTag("artist", value);
- value = retriever->extractMetadata(METADATA_KEY_COMPOSER);
+ client.addStringTag ("artist", value);
+ value = retriever->extractMetadata (METADATA_KEY_COMPOSER);
if (value)
- client.addStringTag("composer", value);
- value = retriever->extractMetadata(METADATA_KEY_GENRE);
+ client.addStringTag ("composer", value);
+ value = retriever->extractMetadata (METADATA_KEY_GENRE);
if (value)
- client.addStringTag("genre", value);
- value = retriever->extractMetadata(METADATA_KEY_TITLE);
+ client.addStringTag ("genre", value);
+ value = retriever->extractMetadata (METADATA_KEY_TITLE);
if (value)
- client.addStringTag("title", value);
- value = retriever->extractMetadata(METADATA_KEY_YEAR);
+ client.addStringTag ("title", value);
+ value = retriever->extractMetadata (METADATA_KEY_YEAR);
if (value)
- client.addStringTag("year", value);
- value = retriever->extractMetadata(METADATA_KEY_CD_TRACK_NUMBER);
+ client.addStringTag ("year", value);
+ value = retriever->extractMetadata (METADATA_KEY_CD_TRACK_NUMBER);
if (value)
- client.addStringTag("tracknumber", value);
+ client.addStringTag ("tracknumber", value);
- retriever->disconnect();
- // send info to java layer
- client.endFile();
- return status;
-}
+ retriever->disconnect ();
+ // send info to java layer
+ client.endFile ();
+ return status;
+ }
#ifndef STECONF_ANDROID_VERSION_FROYO
-status_t MediaScanner::processDirectory(const char *path, const char* extensions, MediaScannerClient& client, ExceptionCheck exceptionCheck, void* exceptionEnv)
-{
- LOGV("MediaScanner processDirectory %s", path);
- // do not process directories with .nomedia file
- char* nomedia = new char[strlen(path) + strlen("/.nomedia") + 2];
- strcpy(nomedia, path);
- strcat(nomedia, "/.nomedia");
-
- if(access(nomedia, F_OK) == 0) {
- LOGV("MediaScanner %s found don't process this directory", nomedia);
- delete nomedia;
- return OK;
- }
- delete nomedia;
-
- struct dirent* entry;
- DIR* dir = opendir(path);
- if(!dir) {
- LOGV("MediaScanner can't open directory %s", path);
- return -1;
- }
-
- // now check all entries in this directory
- while ((entry = readdir(dir))) {
- const char* name = entry->d_name;
- LOGV("MediaScanner entry name %s/%s", path, name);
- if(name == NULL) {
- continue;
- }
-
- // ignore "." and ".."
- if (name[0] == '.') {
- continue;
- }
+ status_t MediaScanner::processDirectory (const char *path,
+ const char *extensions, MediaScannerClient & client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv)
+ {
+ LOGV ("MediaScanner processDirectory %s", path);
+ // do not process directories with .nomedia file
+ char *nomedia = new char[strlen (path) + strlen ("/.nomedia") + 2];
+ strcpy (nomedia, path);
+ strcat (nomedia, "/.nomedia");
+
+ if (access (nomedia, F_OK) == 0) {
+ LOGV ("MediaScanner %s found don't process this directory", nomedia);
+ delete nomedia;
+ return OK;
+ }
+ delete nomedia;
- int type = entry->d_type;
- if (type == DT_UNKNOWN) {
- // If the type is unknown, stat() the file instead.
- // This is sometimes necessary when accessing NFS mounted filesystems, but
- // could be needed in other cases well.
- struct stat statbuf;
- if (stat(path, &statbuf) == 0) {
- if (S_ISREG(statbuf.st_mode)) {
- type = DT_REG;
- } else if (S_ISDIR(statbuf.st_mode)) {
- type = DT_DIR;
- }
- } else {
- LOGD("stat() failed for %s: %s", path, strerror(errno) );
- }
+ struct dirent *entry;
+ DIR *dir = opendir (path);
+ if (!dir) {
+ LOGV ("MediaScanner can't open directory %s", path);
+ return -1;
+ }
+ // now check all entries in this directory
+ while ((entry = readdir (dir))) {
+ const char *name = entry->d_name;
+ LOGV ("MediaScanner entry name %s/%s", path, name);
+ if (name == NULL) {
+ continue;
+ }
+ // ignore "." and ".."
+ if (name[0] == '.') {
+ continue;
+ }
+
+ int type = entry->d_type;
+ if (type == DT_UNKNOWN) {
+ // If the type is unknown, stat() the file instead.
+ // This is sometimes necessary when accessing NFS mounted filesystems, but
+ // could be needed in other cases well.
+ struct stat statbuf;
+ if (stat (path, &statbuf) == 0) {
+ if (S_ISREG (statbuf.st_mode)) {
+ type = DT_REG;
+ } else if (S_ISDIR (statbuf.st_mode)) {
+ type = DT_DIR;
+ }
+ } else {
+ LOGD ("stat() failed for %s: %s", path, strerror (errno));
}
-
- if (type == DT_REG || type == DT_DIR) {
- bool isDirectory = (type == DT_DIR);
-
- char* nextPath = new char[strlen(path) + strlen(name) + 2];
- strcpy(nextPath, path);
- strcat(nextPath, "/");
- strcat(nextPath, name);
-
- if (isDirectory) {
- int err = processDirectory(nextPath, extensions, client, exceptionCheck, exceptionEnv);
- if (err) {
- LOGV("Error processing '%s' - skipping\n", path);
- continue;
- }
- } else {
- struct stat statbuf;
- stat(nextPath, &statbuf);
- if (statbuf.st_size > 0) {
- client.scanFile(nextPath, statbuf.st_mtime, statbuf.st_size);
- }
- }
- delete nextPath;
+ }
+
+ if (type == DT_REG || type == DT_DIR) {
+ bool isDirectory = (type == DT_DIR);
+
+ char *nextPath = new char[strlen (path) + strlen (name) + 2];
+ strcpy (nextPath, path);
+ strcat (nextPath, "/");
+ strcat (nextPath, name);
+
+ if (isDirectory) {
+ int err =
+ processDirectory (nextPath, extensions, client, exceptionCheck,
+ exceptionEnv);
+ if (err) {
+ LOGV ("Error processing '%s' - skipping\n", path);
+ continue;
+ }
+ } else {
+ struct stat statbuf;
+ stat (nextPath, &statbuf);
+ if (statbuf.st_size > 0) {
+ client.scanFile (nextPath, statbuf.st_mtime, statbuf.st_size);
+ }
}
+ delete nextPath;
+ }
- }
+ }
- closedir(dir);
- return OK;
-}
+ closedir (dir);
+ return OK;
+ }
-void MediaScanner::setLocale(const char* locale)
-{
- //LOGE("MediaScanner set locale %s", locale);
- if (mLocale) {
- free(mLocale);
- mLocale = NULL;
+ void MediaScanner::setLocale (const char *locale)
+ {
+ //LOGE("MediaScanner set locale %s", locale);
+ if (mLocale) {
+ free (mLocale);
+ mLocale = NULL;
}
if (locale) {
- mLocale = strdup(locale);
+ mLocale = strdup (locale);
}
-}
+ }
#endif
-
+
// extracts album art as a block of data
// output: is a jpeg + 4 bytes of header to give jpeg size
#ifndef STECONF_ANDROID_VERSION_FROYO
-char* MediaScanner::extractAlbumArt(int fd)
-#else
-char* GstMediaScanner::extractAlbumArt(int fd)
+ char *MediaScanner::extractAlbumArt (int fd)
+#else
+ char *GstMediaScanner::extractAlbumArt (int fd)
#endif
-{
- LOGV("MediaScanner extractAlbumArt %d", fd);
- struct stat statbuf;
- char *data = NULL;
-
- sp<MediaMetadataRetriever> retriever = new MediaMetadataRetriever();
- retriever->setMode(METADATA_MODE_METADATA_RETRIEVAL_ONLY);
- // make stat to get fd size
- fstat(fd, &statbuf);
- retriever->setDataSource(fd, 0, statbuf.st_size);
-
- sp<IMemory> albumArt = retriever->extractAlbumArt();
-
- if(albumArt != NULL) {
- MediaAlbumArt *albumArtCopy = static_cast<MediaAlbumArt *>(albumArt->pointer());
-
- data = (char*)malloc(albumArtCopy->mSize + 4);
- if (data) {
- long *len = (long*)data;
- *len = albumArtCopy->mSize;
- memcpy(data + 4, (char*) albumArtCopy + sizeof(MediaAlbumArt), *len);
- }
- }
- retriever->disconnect();
- return data;
-}
-
-#ifndef STECONF_ANDROID_VERSION_FROYO
-void MediaScanner::uninitializeForThread()
-{
-}
-
-status_t MediaScanner::doProcessDirectory(char *path, int pathRemaining, const char* extensions, MediaScannerClient& client, ExceptionCheck exceptionCheck, void* exceptionEnv)
-{
- //LOGE("MediaScanner doProcessDirectory %s", path);
- // empty function: need to keep compatibility with mediascanner interface
- UNUSED(path);
- UNUSED(pathRemaining);
- UNUSED(extensions);
- UNUSED(client);
- UNUSED(exceptionCheck);
- UNUSED(exceptionEnv);
-
- return OK;
-}
-
-void MediaScanner::initializeForThread()
-{
-}
-
-MediaScannerClient::MediaScannerClient()
- : mNames(NULL),
- mValues(NULL),
- mLocaleEncoding(kEncodingNone)
-{
- LOGV("MediaScannerClient construtor");
-}
-
-MediaScannerClient::~MediaScannerClient()
-{
- LOGV("MediaScannerClient destructor");
- if(mNames != NULL )
- delete mNames;
-
- if(mValues != NULL)
- delete mValues;
-}
-
-void MediaScannerClient::setLocale(const char* locale)
-{
- LOGV("MediaScannerClient set locale %s", locale);
-}
+ {
+ LOGV ("MediaScanner extractAlbumArt %d", fd);
+ struct stat statbuf;
+ char *data = NULL;
+
+ sp < MediaMetadataRetriever > retriever = new MediaMetadataRetriever ();
+ retriever->setMode (METADATA_MODE_METADATA_RETRIEVAL_ONLY);
+ // make stat to get fd size
+ fstat (fd, &statbuf);
+ retriever->setDataSource (fd, 0, statbuf.st_size);
+
+ sp < IMemory > albumArt = retriever->extractAlbumArt ();
+
+ if (albumArt != NULL) {
+ MediaAlbumArt *albumArtCopy =
+ static_cast < MediaAlbumArt * >(albumArt->pointer ());
+
+ data = (char *) malloc (albumArtCopy->mSize + 4);
+ if (data) {
+ long *len = (long *) data;
+ *len = albumArtCopy->mSize;
+ memcpy (data + 4, (char *) albumArtCopy + sizeof (MediaAlbumArt), *len);
+ }
+ }
+ retriever->disconnect ();
+ return data;
+ }
-void MediaScannerClient::beginFile()
-{
- LOGV("MediaScannerClient beginFile");
- mNames = new StringArray;
+#ifndef STECONF_ANDROID_VERSION_FROYO
+ void MediaScanner::uninitializeForThread ()
+ {
+ }
+
+ status_t MediaScanner::doProcessDirectory (char *path, int pathRemaining,
+ const char *extensions, MediaScannerClient & client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv)
+ {
+ //LOGE("MediaScanner doProcessDirectory %s", path);
+ // empty function: need to keep compatibility with mediascanner interface
+ UNUSED (path);
+ UNUSED (pathRemaining);
+ UNUSED (extensions);
+ UNUSED (client);
+ UNUSED (exceptionCheck);
+ UNUSED (exceptionEnv);
+
+ return OK;
+ }
+
+ void MediaScanner::initializeForThread ()
+ {
+ }
+
+ MediaScannerClient::MediaScannerClient ()
+: mNames (NULL), mValues (NULL), mLocaleEncoding (kEncodingNone) {
+ LOGV ("MediaScannerClient construtor");
+ }
+
+ MediaScannerClient::~MediaScannerClient () {
+ LOGV ("MediaScannerClient destructor");
+ if (mNames != NULL)
+ delete mNames;
+
+ if (mValues != NULL)
+ delete mValues;
+ }
+
+ void MediaScannerClient::setLocale (const char *locale)
+ {
+ LOGV ("MediaScannerClient set locale %s", locale);
+ }
+
+ void MediaScannerClient::beginFile ()
+ {
+ LOGV ("MediaScannerClient beginFile");
+ mNames = new StringArray;
mValues = new StringArray;
-}
+ }
+
+ bool MediaScannerClient::addStringTag (const char *name, const char *value)
+ {
+ LOGV ("MediaScannerClient addStringTag %s : %s", name, value);
+ mNames->push_back (name);
+ mValues->push_back (value);
+ return true;
+ }
+
+ void MediaScannerClient::endFile ()
+ {
+ LOGV ("MediaScannerClient endFile");
+ // finally, push all name/value pairs to the client
+
+ if (mNames) {
+ for (int i = 0; i < mNames->size (); i++) {
+ if (!handleStringTag (mNames->getEntry (i), mValues->getEntry (i))) {
+ break;
+ }
+ }
+ }
-bool MediaScannerClient::addStringTag(const char* name, const char* value)
-{
- LOGV("MediaScannerClient addStringTag %s : %s", name, value);
- mNames->push_back(name);
- mValues->push_back(value);
- return true;
-}
-
-void MediaScannerClient::endFile()
-{
- LOGV("MediaScannerClient endFile");
- // finally, push all name/value pairs to the client
-
- if(mNames) {
- for (int i = 0; i < mNames->size(); i++) {
- if (!handleStringTag(mNames->getEntry(i), mValues->getEntry(i))) {
- break;
- }
- }
- }
-
- if(mNames != NULL)
- delete mNames;
-
- if(mValues != NULL)
- delete mValues;
+ if (mNames != NULL)
+ delete mNames;
+
+ if (mValues != NULL)
+ delete mValues;
mNames = NULL;
mValues = NULL;
-}
-
-void MediaScannerClient::convertValues(uint32_t encoding)
-{
- LOGV("MediaScannerClient convertValues %d", encoding);
-}
-#else
-void GstMediaScannerClient::setLocale(const char* locale)
-{
- LOGV("GstMediaScannerClient set locale %s", locale);
- MediaScannerClient::setLocale (locale);
-}
-
-void GstMediaScannerClient::beginFile()
-{
- LOGV("GstMediaScannerClient beginFile");
- MediaScannerClient::beginFile();
-}
+ }
-bool GstMediaScannerClient::addStringTag(const char* name, const char* value)
-{
- LOGV("GstMediaScannerClient addStringTag %s : %s", name, value);
- return MediaScannerClient::addStringTag (name, value);
-}
-
-void GstMediaScannerClient::endFile()
-{
- LOGV("GstMediaScannerClient endFile");
- MediaScannerClient::endFile();
-}
-
-void GstMediaScannerClient::convertValues(uint32_t encoding)
-{
- LOGV("GstMediaScannerClient convertValues %d", encoding);
- MediaScannerClient::convertValues (encoding);
-}
+ void MediaScannerClient::convertValues (uint32_t encoding)
+ {
+ LOGV ("MediaScannerClient convertValues %d", encoding);
+ }
+#else
+ void GstMediaScannerClient::setLocale (const char *locale)
+ {
+ LOGV ("GstMediaScannerClient set locale %s", locale);
+ MediaScannerClient::setLocale (locale);
+ }
+
+ void GstMediaScannerClient::beginFile ()
+ {
+ LOGV ("GstMediaScannerClient beginFile");
+ MediaScannerClient::beginFile ();
+ }
+
+ bool GstMediaScannerClient::addStringTag (const char *name, const char *value)
+ {
+ LOGV ("GstMediaScannerClient addStringTag %s : %s", name, value);
+ return MediaScannerClient::addStringTag (name, value);
+ }
+
+ void GstMediaScannerClient::endFile ()
+ {
+ LOGV ("GstMediaScannerClient endFile");
+ MediaScannerClient::endFile ();
+ }
+
+ void GstMediaScannerClient::convertValues (uint32_t encoding)
+ {
+ LOGV ("GstMediaScannerClient convertValues %d", encoding);
+ MediaScannerClient::convertValues (encoding);
+ }
/*
* place holder for functions
*/
-bool GstMediaScannerClient::scanFile(const char* path, long long lastModified, long long fileSize) {
- LOGV("GstMediaScannerClient scanFile");
- return false;
-}
-
-bool GstMediaScannerClient::handleStringTag(const char* name, const char* value) {
- LOGV("GstMediaScannerClient handleStringTag");
- return false;
-}
-
-bool GstMediaScannerClient::setMimeType(const char* mimeType) {
- LOGV("GstMediaScannerClient setMimeType");
- return false;
-}
-
-bool GstMediaScannerClient::addNoMediaFolder(const char* path) {
- LOGV("GstMediaScannerClient addNoMediaFolder");
- return false;
-}
+ bool GstMediaScannerClient::scanFile (const char *path,
+ long long lastModified, long long fileSize)
+ {
+ LOGV ("GstMediaScannerClient scanFile");
+ return false;
+ }
+
+ bool GstMediaScannerClient::handleStringTag (const char *name,
+ const char *value)
+ {
+ LOGV ("GstMediaScannerClient handleStringTag");
+ return false;
+ }
+
+ bool GstMediaScannerClient::setMimeType (const char *mimeType)
+ {
+ LOGV ("GstMediaScannerClient setMimeType");
+ return false;
+ }
+
+ bool GstMediaScannerClient::addNoMediaFolder (const char *path)
+ {
+ LOGV ("GstMediaScannerClient addNoMediaFolder");
+ return false;
+ }
#endif
-}; //namespace android
+}; //namespace android
#endif // GSTMEDIASCANNER
diff --git a/gstplayer/GstMediaScanner.h b/gstplayer/GstMediaScanner.h
index b431105..1d07ea8 100755
--- a/gstplayer/GstMediaScanner.h
+++ b/gstplayer/GstMediaScanner.h
@@ -22,43 +22,48 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include <media/mediascanner.h>
-namespace android {
+namespace android
+{
-struct GstMediaScanner : public MediaScanner {
- GstMediaScanner();
- virtual ~GstMediaScanner();
+ struct GstMediaScanner:public MediaScanner
+ {
+ GstMediaScanner ();
+ virtual ~ GstMediaScanner ();
- virtual status_t processFile(
- const char *path, const char *mimeType,
- MediaScannerClient &client);
+ virtual status_t processFile (const char *path, const char *mimeType,
+ MediaScannerClient & client);
- virtual char *extractAlbumArt(int fd);
+ virtual char *extractAlbumArt (int fd);
-};
+ };
-struct GstMediaScannerClient : public MediaScannerClient {
- GstMediaScannerClient() { // call MediaScannerClient::MediaScannerClient
- LOGV("GstMediaScannerClient construtor");
+ struct GstMediaScannerClient:public MediaScannerClient
+ {
+ GstMediaScannerClient ()
+ { // call MediaScannerClient::MediaScannerClient
+ LOGV ("GstMediaScannerClient construtor");
}
- ~GstMediaScannerClient() { // call MediaScanner::~MediaScanner
- LOGV("GstMediaScannerClient destructor");
+ ~GstMediaScannerClient ()
+ { // call MediaScanner::~MediaScanner
+ LOGV ("GstMediaScannerClient destructor");
}
// non-virtual functions
- void setLocale(const char* locale);
- void beginFile();
- bool addStringTag(const char* name, const char* value);
- void endFile();
- void convertValues(uint32_t encoding);
+ void setLocale (const char *locale);
+ void beginFile ();
+ bool addStringTag (const char *name, const char *value);
+ void endFile ();
+ void convertValues (uint32_t encoding);
// pure virtual functions
- bool scanFile(const char* path, long long lastModified, long long fileSize);
- bool handleStringTag(const char* name, const char* value);
- bool setMimeType(const char* mimeType);
- bool addNoMediaFolder(const char* path);
-};
+ bool scanFile (const char *path, long long lastModified,
+ long long fileSize);
+ bool handleStringTag (const char *name, const char *value);
+ bool setMimeType (const char *mimeType);
+ bool addNoMediaFolder (const char *path);
+ };
-} // namespace android
+} // namespace android
#endif // GSTMEDIASCANNER_H
diff --git a/gstplayer/GstMetadataRetriever.cpp b/gstplayer/GstMetadataRetriever.cpp
index 78b6f8f..83ed36e 100644
--- a/gstplayer/GstMetadataRetriever.cpp
+++ b/gstplayer/GstMetadataRetriever.cpp
@@ -25,304 +25,310 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include "GstMetadataRetriever.h"
#include <utils/Log.h>
-namespace android {
-
-static GStaticMutex GstMetadataRetriever_mutex = G_STATIC_MUTEX_INIT;
-
-GstMetadataRetriever::GstMetadataRetriever()
+namespace android
{
- mMode = METADATA_MODE_METADATA_RETRIEVAL_ONLY | METADATA_MODE_FRAME_CAPTURE_ONLY;
- mLocked = 0 ;
-
- LOGV("GstMetadataRetriever constructor");
- //g_static_mutex_lock (&GstMetadataRetriever_mutex);
- mGstDriver = new GstMetadataRetrieverDriver();
- LOGV("GstMetadataRetriever constructor exit");
-}
-GstMetadataRetriever::~GstMetadataRetriever()
-{
- LOGV("GstMetadataRetriever destructor");
- mGstDriver->quit();
- if(mGstDriver) {
- delete mGstDriver;
- }
- mGstDriver = NULL;
- if (mLocked) {
- LOGV("GstMetadataRetriever destructor deactivate video protection");
- g_static_mutex_unlock(&GstMetadataRetriever_mutex);
- }
-}
-
-
-status_t GstMetadataRetriever::setDataSource(const char *url)
-{
- status_t ret = OK;
- int status = 0;
+ static GStaticMutex GstMetadataRetriever_mutex = G_STATIC_MUTEX_INIT;
+
+ GstMetadataRetriever::GstMetadataRetriever ()
+ {
+ mMode =
+ METADATA_MODE_METADATA_RETRIEVAL_ONLY |
+ METADATA_MODE_FRAME_CAPTURE_ONLY;
+ mLocked = 0;
+
+ LOGV ("GstMetadataRetriever constructor");
+ //g_static_mutex_lock (&GstMetadataRetriever_mutex);
+ mGstDriver = new GstMetadataRetrieverDriver ();
+ LOGV ("GstMetadataRetriever constructor exit");
+ }
+
+ GstMetadataRetriever::~GstMetadataRetriever ()
+ {
+ LOGV ("GstMetadataRetriever destructor");
+ mGstDriver->quit ();
+ if (mGstDriver) {
+ delete mGstDriver;
+ }
+ mGstDriver = NULL;
+ if (mLocked) {
+ LOGV ("GstMetadataRetriever destructor deactivate video protection");
+ g_static_mutex_unlock (&GstMetadataRetriever_mutex);
+ }
+ }
+
+
+ status_t GstMetadataRetriever::setDataSource (const char *url)
+ {
+ status_t ret = OK;
+ int status = 0;
+
+ LOGV ("GstMetadataRetriever setDataSource %s", url);
+
+ mGstDriver->setDataSource (url);
+ mGstDriver->setup (mMode);
+ mGstDriver->prepareSync ();
+
+ status = mGstDriver->getStatus ();
+
+ LOGV ("GstMetadataRetriever setDataSource %s",
+ (status == GST_STATE_PAUSED) ? "OK" : "not correct state");
+ if (status != GST_STATE_PAUSED)
+ ret = UNKNOWN_ERROR;
- LOGV("GstMetadataRetriever setDataSource %s", url);
+ return ret;
+ }
+
+ status_t GstMetadataRetriever::setDataSource (int fd, int64_t offset,
+ int64_t length)
+ {
+ status_t ret = OK;
+ int status = 0;
+
+ LOGV ("GstMetadataRetriever setDataSource fd=%d offset=%lld lenght=%lld",
+ fd, offset, length);
+ mGstDriver->setFdDataSource (fd, offset, length);
+ mGstDriver->setup (mMode);
+ mGstDriver->prepareSync ();
+
+ status = mGstDriver->getStatus ();
+ LOGV ("GstMetadataRetriever setDataSource %s:%d",
+ (status == GST_STATE_PAUSED) ? "OK" : "not correct state", status);
+ if (status != GST_STATE_PAUSED)
+ return UNKNOWN_ERROR;
- mGstDriver->setDataSource(url);
- mGstDriver->setup(mMode);
- mGstDriver->prepareSync();
+ return ret;
+ }
+
+
+ status_t GstMetadataRetriever::setMode (int mode)
+ {
+ LOGV ("GstMetadataRetriever setMode mode=%d", mode);
+ if (mode < METADATA_MODE_NOOP ||
+ mode > METADATA_MODE_FRAME_CAPTURE_AND_METADATA_RETRIEVAL) {
+ LOGE ("set to invalid mode (%d)", mode);
+ return BAD_VALUE;
+ }
+
+ if (mode & METADATA_MODE_FRAME_CAPTURE_ONLY) {
+ if (!mLocked) {
+ LOGV ("GstMetadataRetriever setMode activate video protection");
+ g_static_mutex_lock (&GstMetadataRetriever_mutex);
+ LOGV ("Lock on GstMetadataRetriever acquired");
+ mLocked = 1;
+ } else {
+ LOGV ("GstMetadataRetriever::setMode video protection already activated");
+ }
+ } else { /* ! mode & METADATA_MODE_FRAME_CAPTURE_ONLY */
+ if (mLocked) {
+ LOGV ("GstMetadataRetriever::setMode deactivate video protection");
+ g_static_mutex_unlock (&GstMetadataRetriever_mutex);
+ mLocked = 0;
+ } else {
+ LOGV ("GstMetadataRetriever::setMode video protection already deactivated");
+ }
+ }
+ mMode = mode;
+ return OK;
+ }
+
+ status_t GstMetadataRetriever::getMode (int *mode) const
+ {
+ *mode = mMode;
+ LOGV ("GstMetadataRetriever getMode mode%d", *mode);
+ return OK;
+ }
+
+ VideoFrame *GstMetadataRetriever::captureFrame ()
+ {
+ int width, height, size;
+ VideoFrame *vFrame = NULL;
+ gint64 duration;
+
+ LOGV ("GstMetadataRetriever captureFrame");
+
+ if (!mLocked) {
+ LOGE ("GstMetadataRetriever captureFrame video protection not activated => ERROR");
+ return (NULL);
+ }
+ mGstDriver->getVideoSize (&width, &height);
+
+ LOGV ("GstMetadataRetriever captureFrame get video size %d x %d", width,
+ height);
+ // compute data size
+ // FIXME: Check the Framebuffer color depth (if != RGB565)
+ size = width * height * 2; // RGB565
+
+ duration = mGstDriver->getDuration ();
+ if (duration) {
+ mGstDriver->seekSync (duration / 20);
+ }
+
+ if (size > 0) {
+ vFrame = new VideoFrame ();
+
+ vFrame->mWidth = width;
+ vFrame->mHeight = height;
+ vFrame->mDisplayWidth = width;
+ vFrame->mDisplayHeight = height;
+ vFrame->mSize = size;
+ vFrame->mData = 0;
+
+ mGstDriver->getCaptureFrame (&(vFrame->mData));
+
+ if (vFrame->mData == 0) {
+ LOGV ("GstMetadataRetriever cant' allocate memory for video frame");
+ delete vFrame;
+ vFrame = NULL;
+ }
+ }
+
+ return vFrame;
+ }
+
+ MediaAlbumArt *GstMetadataRetriever::extractAlbumArt ()
+ {
+ LOGV ("GstMetadataRetriever extractAlbumArt");
+ guint8 *data = NULL;
+ guint64 size = 0;
+
+ mGstDriver->getAlbumArt (&data, &size);
+
+ LOGV ("From extract AlbumArt: Data:%d Size:%d\n", data, size);
+ if (data && size) {
+ MediaAlbumArt *albumArt = new MediaAlbumArt ();
+ albumArt->mSize = size;
+ albumArt->mData = new uint8_t[size];
+ memcpy (albumArt->mData, data, size);
+ return albumArt; // must free by caller
+ }
+
+ if (mLocked) {
+ LOGV ("No AlbumArt data, releasing video protection lock");
+ g_static_mutex_unlock (&GstMetadataRetriever_mutex);
+ mLocked = 0;
+ }
+
+ return NULL;
+ }
+
+ const char *GstMetadataRetriever::extractMetadata (int keyCode)
+ {
+ char *tag;
+ char *ret;
+ int msec;
+ char *duration;
+
+ LOGV ("GstMetadataRetriever keyCode=%d", keyCode);
+
+ switch (keyCode) {
+ case METADATA_KEY_CD_TRACK_NUMBER:
+ tag = strdup (GST_TAG_TRACK_NUMBER);
+ break;
+ case METADATA_KEY_ALBUM:
+ tag = strdup (GST_TAG_ALBUM);
+ break;
+ case METADATA_KEY_AUTHOR:
+ case METADATA_KEY_ARTIST:
+ tag = strdup (GST_TAG_ARTIST);
+ break;
+ case METADATA_KEY_COMPOSER:
+ tag = strdup (GST_TAG_COMPOSER);
+ break;
+ case METADATA_KEY_YEAR:
+ case METADATA_KEY_DATE:
+ tag = strdup (GST_TAG_DATE);
+ break;
+ case METADATA_KEY_GENRE:
+ tag = strdup (GST_TAG_GENRE);
+ break;
+ case METADATA_KEY_TITLE:
+ tag = strdup (GST_TAG_TITLE);
+ break;
+ case METADATA_KEY_DURATION:
+ // Use Gst GetDuration instead of Tag one.
+ msec = mGstDriver->getDuration ();
+ duration = (char *) malloc (sizeof (char *) * 55);
+ sprintf (duration, "%d", msec);
+ return duration;
+ // tag = strdup(GST_TAG_DURATION);
+ break;
+ case METADATA_KEY_NUM_TRACKS:
+ tag = strdup (GST_TAG_TRACK_COUNT);
+ break;
+ case METADATA_KEY_COMMENT:
+ tag = strdup (GST_TAG_COMMENT);
+ break;
+ case METADATA_KEY_COPYRIGHT:
+ tag = strdup (GST_TAG_COPYRIGHT);
+ break;
+ case METADATA_KEY_CODEC:
+ tag = strdup (GST_TAG_CODEC);
+ break;
+ case METADATA_KEY_BIT_RATE:
+ tag = strdup (GST_TAG_BITRATE);
+ break;
+ case METADATA_KEY_VIDEO_HEIGHT:
+ {
+ int width, height;
+ char *res;
+ mGstDriver->getVideoSize (&width, &height);
+ res = (char *) malloc (sizeof (char) * 55);
+ sprintf (res, "%d", height);
+ return res;
+ }
+ case METADATA_KEY_VIDEO_WIDTH:
+ {
+ int width, height;
+ char *res;
+ mGstDriver->getVideoSize (&width, &height);
+ res = (char *) malloc (sizeof (char) * 55);
+ sprintf (res, "%d", width);
+ return res;
+ }
+ case METADATA_KEY_VIDEO_FORMAT:
+ tag = strdup (GST_TAG_VIDEO_CODEC);
+ break;
+
+ case METADATA_KEY_FRAME_RATE:
+ {
+ int framerate;
+ char *res;
+ mGstDriver->getFrameRate (&framerate);
+ res = (char *) malloc (sizeof (char) * 55);
+ sprintf (res, "%d", framerate);
+ return res;
+ }
+#ifdef STECONF_ANDROID_VERSION_FROYO
+ case METADATA_KEY_WRITER:
+ tag = strdup (GST_TAG_COMPOSER);
+ break;
+ case METADATA_KEY_MIMETYPE:
+ tag = strdup (GST_TAG_CODEC);
+ break;
+ case METADATA_KEY_DISC_NUMBER:
+ tag = strdup (GST_TAG_ALBUM_VOLUME_NUMBER);
+ break;
+ case METADATA_KEY_ALBUMARTIST:
+ tag = strdup (GST_TAG_ALBUM_ARTIST);
+ break;
+#endif
+ case METADATA_KEY_IS_DRM_CRIPPLED:
+ case METADATA_KEY_RATING:
+ default:
+ LOGV ("unsupported metadata keycode %d", keyCode);
+ return NULL;
+ }
- status = mGstDriver->getStatus();
+ LOGV ("GstMetadataRetriever send request for |%s| ", tag);
- LOGV("GstMetadataRetriever setDataSource %s", (status == GST_STATE_PAUSED)? "OK": "not correct state");
- if(status != GST_STATE_PAUSED)
- ret = UNKNOWN_ERROR;
- return ret;
-}
+ ret = mGstDriver->getMetadata (tag);
-status_t GstMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
-{
- status_t ret = OK;
- int status = 0;
-
- LOGV("GstMetadataRetriever setDataSource fd=%d offset=%lld lenght=%lld", fd, offset, length);
- mGstDriver->setFdDataSource(fd, offset, length);
- mGstDriver->setup(mMode);
- mGstDriver->prepareSync();
-
- status = mGstDriver->getStatus();
- LOGV("GstMetadataRetriever setDataSource %s:%d", (status == GST_STATE_PAUSED)? "OK": "not correct state", status);
- if(status != GST_STATE_PAUSED)
- return UNKNOWN_ERROR;
+ LOGV ("GstMetadataRetriever tag %s metadata %s", tag, ret);
+ g_free (tag);
return ret;
-}
+ }
-
-status_t GstMetadataRetriever::setMode(int mode)
-{
- LOGV("GstMetadataRetriever setMode mode=%d", mode);
- if (mode < METADATA_MODE_NOOP ||
- mode > METADATA_MODE_FRAME_CAPTURE_AND_METADATA_RETRIEVAL)
- {
- LOGE("set to invalid mode (%d)", mode);
- return BAD_VALUE;
- }
-
- if (mode & METADATA_MODE_FRAME_CAPTURE_ONLY) {
- if (!mLocked) {
- LOGV("GstMetadataRetriever setMode activate video protection");
- g_static_mutex_lock (&GstMetadataRetriever_mutex);
- LOGV("Lock on GstMetadataRetriever acquired");
- mLocked = 1 ;
- } else {
- LOGV("GstMetadataRetriever::setMode video protection already activated");
- }
- } else { /* ! mode & METADATA_MODE_FRAME_CAPTURE_ONLY */
- if (mLocked) {
- LOGV("GstMetadataRetriever::setMode deactivate video protection");
- g_static_mutex_unlock (&GstMetadataRetriever_mutex);
- mLocked = 0 ;
- } else {
- LOGV("GstMetadataRetriever::setMode video protection already deactivated");
- }
- }
- mMode = mode;
- return OK;
-}
-
-status_t GstMetadataRetriever::getMode(int* mode) const
-{
- *mode = mMode;
- LOGV("GstMetadataRetriever getMode mode%d", *mode);
- return OK;
-}
-
-VideoFrame* GstMetadataRetriever::captureFrame()
-{
- int width, height, size;
- VideoFrame *vFrame = NULL;
- gint64 duration;
-
- LOGV("GstMetadataRetriever captureFrame");
-
- if (!mLocked) {
- LOGE("GstMetadataRetriever captureFrame video protection not activated => ERROR");
- return (NULL);
- }
- mGstDriver->getVideoSize(&width, &height);
-
- LOGV("GstMetadataRetriever captureFrame get video size %d x %d", width, height);
- // compute data size
- // FIXME: Check the Framebuffer color depth (if != RGB565)
- size = width * height * 2; // RGB565
-
- duration = mGstDriver->getDuration();
- if(duration) {
- mGstDriver->seekSync(duration/20);
- }
-
- if(size > 0) {
- vFrame = new VideoFrame();
-
- vFrame->mWidth = width;
- vFrame->mHeight = height;
- vFrame->mDisplayWidth = width;
- vFrame->mDisplayHeight = height;
- vFrame->mSize = size;
- vFrame->mData = 0;
-
- mGstDriver->getCaptureFrame(&(vFrame->mData));
-
- if(vFrame->mData == 0) {
- LOGV("GstMetadataRetriever cant' allocate memory for video frame");
- delete vFrame;
- vFrame = NULL;
- }
- }
-
- return vFrame;
-}
-
-MediaAlbumArt* GstMetadataRetriever::extractAlbumArt()
-{
- LOGV("GstMetadataRetriever extractAlbumArt");
- guint8* data = NULL;
- guint64 size = 0;
-
- mGstDriver->getAlbumArt(&data, &size);
-
- LOGV("From extract AlbumArt: Data:%d Size:%d\n", data, size);
- if(data && size) {
- MediaAlbumArt* albumArt = new MediaAlbumArt();
- albumArt->mSize = size;
- albumArt->mData = new uint8_t[size];
- memcpy(albumArt->mData, data, size);
- return albumArt; // must free by caller
- }
-
- if (mLocked) {
- LOGV("No AlbumArt data, releasing video protection lock");
- g_static_mutex_unlock(&GstMetadataRetriever_mutex);
- mLocked = 0;
- }
-
- return NULL;
-}
-
-const char* GstMetadataRetriever::extractMetadata(int keyCode)
-{
- char * tag;
- char * ret;
- int msec;
- char* duration;
-
- LOGV("GstMetadataRetriever keyCode=%d", keyCode);
-
- switch (keyCode)
- {
- case METADATA_KEY_CD_TRACK_NUMBER:
- tag = strdup(GST_TAG_TRACK_NUMBER);
- break;
- case METADATA_KEY_ALBUM:
- tag = strdup(GST_TAG_ALBUM);
- break;
- case METADATA_KEY_AUTHOR:
- case METADATA_KEY_ARTIST:
- tag = strdup(GST_TAG_ARTIST);
- break;
- case METADATA_KEY_COMPOSER:
- tag = strdup(GST_TAG_COMPOSER);
- break;
- case METADATA_KEY_YEAR:
- case METADATA_KEY_DATE:
- tag = strdup(GST_TAG_DATE);
- break;
- case METADATA_KEY_GENRE:
- tag = strdup(GST_TAG_GENRE);
- break;
- case METADATA_KEY_TITLE:
- tag = strdup(GST_TAG_TITLE);
- break;
- case METADATA_KEY_DURATION:
- // Use Gst GetDuration instead of Tag one.
- msec = mGstDriver->getDuration();
- duration = (char *)malloc(sizeof(char *)*55);
- sprintf(duration,"%d",msec);
- return duration;
- // tag = strdup(GST_TAG_DURATION);
- break;
- case METADATA_KEY_NUM_TRACKS:
- tag = strdup(GST_TAG_TRACK_COUNT);
- break;
- case METADATA_KEY_COMMENT:
- tag = strdup(GST_TAG_COMMENT);
- break;
- case METADATA_KEY_COPYRIGHT:
- tag = strdup(GST_TAG_COPYRIGHT);
- break;
- case METADATA_KEY_CODEC:
- tag = strdup(GST_TAG_CODEC);
- break;
- case METADATA_KEY_BIT_RATE:
- tag = strdup(GST_TAG_BITRATE);
- break;
- case METADATA_KEY_VIDEO_HEIGHT:
- {
- int width, height;
- char *res;
- mGstDriver->getVideoSize(&width, &height);
- res = (char *)malloc(sizeof(char)*55);
- sprintf(res,"%d",height);
- return res;
- }
- case METADATA_KEY_VIDEO_WIDTH:
- {
- int width, height;
- char *res;
- mGstDriver->getVideoSize(&width, &height);
- res = (char *)malloc(sizeof(char)*55);
- sprintf(res,"%d",width);
- return res;
- }
- case METADATA_KEY_VIDEO_FORMAT:
- tag = strdup(GST_TAG_VIDEO_CODEC);
- break;
-
- case METADATA_KEY_FRAME_RATE:
- {
- int framerate;
- char *res;
- mGstDriver->getFrameRate(&framerate);
- res = (char *)malloc(sizeof(char)*55);
- sprintf(res,"%d",framerate);
- return res;
- }
-#ifdef STECONF_ANDROID_VERSION_FROYO
- case METADATA_KEY_WRITER:
- tag = strdup(GST_TAG_COMPOSER);
- break;
- case METADATA_KEY_MIMETYPE:
- tag = strdup(GST_TAG_CODEC);
- break;
- case METADATA_KEY_DISC_NUMBER:
- tag = strdup(GST_TAG_ALBUM_VOLUME_NUMBER);
- break;
- case METADATA_KEY_ALBUMARTIST:
- tag = strdup(GST_TAG_ALBUM_ARTIST);
- break;
-#endif
- case METADATA_KEY_IS_DRM_CRIPPLED:
- case METADATA_KEY_RATING:
- default:
- LOGV("unsupported metadata keycode %d", keyCode);
- return NULL;
- }
-
- LOGV("GstMetadataRetriever send request for |%s| ", tag);
-
-
- ret = mGstDriver->getMetadata(tag);
-
- LOGV("GstMetadataRetriever tag %s metadata %s", tag, ret);
- g_free(tag);
-
- return ret;
-}
-
-}; // namespace android
+}; // namespace android
diff --git a/gstplayer/GstMetadataRetriever.h b/gstplayer/GstMetadataRetriever.h
index c54df1a..7ed5f2b 100644
--- a/gstplayer/GstMetadataRetriever.h
+++ b/gstplayer/GstMetadataRetriever.h
@@ -25,31 +25,42 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include <media/MediaMetadataRetrieverInterface.h>
#include "GstMetadataRetrieverDriver.h"
-namespace android {
-
-class GstMetadataRetriever : public MediaMetadataRetrieverInterface
+namespace android
{
-public:
- GstMetadataRetriever();
- virtual ~GstMetadataRetriever();
-
- virtual status_t setDataSource(const char *url);
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setMode(int mode);
- virtual status_t getMode(int* mode) const;
- virtual VideoFrame* captureFrame();
- virtual MediaAlbumArt* extractAlbumArt();
- virtual const char* extractMetadata(int keyCode);
-
-private:
- static void do_nothing(status_t s, void *cookie, bool cancelled) { if(s) { /* warning removal*/ } if(cookie) { /* warning removal*/ } if(cancelled) { /* warning removal*/ } }
-
- GstMetadataRetrieverDriver* mGstDriver;
- int mMode;
- int mLocked ;
-
-};
-
-}; // namespace android
+
+ class GstMetadataRetriever:public MediaMetadataRetrieverInterface
+ {
+ public:
+ GstMetadataRetriever ();
+ virtual ~ GstMetadataRetriever ();
+
+ virtual status_t setDataSource (const char *url);
+ virtual status_t setDataSource (int fd, int64_t offset, int64_t length);
+ virtual status_t setMode (int mode);
+ virtual status_t getMode (int *mode) const;
+ virtual VideoFrame *captureFrame ();
+ virtual MediaAlbumArt *extractAlbumArt ();
+ virtual const char *extractMetadata (int keyCode);
+
+ private:
+ static void do_nothing (status_t s, void *cookie, bool cancelled)
+ {
+ if (s) { /* warning removal */
+ }
+ if (cookie)
+ { /* warning removal */
+ }
+ if (cancelled)
+ { /* warning removal */
+ }
+ }
+
+ GstMetadataRetrieverDriver *mGstDriver;
+ int mMode;
+ int mLocked;
+
+ };
+
+}; // namespace android
#endif // GST_METADATARETRIEVER_H
diff --git a/gstplayer/GstMetadataRetrieverDriver.cpp b/gstplayer/GstMetadataRetrieverDriver.cpp
index 274d3b2..5098187 100644
--- a/gstplayer/GstMetadataRetrieverDriver.cpp
+++ b/gstplayer/GstMetadataRetrieverDriver.cpp
@@ -33,717 +33,759 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
using namespace android;
-static GstStaticCaps static_audio_caps = GST_STATIC_CAPS ("audio/mpeg,framed=true;audio/mpeg,parsed=true;audio/AMR;audio/AMR-WB;audio/x-wma;audio/midi;audio/mobile-xmf");
-static GstStaticCaps static_video_caps = GST_STATIC_CAPS ("video/mpeg;video/x-h263;video/x-h264;video/x-divx;video/x-wmv");
-
-static GstStaticCaps static_have_video_caps = GST_STATIC_CAPS ("video/x-raw-yuv;video/x-raw-rgb");
-
-
-GstMetadataRetrieverDriver::GstMetadataRetrieverDriver():
- mPipeline(NULL),
- mAppsrc(NULL),
- mColorTransform(NULL),
- mScaler(NULL),
- mPlayBin(NULL),
- mAppSink(NULL),
- mAudioSink(NULL),
- mUri(NULL),
- mTag_list(NULL),
- mFdSrcOffset_min(0), mFdSrcOffset_max(0),
- mFdSrcOffset_current(0), mFd(-1),
- mState(GST_STATE_NULL),
- mAlbumArt(NULL)
+static GstStaticCaps static_audio_caps =
+ GST_STATIC_CAPS
+ ("audio/mpeg,framed=true;audio/mpeg,parsed=true;audio/AMR;audio/AMR-WB;audio/x-wma;audio/midi;audio/mobile-xmf");
+static GstStaticCaps static_video_caps =
+ GST_STATIC_CAPS
+ ("video/mpeg;video/x-h263;video/x-h264;video/x-divx;video/x-wmv");
+
+static GstStaticCaps static_have_video_caps =
+ GST_STATIC_CAPS ("video/x-raw-yuv;video/x-raw-rgb");
+
+
+GstMetadataRetrieverDriver::GstMetadataRetrieverDriver ():
+mPipeline (NULL),
+mAppsrc (NULL),
+mColorTransform (NULL),
+mScaler (NULL),
+mPlayBin (NULL),
+mAppSink (NULL),
+mAudioSink (NULL),
+mUri (NULL),
+mTag_list (NULL),
+mFdSrcOffset_min (0), mFdSrcOffset_max (0),
+mFdSrcOffset_current (0), mFd (-1), mState (GST_STATE_NULL), mAlbumArt (NULL)
{
- LOGV("constructor");
+ LOGV ("constructor");
- mHaveStreamVideo = false;
+ mHaveStreamVideo = false;
- init_gstreamer();
+ init_gstreamer ();
}
-GstMetadataRetrieverDriver::~GstMetadataRetrieverDriver()
+GstMetadataRetrieverDriver::~GstMetadataRetrieverDriver ()
{
- LOGV("destructor");
-
- if(mTag_list) {
- LOGV("free tag list");
- gst_tag_list_free (mTag_list);
- mTag_list = NULL;
- }
-
- if(mAlbumArt) {
- gst_buffer_unref(mAlbumArt);
- mAlbumArt = NULL;
- }
-
- if(mPipeline) {
- LOGV("free pipeline %s", GST_ELEMENT_NAME(mPipeline));
- gst_element_set_state(mPipeline, GST_STATE_NULL);
- gst_object_unref (mPipeline);
- mPipeline = NULL;
- }
-
- if(mFd != -1) {
- close(mFd);
- }
-
- if(mUri) {
- g_free(mUri);
- }
+ LOGV ("destructor");
+
+ if (mTag_list) {
+ LOGV ("free tag list");
+ gst_tag_list_free (mTag_list);
+ mTag_list = NULL;
+ }
+
+ if (mAlbumArt) {
+ gst_buffer_unref (mAlbumArt);
+ mAlbumArt = NULL;
+ }
+
+ if (mPipeline) {
+ LOGV ("free pipeline %s", GST_ELEMENT_NAME (mPipeline));
+ gst_element_set_state (mPipeline, GST_STATE_NULL);
+ gst_object_unref (mPipeline);
+ mPipeline = NULL;
+ }
+
+ if (mFd != -1) {
+ close (mFd);
+ }
+
+ if (mUri) {
+ g_free (mUri);
+ }
}
void
-GstMetadataRetrieverDriver::cb_newpad(GstElement *mPlayBin, GstPad *pad,
- GstMetadataRetrieverDriver *data)
+GstMetadataRetrieverDriver::cb_newpad (GstElement * mPlayBin, GstPad * pad,
+ GstMetadataRetrieverDriver * data)
{
- GstCaps *caps;
- GstStructure *str;
- gboolean err = true;
-
- caps = gst_pad_get_caps (pad);
- str = gst_caps_get_structure (caps, 0);
- if (g_strrstr (gst_structure_get_name (str), "audio")) {
- LOGI ("cb_newpad Called for an audio pad");
- err = gst_element_link (data->mPlayBin, data->mAudioSink);
- }
- else
- if (g_strrstr (gst_structure_get_name (str), "video")) {
- LOGI ("cb_newpad Called for a video pad");
- err = gst_element_link (data->mPlayBin, data->mColorTransform);
- }
-
- if (!err)
- LOGE ("Could not link %s with %s", GST_ELEMENT_NAME (data->mPlayBin),
- GST_ELEMENT_NAME (data->mAudioSink?
- data->mAudioSink : data->mColorTransform));
-
- gst_caps_unref (caps);
-
- return;
+ GstCaps *caps;
+ GstStructure *str;
+ gboolean err = true;
+
+ caps = gst_pad_get_caps (pad);
+ str = gst_caps_get_structure (caps, 0);
+ if (g_strrstr (gst_structure_get_name (str), "audio")) {
+ LOGI ("cb_newpad Called for an audio pad");
+ err = gst_element_link (data->mPlayBin, data->mAudioSink);
+ } else if (g_strrstr (gst_structure_get_name (str), "video")) {
+ LOGI ("cb_newpad Called for a video pad");
+ err = gst_element_link (data->mPlayBin, data->mColorTransform);
+ }
+
+ if (!err)
+ LOGE ("Could not link %s with %s", GST_ELEMENT_NAME (data->mPlayBin),
+ GST_ELEMENT_NAME (data->mAudioSink ?
+ data->mAudioSink : data->mColorTransform));
+
+ gst_caps_unref (caps);
+
+ return;
}
-void GstMetadataRetrieverDriver::setup(int mode)
+void
+GstMetadataRetrieverDriver::setup (int mode)
{
- gchar *description = NULL;
- GError *error = NULL;
- mMode = mode;
-
- if(mMode & METADATA_MODE_FRAME_CAPTURE_ONLY) {
- LOGI("Called in METADATA_MODE_FRAME_CAPTURE_ONLY mode");
- LOGI("For URI:%s", mUri);
- mPipeline = gst_pipeline_new ("pipeline");
- mColorTransform = gst_element_factory_make ("ffmpegcolorspace", NULL);
- mScaler = gst_element_factory_make ("videoscale", NULL);
- mPlayBin = gst_element_factory_make ("uridecodebin", "src");
- mAppSink = gst_element_factory_make ("appsink", "sink");
- mAudioSink = gst_element_factory_make ("fakesink", NULL);
-
- g_object_set (G_OBJECT (mPlayBin), "uri", mUri, NULL);
- g_object_set (G_OBJECT (mAppSink), "enable-last-buffer", "true", NULL);
-
- gst_bin_add_many (GST_BIN (mPipeline), mPlayBin, mColorTransform,
- mAudioSink, mScaler, mAppSink, NULL);
-
- if (!gst_element_link (mColorTransform, mScaler))
- LOGE("Failed to link %s to %s",
- GST_ELEMENT_NAME (mColorTransform),
- GST_ELEMENT_NAME (mScaler));
-
- if (!gst_element_link (mScaler, mAppSink))
- LOGE("Failed to link %s to %s",
- GST_ELEMENT_NAME (mScaler),
- GST_ELEMENT_NAME (mAppSink));
-
- g_signal_connect (mPlayBin, "pad-added", G_CALLBACK (cb_newpad), this);
- } else {
- description = g_strdup_printf("uridecodebin uri=%s name=src ! fakesink name=sink", mUri);
- mPipeline = gst_parse_launch(description, &error);
- }
-
- if(!mPipeline) {
- LOGE("can't create pipeline");
- return;
- }
- LOGV("pipeline creation: %s", GST_ELEMENT_NAME (mPipeline));
-
-
- // verbose info (as gst-launch -v)
- // Activate the trace with the command: "setprop persist.gst.verbose 1"
- char value[PROPERTY_VALUE_MAX];
- property_get("persist.gst.verbose", value, "0");
- LOGV("persist.gst.verbose property = %s", value);
- if (value[0] == '1') {
- LOGV("Activate deep_notify");
- g_signal_connect (mPipeline, "deep_notify",
- G_CALLBACK (gst_object_default_deep_notify), NULL);
- }
-
- mState = GST_STATE_NULL;
+ gchar *description = NULL;
+ GError *error = NULL;
+ mMode = mode;
+
+ if (mMode & METADATA_MODE_FRAME_CAPTURE_ONLY) {
+ LOGI ("Called in METADATA_MODE_FRAME_CAPTURE_ONLY mode");
+ LOGI ("For URI:%s", mUri);
+ mPipeline = gst_pipeline_new ("pipeline");
+ mColorTransform = gst_element_factory_make ("ffmpegcolorspace", NULL);
+ mScaler = gst_element_factory_make ("videoscale", NULL);
+ mPlayBin = gst_element_factory_make ("uridecodebin", "src");
+ mAppSink = gst_element_factory_make ("appsink", "sink");
+ mAudioSink = gst_element_factory_make ("fakesink", NULL);
+
+ g_object_set (G_OBJECT (mPlayBin), "uri", mUri, NULL);
+ g_object_set (G_OBJECT (mAppSink), "enable-last-buffer", "true", NULL);
+
+ gst_bin_add_many (GST_BIN (mPipeline), mPlayBin, mColorTransform,
+ mAudioSink, mScaler, mAppSink, NULL);
+
+ if (!gst_element_link (mColorTransform, mScaler))
+ LOGE ("Failed to link %s to %s",
+ GST_ELEMENT_NAME (mColorTransform), GST_ELEMENT_NAME (mScaler));
+
+ if (!gst_element_link (mScaler, mAppSink))
+ LOGE ("Failed to link %s to %s",
+ GST_ELEMENT_NAME (mScaler), GST_ELEMENT_NAME (mAppSink));
+
+ g_signal_connect (mPlayBin, "pad-added", G_CALLBACK (cb_newpad), this);
+ } else {
+ description =
+ g_strdup_printf ("uridecodebin uri=%s name=src ! fakesink name=sink",
+ mUri);
+ mPipeline = gst_parse_launch (description, &error);
+ }
+
+ if (!mPipeline) {
+ LOGE ("can't create pipeline");
+ return;
+ }
+ LOGV ("pipeline creation: %s", GST_ELEMENT_NAME (mPipeline));
+
+
+ // verbose info (as gst-launch -v)
+ // Activate the trace with the command: "setprop persist.gst.verbose 1"
+ char value[PROPERTY_VALUE_MAX];
+ property_get ("persist.gst.verbose", value, "0");
+ LOGV ("persist.gst.verbose property = %s", value);
+ if (value[0] == '1') {
+ LOGV ("Activate deep_notify");
+ g_signal_connect (mPipeline, "deep_notify",
+ G_CALLBACK (gst_object_default_deep_notify), NULL);
+ }
+
+ mState = GST_STATE_NULL;
}
-void GstMetadataRetrieverDriver::setDataSource(const char* url)
+void
+GstMetadataRetrieverDriver::setDataSource (const char *url)
{
- LOGI("create source from uri %s", url);
-
- if(!gst_uri_is_valid(url)) {
- gchar *uri_file = g_filename_to_uri (url, NULL, NULL);
- // add \" to avoid issues with space charactere in filename/filepath
- mUri = g_strdup_printf("%s", uri_file);
- g_free (uri_file);
- }
- else {
- mUri = g_strdup_printf("%s", url);
- }
-
- LOGV("set uri %s to src", mUri);
+ LOGI ("create source from uri %s", url);
+
+ if (!gst_uri_is_valid (url)) {
+ gchar *uri_file = g_filename_to_uri (url, NULL, NULL);
+ // add \" to avoid issues with space charactere in filename/filepath
+ mUri = g_strdup_printf ("%s", uri_file);
+ g_free (uri_file);
+ } else {
+ mUri = g_strdup_printf ("%s", url);
+ }
+
+ LOGV ("set uri %s to src", mUri);
}
-/*static*/ gboolean GstMetadataRetrieverDriver::have_video_caps (GstElement * uridecodebin, GstCaps * caps)
+/*static*/ gboolean
+GstMetadataRetrieverDriver::have_video_caps (GstElement * uridecodebin,
+ GstCaps * caps)
{
- GstCaps *intersection, *video_caps;
-
- gboolean res;
-
- video_caps = gst_static_caps_get(&static_have_video_caps);
- GST_OBJECT_LOCK(uridecodebin);
- intersection = gst_caps_intersect (caps, video_caps);
- GST_OBJECT_UNLOCK(uridecodebin);
-
- res = !(gst_caps_is_empty (intersection));
-
- gst_caps_unref (intersection);
- gst_caps_unref (video_caps);
- return res;
+ GstCaps *intersection, *video_caps;
+
+ gboolean res;
+
+ video_caps = gst_static_caps_get (&static_have_video_caps);
+ GST_OBJECT_LOCK (uridecodebin);
+ intersection = gst_caps_intersect (caps, video_caps);
+ GST_OBJECT_UNLOCK (uridecodebin);
+
+ res = !(gst_caps_is_empty (intersection));
+
+ gst_caps_unref (intersection);
+ gst_caps_unref (video_caps);
+ return res;
}
-/*static*/ gboolean GstMetadataRetrieverDriver::are_audio_caps (GstElement * uridecodebin, GstCaps * caps)
+/*static*/ gboolean
+GstMetadataRetrieverDriver::are_audio_caps (GstElement * uridecodebin,
+ GstCaps * caps)
{
- GstCaps *intersection, *end_caps;
-
- gboolean res;
-
- end_caps = gst_static_caps_get(&static_audio_caps);
- GST_OBJECT_LOCK(uridecodebin);
- intersection = gst_caps_intersect (caps, end_caps);
- GST_OBJECT_UNLOCK(uridecodebin);
-
- res = (gst_caps_is_empty (intersection));
-
- gst_caps_unref (intersection);
- gst_caps_unref (end_caps);
- return res;
+ GstCaps *intersection, *end_caps;
+
+ gboolean res;
+
+ end_caps = gst_static_caps_get (&static_audio_caps);
+ GST_OBJECT_LOCK (uridecodebin);
+ intersection = gst_caps_intersect (caps, end_caps);
+ GST_OBJECT_UNLOCK (uridecodebin);
+
+ res = (gst_caps_is_empty (intersection));
+
+ gst_caps_unref (intersection);
+ gst_caps_unref (end_caps);
+ return res;
}
-/*static*/ gboolean GstMetadataRetrieverDriver::are_video_caps (GstElement * uridecodebin, GstCaps * caps)
+/*static*/ gboolean
+GstMetadataRetrieverDriver::are_video_caps (GstElement * uridecodebin,
+ GstCaps * caps)
{
- GstCaps *intersection, *end_caps;
-
- gboolean res;
-
- end_caps = gst_static_caps_get(&static_video_caps);
- GST_OBJECT_LOCK(uridecodebin);
- intersection = gst_caps_intersect (caps, end_caps);
- GST_OBJECT_UNLOCK(uridecodebin);
-
- res = (gst_caps_is_empty (intersection));
-
- gst_caps_unref (intersection);
- gst_caps_unref (end_caps);
- return res;
+ GstCaps *intersection, *end_caps;
+
+ gboolean res;
+
+ end_caps = gst_static_caps_get (&static_video_caps);
+ GST_OBJECT_LOCK (uridecodebin);
+ intersection = gst_caps_intersect (caps, end_caps);
+ GST_OBJECT_UNLOCK (uridecodebin);
+
+ res = (gst_caps_is_empty (intersection));
+
+ gst_caps_unref (intersection);
+ gst_caps_unref (end_caps);
+ return res;
}
/* return TRUE if we continu to buld the graph, FALSE either */
-/*static */ gboolean GstMetadataRetrieverDriver::autoplug_continue (GstElement* object,
- GstPad* pad,
- GstCaps* caps,
- GstMetadataRetrieverDriver* ed)
+/*static */ gboolean
+GstMetadataRetrieverDriver::autoplug_continue (GstElement * object,
+ GstPad * pad, GstCaps * caps, GstMetadataRetrieverDriver * ed)
{
- GstStructure *structure = NULL;
- structure = gst_caps_get_structure (caps, 0);
- gboolean res;
-
-
- UNUSED(pad);
-
- //LOGV("autoplug_continue %s" ,gst_structure_get_name(structure));
- if(are_video_caps(object, caps)) {
- //LOGV("\nfound video caps %" GST_PTR_FORMAT, caps);
- ed->mHaveStreamVideo = TRUE;
- }
-
- res = are_audio_caps(object, caps);
-
- if(res && (ed->mMode & METADATA_MODE_METADATA_RETRIEVAL_ONLY)) {
- res &= are_video_caps(object, caps);
- }
-
- return res;
+ GstStructure *structure = NULL;
+ structure = gst_caps_get_structure (caps, 0);
+ gboolean res;
+
+
+ UNUSED (pad);
+
+ //LOGV("autoplug_continue %s" ,gst_structure_get_name(structure));
+ if (are_video_caps (object, caps)) {
+ //LOGV("\nfound video caps %" GST_PTR_FORMAT, caps);
+ ed->mHaveStreamVideo = TRUE;
+ }
+
+ res = are_audio_caps (object, caps);
+
+ if (res && (ed->mMode & METADATA_MODE_METADATA_RETRIEVAL_ONLY)) {
+ res &= are_video_caps (object, caps);
+ }
+
+ return res;
}
-/*static*/ void GstMetadataRetrieverDriver::need_data (GstElement * object, guint size, GstMetadataRetrieverDriver* ed)
+/*static*/ void
+GstMetadataRetrieverDriver::need_data (GstElement * object, guint size,
+ GstMetadataRetrieverDriver * ed)
{
- GstFlowReturn ret;
- GstBuffer *buffer;
- UNUSED(object);
-
- if(ed->mFdSrcOffset_current >= ed->mFdSrcOffset_max) {
- LOGV("appsrc send eos");
- g_signal_emit_by_name (ed->mAppsrc, "end-of-stream", &ret);
- return;
- }
-
- if((ed->mFdSrcOffset_current + size) > ed->mFdSrcOffset_max) {
- size = ed->mFdSrcOffset_max - ed->mFdSrcOffset_current;
- }
-
- buffer = gst_buffer_new_and_alloc(size);
-
- if(buffer == NULL) {
- LOGV("appsrc can't allocate buffer of size %d", size);
- return;
- }
- size = read(ed->mFd, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
-
- GST_BUFFER_SIZE(buffer) = size;
- /* we need to set an offset for random access */
- GST_BUFFER_OFFSET (buffer) = ed->mFdSrcOffset_current - ed->mFdSrcOffset_min;
- ed->mFdSrcOffset_current += size;
- GST_BUFFER_OFFSET_END (buffer) = ed->mFdSrcOffset_current - ed->mFdSrcOffset_min;
-
- g_signal_emit_by_name (ed->mAppsrc, "push-buffer", buffer, &ret);
- gst_buffer_unref (buffer);
+ GstFlowReturn ret;
+ GstBuffer *buffer;
+ UNUSED (object);
+
+ if (ed->mFdSrcOffset_current >= ed->mFdSrcOffset_max) {
+ LOGV ("appsrc send eos");
+ g_signal_emit_by_name (ed->mAppsrc, "end-of-stream", &ret);
+ return;
+ }
+
+ if ((ed->mFdSrcOffset_current + size) > ed->mFdSrcOffset_max) {
+ size = ed->mFdSrcOffset_max - ed->mFdSrcOffset_current;
+ }
+
+ buffer = gst_buffer_new_and_alloc (size);
+
+ if (buffer == NULL) {
+ LOGV ("appsrc can't allocate buffer of size %d", size);
+ return;
+ }
+ size = read (ed->mFd, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
+
+ GST_BUFFER_SIZE (buffer) = size;
+ /* we need to set an offset for random access */
+ GST_BUFFER_OFFSET (buffer) = ed->mFdSrcOffset_current - ed->mFdSrcOffset_min;
+ ed->mFdSrcOffset_current += size;
+ GST_BUFFER_OFFSET_END (buffer) =
+ ed->mFdSrcOffset_current - ed->mFdSrcOffset_min;
+
+ g_signal_emit_by_name (ed->mAppsrc, "push-buffer", buffer, &ret);
+ gst_buffer_unref (buffer);
}
-/*static*/ gboolean GstMetadataRetrieverDriver::seek_data(GstElement * object, guint64 offset, GstMetadataRetrieverDriver* ed)
+/*static*/ gboolean
+GstMetadataRetrieverDriver::seek_data (GstElement * object, guint64 offset,
+ GstMetadataRetrieverDriver * ed)
{
- UNUSED(object);
+ UNUSED (object);
- if((ed->mFdSrcOffset_min + offset) <= ed->mFdSrcOffset_max) {
- lseek (ed->mFd, ed->mFdSrcOffset_min + offset, SEEK_SET);
- ed->mFdSrcOffset_current = ed->mFdSrcOffset_min + offset;
- }
+ if ((ed->mFdSrcOffset_min + offset) <= ed->mFdSrcOffset_max) {
+ lseek (ed->mFd, ed->mFdSrcOffset_min + offset, SEEK_SET);
+ ed->mFdSrcOffset_current = ed->mFdSrcOffset_min + offset;
+ }
- return TRUE;
+ return TRUE;
}
-/*static*/ void GstMetadataRetrieverDriver::source_changed_cb (GObject *obj, GParamSpec *pspec, GstMetadataRetrieverDriver* ed)
+/*static*/ void
+GstMetadataRetrieverDriver::source_changed_cb (GObject * obj,
+ GParamSpec * pspec, GstMetadataRetrieverDriver * ed)
{
- UNUSED(pspec);
+ UNUSED (pspec);
- // get the newly created source element
- g_object_get (obj, "source", &(ed->mAppsrc), (gchar*)NULL);
+ // get the newly created source element
+ g_object_get (obj, "source", &(ed->mAppsrc), (gchar *) NULL);
- if(ed->mAppsrc != NULL) {
- lseek (ed->mFd, ed->mFdSrcOffset_min, SEEK_SET);
+ if (ed->mAppsrc != NULL) {
+ lseek (ed->mFd, ed->mFdSrcOffset_min, SEEK_SET);
- g_object_set(ed->mAppsrc, "format" , GST_FORMAT_BYTES, NULL);
- g_object_set(ed->mAppsrc, "stream-type" , 2 /*"random-access"*/ , NULL);
- g_object_set(ed->mAppsrc, "size", (gint64) (ed->mFdSrcOffset_max - ed->mFdSrcOffset_min), NULL);
- g_signal_connect (ed->mAppsrc, "need-data", G_CALLBACK (need_data), ed);
- g_signal_connect (ed->mAppsrc, "seek-data", G_CALLBACK (seek_data), ed);
- }
+ g_object_set (ed->mAppsrc, "format", GST_FORMAT_BYTES, NULL);
+ g_object_set (ed->mAppsrc, "stream-type", 2 /*"random-access" */ , NULL);
+ g_object_set (ed->mAppsrc, "size",
+ (gint64) (ed->mFdSrcOffset_max - ed->mFdSrcOffset_min), NULL);
+ g_signal_connect (ed->mAppsrc, "need-data", G_CALLBACK (need_data), ed);
+ g_signal_connect (ed->mAppsrc, "seek-data", G_CALLBACK (seek_data), ed);
+ }
}
-void GstMetadataRetrieverDriver::setFdDataSource(int fd, gint64 offset, gint64 length)
+void
+GstMetadataRetrieverDriver::setFdDataSource (int fd, gint64 offset,
+ gint64 length)
{
- LOGI("create source from fd %d offset %lld lenght %lld", fd, offset, length);
-
- // duplicate the fd because it should be close in java layers before we can use it
- mFd = dup(fd);
- LOGV("dup(fd) old %d new %d", fd, mFd);
- // create the uri string with the new fd
- mUri = g_strdup_printf ("appsrc://");
- mFdSrcOffset_min = offset;
- mFdSrcOffset_current = mFdSrcOffset_min;
- mFdSrcOffset_max = mFdSrcOffset_min + length;
+ LOGI ("create source from fd %d offset %lld lenght %lld", fd, offset, length);
+
+ // duplicate the fd because it should be close in java layers before we can use it
+ mFd = dup (fd);
+ LOGV ("dup(fd) old %d new %d", fd, mFd);
+ // create the uri string with the new fd
+ mUri = g_strdup_printf ("appsrc://");
+ mFdSrcOffset_min = offset;
+ mFdSrcOffset_current = mFdSrcOffset_min;
+ mFdSrcOffset_max = mFdSrcOffset_min + length;
}
-void GstMetadataRetrieverDriver::getVideoSize(int* width, int* height)
+void
+GstMetadataRetrieverDriver::getVideoSize (int *width, int *height)
{
- *width = 0;
- *height = 0;
-
- if (mHaveStreamVideo) {
- GstElement * sink = gst_bin_get_by_name (GST_BIN(mPipeline), "sink");
- if (GstPad* pad = gst_element_get_static_pad(sink, "sink")) {
- gst_video_get_size(GST_PAD(pad), width, height);
- gst_object_unref(GST_OBJECT(pad));
- }
- LOGV("video width %d height %d", *width, *height);
- }
+ *width = 0;
+ *height = 0;
+
+ if (mHaveStreamVideo) {
+ GstElement *sink = gst_bin_get_by_name (GST_BIN (mPipeline), "sink");
+ if (GstPad * pad = gst_element_get_static_pad (sink, "sink")) {
+ gst_video_get_size (GST_PAD (pad), width, height);
+ gst_object_unref (GST_OBJECT (pad));
+ }
+ LOGV ("video width %d height %d", *width, *height);
+ }
}
-void GstMetadataRetrieverDriver::getFrameRate(int* framerate)
+void
+GstMetadataRetrieverDriver::getFrameRate (int *framerate)
{
- *framerate = 0;
-
- if (mHaveStreamVideo) {
- const GValue* fps = NULL;
- GstElement * sink = gst_bin_get_by_name (GST_BIN(mPipeline), "sink");
- if (GstPad* pad = gst_element_get_static_pad(sink, "sink")) {
- fps = gst_video_frame_rate(GST_PAD(pad));
- if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
- *framerate = gst_value_get_fraction_numerator (fps) / gst_value_get_fraction_denominator (fps);
- }
- gst_object_unref(GST_OBJECT(pad));
- }
- LOGV("framerate %d", *framerate);
- }
+ *framerate = 0;
+
+ if (mHaveStreamVideo) {
+ const GValue *fps = NULL;
+ GstElement *sink = gst_bin_get_by_name (GST_BIN (mPipeline), "sink");
+ if (GstPad * pad = gst_element_get_static_pad (sink, "sink")) {
+ fps = gst_video_frame_rate (GST_PAD (pad));
+ if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
+ *framerate =
+ gst_value_get_fraction_numerator (fps) /
+ gst_value_get_fraction_denominator (fps);
+ }
+ gst_object_unref (GST_OBJECT (pad));
+ }
+ LOGV ("framerate %d", *framerate);
+ }
}
#define PREPARE_SYNC_TIMEOUT 5000 * GST_MSECOND
-void GstMetadataRetrieverDriver::prepareSync()
+void
+GstMetadataRetrieverDriver::prepareSync ()
{
- GstBus * bus = NULL;
- GstMessage *message = NULL;
- GstElement * src = NULL;
- GstMessageType message_filter = (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE);
-
- if(mMode & METADATA_MODE_METADATA_RETRIEVAL_ONLY) {
- message_filter = (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_TAG);
- }
-
- LOGV("prepareSync");
- src = gst_bin_get_by_name (GST_BIN(mPipeline), "src");
-
- if(src == NULL) {
- LOGV("prepareSync no src found");
- mState = GST_STATE_NULL;
- return;
- }
-
- g_signal_connect (src, "autoplug-continue", G_CALLBACK (autoplug_continue),this);
-
- if(mFdSrcOffset_max) {
- g_signal_connect (src, "notify::source", G_CALLBACK (source_changed_cb),this);
- }
-
- bus = gst_pipeline_get_bus (GST_PIPELINE(mPipeline));
- gst_element_set_state(mPipeline, GST_STATE_PAUSED);
-
- message = gst_bus_timed_pop_filtered(bus, PREPARE_SYNC_TIMEOUT, message_filter);
-
- mState = GST_STATE_PAUSED;
-
- while(message != NULL) {
- switch(GST_MESSAGE_TYPE(message)) {
- case GST_MESSAGE_TAG:
- {
- GstTagList *tag_list, *result;
-
- LOGV("receive TAGS from the stream");
- gst_message_parse_tag (message, &tag_list);
-
- /* all tags (replace previous tags, title/artist/etc. might change
- * in the middle of a stream, e.g. with radio streams) */
- result = gst_tag_list_merge (mTag_list, tag_list, GST_TAG_MERGE_REPLACE);
- if (mTag_list)
- gst_tag_list_free (mTag_list);
- mTag_list = result;
-
- /* clean up */
- gst_tag_list_free (tag_list);
- gst_message_unref(message);
- break;
- }
-
- case GST_MESSAGE_ASYNC_DONE:
- {
- mState = GST_STATE_PAUSED;
- LOGV("receive GST_MESSAGE_ASYNC_DONE");
- gst_message_unref(message);
- goto bail;
- }
-
- case GST_MESSAGE_ERROR:
- {
- GError* err;
- gchar* debug;
-
- mState = GST_STATE_NULL;
- gst_message_parse_error(message, &err, &debug);
- LOGV("receive GST_MESSAGE_ERROR : %d, %s (EXTRA INFO=%s)",
- err->code,
- err->message,
- (debug != NULL) ? debug : "none");
- gst_message_unref(message);
- if (debug) {
- g_free (debug);
- }
- goto bail;
- }
-
- default:
- // do nothing
- break;
- }
- message = gst_bus_timed_pop_filtered(bus, 50*GST_MSECOND, message_filter);
- }
+ GstBus *bus = NULL;
+ GstMessage *message = NULL;
+ GstElement *src = NULL;
+ GstMessageType message_filter =
+ (GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE);
+
+ if (mMode & METADATA_MODE_METADATA_RETRIEVAL_ONLY) {
+ message_filter =
+ (GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE |
+ GST_MESSAGE_TAG);
+ }
+
+ LOGV ("prepareSync");
+ src = gst_bin_get_by_name (GST_BIN (mPipeline), "src");
+
+ if (src == NULL) {
+ LOGV ("prepareSync no src found");
+ mState = GST_STATE_NULL;
+ return;
+ }
+
+ g_signal_connect (src, "autoplug-continue", G_CALLBACK (autoplug_continue),
+ this);
+
+ if (mFdSrcOffset_max) {
+ g_signal_connect (src, "notify::source", G_CALLBACK (source_changed_cb),
+ this);
+ }
+
+ bus = gst_pipeline_get_bus (GST_PIPELINE (mPipeline));
+ gst_element_set_state (mPipeline, GST_STATE_PAUSED);
+
+ message =
+ gst_bus_timed_pop_filtered (bus, PREPARE_SYNC_TIMEOUT, message_filter);
+
+ mState = GST_STATE_PAUSED;
+
+ while (message != NULL) {
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_TAG:
+ {
+ GstTagList *tag_list, *result;
+
+ LOGV ("receive TAGS from the stream");
+ gst_message_parse_tag (message, &tag_list);
+
+ /* all tags (replace previous tags, title/artist/etc. might change
+ * in the middle of a stream, e.g. with radio streams) */
+ result =
+ gst_tag_list_merge (mTag_list, tag_list, GST_TAG_MERGE_REPLACE);
+ if (mTag_list)
+ gst_tag_list_free (mTag_list);
+ mTag_list = result;
+
+ /* clean up */
+ gst_tag_list_free (tag_list);
+ gst_message_unref (message);
+ break;
+ }
+
+ case GST_MESSAGE_ASYNC_DONE:
+ {
+ mState = GST_STATE_PAUSED;
+ LOGV ("receive GST_MESSAGE_ASYNC_DONE");
+ gst_message_unref (message);
+ goto bail;
+ }
+
+ case GST_MESSAGE_ERROR:
+ {
+ GError *err;
+ gchar *debug;
+
+ mState = GST_STATE_NULL;
+ gst_message_parse_error (message, &err, &debug);
+ LOGV ("receive GST_MESSAGE_ERROR : %d, %s (EXTRA INFO=%s)",
+ err->code, err->message, (debug != NULL) ? debug : "none");
+ gst_message_unref (message);
+ if (debug) {
+ g_free (debug);
+ }
+ goto bail;
+ }
+
+ default:
+ // do nothing
+ break;
+ }
+ message =
+ gst_bus_timed_pop_filtered (bus, 50 * GST_MSECOND, message_filter);
+ }
bail:
- gst_object_unref(bus);
+ gst_object_unref (bus);
}
-void GstMetadataRetrieverDriver::seekSync(gint64 p)
+void
+GstMetadataRetrieverDriver::seekSync (gint64 p)
{
- gint64 position;
- GstMessage *message = NULL;
- GstBus * bus = NULL;
-
- if(!mPipeline)
- return;
-
- position = ((gint64)p) * 1000000;
- LOGV("Seek to %lld ms (%lld ns)", p, position);
- if(!gst_element_seek_simple(mPipeline, GST_FORMAT_TIME, (GstSeekFlags)((int)GST_SEEK_FLAG_FLUSH | (int)GST_SEEK_FLAG_KEY_UNIT), position)) {
- LOGE("Can't perfom seek for %lld ms", p);
- }
-
- bus = gst_pipeline_get_bus (GST_PIPELINE(mPipeline));
-
- message = gst_bus_timed_pop_filtered(bus, PREPARE_SYNC_TIMEOUT, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE));
-
- if( message != NULL) {
- switch(GST_MESSAGE_TYPE(message)) {
-
- case GST_MESSAGE_ASYNC_DONE:
- {
- mState = GST_STATE_PAUSED;
- break;
- }
-
- case GST_MESSAGE_ERROR:
- {
- mState = GST_STATE_NULL;
- break;
- }
- default:
- // do nothing
- break;
- }
- gst_message_unref(message);
- }
- gst_object_unref(bus);
+ gint64 position;
+ GstMessage *message = NULL;
+ GstBus *bus = NULL;
+
+ if (!mPipeline)
+ return;
+
+ position = ((gint64) p) * 1000000;
+ LOGV ("Seek to %lld ms (%lld ns)", p, position);
+ if (!gst_element_seek_simple (mPipeline, GST_FORMAT_TIME,
+ (GstSeekFlags) ((int) GST_SEEK_FLAG_FLUSH | (int)
+ GST_SEEK_FLAG_KEY_UNIT), position)) {
+ LOGE ("Can't perfom seek for %lld ms", p);
+ }
+
+ bus = gst_pipeline_get_bus (GST_PIPELINE (mPipeline));
+
+ message =
+ gst_bus_timed_pop_filtered (bus, PREPARE_SYNC_TIMEOUT,
+ (GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE));
+
+ if (message != NULL) {
+ switch (GST_MESSAGE_TYPE (message)) {
+
+ case GST_MESSAGE_ASYNC_DONE:
+ {
+ mState = GST_STATE_PAUSED;
+ break;
+ }
+
+ case GST_MESSAGE_ERROR:
+ {
+ mState = GST_STATE_NULL;
+ break;
+ }
+ default:
+ // do nothing
+ break;
+ }
+ gst_message_unref (message);
+ }
+ gst_object_unref (bus);
}
-gint64 GstMetadataRetrieverDriver::getPosition()
+gint64
+GstMetadataRetrieverDriver::getPosition ()
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 pos = 0;
-
- if(!mPipeline) {
- LOGV("get postion but pipeline has not been created yet");
- return 0;
- }
-
- LOGV("getPosition");
- gst_element_query_position (mPipeline, &fmt, &pos);
- LOGV("Stream position %lld ms", pos / 1000000);
- return (pos / 1000000);
+ GstFormat fmt = GST_FORMAT_TIME;
+ gint64 pos = 0;
+
+ if (!mPipeline) {
+ LOGV ("get postion but pipeline has not been created yet");
+ return 0;
+ }
+
+ LOGV ("getPosition");
+ gst_element_query_position (mPipeline, &fmt, &pos);
+ LOGV ("Stream position %lld ms", pos / 1000000);
+ return (pos / 1000000);
}
-int GstMetadataRetrieverDriver::getStatus()
+int
+GstMetadataRetrieverDriver::getStatus ()
{
- return mState;
+ return mState;
}
-gint64 GstMetadataRetrieverDriver::getDuration()
+gint64
+GstMetadataRetrieverDriver::getDuration ()
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 len;
-
- if(!mPipeline) {
- LOGV("get duration but pipeline has not been created yet");
- return 0;
- }
-
- // the duration given by gstreamer is in nanosecond
- // so we need to transform it in millisecond
- LOGV("getDuration");
- if(gst_element_query_duration(mPipeline, &fmt, &len)) {
- LOGE("Stream duration %lld ms", len / 1000000 );
- }
- else {
- LOGV("Query duration failed");
- len = 0;
- }
-
- if ((GstClockTime)len == GST_CLOCK_TIME_NONE) {
- LOGV("Query duration return GST_CLOCK_TIME_NONE");
- len = 0;
- }
-
- return (len / 1000000);
+ GstFormat fmt = GST_FORMAT_TIME;
+ gint64 len;
+
+ if (!mPipeline) {
+ LOGV ("get duration but pipeline has not been created yet");
+ return 0;
+ }
+ // the duration given by gstreamer is in nanosecond
+ // so we need to transform it in millisecond
+ LOGV ("getDuration");
+ if (gst_element_query_duration (mPipeline, &fmt, &len)) {
+ LOGE ("Stream duration %lld ms", len / 1000000);
+ } else {
+ LOGV ("Query duration failed");
+ len = 0;
+ }
+
+ if ((GstClockTime) len == GST_CLOCK_TIME_NONE) {
+ LOGV ("Query duration return GST_CLOCK_TIME_NONE");
+ len = 0;
+ }
+
+ return (len / 1000000);
}
-void GstMetadataRetrieverDriver::quit()
-{
- int state = -1;
-
- LOGV("quit");
-
-
- if(mTag_list) {
- LOGV("free tag list");
- gst_tag_list_free (mTag_list);
- mTag_list = NULL;
- }
-
- if(mPipeline) {
- GstBus *bus;
- bus = gst_pipeline_get_bus(GST_PIPELINE (mPipeline));
- LOGV("flush bus messages");
- if (bus != NULL) {
- gst_bus_set_flushing(bus, TRUE);
- gst_object_unref (bus);
- }
- LOGV("free pipeline %s", GST_ELEMENT_NAME(mPipeline));
- state = gst_element_set_state(mPipeline, GST_STATE_NULL);
- LOGV("set pipeline state to NULL: %d (0:Failure, 1:Success, 2:Async, 3:NO_PREROLL)", state);
- gst_object_unref (mPipeline);
- mPipeline = NULL;
- }
-
- mState = GST_STATE_NULL;
+void
+GstMetadataRetrieverDriver::quit ()
+{
+ int state = -1;
+
+ LOGV ("quit");
+
+
+ if (mTag_list) {
+ LOGV ("free tag list");
+ gst_tag_list_free (mTag_list);
+ mTag_list = NULL;
+ }
+
+ if (mPipeline) {
+ GstBus *bus;
+ bus = gst_pipeline_get_bus (GST_PIPELINE (mPipeline));
+ LOGV ("flush bus messages");
+ if (bus != NULL) {
+ gst_bus_set_flushing (bus, TRUE);
+ gst_object_unref (bus);
+ }
+ LOGV ("free pipeline %s", GST_ELEMENT_NAME (mPipeline));
+ state = gst_element_set_state (mPipeline, GST_STATE_NULL);
+ LOGV ("set pipeline state to NULL: %d (0:Failure, 1:Success, 2:Async, 3:NO_PREROLL)", state);
+ gst_object_unref (mPipeline);
+ mPipeline = NULL;
+ }
+
+ mState = GST_STATE_NULL;
}
-void GstMetadataRetrieverDriver::getCaptureFrame(guint8** data)
+void
+GstMetadataRetrieverDriver::getCaptureFrame (guint8 ** data)
{
- LOGV("getCaptureFrame");
-
- if(mPipeline != NULL) {
- GstBuffer* frame = NULL;
- GstElement * sink = gst_bin_get_by_name (GST_BIN(mPipeline), "sink");
-
- g_object_get(G_OBJECT(sink), "last-buffer" , &frame, NULL);
-
- if(frame != NULL) {
- if(*data) {
- delete [] *data;
- }
- *data = new guint8[GST_BUFFER_SIZE(frame)];
- memcpy(*data, GST_BUFFER_DATA(frame), GST_BUFFER_SIZE(frame));
- gst_object_unref(frame);
- }
- }
+ LOGV ("getCaptureFrame");
+
+ if (mPipeline != NULL) {
+ GstBuffer *frame = NULL;
+ GstElement *sink = gst_bin_get_by_name (GST_BIN (mPipeline), "sink");
+
+ g_object_get (G_OBJECT (sink), "last-buffer", &frame, NULL);
+
+ if (frame != NULL) {
+ if (*data) {
+ delete[] * data;
+ }
+ *data = new guint8[GST_BUFFER_SIZE (frame)];
+ memcpy (*data, GST_BUFFER_DATA (frame), GST_BUFFER_SIZE (frame));
+ gst_object_unref (frame);
+ }
+ }
}
-gchar* GstMetadataRetrieverDriver::getMetadata(gchar *tag)
+gchar *
+GstMetadataRetrieverDriver::getMetadata (gchar * tag)
{
- LOGV("get metadata tag %s", tag);
-
- gchar *str;
- gint count;
-
- if(!mTag_list) // no tag list nothing do to
- {
- LOGV("No taglist => Nothing to do");
- return NULL;
- }
-
- count = gst_tag_list_get_tag_size (mTag_list, tag);
- if(count) {
-
- if (gst_tag_get_type (tag) == G_TYPE_STRING) {
- if (!gst_tag_list_get_string_index (mTag_list, tag, 0, &str)) {
- g_assert_not_reached ();
- }
- } else {
- str = g_strdup_value_contents (gst_tag_list_get_value_index (mTag_list, tag, 0));
- }
- LOGV("for tag %s have metadata %s", tag, str);
- return str;
- }
- else {
- LOGV(" No Tag : %s ! ",tag);
- }
- return NULL;
+ LOGV ("get metadata tag %s", tag);
+
+ gchar *str;
+ gint count;
+
+ if (!mTag_list) // no tag list nothing do to
+ {
+ LOGV ("No taglist => Nothing to do");
+ return NULL;
+ }
+
+ count = gst_tag_list_get_tag_size (mTag_list, tag);
+ if (count) {
+
+ if (gst_tag_get_type (tag) == G_TYPE_STRING) {
+ if (!gst_tag_list_get_string_index (mTag_list, tag, 0, &str)) {
+ g_assert_not_reached ();
+ }
+ } else {
+ str =
+ g_strdup_value_contents (gst_tag_list_get_value_index (mTag_list, tag,
+ 0));
+ }
+ LOGV ("for tag %s have metadata %s", tag, str);
+ return str;
+ } else {
+ LOGV (" No Tag : %s ! ", tag);
+ }
+ return NULL;
}
-void GstMetadataRetrieverDriver::getAlbumArt(guint8 **data, guint64 *size)
- {
- if(mAlbumArt == NULL) {
- LOGV("getAlbumArt try to get image from tags");
- if(mTag_list)
- {
- gboolean res = FALSE;
- res = gst_tag_list_get_buffer (mTag_list, GST_TAG_IMAGE, &mAlbumArt);
- if(!res)
- res = gst_tag_list_get_buffer (mTag_list, GST_TAG_PREVIEW_IMAGE, &mAlbumArt);
-
- if(!res)
- LOGV("no album art found");
- }
- }
-
- if(mAlbumArt) {
- *data = GST_BUFFER_DATA(mAlbumArt);
- *size= GST_BUFFER_SIZE(mAlbumArt);
- }
+void
+GstMetadataRetrieverDriver::getAlbumArt (guint8 ** data, guint64 * size)
+{
+ if (mAlbumArt == NULL) {
+ LOGV ("getAlbumArt try to get image from tags");
+ if (mTag_list) {
+ gboolean res = FALSE;
+ res = gst_tag_list_get_buffer (mTag_list, GST_TAG_IMAGE, &mAlbumArt);
+ if (!res)
+ res =
+ gst_tag_list_get_buffer (mTag_list, GST_TAG_PREVIEW_IMAGE,
+ &mAlbumArt);
+
+ if (!res)
+ LOGV ("no album art found");
+ }
+ }
+
+ if (mAlbumArt) {
+ *data = GST_BUFFER_DATA (mAlbumArt);
+ *size = GST_BUFFER_SIZE (mAlbumArt);
+ }
}
-/*static*/ void GstMetadataRetrieverDriver::debug_log (GstDebugCategory * category, GstDebugLevel level,
- const gchar * file, const gchar * function, gint line,
- GObject * object, GstDebugMessage * message, gpointer data)
+/*static*/ void
+GstMetadataRetrieverDriver::debug_log (GstDebugCategory * category,
+ GstDebugLevel level, const gchar * file, const gchar * function, gint line,
+ GObject * object, GstDebugMessage * message, gpointer data)
{
- gint pid;
- GstClockTime elapsed;
- GstMetadataRetrieverDriver* ed = (GstMetadataRetrieverDriver*)data;
+ gint pid;
+ GstClockTime elapsed;
+ GstMetadataRetrieverDriver *ed = (GstMetadataRetrieverDriver *) data;
- UNUSED(file);
- UNUSED(object);
+ UNUSED (file);
+ UNUSED (object);
- if (level > gst_debug_category_get_threshold (category))
- return;
+ if (level > gst_debug_category_get_threshold (category))
+ return;
- pid = getpid ();
+ pid = getpid ();
- elapsed = GST_CLOCK_DIFF (ed->mGst_info_start_time,
+ elapsed = GST_CLOCK_DIFF (ed->mGst_info_start_time,
gst_util_get_timestamp ());
- g_printerr ("%" GST_TIME_FORMAT " %5d %s %s %s:%d %s\r\n",
- GST_TIME_ARGS (elapsed),
- pid,
- gst_debug_level_get_name (level),
- gst_debug_category_get_name (category), function, line,
- gst_debug_message_get (message));
+ g_printerr ("%" GST_TIME_FORMAT " %5d %s %s %s:%d %s\r\n",
+ GST_TIME_ARGS (elapsed),
+ pid,
+ gst_debug_level_get_name (level),
+ gst_debug_category_get_name (category), function, line,
+ gst_debug_message_get (message));
}
-void GstMetadataRetrieverDriver::init_gstreamer()
+void
+GstMetadataRetrieverDriver::init_gstreamer ()
{
- GError *err = NULL;
- char debug[PROPERTY_VALUE_MAX];
- char trace[PROPERTY_VALUE_MAX];
-
- property_get("persist.gst.debug", debug, "0");
- LOGV("persist.gst.debug property %s", debug);
- setenv ("GST_DEBUG", debug, true);
-
- property_get("persist.gst.trace", trace, "/dev/console");
- LOGV("persist.gst.trace property %s", trace);
- LOGV("route the trace to %s", trace);
- setenv ("GST_DEBUG_FILE", trace, true);
-
- setenv ("GST_REGISTRY", "/data/data/gstreamer/registry.bin", 0);
- LOGV("gstreamer init check");
-
- if(!gst_init_check (NULL, NULL, &err))
- {
- LOGE ("Could not initialize GStreamer: %s\n", err ? err->message : "unknown error occurred");
- if (err) {
- g_error_free (err);
- }
- }
+ GError *err = NULL;
+ char debug[PROPERTY_VALUE_MAX];
+ char trace[PROPERTY_VALUE_MAX];
+
+ property_get ("persist.gst.debug", debug, "0");
+ LOGV ("persist.gst.debug property %s", debug);
+ setenv ("GST_DEBUG", debug, true);
+
+ property_get ("persist.gst.trace", trace, "/dev/console");
+ LOGV ("persist.gst.trace property %s", trace);
+ LOGV ("route the trace to %s", trace);
+ setenv ("GST_DEBUG_FILE", trace, true);
+
+ setenv ("GST_REGISTRY", "/data/data/gstreamer/registry.bin", 0);
+ LOGV ("gstreamer init check");
+
+ if (!gst_init_check (NULL, NULL, &err)) {
+ LOGE ("Could not initialize GStreamer: %s\n",
+ err ? err->message : "unknown error occurred");
+ if (err) {
+ g_error_free (err);
+ }
+ }
}
diff --git a/gstplayer/GstMetadataRetrieverDriver.h b/gstplayer/GstMetadataRetrieverDriver.h
index a3b790d..062b057 100644
--- a/gstplayer/GstMetadataRetrieverDriver.h
+++ b/gstplayer/GstMetadataRetrieverDriver.h
@@ -25,84 +25,89 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include <utils/Log.h>
#include <media/mediametadataretriever.h>
-namespace android {
+namespace android
+{
-class GstMetadataRetrieverDriver
-{
-public:
- GstMetadataRetrieverDriver();
- ~GstMetadataRetrieverDriver();
-
- void setup(int mode);
- void setDataSource(const char* url);
- void setFdDataSource(int fd, gint64 offset, gint64 length);
- void prepareSync();
- void seekSync(gint64 p);
- void quit();
- gint64 getPosition();
- gint64 getDuration();
- int getStatus();
- void getVideoSize(int* width, int* height);
- void endOfData();
- gchar* getMetadata(gchar* tag);
- void getCaptureFrame(guint8 **data);
- void getAlbumArt(guint8 **data, guint64 *size);
- void getFrameRate(int* framerate);
-
- static
- void cb_newpad(GstElement *mPlayBin, GstPad *pad,
- GstMetadataRetrieverDriver *data);
-
-private:
- GstElement* mPipeline;
- GstElement* mAppsrc;
- GstElement* mColorTransform;
- GstElement* mScaler;
- GstElement* mPlayBin;
- GstElement* mAppSink;
- GstElement* mAudioSink;
-
- gchar* mUri;
-
- static GstBusSyncReply bus_message(GstBus *bus, GstMessage * msg, gpointer data);
-
- GstTagList * mTag_list;
-
- void parseMetadataInfo();
-
- guint64 mFdSrcOffset_min;
- guint64 mFdSrcOffset_max;
- guint64 mFdSrcOffset_current;
- gint mFd;
-
-
- static gboolean have_video_caps (GstElement * uridecodebin, GstCaps * caps);
- static gboolean are_audio_caps (GstElement * uridecodebin, GstCaps * caps);
- static gboolean are_video_caps (GstElement * uridecodebin, GstCaps * caps);
-
- static gboolean autoplug_continue (GstElement* object, GstPad* pad, GstCaps* caps, GstMetadataRetrieverDriver* ed);
-
- static void source_changed_cb (GObject *obj, GParamSpec *pspec, GstMetadataRetrieverDriver* ed);
- static void need_data (GstElement * object, guint size, GstMetadataRetrieverDriver* ed);
- static gboolean seek_data (GstElement * object, guint64 offset, GstMetadataRetrieverDriver* ed);
-
- int mState;
-
- gboolean mHaveStreamVideo;
-
- GstBuffer *mAlbumArt;
-
- void init_gstreamer();
- GstClockTime mGst_info_start_time;
- static void debug_log (GstDebugCategory * category, GstDebugLevel level,
- const gchar * file, const gchar * function, gint line,
- GObject * object, GstDebugMessage * message, gpointer data);
-
- int mMode;
-};
-
-}; // namespace android
-
-#endif /* ----- #ifndef GST_METADATARETRIEVER_DRIVER_INC ----- */
+ class GstMetadataRetrieverDriver
+ {
+ public:
+ GstMetadataRetrieverDriver ();
+ ~GstMetadataRetrieverDriver ();
+
+ void setup (int mode);
+ void setDataSource (const char *url);
+ void setFdDataSource (int fd, gint64 offset, gint64 length);
+ void prepareSync ();
+ void seekSync (gint64 p);
+ void quit ();
+ gint64 getPosition ();
+ gint64 getDuration ();
+ int getStatus ();
+ void getVideoSize (int *width, int *height);
+ void endOfData ();
+ gchar *getMetadata (gchar * tag);
+ void getCaptureFrame (guint8 ** data);
+ void getAlbumArt (guint8 ** data, guint64 * size);
+ void getFrameRate (int *framerate);
+
+ static
+ void cb_newpad (GstElement * mPlayBin, GstPad * pad,
+ GstMetadataRetrieverDriver * data);
+
+ private:
+ GstElement * mPipeline;
+ GstElement *mAppsrc;
+ GstElement *mColorTransform;
+ GstElement *mScaler;
+ GstElement *mPlayBin;
+ GstElement *mAppSink;
+ GstElement *mAudioSink;
+
+ gchar *mUri;
+
+ static GstBusSyncReply bus_message (GstBus * bus, GstMessage * msg,
+ gpointer data);
+
+ GstTagList *mTag_list;
+
+ void parseMetadataInfo ();
+
+ guint64 mFdSrcOffset_min;
+ guint64 mFdSrcOffset_max;
+ guint64 mFdSrcOffset_current;
+ gint mFd;
+
+
+ static gboolean have_video_caps (GstElement * uridecodebin, GstCaps * caps);
+ static gboolean are_audio_caps (GstElement * uridecodebin, GstCaps * caps);
+ static gboolean are_video_caps (GstElement * uridecodebin, GstCaps * caps);
+
+ static gboolean autoplug_continue (GstElement * object, GstPad * pad,
+ GstCaps * caps, GstMetadataRetrieverDriver * ed);
+
+ static void source_changed_cb (GObject * obj, GParamSpec * pspec,
+ GstMetadataRetrieverDriver * ed);
+ static void need_data (GstElement * object, guint size,
+ GstMetadataRetrieverDriver * ed);
+ static gboolean seek_data (GstElement * object, guint64 offset,
+ GstMetadataRetrieverDriver * ed);
+
+ int mState;
+
+ gboolean mHaveStreamVideo;
+
+ GstBuffer *mAlbumArt;
+
+ void init_gstreamer ();
+ GstClockTime mGst_info_start_time;
+ static void debug_log (GstDebugCategory * category, GstDebugLevel level,
+ const gchar * file, const gchar * function, gint line,
+ GObject * object, GstDebugMessage * message, gpointer data);
+
+ int mMode;
+ };
+
+}; // namespace android
+#endif /* ----- #ifndef GST_METADATARETRIEVER_DRIVER_INC ----- */
diff --git a/gstplayer/GstPlayer.cpp b/gstplayer/GstPlayer.cpp
index f20562e..f49ad25 100644
--- a/gstplayer/GstPlayer.cpp
+++ b/gstplayer/GstPlayer.cpp
@@ -26,186 +26,189 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#define UNUSED(x) (void)x
-namespace android {
-
-GstPlayer::GstPlayer()
-{
- LOGV("GstPlayer constructor");
- mGstDriver = new GstDriver((MediaPlayerInterface*)this);
- mSetupDone = false;
- checkSetup();
- //LOGV("GstPlayer constructor exit");
-}
-
-void GstPlayer::checkSetup()
-{
- if(mSetupDone) {
- return;
- }
- mSetupDone = true;
- LOGV("GstPlayer send GstDriver Setup");
- mGstDriver->setup();
-}
-status_t GstPlayer::initCheck()
-{
- //LOGV("GstPlayer initCheck");
- return OK;
-}
-
-GstPlayer::~GstPlayer()
-{
- LOGV("GstPlayer destructor");
- mGstDriver->quit();
- if(mGstDriver) {
- delete mGstDriver;
- }
- mGstDriver = NULL;
-}
-
-status_t GstPlayer::setSigBusHandlerStructTLSKey(pthread_key_t key)
-{
- //LOGV("GstPlayer setSigBusHandlerStructTLSKey");
- UNUSED(key);
- return OK;
-}
-
-status_t GstPlayer::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
- LOGI("setDataSource('%s')", url);
- return setDataSource(url);
-}
-
-status_t GstPlayer::setDataSource(const char *url)
-{
- LOGI("GstPlayer setDataSource(%s)", url);
- checkSetup();
- mGstDriver->setDataSource(url);
- mGstDriver->setAudioSink(mAudioSink);
- return OK;
-}
-
-status_t GstPlayer::setDataSource(int fd, int64_t offset, int64_t length) {
-
- LOGI("GstPlayer setDataSource(%d, %lld, %lld)", fd, offset, length);
- checkSetup();
-
- mGstDriver->setFdDataSource(fd, offset, length);
- mGstDriver->setAudioSink(mAudioSink);
- return OK;
-}
-
-status_t GstPlayer::setVideoSurface(const sp<ISurface>& surface)
-{
- LOGV("GstPlayer setVideoSurface(%p)", surface.get());
- checkSetup();
- mGstDriver->setVideoSurface(surface);
- return OK;
-}
-
-status_t GstPlayer::prepare()
-{
- // prepare
- LOGV("GstPlayer prepare");
- checkSetup();
- mGstDriver->prepareSync();
- {
- int width;
- int height;
- mGstDriver->getVideoSize(&width, &height);
- sendEvent(MEDIA_SET_VIDEO_SIZE, width, height);
- }
- return OK;
-}
-
-status_t GstPlayer::prepareAsync()
-{
- LOGV("GstPlayer prepareAsync");
- checkSetup();
- // No need to run a sequence of commands.
- // The only command needed to run is PLAYER_PREPARE.
- mGstDriver->prepareAsync();
- return OK;
-}
-
-status_t GstPlayer::start()
-{
- LOGV("GstPlayer start");
- mGstDriver->start();
- return OK;
-}
-
-status_t GstPlayer::stop()
-{
- LOGV("GstPlayer stop");
- mGstDriver->stop();
- return OK;
-}
-
-status_t GstPlayer::pause()
-{
- LOGV("GstPlayer pause");
- mGstDriver->pause();
- return OK;
-}
-
-bool GstPlayer::isPlaying()
-{
- int status = mGstDriver->getStatus();
- LOGI("GstPlayer %s playing", (status == GstDriver::GSTDRIVER_STATE_STARTED)? "is": "isn't");
- return (status == GstDriver::GSTDRIVER_STATE_STARTED);
-}
-
-status_t GstPlayer::getCurrentPosition(int *msec)
-{
- LOGV("GstPlayer getCurrentPosition");
- *msec = mGstDriver->getPosition();
- return OK;
-}
-
-status_t GstPlayer::getDuration(int *msec)
-{
- LOGV("GstPlayer getDuration");
- *msec = mGstDriver->getDuration();
- return OK;
-}
-
-status_t GstPlayer::seekTo(int msec)
-{
- int tmpduration;
- int seek_position = msec;
- LOGV("GstPlayer seekTo(%d)", msec);
- getDuration(&tmpduration);
- if (msec >= tmpduration)
- seek_position = tmpduration - 1000;
- mGstDriver->seek(seek_position);
- return OK;
-}
-
-status_t GstPlayer::reset()
-{
- LOGV("GstPlayer reset");
- mGstDriver->quit();
- delete mGstDriver;
-
- mGstDriver = new GstDriver((MediaPlayerInterface*)this);
- mSetupDone = false;
-
- return NO_ERROR;
-}
-
-status_t GstPlayer::setLooping(int loop)
-{
- LOGV("GstPlayer setLooping(%d)", loop);
- return OK;
-}
+namespace android
+{
+
+ GstPlayer::GstPlayer ()
+ {
+ LOGV ("GstPlayer constructor");
+ mGstDriver = new GstDriver ((MediaPlayerInterface *) this);
+ mSetupDone = false;
+ checkSetup ();
+ //LOGV("GstPlayer constructor exit");
+ }
+
+ void GstPlayer::checkSetup ()
+ {
+ if (mSetupDone) {
+ return;
+ }
+ mSetupDone = true;
+ LOGV ("GstPlayer send GstDriver Setup");
+ mGstDriver->setup ();
+ }
+ status_t GstPlayer::initCheck ()
+ {
+ //LOGV("GstPlayer initCheck");
+ return OK;
+ }
+
+ GstPlayer::~GstPlayer () {
+ LOGV ("GstPlayer destructor");
+ mGstDriver->quit ();
+ if (mGstDriver) {
+ delete mGstDriver;
+ }
+ mGstDriver = NULL;
+ }
+
+ status_t GstPlayer::setSigBusHandlerStructTLSKey (pthread_key_t key)
+ {
+ //LOGV("GstPlayer setSigBusHandlerStructTLSKey");
+ UNUSED (key);
+ return OK;
+ }
+
+ status_t GstPlayer::setDataSource (const char *url,
+ const KeyedVector < String8, String8 > *headers)
+ {
+ LOGI ("setDataSource('%s')", url);
+ return setDataSource (url);
+ }
+
+ status_t GstPlayer::setDataSource (const char *url)
+ {
+ LOGI ("GstPlayer setDataSource(%s)", url);
+ checkSetup ();
+ mGstDriver->setDataSource (url);
+ mGstDriver->setAudioSink (mAudioSink);
+ return OK;
+ }
+
+ status_t GstPlayer::setDataSource (int fd, int64_t offset, int64_t length)
+ {
+
+ LOGI ("GstPlayer setDataSource(%d, %lld, %lld)", fd, offset, length);
+ checkSetup ();
+
+ mGstDriver->setFdDataSource (fd, offset, length);
+ mGstDriver->setAudioSink (mAudioSink);
+ return OK;
+ }
+
+ status_t GstPlayer::setVideoSurface (const sp < ISurface > &surface)
+ {
+ LOGV ("GstPlayer setVideoSurface(%p)", surface.get ());
+ checkSetup ();
+ mGstDriver->setVideoSurface (surface);
+ return OK;
+ }
+
+ status_t GstPlayer::prepare ()
+ {
+ // prepare
+ LOGV ("GstPlayer prepare");
+ checkSetup ();
+ mGstDriver->prepareSync ();
+ {
+ int width;
+ int height;
+ mGstDriver->getVideoSize (&width, &height);
+ sendEvent (MEDIA_SET_VIDEO_SIZE, width, height);
+ }
+ return OK;
+ }
+
+ status_t GstPlayer::prepareAsync ()
+ {
+ LOGV ("GstPlayer prepareAsync");
+ checkSetup ();
+ // No need to run a sequence of commands.
+ // The only command needed to run is PLAYER_PREPARE.
+ mGstDriver->prepareAsync ();
+ return OK;
+ }
+
+ status_t GstPlayer::start ()
+ {
+ LOGV ("GstPlayer start");
+ mGstDriver->start ();
+ return OK;
+ }
+
+ status_t GstPlayer::stop ()
+ {
+ LOGV ("GstPlayer stop");
+ mGstDriver->stop ();
+ return OK;
+ }
+
+ status_t GstPlayer::pause ()
+ {
+ LOGV ("GstPlayer pause");
+ mGstDriver->pause ();
+ return OK;
+ }
+
+ bool GstPlayer::isPlaying ()
+ {
+ int status = mGstDriver->getStatus ();
+ LOGI ("GstPlayer %s playing",
+ (status == GstDriver::GSTDRIVER_STATE_STARTED) ? "is" : "isn't");
+ return (status == GstDriver::GSTDRIVER_STATE_STARTED);
+ }
+
+ status_t GstPlayer::getCurrentPosition (int *msec)
+ {
+ LOGV ("GstPlayer getCurrentPosition");
+ *msec = mGstDriver->getPosition ();
+ return OK;
+ }
+
+ status_t GstPlayer::getDuration (int *msec)
+ {
+ LOGV ("GstPlayer getDuration");
+ *msec = mGstDriver->getDuration ();
+ return OK;
+ }
+
+ status_t GstPlayer::seekTo (int msec)
+ {
+ int tmpduration;
+ int seek_position = msec;
+ LOGV ("GstPlayer seekTo(%d)", msec);
+ getDuration (&tmpduration);
+ if (msec >= tmpduration)
+ seek_position = tmpduration - 1000;
+ mGstDriver->seek (seek_position);
+ return OK;
+ }
+
+ status_t GstPlayer::reset ()
+ {
+ LOGV ("GstPlayer reset");
+ mGstDriver->quit ();
+ delete mGstDriver;
+
+ mGstDriver = new GstDriver ((MediaPlayerInterface *) this);
+ mSetupDone = false;
+
+ return NO_ERROR;
+ }
+
+ status_t GstPlayer::setLooping (int loop)
+ {
+ LOGV ("GstPlayer setLooping(%d)", loop);
+ return OK;
+ }
//eclair
-status_t GstPlayer::invoke(const Parcel &request, Parcel *reply)
-{
- GST_UNUSED(request)
- GST_UNUSED(reply)
- return INVALID_OPERATION;
-}
+ status_t GstPlayer::invoke (const Parcel & request, Parcel * reply)
+ {
+ GST_UNUSED (request)
+ GST_UNUSED (reply)
+ return INVALID_OPERATION;
+ }
// The Client in the MetadataPlayerService calls this method on
// the native player to retrieve all or a subset of metadata.
@@ -214,13 +217,13 @@ status_t GstPlayer::invoke(const Parcel &request, Parcel *reply)
// the known metadata should be returned.
// @param[inout] records Parcel where the player appends its metadata.
// @return OK if the call was successful.
-status_t GstPlayer::getMetadata(const SortedVector<media::Metadata::Type>& ids, Parcel *records)
-{
- if (!mSetupDone || !mGstDriver) {
- return INVALID_OPERATION;
- }
- return mGstDriver->getMetadata(ids, records);
-}
-
-}; // namespace android
-
+ status_t GstPlayer::getMetadata (const SortedVector < media::Metadata::Type >
+ &ids, Parcel * records)
+ {
+ if (!mSetupDone || !mGstDriver) {
+ return INVALID_OPERATION;
+ }
+ return mGstDriver->getMetadata (ids, records);
+ }
+
+}; // namespace android
diff --git a/gstplayer/GstPlayer.h b/gstplayer/GstPlayer.h
index 930a467..18b29ed 100644
--- a/gstplayer/GstPlayer.h
+++ b/gstplayer/GstPlayer.h
@@ -23,69 +23,75 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#include <media/MediaPlayerInterface.h>
#include "GstDriver.h"
-namespace android {
-class GstPlayer : public MediaPlayerInterface
+namespace android
{
-public:
- GstPlayer();
- virtual ~GstPlayer();
+ class GstPlayer:public MediaPlayerInterface
+ {
+ public:
+ GstPlayer ();
+ virtual ~ GstPlayer ();
- virtual status_t initCheck();
- virtual status_t setSigBusHandlerStructTLSKey(pthread_key_t key);
- virtual status_t setDataSource(const char *url);
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface>& surface);
- virtual status_t prepare();
- virtual status_t prepareAsync();
- virtual status_t start();
- virtual status_t stop();
- virtual status_t pause();
- virtual bool isPlaying();
- virtual status_t seekTo(int msec);
- virtual status_t getCurrentPosition(int *msec);
- virtual status_t getDuration(int *msec);
- virtual status_t reset();
- virtual status_t setLooping(int loop);
- virtual player_type playerType() { return GST_PLAYER; }
+ virtual status_t initCheck ();
+ virtual status_t setSigBusHandlerStructTLSKey (pthread_key_t key);
+ virtual status_t setDataSource (const char *url);
+ virtual status_t setDataSource (int fd, int64_t offset, int64_t length);
+ virtual status_t setVideoSurface (const sp < ISurface > &surface);
+ virtual status_t prepare ();
+ virtual status_t prepareAsync ();
+ virtual status_t start ();
+ virtual status_t stop ();
+ virtual status_t pause ();
+ virtual bool isPlaying ();
+ virtual status_t seekTo (int msec);
+ virtual status_t getCurrentPosition (int *msec);
+ virtual status_t getDuration (int *msec);
+ virtual status_t reset ();
+ virtual status_t setLooping (int loop);
+ virtual player_type playerType ()
+ {
+ return GST_PLAYER;
+ }
#ifdef ANDROID_1_6
- virtual status_t getVideoWidth(int *w);
- virtual status_t getVideoHeight(int *h);
- virtual status_t setSigBusHandlerStructTLSKey(pthread_key_t key);
-#endif
-
- //new on eclair
- // Invoke a generic method on the player by using opaque parcels
+ virtual status_t getVideoWidth (int *w);
+ virtual status_t getVideoHeight (int *h);
+ virtual status_t setSigBusHandlerStructTLSKey (pthread_key_t key);
+#endif
+
+ //new on eclair
+ // Invoke a generic method on the player by using opaque parcels
// for the request and reply.
//
// @param request Parcel that is positioned at the start of the
// data sent by the java layer.
// @param[out] reply Parcel to hold the reply data. Cannot be null.
// @return OK if the call was successful.
- virtual status_t invoke(const Parcel& request, Parcel *reply);
- // The Client in the MetadataPlayerService calls this method on
+ virtual status_t invoke (const Parcel & request, Parcel * reply);
+ // The Client in the MetadataPlayerService calls this method on
// the native player to retrieve all or a subset of metadata.
//
// @param ids SortedList of metadata ID to be fetch. If empty, all
// the known metadata should be returned.
// @param[inout] records Parcel where the player appends its metadata.
// @return OK if the call was successful.
- virtual status_t getMetadata(const SortedVector<media::Metadata::Type>& ids,
- Parcel *records);
+ virtual status_t getMetadata (const SortedVector < media::Metadata::Type >
+ &ids, Parcel * records);
// make available to GstDriver
- void sendEvent(int msg, int ext1=0, int ext2=0) { MediaPlayerBase::sendEvent(msg, ext1, ext2); }
- virtual status_t setDataSource(const char *url, const KeyedVector<String8, String8> *headers);
-private:
+ void sendEvent (int msg, int ext1 = 0, int ext2 = 0) {
+ MediaPlayerBase::sendEvent (msg, ext1, ext2);
+ }
+ virtual status_t setDataSource (const char *url,
+ const KeyedVector < String8, String8 > *headers);
+ private:
//static void do_nothing(status_t s, void *cookie, bool cancelled) { if(s) { /* warning removal*/ } if(cookie) { /* warning removal*/ } if(cancelled) { /* warning removal*/ } }
- void checkSetup();
-
- GstDriver* mGstDriver;
- bool mSetupDone;
-};
+ void checkSetup ();
-}; // namespace android
+ GstDriver *mGstDriver;
+ bool mSetupDone;
+ };
-#endif /* ----- #ifndef GSTPLAYER_INC ----- */
+}; // namespace android
+#endif /* ----- #ifndef GSTPLAYER_INC ----- */
diff --git a/gstplayer/GsticbAndroid.cpp b/gstplayer/GsticbAndroid.cpp
index 7b8c274..012828f 100644
--- a/gstplayer/GsticbAndroid.cpp
+++ b/gstplayer/GsticbAndroid.cpp
@@ -23,11 +23,12 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
/* Object header */
#include "GsticbAndroidVideoSink.h"
-
-static gboolean plugin_init (GstPlugin * plugin)
+
+static gboolean
+plugin_init (GstPlugin * plugin)
{
gboolean ret = TRUE;
-
+
ret &= gst_icbandroidvideosink_plugin_init (plugin);
return ret;
@@ -43,5 +44,5 @@ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"icbandroid",
"Icebird android library",
- plugin_init, VERSION, "Proprietary", "libgsticbandroidvideo.so", "http://www.stericsson.com")
-
+ plugin_init, VERSION, "Proprietary", "libgsticbandroidvideo.so",
+ "http://www.stericsson.com")
diff --git a/gstplayer/GsticbAndroidVideoSink.cpp b/gstplayer/GsticbAndroidVideoSink.cpp
index 6e7cd86..3a77e9e 100644
--- a/gstplayer/GsticbAndroidVideoSink.cpp
+++ b/gstplayer/GsticbAndroidVideoSink.cpp
@@ -30,8 +30,8 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* See cutils/log.h for more information */
/*#define LOG_NDEBUG 0 */
-/***** Android includes *****/
-#include <binder/MemoryHeapBase.h> /* DONT_MAP_LOCALLY */
+/***** Android includes *****/
+#include <binder/MemoryHeapBase.h> /* DONT_MAP_LOCALLY */
#include <utils/Log.h>
/***** Gstreamer includes *****/
@@ -50,19 +50,16 @@ GST_DEBUG_CATEGORY_STATIC (gst_debug_gsticbandroidvideosink);
/* ElementFactory information */
static const GstElementDetails gst_icbandroidvideosink_details =
-GST_ELEMENT_DETAILS ((gchar*)"Icebird android video sink",
- (gchar*)"Sink/Video",
- (gchar*)"Icebird android video sink",
- (gchar*)"Benjamin Gaignard <Benjamin.Gaignard@stericsson.com>");
+GST_ELEMENT_DETAILS ((gchar *) "Icebird android video sink",
+ (gchar *) "Sink/Video",
+ (gchar *) "Icebird android video sink",
+ (gchar *) "Benjamin Gaignard <Benjamin.Gaignard@stericsson.com>");
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("video/x-raw-yuv, "
- "format = (fourcc) NV12,"
- "width = (int) [16, 400],"
- "height = (int) [16, 352]," /* to allow CIFp 288x352 */
- "framerate = (fraction) [0, 10000]"));
+ GST_STATIC_CAPS ("video/x-raw-yuv, " "format = (fourcc) NV12," "width = (int) [16, 400]," "height = (int) [16, 352]," /* to allow CIFp 288x352 */
+ "framerate = (fraction) [0, 10000]"));
enum
{
@@ -75,33 +72,35 @@ enum
static GstVideoSinkClass *parent_class = NULL;
-static void
-gst_icbandroidvideosink_buffers_add(GstIcbAndroidVideoSink *sink, GstBuffer *newBuf)
+static void
+gst_icbandroidvideosink_buffers_add (GstIcbAndroidVideoSink * sink,
+ GstBuffer * newBuf)
{
- LOGV("sink->mGstBuffers[%d]=%p", sink->mGstBufferIndex, sink->mGstBuffers[sink->mGstBufferIndex]);
+ LOGV ("sink->mGstBuffers[%d]=%p", sink->mGstBufferIndex,
+ sink->mGstBuffers[sink->mGstBufferIndex]);
/* check if there is an empty buffer in the array ? */
- if (sink->mGstBuffers[sink->mGstBufferIndex]){
+ if (sink->mGstBuffers[sink->mGstBufferIndex]) {
/* unref the old buffer */
- gst_buffer_unref(sink->mGstBuffers[sink->mGstBufferIndex]);
+ gst_buffer_unref (sink->mGstBuffers[sink->mGstBufferIndex]);
}
/* save & ref the new buffer */
sink->mGstBuffers[sink->mGstBufferIndex] = newBuf;
- gst_buffer_ref(sink->mGstBuffers[sink->mGstBufferIndex]);
+ gst_buffer_ref (sink->mGstBuffers[sink->mGstBufferIndex]);
/* go to the next array item (cyclic array) */
- sink->mGstBufferIndex ++;
+ sink->mGstBufferIndex++;
if (sink->mGstBufferIndex == sink->mGstBuffersCount) {
sink->mGstBufferIndex = 0;
}
}
-static void
-gst_icbandroidvideosink_buffers_clean(GstIcbAndroidVideoSink *sink)
+static void
+gst_icbandroidvideosink_buffers_clean (GstIcbAndroidVideoSink * sink)
{
- for (int i=0; i < sink->mGstBuffersCount; i++) {
+ for (int i = 0; i < sink->mGstBuffersCount; i++) {
if (sink->mGstBuffers[i]) {
- gst_buffer_unref(sink->mGstBuffers[i]);
+ gst_buffer_unref (sink->mGstBuffers[i]);
sink->mGstBuffers[i] = NULL;
}
}
@@ -111,7 +110,7 @@ gst_icbandroidvideosink_buffers_clean(GstIcbAndroidVideoSink *sink)
static gboolean
gst_icbandroidvideosink_setcaps (GstBaseSink * bsink, GstCaps * caps)
{
- return TRUE;
+ return TRUE;
}
static GstFlowReturn
@@ -120,37 +119,37 @@ gst_icbandroidvideosink_show_frame (GstBaseSink * bsink, GstBuffer * inbuf)
GstIcbAndroidVideoSink *sink = GST_ICB_ANDROID_VIDEO_SINK (bsink);
if (sink->mSurface == NULL) {
- LOGD("mSurface not yet initialized");
+ LOGD ("mSurface not yet initialized");
return GST_FLOW_OK;
}
/* Initialization */
if (!sink->mInitialized) {
- LOGV("android video sink initialization");
+ LOGV ("android video sink initialization");
int frameSize;
/* test width and height */
- GstCaps * caps = GST_BUFFER_CAPS(inbuf);
+ GstCaps *caps = GST_BUFFER_CAPS (inbuf);
GstStructure *structure = NULL;
structure = gst_caps_get_structure (caps, 0);
- if (!gst_structure_get_int (structure, "width", &sink->mFrameWidth) ||
+ if (!gst_structure_get_int (structure, "width", &sink->mFrameWidth) ||
!gst_structure_get_int (structure, "height", &sink->mFrameHeight)) {
- LOGE("Can't get width and height");
+ LOGE ("Can't get width and height");
sink->mFrameWidth = 240;
sink->mFrameHeight = 160;
}
- LOGV("Icebird android video sink width %d height %d",
+ LOGV ("Icebird android video sink width %d height %d",
sink->mFrameWidth, sink->mFrameHeight);
- if (GST_IS_ICBVIDEO_BUFFER(inbuf)) {
+ if (GST_IS_ICBVIDEO_BUFFER (inbuf)) {
/***** Hardware *****/
- LOGV("Hardware video sink (pmem and copybit)");
+ LOGV ("Hardware video sink (pmem and copybit)");
- video_frame_t *frame = &(GST_ICBVIDEO_BUFFER(inbuf)->frame);
- LOGV("Video frame pmem_fd %d, pmem_size %lu",
+ video_frame_t *frame = &(GST_ICBVIDEO_BUFFER (inbuf)->frame);
+ LOGV ("Video frame pmem_fd %d, pmem_size %lu",
frame->pmem_fd, frame->pmem_size);
/* Compute the "real" video size according to the woi */
@@ -161,59 +160,51 @@ gst_icbandroidvideosink_show_frame (GstBaseSink * bsink, GstBuffer * inbuf)
sink->mFrameWidth = frame->desc.dimensions.width;
sink->mFrameHeight = frame->desc.dimensions.height;
}
-
- sink->mFrameHeap = new MemoryHeapBase(frame->pmem_fd,
+
+ sink->mFrameHeap = new MemoryHeapBase (frame->pmem_fd,
frame->pmem_size, MemoryHeapBase::DONT_MAP_LOCALLY);
- if (sink->mFrameHeap->heapID() < 0) {
- LOGE("Error creating pmem heap");
+ if (sink->mFrameHeap->heapID () < 0) {
+ LOGE ("Error creating pmem heap");
return GST_FLOW_OK;
}
- LOGV("Create pmem heap");
- sink->mFrameHeap->setDevice("/dev/pmem");
- sink->mFrameHeapPmem = new MemoryHeapPmem(sink->mFrameHeap, 0);
- sink->mFrameHeapPmem->slap();
- sink->mFrameHeap.clear();
-
- LOGV("registerBuffers");
- ISurface::BufferHeap buffers(sink->mFrameWidth, sink->mFrameHeight,
- ALIGN_FRAME_WIDTH(frame->desc.dimensions.width),
- ALIGN_FRAME_HEIGHT(frame->desc.dimensions.height),
- PIXEL_FORMAT_YCbCr_420_SP,
- 0,
- 0,
- sink->mFrameHeapPmem);
- sink->mSurface->registerBuffers(buffers);
-
- }
- else
- {
+ LOGV ("Create pmem heap");
+ sink->mFrameHeap->setDevice ("/dev/pmem");
+ sink->mFrameHeapPmem = new MemoryHeapPmem (sink->mFrameHeap, 0);
+ sink->mFrameHeapPmem->slap ();
+ sink->mFrameHeap.clear ();
+
+ LOGV ("registerBuffers");
+ ISurface::BufferHeap buffers (sink->mFrameWidth, sink->mFrameHeight,
+ ALIGN_FRAME_WIDTH (frame->desc.dimensions.width),
+ ALIGN_FRAME_HEIGHT (frame->desc.dimensions.height),
+ PIXEL_FORMAT_YCbCr_420_SP, 0, 0, sink->mFrameHeapPmem);
+ sink->mSurface->registerBuffers (buffers);
+
+ } else {
/***** Software *****/
- LOGV("Software video sink (memcpy)");
+ LOGV ("Software video sink (memcpy)");
/* FIXME check if color format is RGB565! */
- frameSize = sink->mFrameWidth * sink->mFrameHeight * 2; /* w*h*rgb565 size */
+ frameSize = sink->mFrameWidth * sink->mFrameHeight * 2; /* w*h*rgb565 size */
/* create frame buffer heap and register with surfaceflinger */
- sink->mFrameHeap = new MemoryHeapBase(frameSize * sink->kBufferCount);
- if (sink->mFrameHeap->heapID() < 0) {
- LOGE("Error creating frame buffer heap");
+ sink->mFrameHeap = new MemoryHeapBase (frameSize * sink->kBufferCount);
+ if (sink->mFrameHeap->heapID () < 0) {
+ LOGE ("Error creating frame buffer heap");
return GST_FLOW_OK;
}
-
- ISurface::BufferHeap buffers(sink->mFrameWidth, sink->mFrameHeight,
- sink->mFrameWidth, sink->mFrameHeight,
- PIXEL_FORMAT_RGB_565,
- 0,
- 0,
- sink->mFrameHeap);
- sink->mSurface->registerBuffers(buffers);
+
+ ISurface::BufferHeap buffers (sink->mFrameWidth, sink->mFrameHeight,
+ sink->mFrameWidth, sink->mFrameHeight,
+ PIXEL_FORMAT_RGB_565, 0, 0, sink->mFrameHeap);
+ sink->mSurface->registerBuffers (buffers);
/* create frame buffers */
for (int i = 0; i < sink->kBufferCount; i++) {
sink->mFrameBuffers[i] = i * frameSize;
}
-
+
sink->mFrameBufferIndex = 0;
}
@@ -221,29 +212,30 @@ gst_icbandroidvideosink_show_frame (GstBaseSink * bsink, GstBuffer * inbuf)
}
/* Frame sink */
- if (GST_IS_ICBVIDEO_BUFFER(inbuf)) {
+ if (GST_IS_ICBVIDEO_BUFFER (inbuf)) {
/***** Hardware *****/
- LOGV("Hardware video sink (pmem and copybit)");
+ LOGV ("Hardware video sink (pmem and copybit)");
- video_frame_t *frame = &(GST_ICBVIDEO_BUFFER(inbuf)->frame);
+ video_frame_t *frame = &(GST_ICBVIDEO_BUFFER (inbuf)->frame);
/* Insert this video buffer in the buffers array and
- * "ref" it to postpone its recycle to give more time
- * for the UI to use the video buffer */
- gst_icbandroidvideosink_buffers_add(sink, inbuf);
+ * "ref" it to postpone its recycle to give more time
+ * for the UI to use the video buffer */
+ gst_icbandroidvideosink_buffers_add (sink, inbuf);
- LOGV("post buffer: pmem_offset=%lu, cts=%d", frame->pmem_offset, frame->cts);
- sink->mSurface->postBuffer(frame->pmem_offset);
+ LOGV ("post buffer: pmem_offset=%lu, cts=%d", frame->pmem_offset,
+ frame->cts);
+ sink->mSurface->postBuffer (frame->pmem_offset);
} else {
/***** Software *****/
- LOGV("Software video sink (memcpy)");
+ LOGV ("Software video sink (memcpy)");
- memcpy(static_cast<char*>(sink->mFrameHeap->base()) +
- sink->mFrameBuffers[sink->mFrameBufferIndex], GST_BUFFER_DATA(inbuf),
- GST_BUFFER_SIZE(inbuf));
+ memcpy (static_cast < char *>(sink->mFrameHeap->base ()) +
+ sink->mFrameBuffers[sink->mFrameBufferIndex], GST_BUFFER_DATA (inbuf),
+ GST_BUFFER_SIZE (inbuf));
- LOGV("post buffer");
- sink->mSurface->postBuffer(sink->mFrameBuffers[sink->mFrameBufferIndex]);
+ LOGV ("post buffer");
+ sink->mSurface->postBuffer (sink->mFrameBuffers[sink->mFrameBufferIndex]);
/* Prepare next buffer */
sink->mFrameBufferIndex++;
@@ -272,13 +264,14 @@ gst_icbandroidvideosink_set_property (GObject * object, guint prop_id,
GST_OBJECT_LOCK (sink);
switch (prop_id) {
- case PROP_SURFACE: {
- LOGV("Icebird Android video sink: set surface from void* to sp<ISurface>");
- ISurface * tmp_ptr = static_cast<ISurface*>(g_value_get_pointer (value));
+ case PROP_SURFACE:{
+ LOGV ("Icebird Android video sink: set surface from void* to sp<ISurface>");
+ ISurface *tmp_ptr =
+ static_cast < ISurface * >(g_value_get_pointer (value));
sink->mSurface = tmp_ptr;
break;
}
-
+
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@@ -319,29 +312,29 @@ static void
gst_icbandroidvideosink_finalize (GObject * object)
{
GstIcbAndroidVideoSink *sink = GST_ICB_ANDROID_VIDEO_SINK (object);
-
+
sink->mInitialized = FALSE;
/* clean buffers list */
- gst_icbandroidvideosink_buffers_clean(sink);
+ gst_icbandroidvideosink_buffers_clean (sink);
- if(sink->mSurface != NULL ) {
- if (sink->mSurface.get()) {
- LOGV("unregisterBuffers");
- sink->mSurface->unregisterBuffers();
- sink->mSurface.clear();
+ if (sink->mSurface != NULL) {
+ if (sink->mSurface.get ()) {
+ LOGV ("unregisterBuffers");
+ sink->mSurface->unregisterBuffers ();
+ sink->mSurface.clear ();
}
/* free frame buffers */
- LOGV("free frame buffers");
- for (int i = 0; i < sink->kBufferCount; i++)
+ LOGV ("free frame buffers");
+ for (int i = 0; i < sink->kBufferCount; i++)
sink->mFrameBuffers[i] = 0;
/* free heaps */
- LOGV("free mFrameHeap");
- sink->mFrameHeap.clear();
- LOGV("free mFrameHeapPmem");
- sink->mFrameHeapPmem.clear();
+ LOGV ("free mFrameHeap");
+ sink->mFrameHeap.clear ();
+ LOGV ("free mFrameHeapPmem");
+ sink->mFrameHeapPmem.clear ();
}
@@ -351,7 +344,8 @@ gst_icbandroidvideosink_finalize (GObject * object)
static gboolean
gst_icbandroidvideosink_sink_event (GstPad * pad, GstEvent * event)
{
- GstIcbAndroidVideoSink *sink = (GstIcbAndroidVideoSink *) (GST_OBJECT_PARENT (pad));
+ GstIcbAndroidVideoSink *sink =
+ (GstIcbAndroidVideoSink *) (GST_OBJECT_PARENT (pad));
GstBaseSinkClass *bclass;
GstBaseSink *bsink;
GstPadEventFunction event_base_sink;
@@ -360,12 +354,12 @@ gst_icbandroidvideosink_sink_event (GstPad * pad, GstEvent * event)
bsink = GST_BASE_SINK (gst_pad_get_parent (pad));
bclass = GST_BASE_SINK_GET_CLASS (bsink);
- event_base_sink = GST_PAD_EVENTFUNC (bsink->sinkpad);
+ event_base_sink = GST_PAD_EVENTFUNC (bsink->sinkpad);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
/* flush pending ref-ed buffers */
- gst_icbandroidvideosink_buffers_clean(sink);
+ gst_icbandroidvideosink_buffers_clean (sink);
break;
default:
@@ -375,7 +369,7 @@ gst_icbandroidvideosink_sink_event (GstPad * pad, GstEvent * event)
/* call base sink (we just add some
specific actions and wanted
to keep base sink way...) */
- sink->bsink_event(pad, event);
+ sink->bsink_event (pad, event);
gst_object_unref (bsink);
@@ -400,17 +394,17 @@ gst_icbandroidvideosink_init (GstIcbAndroidVideoSink * sink)
sink->mSurface = NULL;
/* basesink event callback surcharge
- to intercept FLUSH event in order
- to flush pending ref-ed buffers when
- seeking */
- sink->bsink_event = GST_PAD_EVENTFUNC (bsink->sinkpad);
+ to intercept FLUSH event in order
+ to flush pending ref-ed buffers when
+ seeking */
+ sink->bsink_event = GST_PAD_EVENTFUNC (bsink->sinkpad);
gst_pad_set_event_function (bsink->sinkpad,
GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_sink_event));
/* initialize buffers array */
sink->mGstBufferIndex = 0;
- for (int i = 0; i < sink->mGstBuffersCount; i++)
- sink->mGstBuffers[i] = 0;
+ for (int i = 0; i < sink->mGstBuffersCount; i++)
+ sink->mGstBuffers[i] = 0;
}
static void
@@ -421,7 +415,8 @@ gst_icbandroidvideosink_base_init (gpointer g_class)
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
- gst_element_class_set_details (element_class, &gst_icbandroidvideosink_details);
+ gst_element_class_set_details (element_class,
+ &gst_icbandroidvideosink_details);
}
static void
@@ -437,9 +432,12 @@ gst_icbandroidvideosink_class_init (GstIcbAndroidVideoSinkClass * klass)
gobject_class->get_property = gst_icbandroidvideosink_get_property;
gobject_class->finalize = gst_icbandroidvideosink_finalize;
- gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_setcaps);
- gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_show_frame);
- gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_show_frame);
+ gstbasesink_class->set_caps =
+ GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_setcaps);
+ gstbasesink_class->render =
+ GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_show_frame);
+ gstbasesink_class->preroll =
+ GST_DEBUG_FUNCPTR (gst_icbandroidvideosink_show_frame);
/* install properties */
@@ -478,8 +476,8 @@ gst_icbandroidvideosink_get_type (void)
icbandroidvideosink_type =
g_type_register_static (GST_TYPE_VIDEO_SINK, "icbandroidvideosink",
- &icbandroidvideosink_info, (GTypeFlags)0);
- GST_DEBUG_CATEGORY_INIT (gst_debug_gsticbandroidvideosink,
+ &icbandroidvideosink_info, (GTypeFlags) 0);
+ GST_DEBUG_CATEGORY_INIT (gst_debug_gsticbandroidvideosink,
"icbandroidvideosink", 0, "Icebird android video sink");
}
@@ -491,7 +489,7 @@ gboolean
gst_icbandroidvideosink_plugin_init (GstPlugin * plugin)
{
if (!gst_element_register (plugin, "icbandroidvideosink", GST_RANK_PRIMARY,
- GST_TYPE_ICB_ANDROID_VIDEO_SINK))
+ GST_TYPE_ICB_ANDROID_VIDEO_SINK))
return FALSE;
return TRUE;
diff --git a/gstplayer/GsticbAndroidVideoSink.h b/gstplayer/GsticbAndroidVideoSink.h
index 2d77d2a..4837f2a 100644
--- a/gstplayer/GsticbAndroidVideoSink.h
+++ b/gstplayer/GsticbAndroidVideoSink.h
@@ -27,16 +27,16 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#ifndef __GST_ICBANDROIDVIDEOSINK_H__
#define __GST_ICBANDROIDVIDEOSINK_H__
-/***** Gstreamer includes *****/
+/***** Gstreamer includes *****/
#include <gst/video/gstvideosink.h>
-/***** Android includes *****/
+/***** Android includes *****/
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryHeapPmem.h>
#ifndef STECONF_ANDROID_VERSION_FROYO
#include <ui/ISurface.h>
-#else
+#else
#include <surfaceflinger/ISurface.h>
#endif
@@ -54,7 +54,6 @@ G_BEGIN_DECLS
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_ICB_ANDROID_VIDEO_SINK))
#define GST_IS_ICB_ANDROID_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_ICB_ANDROID_VIDEO_SINK))
-
typedef struct _GstIcbAndroidVideoSink GstIcbAndroidVideoSink;
typedef struct _GstIcbAndroidVideoSinkClass GstIcbAndroidVideoSinkClass;
@@ -63,25 +62,25 @@ struct _GstIcbAndroidVideoSink
/* Our element stuff */
GstVideoSink videosink;
- sp<ISurface> mSurface;
+ sp < ISurface > mSurface;
- sp<MemoryHeapBase> mFrameHeap;
- sp<MemoryHeapPmem> mFrameHeapPmem;
+ sp < MemoryHeapBase > mFrameHeap;
+ sp < MemoryHeapPmem > mFrameHeapPmem;
/* Frame buffer support */
- static const int kBufferCount = 2;
- size_t mFrameBuffers[kBufferCount];
- int mFrameBufferIndex;
+ static const int kBufferCount = 2;
+ size_t mFrameBuffers[kBufferCount];
+ int mFrameBufferIndex;
- gboolean mInitialized;
+ gboolean mInitialized;
- int mFrameWidth;
- int mFrameHeight;
+ int mFrameWidth;
+ int mFrameHeight;
// GstBuffer used to avoid buffer release while used by the UI
- int mGstBufferIndex;
- static const int mGstBuffersCount = 3;
- GstBuffer *mGstBuffers[mGstBuffersCount];
+ int mGstBufferIndex;
+ static const int mGstBuffersCount = 3;
+ GstBuffer *mGstBuffers[mGstBuffersCount];
GstPadEventFunction bsink_event;
};
@@ -95,5 +94,4 @@ GType gst_icbandroidvideosink_get_type (void);
gboolean gst_icbandroidvideosink_plugin_init (GstPlugin * plugin);
G_END_DECLS
-
#endif /*__GST_ICBANDROIDVIDEOSINK_H__*/