source: trunk/src/3rdparty/phonon/gstreamer/mediaobject.cpp

Last change on this file was 846, checked in by Dmitry A. Kuminov, 15 years ago

trunk: Merged in qt 4.7.2 sources from branches/vendor/nokia/qt.

File size: 55.1 KB
Line 
1/* This file is part of the KDE project.
2
3 Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
4
5 This library is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Lesser General Public License as published by
7 the Free Software Foundation, either version 2.1 or 3 of the License.
8
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public License
15 along with this library. If not, see <http://www.gnu.org/licenses/>.
16*/
17#include <cmath>
18#include <gst/interfaces/propertyprobe.h>
19#include <gst/pbutils/install-plugins.h>
20#include "common.h"
21#include "mediaobject.h"
22#include "videowidget.h"
23#include "message.h"
24#include "backend.h"
25#include "streamreader.h"
26#include "phononsrc.h"
27#include <QtCore>
28#include <QtCore/QTimer>
29#include <QtCore/QVector>
30#include <QtCore/QFile>
31#include <QtCore/QByteRef>
32#include <QtCore/QStringList>
33#include <QtCore/QEvent>
34#include <QApplication>
35
36#define ABOUT_TO_FINNISH_TIME 2000
37#define MAX_QUEUE_TIME 20 * GST_SECOND
38
39QT_BEGIN_NAMESPACE
40
41namespace Phonon
42{
43namespace Gstreamer
44{
45
46MediaObject::MediaObject(Backend *backend, QObject *parent)
47 : QObject(parent)
48 , MediaNode(backend, AudioSource | VideoSource)
49 , m_resumeState(false)
50 , m_oldState(Phonon::LoadingState)
51 , m_oldPos(0)
52 , m_state(Phonon::LoadingState)
53 , m_pendingState(Phonon::LoadingState)
54 , m_tickTimer(new QTimer(this))
55 , m_prefinishMark(0)
56 , m_transitionTime(0)
57 , m_isStream(false)
58 , m_posAtSeek(-1)
59 , m_prefinishMarkReachedNotEmitted(true)
60 , m_aboutToFinishEmitted(false)
61 , m_loading(false)
62 , m_capsHandler(0)
63 , m_datasource(0)
64 , m_decodebin(0)
65 , m_audioPipe(0)
66 , m_videoPipe(0)
67 , m_totalTime(-1)
68 , m_bufferPercent(0)
69 , m_hasVideo(false)
70 , m_videoStreamFound(false)
71 , m_hasAudio(false)
72 , m_seekable(false)
73 , m_atEndOfStream(false)
74 , m_atStartOfStream(false)
75 , m_error(Phonon::NoError)
76 , m_pipeline(0)
77 , m_audioGraph(0)
78 , m_videoGraph(0)
79 , m_previousTickTime(-1)
80 , m_resetNeeded(false)
81 , m_autoplayTitles(true)
82 , m_availableTitles(0)
83 , m_currentTitle(1)
84 , m_pendingTitle(1)
85{
86 qRegisterMetaType<GstCaps*>("GstCaps*");
87 qRegisterMetaType<State>("State");
88
89 static int count = 0;
90 m_name = "MediaObject" + QString::number(count++);
91
92 if (!m_backend->isValid()) {
93 setError(tr("Cannot start playback. \n\nCheck your GStreamer installation and make sure you "
94 "\nhave libgstreamer-plugins-base installed."), Phonon::FatalError);
95 } else {
96 m_root = this;
97 createPipeline();
98 m_backend->addBusWatcher(this);
99 connect(m_tickTimer, SIGNAL(timeout()), SLOT(emitTick()));
100 }
101 connect(this, SIGNAL(stateChanged(Phonon::State, Phonon::State)),
102 this, SLOT(notifyStateChange(Phonon::State, Phonon::State)));
103
104}
105
106MediaObject::~MediaObject()
107{
108 m_backend->removeBusWatcher(this);
109 if (m_pipeline) {
110 gst_element_set_state(m_pipeline, GST_STATE_NULL);
111 gst_object_unref(m_pipeline);
112 }
113 if (m_audioGraph) {
114 gst_element_set_state(m_audioGraph, GST_STATE_NULL);
115 gst_object_unref(m_audioGraph);
116 }
117 if (m_videoGraph) {
118 gst_element_set_state(m_videoGraph, GST_STATE_NULL);
119 gst_object_unref(m_videoGraph);
120 }
121}
122
123QString stateString(const Phonon::State &state)
124{
125 switch (state) {
126 case Phonon::LoadingState:
127 return QString("LoadingState");
128 case Phonon::StoppedState:
129 return QString("StoppedState");
130 case Phonon::PlayingState:
131 return QString("PlayingState");
132 case Phonon::BufferingState:
133 return QString("BufferingState");
134 case Phonon::PausedState:
135 return QString("PausedState");
136 case Phonon::ErrorState:
137 return QString("ErrorState");
138 }
139 return QString();
140}
141
142void
143pluginInstallationDone( GstInstallPluginsReturn res, gpointer userData )
144{
145 // Nothing inside yet
146 Q_UNUSED(res);
147 Q_UNUSED(userData);
148}
149
150void MediaObject::saveState()
151{
152 //Only first resumeState is respected
153 if (m_resumeState)
154 return;
155
156 if (m_pendingState == Phonon::PlayingState || m_pendingState == Phonon::PausedState) {
157 m_resumeState = true;
158 m_oldState = m_pendingState;
159 m_oldPos = getPipelinePos();
160 }
161}
162
163void MediaObject::resumeState()
164{
165 if (m_resumeState)
166 QMetaObject::invokeMethod(this, "setState", Qt::QueuedConnection, Q_ARG(State, m_oldState));
167}
168
169void MediaObject::newPadAvailable (GstPad *pad)
170{
171 GstCaps *caps;
172 GstStructure *str;
173 caps = gst_pad_get_caps (pad);
174 if (caps) {
175 str = gst_caps_get_structure (caps, 0);
176 QString mediaString(gst_structure_get_name (str));
177
178 if (mediaString.startsWith("video")) {
179 connectVideo(pad);
180 } else if (mediaString.startsWith("audio")) {
181 connectAudio(pad);
182 } else {
183 m_backend->logMessage("Could not connect pad", Backend::Warning);
184 }
185 gst_caps_unref (caps);
186 }
187}
188
189void MediaObject::cb_newpad (GstElement *decodebin,
190 GstPad *pad,
191 gboolean last,
192 gpointer data)
193{
194 Q_UNUSED(decodebin);
195 Q_UNUSED(pad);
196 Q_UNUSED(last);
197 Q_UNUSED(data);
198
199 MediaObject *media = static_cast<MediaObject*>(data);
200 Q_ASSERT(media);
201 media->newPadAvailable(pad);
202}
203
204void MediaObject::noMorePadsAvailable ()
205{
206 if (m_missingCodecs.size() > 0) {
207 bool canPlay = (m_hasAudio || m_videoStreamFound);
208 Phonon::ErrorType error = canPlay ? Phonon::NormalError : Phonon::FatalError;
209#ifdef PLUGIN_INSTALL_API
210 GstInstallPluginsContext *ctx = gst_install_plugins_context_new ();
211 gchar *details[2];
212 details[0] = m_missingCodecs[0].toLocal8Bit().data();
213 details[1] = NULL;
214 GstInstallPluginsReturn status;
215
216 status = gst_install_plugins_async( details, ctx, pluginInstallationDone, NULL );
217 gst_install_plugins_context_free ( ctx );
218
219 if ( status != GST_INSTALL_PLUGINS_STARTED_OK )
220 {
221 if( status == GST_INSTALL_PLUGINS_HELPER_MISSING )
222 setError(tr("Missing codec helper script assistant."), Phonon::FatalError );
223 else
224 setError(tr("Plugin codec installation failed for codec: %0")
225 .arg(m_missingCodecs[0].split("|")[3]), error);
226 }
227 m_missingCodecs.clear();
228#else
229 QString codecs = m_missingCodecs.join(", ");
230
231 if (error == Phonon::NormalError && m_hasVideo && !m_videoStreamFound) {
232 m_hasVideo = false;
233 emit hasVideoChanged(false);
234 }
235 setError(tr("A required codec is missing. You need to install the following codec(s) to play this content: %0").arg(codecs), error);
236 m_missingCodecs.clear();
237#endif
238 }
239}
240
241void MediaObject::cb_no_more_pads (GstElement * decodebin, gpointer data)
242{
243 Q_UNUSED(decodebin);
244 MediaObject *media = static_cast<MediaObject*>(data);
245 Q_ASSERT(media);
246 QMetaObject::invokeMethod(media, "noMorePadsAvailable", Qt::QueuedConnection);
247}
248
249typedef void (*Ptr_gst_pb_utils_init)();
250typedef gchar* (*Ptr_gst_pb_utils_get_codec_description)(const GstCaps *);
251
252void MediaObject::cb_unknown_type (GstElement *decodebin, GstPad *pad, GstCaps *caps, gpointer data)
253{
254 Q_UNUSED(decodebin);
255 Q_UNUSED(pad);
256 MediaObject *media = static_cast<MediaObject*>(data);
257 Q_ASSERT(media);
258
259 QString value = "unknown codec";
260
261 // These functions require GStreamer > 0.10.12
262#ifndef QT_NO_LIBRARY
263 static Ptr_gst_pb_utils_init p_gst_pb_utils_init = 0;
264 static Ptr_gst_pb_utils_get_codec_description p_gst_pb_utils_get_codec_description = 0;
265 if (!p_gst_pb_utils_init) {
266 p_gst_pb_utils_init = (Ptr_gst_pb_utils_init)QLibrary::resolve(QLatin1String("gstpbutils-0.10"), 0, "gst_pb_utils_init");
267 p_gst_pb_utils_get_codec_description = (Ptr_gst_pb_utils_get_codec_description)QLibrary::resolve(QLatin1String("gstpbutils-0.10"), 0, "gst_pb_utils_get_codec_description");
268 if (p_gst_pb_utils_init)
269 p_gst_pb_utils_init();
270 }
271 if (p_gst_pb_utils_get_codec_description) {
272 gchar *codecName = NULL;
273 codecName = p_gst_pb_utils_get_codec_description (caps);
274 value = QString::fromUtf8(codecName);
275 g_free (codecName);
276 } else
277#endif //QT_NO_LIBRARY
278 {
279 // For GStreamer versions < 0.10.12
280 GstStructure *str = gst_caps_get_structure (caps, 0);
281 value = QString::fromUtf8(gst_structure_get_name (str));
282
283 }
284
285#ifdef PLUGIN_INSTALL_API
286 QString plugins = QString("gstreamer|0.10|%0|%1|decoder-%2")
287 .arg( qApp->applicationName() )
288 .arg( value )
289 .arg( QString::fromUtf8(gst_caps_to_string (caps) ) );
290 media->addMissingCodecName( plugins );
291#else
292 media->addMissingCodecName( value );
293#endif
294}
295
296static void notifyVideoCaps(GObject *obj, GParamSpec *, gpointer data)
297{
298 GstPad *pad = GST_PAD(obj);
299 GstCaps *caps = gst_pad_get_caps (pad);
300 Q_ASSERT(caps);
301 MediaObject *media = static_cast<MediaObject*>(data);
302
303 // We do not want any more notifications until the source changes
304 g_signal_handler_disconnect(pad, media->capsHandler());
305
306 // setVideoCaps calls loadingComplete(), meaning we cannot call it from
307 // the streaming thread
308 QMetaObject::invokeMethod(media, "setVideoCaps", Qt::QueuedConnection, Q_ARG(GstCaps *, caps));
309}
310
311void MediaObject::setVideoCaps(GstCaps *caps)
312{
313 GstStructure *str;
314 gint width, height;
315
316 if ((str = gst_caps_get_structure (caps, 0))) {
317 if (gst_structure_get_int (str, "width", &width) && gst_structure_get_int (str, "height", &height)) {
318 gint aspectNum = 0;
319 gint aspectDenum = 0;
320 if (gst_structure_get_fraction(str, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
321 if (aspectDenum > 0)
322 width = width*aspectNum/aspectDenum;
323 }
324 // Let child nodes know about our new video size
325 QSize size(width, height);
326 MediaNodeEvent event(MediaNodeEvent::VideoSizeChanged, &size);
327 notify(&event);
328 }
329 }
330 gst_caps_unref(caps);
331}
332
333// Adds an element to the pipeline if not previously added
334bool MediaObject::addToPipeline(GstElement *elem)
335{
336 bool success = true;
337 if (!GST_ELEMENT_PARENT(elem)) { // If not already in pipeline
338 success = gst_bin_add(GST_BIN(m_pipeline), elem);
339 }
340 return success;
341}
342
343void MediaObject::connectVideo(GstPad *pad)
344{
345 GstState currentState = GST_STATE(m_pipeline);
346 if (addToPipeline(m_videoGraph)) {
347 GstPad *videopad = gst_element_get_pad (m_videoGraph, "sink");
348 if (!GST_PAD_IS_LINKED (videopad) && (gst_pad_link (pad, videopad) == GST_PAD_LINK_OK)) {
349 gst_element_set_state(m_videoGraph, currentState == GST_STATE_PLAYING ? GST_STATE_PLAYING : GST_STATE_PAUSED);
350 m_videoStreamFound = true;
351 m_backend->logMessage("Video track connected", Backend::Info, this);
352 // Note that the notify::caps _must_ be installed after linking to work with Dapper
353 m_capsHandler = g_signal_connect(pad, "notify::caps", G_CALLBACK(notifyVideoCaps), this);
354
355 if (!m_loading && !m_hasVideo) {
356 m_hasVideo = m_videoStreamFound;
357 emit hasVideoChanged(m_hasVideo);
358 }
359 }
360 gst_object_unref (videopad);
361 } else {
362 m_backend->logMessage("The video stream could not be plugged.", Backend::Info, this);
363 }
364}
365
366void MediaObject::connectAudio(GstPad *pad)
367{
368 GstState currentState = GST_STATE(m_pipeline);
369 if (addToPipeline(m_audioGraph)) {
370 GstPad *audiopad = gst_element_get_pad (m_audioGraph, "sink");
371 if (!GST_PAD_IS_LINKED (audiopad) && (gst_pad_link (pad, audiopad)==GST_PAD_LINK_OK)) {
372 gst_element_set_state(m_audioGraph, currentState == GST_STATE_PLAYING ? GST_STATE_PLAYING : GST_STATE_PAUSED);
373 m_hasAudio = true;
374 m_backend->logMessage("Audio track connected", Backend::Info, this);
375 }
376 gst_object_unref (audiopad);
377 } else {
378 m_backend->logMessage("The audio stream could not be plugged.", Backend::Info, this);
379 }
380}
381
382void MediaObject::cb_pad_added(GstElement *decodebin,
383 GstPad *pad,
384 gpointer data)
385{
386 Q_UNUSED(decodebin);
387 GstPad *decodepad = static_cast<GstPad*>(data);
388 gst_pad_link (pad, decodepad);
389 //gst_object_unref (decodepad);
390}
391
392/**
393 * Create a media source from a given URL.
394 *
395 * returns true if successful
396 */
397bool MediaObject::createPipefromURL(const QUrl &url)
398{
399 // Remove any existing data source
400 if (m_datasource) {
401 gst_bin_remove(GST_BIN(m_pipeline), m_datasource);
402 // m_pipeline has the only ref to datasource
403 m_datasource = 0;
404 }
405
406 // Verify that the uri can be parsed
407 if (!url.isValid()) {
408 m_backend->logMessage(QString("%1 is not a valid URI").arg(url.toString()));
409 return false;
410 }
411
412 // Create a new datasource based on the input URL
413 // add the 'file' scheme if it's missing; the double '/' is needed!
414 QByteArray encoded_cstr_url = (url.scheme() == QLatin1String("") ?
415 "file://" + url.toEncoded() :
416 url.toEncoded());
417 m_datasource = gst_element_make_from_uri(GST_URI_SRC, encoded_cstr_url.constData(), (const char*)NULL);
418 if (!m_datasource)
419 return false;
420
421 // Set the device for MediaSource::Disc
422 if (m_source.type() == MediaSource::Disc) {
423
424 if (g_object_class_find_property (G_OBJECT_GET_CLASS (m_datasource), "device")) {
425 QByteArray mediaDevice = QFile::encodeName(m_source.deviceName());
426 if (!mediaDevice.isEmpty())
427 g_object_set (G_OBJECT (m_datasource), "device", mediaDevice.constData(), (const char*)NULL);
428 }
429
430 // Also Set optical disc speed to 2X for Audio CD
431 if (m_source.discType() == Phonon::Cd
432 && (g_object_class_find_property (G_OBJECT_GET_CLASS (m_datasource), "read-speed"))) {
433 g_object_set (G_OBJECT (m_datasource), "read-speed", 2, (const char*)NULL);
434 m_backend->logMessage(QString("new device speed : 2X"), Backend::Info, this);
435 }
436 }
437
438 /* make HTTP sources send extra headers so we get icecast
439 * metadata in case the stream is an icecast stream */
440 if (encoded_cstr_url.startsWith("http://")
441 && g_object_class_find_property (G_OBJECT_GET_CLASS (m_datasource), "iradio-mode")) {
442 g_object_set (m_datasource, "iradio-mode", TRUE, NULL);
443 m_isStream = true;
444 }
445
446 // Link data source into pipeline
447 gst_bin_add(GST_BIN(m_pipeline), m_datasource);
448 if (!gst_element_link(m_datasource, m_decodebin)) {
449 // For sources with dynamic pads (such as RtspSrc) we need to connect dynamically
450 GstPad *decodepad = gst_element_get_pad (m_decodebin, "sink");
451 g_signal_connect (m_datasource, "pad-added", G_CALLBACK (&cb_pad_added), decodepad);
452 }
453
454 return true;
455}
456
457/**
458 * Create a media source from a media stream
459 *
460 * returns true if successful
461 */
462bool MediaObject::createPipefromStream(const MediaSource &source)
463{
464#ifndef QT_NO_PHONON_ABSTRACTMEDIASTREAM
465 // Remove any existing data source
466 if (m_datasource) {
467 gst_bin_remove(GST_BIN(m_pipeline), m_datasource);
468 // m_pipeline has the only ref to datasource
469 m_datasource = 0;
470 }
471
472 m_datasource = GST_ELEMENT(g_object_new(phonon_src_get_type(), NULL));
473 if (!m_datasource)
474 return false;
475
476 StreamReader *streamReader = new StreamReader(source);
477 g_object_set (G_OBJECT (m_datasource), "iodevice", streamReader, (const char*)NULL);
478
479 // Link data source into pipeline
480 gst_bin_add(GST_BIN(m_pipeline), m_datasource);
481 if (!gst_element_link(m_datasource, m_decodebin)) {
482 gst_bin_remove(GST_BIN(m_pipeline), m_datasource);
483 return false;
484 }
485 return true;
486#else //QT_NO_PHONON_ABSTRACTMEDIASTREAM
487 Q_UNUSED(source);
488 return false;
489#endif
490}
491
492void MediaObject::createPipeline()
493{
494 m_pipeline = gst_pipeline_new (NULL);
495 gst_object_ref (GST_OBJECT (m_pipeline));
496 gst_object_sink (GST_OBJECT (m_pipeline));
497
498 m_decodebin = gst_element_factory_make ("decodebin2", NULL);
499 g_signal_connect (m_decodebin, "new-decoded-pad", G_CALLBACK (&cb_newpad), this);
500 g_signal_connect (m_decodebin, "unknown-type", G_CALLBACK (&cb_unknown_type), this);
501 g_signal_connect (m_decodebin, "no-more-pads", G_CALLBACK (&cb_no_more_pads), this);
502
503 gst_bin_add(GST_BIN(m_pipeline), m_decodebin);
504
505 // Create a bin to contain the gst elements for this medianode
506
507 // Set up audio graph
508 m_audioGraph = gst_bin_new(NULL);
509 gst_object_ref (GST_OBJECT (m_audioGraph));
510 gst_object_sink (GST_OBJECT (m_audioGraph));
511
512 // Note that these queues are only required for streaming content
513 // And should ideally be created on demand as they will disable
514 // pull-mode access. Also note that the max-size-time are increased to
515 // reduce buffer overruns as these are not gracefully handled at the moment.
516 m_audioPipe = gst_element_factory_make("queue", NULL);
517 g_object_set(G_OBJECT(m_audioPipe), "max-size-time", MAX_QUEUE_TIME, (const char*)NULL);
518 gst_bin_add(GST_BIN(m_audioGraph), m_audioPipe);
519 GstPad *audiopad = gst_element_get_pad (m_audioPipe, "sink");
520 gst_element_add_pad (m_audioGraph, gst_ghost_pad_new ("sink", audiopad));
521 gst_object_unref (audiopad);
522
523 // Set up video graph
524 m_videoGraph = gst_bin_new(NULL);
525 gst_object_ref (GST_OBJECT (m_videoGraph));
526 gst_object_sink (GST_OBJECT (m_videoGraph));
527
528 m_videoPipe = gst_element_factory_make("queue", NULL);
529 g_object_set(G_OBJECT(m_videoPipe), "max-size-time", MAX_QUEUE_TIME, (const char*)NULL);
530 gst_bin_add(GST_BIN(m_videoGraph), m_videoPipe);
531 GstPad *videopad = gst_element_get_pad (m_videoPipe, "sink");
532 gst_element_add_pad (m_videoGraph, gst_ghost_pad_new ("sink", videopad));
533 gst_object_unref (videopad);
534
535 if (m_pipeline && m_decodebin && m_audioGraph && m_videoGraph && m_audioPipe && m_videoPipe)
536 m_isValid = true;
537 else
538 m_backend->logMessage("Could not create pipeline for media object", Backend::Warning);
539}
540
541/**
542 * !reimp
543 */
544State MediaObject::state() const
545{
546 return m_state;
547}
548
549/**
550 * !reimp
551 */
552bool MediaObject::hasVideo() const
553{
554 return m_hasVideo;
555}
556
557/**
558 * !reimp
559 */
560bool MediaObject::isSeekable() const
561{
562 return m_seekable;
563}
564
565/**
566 * !reimp
567 */
568qint64 MediaObject::currentTime() const
569{
570 if (m_resumeState)
571 return m_oldPos;
572
573 switch (state()) {
574 case Phonon::PausedState:
575 case Phonon::BufferingState:
576 case Phonon::PlayingState:
577 return getPipelinePos();
578 case Phonon::StoppedState:
579 case Phonon::LoadingState:
580 return 0;
581 case Phonon::ErrorState:
582 break;
583 }
584 return -1;
585}
586
587/**
588 * !reimp
589 */
590qint32 MediaObject::tickInterval() const
591{
592 return m_tickInterval;
593}
594
595/**
596 * !reimp
597 */
598void MediaObject::setTickInterval(qint32 newTickInterval)
599{
600 m_tickInterval = newTickInterval;
601 if (m_tickInterval <= 0)
602 m_tickTimer->setInterval(50);
603 else
604 m_tickTimer->setInterval(newTickInterval);
605}
606
607/**
608 * !reimp
609 */
610void MediaObject::play()
611{
612 setState(Phonon::PlayingState);
613 m_resumeState = false;
614}
615
616/**
617 * !reimp
618 */
619QString MediaObject::errorString() const
620{
621 return m_errorString;
622}
623
624/**
625 * !reimp
626 */
627Phonon::ErrorType MediaObject::errorType() const
628{
629 return m_error;
630}
631
632/**
633 * Set the current state of the mediaObject.
634 *
635 * !### Note that both Playing and Paused states are set immediately
636 * This should obviously be done in response to actual gstreamer state changes
637 */
638void MediaObject::setState(State newstate)
639{
640 if (!isValid())
641 return;
642
643 if (m_state == newstate)
644 return;
645
646 if (m_loading) {
647 // We are still loading. The state will be requested
648 // when loading has completed.
649 m_pendingState = newstate;
650 return;
651 }
652
653 GstState currentState;
654 gst_element_get_state (m_pipeline, &currentState, NULL, 1000);
655
656 switch (newstate) {
657 case Phonon::BufferingState:
658 m_backend->logMessage("phonon state request: buffering", Backend::Info, this);
659 break;
660
661 case Phonon::PausedState:
662 m_backend->logMessage("phonon state request: paused", Backend::Info, this);
663 if (currentState == GST_STATE_PAUSED) {
664 changeState(Phonon::PausedState);
665 } else if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) != GST_STATE_CHANGE_FAILURE) {
666 m_pendingState = Phonon::PausedState;
667 } else {
668 m_backend->logMessage("phonon state request failed", Backend::Info, this);
669 }
670 break;
671
672 case Phonon::StoppedState:
673 m_backend->logMessage("phonon state request: Stopped", Backend::Info, this);
674 if (currentState == GST_STATE_READY) {
675 changeState(Phonon::StoppedState);
676 } else if (gst_element_set_state(m_pipeline, GST_STATE_READY) != GST_STATE_CHANGE_FAILURE) {
677 m_pendingState = Phonon::StoppedState;
678 } else {
679 m_backend->logMessage("phonon state request failed", Backend::Info, this);
680 }
681 m_atEndOfStream = false;
682 break;
683
684 case Phonon::PlayingState:
685 if (m_resetNeeded) {
686 // ### Note this is a workaround and it should really be gracefully
687 // handled by medianode when we implement live connections.
688 // This generally happens if medianodes have been connected after the MediaSource was set
689 // Note that a side-effect of this is that we resend all meta data.
690 gst_element_set_state(m_pipeline, GST_STATE_NULL);
691 m_resetNeeded = false;
692 // Send a source change so the X11 renderer
693 // will re-set the overlay
694 MediaNodeEvent event(MediaNodeEvent::SourceChanged);
695 notify(&event);
696 }
697 m_backend->logMessage("phonon state request: Playing", Backend::Info, this);
698 if (m_atEndOfStream) {
699 m_backend->logMessage("EOS already reached", Backend::Info, this);
700 } else if (currentState == GST_STATE_PLAYING) {
701 changeState(Phonon::PlayingState);
702 } else if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE) {
703 m_pendingState = Phonon::PlayingState;
704 } else {
705 m_backend->logMessage("phonon state request failed", Backend::Info, this);
706 }
707 break;
708
709 case Phonon::ErrorState:
710 m_backend->logMessage("phonon state request : Error", Backend::Warning, this);
711 m_backend->logMessage(QString("Last error : %0").arg(errorString()) , Backend::Warning, this);
712 changeState(Phonon::ErrorState); //immediately set error state
713 break;
714
715 case Phonon::LoadingState:
716 m_backend->logMessage("phonon state request: Loading", Backend::Info, this);
717 changeState(Phonon::LoadingState);
718 break;
719 }
720}
721
722/*
723 * Signals that the requested state has completed
724 * by emitting stateChanged and updates the internal state.
725 */
726void MediaObject::changeState(State newstate)
727{
728 if (newstate == m_state)
729 return;
730
731 Phonon::State oldState = m_state;
732 m_state = newstate; // m_state must be set before emitting, since
733 // Error state requires that state() will return the new value
734 m_pendingState = newstate;
735 emit stateChanged(newstate, oldState);
736
737 switch (newstate) {
738 case Phonon::PausedState:
739 m_backend->logMessage("phonon state changed: paused", Backend::Info, this);
740 break;
741
742 case Phonon::BufferingState:
743 m_backend->logMessage("phonon state changed: buffering", Backend::Info, this);
744 break;
745
746 case Phonon::PlayingState:
747 m_backend->logMessage("phonon state changed: Playing", Backend::Info, this);
748 break;
749
750 case Phonon::StoppedState:
751 m_backend->logMessage("phonon state changed: Stopped", Backend::Info, this);
752 // We must reset the pipeline when playing again
753 m_resetNeeded = true;
754 m_tickTimer->stop();
755 break;
756
757 case Phonon::ErrorState:
758 m_loading = false;
759 m_backend->logMessage("phonon state changed : Error", Backend::Info, this);
760 m_backend->logMessage(errorString(), Backend::Warning, this);
761 break;
762
763 case Phonon::LoadingState:
764 m_backend->logMessage("phonon state changed: Loading", Backend::Info, this);
765 break;
766 }
767}
768
769void MediaObject::setError(const QString &errorString, Phonon::ErrorType error)
770{
771 m_errorString = errorString;
772 m_error = error;
773 m_tickTimer->stop();
774
775 if (error == Phonon::FatalError) {
776 m_hasVideo = false;
777 emit hasVideoChanged(false);
778 gst_element_set_state(m_pipeline, GST_STATE_READY);
779 changeState(Phonon::ErrorState);
780 } else {
781 if (m_loading) //Flag error only after loading has completed
782 m_pendingState = Phonon::ErrorState;
783 else
784 changeState(Phonon::ErrorState);
785 }
786}
787
788qint64 MediaObject::totalTime() const
789{
790 return m_totalTime;
791}
792
793qint32 MediaObject::prefinishMark() const
794{
795 return m_prefinishMark;
796}
797
798qint32 MediaObject::transitionTime() const
799{
800 return m_transitionTime;
801}
802
803void MediaObject::setTransitionTime(qint32 time)
804{
805 m_transitionTime = time;
806}
807
808qint64 MediaObject::remainingTime() const
809{
810 return totalTime() - currentTime();
811}
812
813MediaSource MediaObject::source() const
814{
815 return m_source;
816}
817
818void MediaObject::setNextSource(const MediaSource &source)
819{
820 if (source.type() == MediaSource::Invalid &&
821 source.type() == MediaSource::Empty)
822 return;
823 m_nextSource = source;
824}
825
826/**
827 * Update total time value from the pipeline
828 */
829bool MediaObject::updateTotalTime()
830{
831 GstFormat format = GST_FORMAT_TIME;
832 gint64 duration = 0;
833 if (gst_element_query_duration (GST_ELEMENT(m_pipeline), &format, &duration)) {
834 setTotalTime(duration / GST_MSECOND);
835 return true;
836 }
837 return false;
838}
839
840/**
841 * Checks if the current source is seekable
842 */
843void MediaObject::updateSeekable()
844{
845 if (!isValid())
846 return;
847
848 GstQuery *query;
849 gboolean result;
850 gint64 start, stop;
851 query = gst_query_new_seeking(GST_FORMAT_TIME);
852 result = gst_element_query (m_pipeline, query);
853 if (result) {
854 gboolean seekable;
855 GstFormat format;
856 gst_query_parse_seeking (query, &format, &seekable, &start, &stop);
857
858 if (m_seekable != seekable) {
859 m_seekable = seekable;
860 emit seekableChanged(m_seekable);
861 }
862
863 if (m_seekable)
864 m_backend->logMessage("Stream is seekable", Backend::Info, this);
865 else
866 m_backend->logMessage("Stream is non-seekable", Backend::Info, this);
867 } else {
868 m_backend->logMessage("updateSeekable query failed", Backend::Info, this);
869 }
870 gst_query_unref (query);
871}
872
873qint64 MediaObject::getPipelinePos() const
874{
875 Q_ASSERT(m_pipeline);
876
877 // Note some formats (usually mpeg) do not allow us to accurately seek to the
878 // beginning or end of the file so we 'fake' it here rather than exposing the front end to potential issues.
879 if (m_atEndOfStream)
880 return totalTime();
881 if (m_atStartOfStream)
882 return 0;
883 if (m_posAtSeek >= 0)
884 return m_posAtSeek;
885
886 gint64 pos = 0;
887 GstFormat format = GST_FORMAT_TIME;
888 gst_element_query_position (GST_ELEMENT(m_pipeline), &format, &pos);
889 return (pos / GST_MSECOND);
890}
891
892/*
893 * Internal method to set a new total time for the media object
894 */
895void MediaObject::setTotalTime(qint64 newTime)
896{
897
898 if (newTime == m_totalTime)
899 return;
900
901 m_totalTime = newTime;
902
903 emit totalTimeChanged(m_totalTime);
904}
905
906/*
907 * !reimp
908 */
909void MediaObject::setSource(const MediaSource &source)
910{
911 if (!isValid())
912 return;
913
914 // We have to reset the state completely here, otherwise
915 // remnants of the old pipeline can result in strangenes
916 // such as failing duration queries etc
917 GstState state;
918 gst_element_set_state(m_pipeline, GST_STATE_NULL);
919 gst_element_get_state(m_pipeline, &state, NULL, 2000);
920
921 m_source = source;
922 emit currentSourceChanged(m_source);
923 m_previousTickTime = -1;
924 m_missingCodecs.clear();
925
926 // Go into to loading state
927 changeState(Phonon::LoadingState);
928 m_loading = true;
929 // IMPORTANT: Honor the m_resetNeeded flag as it currently stands.
930 // See https://qa.mandriva.com/show_bug.cgi?id=56807
931 //m_resetNeeded = false;
932 m_resumeState = false;
933 m_pendingState = Phonon::StoppedState;
934
935 // Make sure we start out unconnected
936 if (GST_ELEMENT_PARENT(m_audioGraph))
937 gst_bin_remove(GST_BIN(m_pipeline), m_audioGraph);
938 if (GST_ELEMENT_PARENT(m_videoGraph))
939 gst_bin_remove(GST_BIN(m_pipeline), m_videoGraph);
940
941 // Clear any existing errors
942 m_aboutToFinishEmitted = false;
943 m_error = NoError;
944 m_errorString.clear();
945
946 m_bufferPercent = 0;
947 m_prefinishMarkReachedNotEmitted = true;
948 m_aboutToFinishEmitted = false;
949 m_hasAudio = false;
950 m_videoStreamFound = false;
951 setTotalTime(-1);
952 m_atEndOfStream = false;
953
954 m_availableTitles = 0;
955 m_pendingTitle = 1;
956 m_currentTitle = 1;
957
958 // Clear existing meta tags
959 m_metaData.clear();
960 m_isStream = false;
961
962 switch (source.type()) {
963 case MediaSource::Url: {
964 if (!createPipefromURL(source.url()))
965 setError(tr("Could not open media source."));
966 }
967 break;
968
969 case MediaSource::LocalFile: {
970 if (!createPipefromURL(QUrl::fromLocalFile(source.fileName())))
971 setError(tr("Could not open media source."));
972 }
973 break;
974
975 case MediaSource::Invalid:
976 setError(tr("Invalid source type."), Phonon::NormalError);
977 break;
978
979 case MediaSource::Empty:
980 break;
981
982 case MediaSource::Stream:
983 if (!createPipefromStream(source))
984 setError(tr("Could not open media source."));
985 break;
986
987 case MediaSource::Disc:
988 {
989 QString mediaUrl;
990 switch (source.discType()) {
991 case Phonon::NoDisc:
992 qWarning() << "I should never get to see a MediaSource that is a disc but doesn't specify which one";
993 return;
994 case Phonon::Cd: // CD tracks can be specified by setting the url in the following way uri=cdda:4
995 mediaUrl = QLatin1String("cdda://");
996 break;
997 case Phonon::Dvd:
998 mediaUrl = QLatin1String("dvd://");
999 break;
1000 case Phonon::Vcd:
1001 mediaUrl = QLatin1String("vcd://");
1002 break;
1003 default:
1004 qWarning() << "media " << source.discType() << " not implemented";
1005 return;
1006 }
1007 if (mediaUrl.isEmpty() || !createPipefromURL(QUrl(mediaUrl)))
1008 setError(tr("Could not open media source."));
1009 }
1010 break;
1011
1012 default:
1013 m_backend->logMessage("Source type not currently supported", Backend::Warning, this);
1014 setError(tr("Could not open media source."), Phonon::NormalError);
1015 break;
1016 }
1017
1018 MediaNodeEvent event(MediaNodeEvent::SourceChanged);
1019 notify(&event);
1020
1021 // We need to link this node to ensure that fake sinks are connected
1022 // before loading, otherwise the stream will be blocked
1023 link();
1024 beginLoad();
1025}
1026
1027void MediaObject::beginLoad()
1028{
1029 if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) != GST_STATE_CHANGE_FAILURE) {
1030 m_backend->logMessage("Begin source load", Backend::Info, this);
1031 } else {
1032 setError(tr("Could not open media source."));
1033 }
1034}
1035
1036// Called when we are ready to leave the loading state
1037void MediaObject::loadingComplete()
1038{
1039 if (m_videoStreamFound) {
1040 MediaNodeEvent event(MediaNodeEvent::VideoAvailable);
1041 notify(&event);
1042 }
1043 getStreamInfo();
1044 m_loading = false;
1045
1046 setState(m_pendingState);
1047 emit metaDataChanged(m_metaData);
1048}
1049
1050void MediaObject::getStreamInfo()
1051{
1052 updateSeekable();
1053 updateTotalTime();
1054
1055 if (m_videoStreamFound != m_hasVideo) {
1056 m_hasVideo = m_videoStreamFound;
1057 emit hasVideoChanged(m_hasVideo);
1058 }
1059
1060 if (m_source.discType() == Phonon::Cd) {
1061 gint64 titleCount;
1062 GstFormat format = gst_format_get_by_nick("track");
1063 if (gst_element_query_duration (m_pipeline, &format, &titleCount)) {
1064 //check if returned format is still "track",
1065 //gstreamer sometimes returns the total time, if tracks information is not available.
1066 if (qstrcmp(gst_format_get_name(format), "track") == 0) {
1067 int oldAvailableTitles = m_availableTitles;
1068 m_availableTitles = (int)titleCount;
1069 if (m_availableTitles != oldAvailableTitles) {
1070 emit availableTitlesChanged(m_availableTitles);
1071 m_backend->logMessage(QString("Available titles changed: %0").arg(m_availableTitles), Backend::Info, this);
1072 }
1073 }
1074 }
1075 }
1076}
1077
1078void MediaObject::setPrefinishMark(qint32 newPrefinishMark)
1079{
1080 m_prefinishMark = newPrefinishMark;
1081 if (currentTime() < totalTime() - m_prefinishMark) // not about to finish
1082 m_prefinishMarkReachedNotEmitted = true;
1083}
1084
1085void MediaObject::pause()
1086{
1087 m_backend->logMessage("pause()", Backend::Info, this);
1088 if (state() != Phonon::PausedState)
1089 setState(Phonon::PausedState);
1090 m_resumeState = false;
1091}
1092
1093void MediaObject::stop()
1094{
1095 if (state() != Phonon::StoppedState) {
1096 setState(Phonon::StoppedState);
1097 m_prefinishMarkReachedNotEmitted = true;
1098 }
1099 m_resumeState = false;
1100}
1101
1102void MediaObject::seek(qint64 time)
1103{
1104 if (!isValid())
1105 return;
1106
1107 if (isSeekable()) {
1108 switch (state()) {
1109 case Phonon::PlayingState:
1110 case Phonon::StoppedState:
1111 case Phonon::PausedState:
1112 case Phonon::BufferingState:
1113 m_backend->logMessage(QString("Seek to pos %0").arg(time), Backend::Info, this);
1114
1115 if (time <= 0)
1116 m_atStartOfStream = true;
1117 else
1118 m_atStartOfStream = false;
1119
1120 m_posAtSeek = getPipelinePos();
1121 m_tickTimer->stop();
1122
1123 if (gst_element_seek(m_pipeline, 1.0, GST_FORMAT_TIME,
1124 GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET,
1125 time * GST_MSECOND, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE))
1126 break;
1127 case Phonon::LoadingState:
1128 case Phonon::ErrorState:
1129 return;
1130 }
1131
1132 quint64 current = currentTime();
1133 quint64 total = totalTime();
1134
1135 if (current < total - m_prefinishMark)
1136 m_prefinishMarkReachedNotEmitted = true;
1137 if (current < total - ABOUT_TO_FINNISH_TIME)
1138 m_aboutToFinishEmitted = false;
1139 m_atEndOfStream = false;
1140 }
1141}
1142
1143void MediaObject::emitTick()
1144{
1145 if (m_resumeState) {
1146 return;
1147 }
1148
1149 qint64 currentTime = getPipelinePos();
1150 qint64 totalTime = m_totalTime;
1151
1152 if (m_tickInterval > 0 && currentTime != m_previousTickTime) {
1153 emit tick(currentTime);
1154 m_previousTickTime = currentTime;
1155 }
1156 if (m_state == Phonon::PlayingState) {
1157 if (currentTime >= totalTime - m_prefinishMark) {
1158 if (m_prefinishMarkReachedNotEmitted) {
1159 m_prefinishMarkReachedNotEmitted = false;
1160 emit prefinishMarkReached(totalTime - currentTime);
1161 }
1162 }
1163 // Prepare load of next source
1164 if (currentTime >= totalTime - ABOUT_TO_FINNISH_TIME) {
1165 if (m_source.type() == MediaSource::Disc &&
1166 m_autoplayTitles &&
1167 m_availableTitles > 1 &&
1168 m_currentTitle < m_availableTitles) {
1169 m_aboutToFinishEmitted = false;
1170 } else if (!m_aboutToFinishEmitted) {
1171 m_aboutToFinishEmitted = true; // track is about to finish
1172 emit aboutToFinish();
1173 }
1174 }
1175 }
1176}
1177
1178
1179/*
1180 * Used to iterate through the gst_tag_list and extract values
1181 */
1182void foreach_tag_function(const GstTagList *list, const gchar *tag, gpointer user_data)
1183{
1184 TagMap *newData = static_cast<TagMap *>(user_data);
1185 QString value;
1186 GType type = gst_tag_get_type(tag);
1187 switch (type) {
1188 case G_TYPE_STRING: {
1189 char *str = 0;
1190 gst_tag_list_get_string(list, tag, &str);
1191 value = QString::fromUtf8(str);
1192 g_free(str);
1193 }
1194 break;
1195
1196 case G_TYPE_BOOLEAN: {
1197 int bval;
1198 gst_tag_list_get_boolean(list, tag, &bval);
1199 value = QString::number(bval);
1200 }
1201 break;
1202
1203 case G_TYPE_INT: {
1204 int ival;
1205 gst_tag_list_get_int(list, tag, &ival);
1206 value = QString::number(ival);
1207 }
1208 break;
1209
1210 case G_TYPE_UINT: {
1211 unsigned int uival;
1212 gst_tag_list_get_uint(list, tag, &uival);
1213 value = QString::number(uival);
1214 }
1215 break;
1216
1217 case G_TYPE_FLOAT: {
1218 float fval;
1219 gst_tag_list_get_float(list, tag, &fval);
1220 value = QString::number(fval);
1221 }
1222 break;
1223
1224 case G_TYPE_DOUBLE: {
1225 double dval;
1226 gst_tag_list_get_double(list, tag, &dval);
1227 value = QString::number(dval);
1228 }
1229 break;
1230
1231 default:
1232 //qDebug("Unsupported tag type: %s", g_type_name(type));
1233 break;
1234 }
1235
1236 QString key = QString(tag).toUpper();
1237 QString currVal = newData->value(key);
1238 if (!value.isEmpty() && !(newData->contains(key) && currVal == value))
1239 newData->insert(key, value);
1240}
1241
1242/**
1243 * Triggers playback after a song has completed in the current media queue
1244 */
1245void MediaObject::beginPlay()
1246{
1247 setSource(m_nextSource);
1248 m_nextSource = MediaSource();
1249 m_pendingState = Phonon::PlayingState;
1250}
1251
1252/**
1253 * Handle GStreamer bus messages
1254 */
1255void MediaObject::handleBusMessage(const Message &message)
1256{
1257
1258 if (!isValid())
1259 return;
1260
1261 GstMessage *gstMessage = message.rawMessage();
1262 Q_ASSERT(m_pipeline);
1263
1264 if (m_backend->debugLevel() >= Backend::Debug) {
1265 int type = GST_MESSAGE_TYPE(gstMessage);
1266 gchar* name = gst_element_get_name(gstMessage->src);
1267 QString msgString = QString("Bus: %0 (%1)").arg(gst_message_type_get_name ((GstMessageType)type)).arg(name);
1268 g_free(name);
1269 m_backend->logMessage(msgString, Backend::Debug, this);
1270 }
1271
1272 switch (GST_MESSAGE_TYPE (gstMessage)) {
1273
1274 case GST_MESSAGE_EOS:
1275 m_backend->logMessage("EOS received", Backend::Info, this);
1276 handleEndOfStream();
1277 break;
1278
1279 case GST_MESSAGE_TAG: {
1280 GstTagList* tag_list = 0;
1281 gst_message_parse_tag(gstMessage, &tag_list);
1282 if (tag_list) {
1283 TagMap newTags;
1284 gst_tag_list_foreach (tag_list, &foreach_tag_function, &newTags);
1285 gst_tag_list_free(tag_list);
1286
1287 // Determine if we should no fake the album/artist tags.
1288 // This is a little confusing as we want to fake it on initial
1289 // connection where title, album and artist are all missing.
1290 // There are however times when we get just other information,
1291 // e.g. codec, and so we want to only do clever stuff if we
1292 // have a commonly available tag (ORGANIZATION) or we have a
1293 // change in title
1294 bool fake_it =
1295 (m_isStream
1296 && ((!newTags.contains("TITLE")
1297 && newTags.contains("ORGANIZATION"))
1298 || (newTags.contains("TITLE")
1299 && m_metaData.value("TITLE") != newTags.value("TITLE")))
1300 && !newTags.contains("ALBUM")
1301 && !newTags.contains("ARTIST"));
1302
1303 TagMap oldMap = m_metaData; // Keep a copy of the old one for reference
1304
1305 // Now we've checked the new data, append any new meta tags to the existing tag list
1306 // We cannot use TagMap::iterator as this is a multimap and when streaming data
1307 // could in theory be lost.
1308 QList<QString> keys = newTags.keys();
1309 for (QList<QString>::iterator i = keys.begin(); i != keys.end(); ++i) {
1310 QString key = *i;
1311 if (m_isStream) {
1312 // If we're streaming, we need to remove data in m_metaData
1313 // in order to stop it filling up indefinitely (as it's a multimap)
1314 m_metaData.remove(key);
1315 }
1316 QList<QString> values = newTags.values(key);
1317 for (QList<QString>::iterator j = values.begin(); j != values.end(); ++j) {
1318 QString value = *j;
1319 QString currVal = m_metaData.value(key);
1320 if (!m_metaData.contains(key) || currVal != value) {
1321 m_metaData.insert(key, value);
1322 }
1323 }
1324 }
1325
1326 m_backend->logMessage("Meta tags found", Backend::Info, this);
1327 if (oldMap != m_metaData) {
1328 // This is a bit of a hack to ensure that stream metadata is
1329 // returned. We get as much as we can from the Shoutcast server's
1330 // StreamTitle= header. If further info is decoded from the stream
1331 // itself later, then it will overwrite this info.
1332 if (m_isStream && fake_it) {
1333 m_metaData.remove("ALBUM");
1334 m_metaData.remove("ARTIST");
1335
1336 // Detect whether we want to "fill in the blanks"
1337 QString str;
1338 if (m_metaData.contains("TITLE"))
1339 {
1340 str = m_metaData.value("TITLE");
1341 int splitpoint;
1342 // Check to see if our title matches "%s - %s"
1343 // Where neither %s are empty...
1344 if ((splitpoint = str.indexOf(" - ")) > 0
1345 && str.size() > (splitpoint+3)) {
1346 m_metaData.insert("ARTIST", str.left(splitpoint));
1347 m_metaData.replace("TITLE", str.mid(splitpoint+3));
1348 }
1349 } else {
1350 str = m_metaData.value("GENRE");
1351 if (!str.isEmpty())
1352 m_metaData.insert("TITLE", str);
1353 else
1354 m_metaData.insert("TITLE", "Streaming Data");
1355 }
1356 if (!m_metaData.contains("ARTIST")) {
1357 str = m_metaData.value("LOCATION");
1358 if (!str.isEmpty())
1359 m_metaData.insert("ARTIST", str);
1360 else
1361 m_metaData.insert("ARTIST", "Streaming Data");
1362 }
1363 str = m_metaData.value("ORGANIZATION");
1364 if (!str.isEmpty())
1365 m_metaData.insert("ALBUM", str);
1366 else
1367 m_metaData.insert("ALBUM", "Streaming Data");
1368 }
1369 // As we manipulate the title, we need to recompare
1370 // oldMap and m_metaData here...
1371 if (oldMap != m_metaData && !m_loading)
1372 emit metaDataChanged(m_metaData);
1373 }
1374 }
1375 }
1376 break;
1377
1378 case GST_MESSAGE_STATE_CHANGED : {
1379
1380 if (gstMessage->src != GST_OBJECT(m_pipeline))
1381 return;
1382
1383 GstState oldState;
1384 GstState newState;
1385 GstState pendingState;
1386 gst_message_parse_state_changed (gstMessage, &oldState, &newState, &pendingState);
1387
1388 if (newState == pendingState)
1389 return;
1390
1391 m_posAtSeek = -1;
1392
1393 switch (newState) {
1394
1395 case GST_STATE_PLAYING :
1396 m_atStartOfStream = false;
1397 m_backend->logMessage("gstreamer: pipeline state set to playing", Backend::Info, this);
1398 m_tickTimer->start();
1399 changeState(Phonon::PlayingState);
1400 if ((m_source.type() == MediaSource::Disc) && (m_currentTitle != m_pendingTitle)) {
1401 setTrack(m_pendingTitle);
1402 }
1403 if (m_resumeState && m_oldState == Phonon::PlayingState) {
1404 seek(m_oldPos);
1405 m_resumeState = false;
1406 }
1407 break;
1408
1409 case GST_STATE_NULL:
1410 m_backend->logMessage("gstreamer: pipeline state set to null", Backend::Info, this);
1411 m_tickTimer->stop();
1412 break;
1413
1414 case GST_STATE_PAUSED :
1415 m_backend->logMessage("gstreamer: pipeline state set to paused", Backend::Info, this);
1416 m_tickTimer->start();
1417 if (state() == Phonon::LoadingState) {
1418 // No_more_pads is not emitted from the decodebin in older versions (0.10.4)
1419 noMorePadsAvailable();
1420 loadingComplete();
1421 } else if (m_resumeState && m_oldState == Phonon::PausedState) {
1422 changeState(Phonon::PausedState);
1423 m_resumeState = false;
1424 break;
1425 } else {
1426 // A lot of autotests can break if we allow all paused changes through.
1427 if (m_pendingState == Phonon::PausedState) {
1428 changeState(Phonon::PausedState);
1429 }
1430 }
1431 break;
1432
1433 case GST_STATE_READY :
1434 if (!m_loading && m_pendingState == Phonon::StoppedState)
1435 changeState(Phonon::StoppedState);
1436 m_backend->logMessage("gstreamer: pipeline state set to ready", Backend::Debug, this);
1437 m_tickTimer->stop();
1438 if ((m_source.type() == MediaSource::Disc) && (m_currentTitle != m_pendingTitle)) {
1439 setTrack(m_pendingTitle);
1440 }
1441 break;
1442
1443 case GST_STATE_VOID_PENDING :
1444 m_backend->logMessage("gstreamer: pipeline state set to pending (void)", Backend::Debug, this);
1445 m_tickTimer->stop();
1446 break;
1447 }
1448 break;
1449 }
1450
1451 case GST_MESSAGE_ERROR: {
1452 gchar *debug;
1453 GError *err;
1454 QString logMessage;
1455 gst_message_parse_error (gstMessage, &err, &debug);
1456 gchar *errorMessage = gst_error_get_message (err->domain, err->code);
1457 logMessage.sprintf("Error: %s Message:%s (%s) Code:%d", debug, err->message, errorMessage, err->code);
1458 m_backend->logMessage(logMessage, Backend::Warning);
1459 g_free(errorMessage);
1460 g_free (debug);
1461
1462 if (err->domain == GST_RESOURCE_ERROR) {
1463 if (err->code == GST_RESOURCE_ERROR_NOT_FOUND) {
1464 setError(tr("Could not locate media source."), Phonon::FatalError);
1465 } else if (err->code == GST_RESOURCE_ERROR_OPEN_READ) {
1466 setError(tr("Could not open media source."), Phonon::FatalError);
1467 } else if (err->code == GST_RESOURCE_ERROR_BUSY) {
1468 // We need to check if this comes from an audio device by looking at sink caps
1469 GstPad* sinkPad = gst_element_get_static_pad(GST_ELEMENT(gstMessage->src), "sink");
1470 if (sinkPad) {
1471 GstCaps *caps = gst_pad_get_caps (sinkPad);
1472 GstStructure *str = gst_caps_get_structure (caps, 0);
1473 if (g_strrstr (gst_structure_get_name (str), "audio"))
1474 setError(tr("Could not open audio device. The device is already in use."), Phonon::NormalError);
1475 else
1476 setError(err->message, Phonon::FatalError);
1477 gst_caps_unref (caps);
1478 gst_object_unref (sinkPad);
1479 }
1480 } else {
1481 setError(QString(err->message), Phonon::FatalError);
1482 }
1483 } else if (err->domain == GST_STREAM_ERROR) {
1484 switch (err->code) {
1485 case GST_STREAM_ERROR_WRONG_TYPE:
1486 case GST_STREAM_ERROR_TYPE_NOT_FOUND:
1487 setError(tr("Could not decode media source."), Phonon::FatalError);
1488 break;
1489 default:
1490 setError(tr("Could not open media source."), Phonon::FatalError);
1491 break;
1492 }
1493 } else {
1494 setError(QString(err->message), Phonon::FatalError);
1495 }
1496 g_error_free (err);
1497 break;
1498 }
1499
1500 case GST_MESSAGE_WARNING: {
1501 gchar *debug;
1502 GError *err;
1503 gst_message_parse_warning(gstMessage, &err, &debug);
1504 QString msgString;
1505 msgString.sprintf("Warning: %s\nMessage:%s", debug, err->message);
1506 m_backend->logMessage(msgString, Backend::Warning);
1507 g_free (debug);
1508 g_error_free (err);
1509 break;
1510 }
1511
1512 case GST_MESSAGE_ELEMENT: {
1513 GstMessage *gstMessage = message.rawMessage();
1514 const GstStructure *gstStruct = gst_message_get_structure(gstMessage); //do not free this
1515 if (g_strrstr (gst_structure_get_name (gstStruct), "prepare-xwindow-id")) {
1516 MediaNodeEvent videoHandleEvent(MediaNodeEvent::VideoHandleRequest);
1517 notify(&videoHandleEvent);
1518 }
1519 break;
1520 }
1521
1522 case GST_MESSAGE_DURATION: {
1523 m_backend->logMessage("GST_MESSAGE_DURATION", Backend::Debug, this);
1524 updateTotalTime();
1525 break;
1526 }
1527
1528 case GST_MESSAGE_BUFFERING: {
1529 gint percent = 0;
1530 gst_structure_get_int (gstMessage->structure, "buffer-percent", &percent); //gst_message_parse_buffering was introduced in 0.10.11
1531
1532 if (m_bufferPercent != percent) {
1533 emit bufferStatus(percent);
1534 m_backend->logMessage(QString("Stream buffering %0").arg(percent), Backend::Debug, this);
1535 m_bufferPercent = percent;
1536 }
1537
1538 if (m_state != Phonon::BufferingState)
1539 emit stateChanged(m_state, Phonon::BufferingState);
1540 else if (percent == 100)
1541 emit stateChanged(Phonon::BufferingState, m_state);
1542 break;
1543 }
1544 //case GST_MESSAGE_INFO:
1545 //case GST_MESSAGE_STREAM_STATUS:
1546 //case GST_MESSAGE_CLOCK_PROVIDE:
1547 //case GST_MESSAGE_NEW_CLOCK:
1548 //case GST_MESSAGE_STEP_DONE:
1549 //case GST_MESSAGE_LATENCY: only from 0.10.12
1550 //case GST_MESSAGE_ASYNC_DONE: only from 0.10.13
1551 default:
1552 break;
1553 }
1554}
1555
1556void MediaObject::handleEndOfStream()
1557{
1558 // If the stream is not seekable ignore
1559 // otherwise chained radio broadcasts would stop
1560
1561
1562 if (m_atEndOfStream)
1563 return;
1564
1565 if (!m_seekable)
1566 m_atEndOfStream = true;
1567
1568 if (m_source.type() == MediaSource::Disc &&
1569 m_autoplayTitles &&
1570 m_availableTitles > 1 &&
1571 m_currentTitle < m_availableTitles) {
1572 _iface_setCurrentTitle(m_currentTitle + 1);
1573 return;
1574 }
1575
1576 if (m_nextSource.type() != MediaSource::Invalid
1577 && m_nextSource.type() != MediaSource::Empty) { // We only emit finish when the queue is actually empty
1578 QTimer::singleShot (qMax(0, transitionTime()), this, SLOT(beginPlay()));
1579 } else {
1580 m_pendingState = Phonon::PausedState;
1581 emit finished();
1582 if (!m_seekable) {
1583 setState(Phonon::StoppedState);
1584 // Note the behavior for live streams is not properly defined
1585 // But since we cant seek to 0, we don't have much choice other than stopping
1586 // the stream
1587 } else {
1588 // Only emit paused if the finished signal
1589 // did not result in a new state
1590 if (m_pendingState == Phonon::PausedState)
1591 setState(m_pendingState);
1592 }
1593 }
1594}
1595
1596void MediaObject::invalidateGraph()
1597{
1598 m_resetNeeded = true;
1599 if (m_state == Phonon::PlayingState || m_state == Phonon::PausedState) {
1600 changeState(Phonon::StoppedState);
1601 }
1602}
1603
1604// Notifes the pipeline about state changes in the media object
1605void MediaObject::notifyStateChange(Phonon::State newstate, Phonon::State oldstate)
1606{
1607 Q_UNUSED(oldstate);
1608 MediaNodeEvent event(MediaNodeEvent::StateChanged, &newstate);
1609 notify(&event);
1610}
1611
1612#ifndef QT_NO_PHONON_MEDIACONTROLLER
1613//interface management
1614bool MediaObject::hasInterface(Interface iface) const
1615{
1616 return iface == AddonInterface::TitleInterface;
1617}
1618
1619QVariant MediaObject::interfaceCall(Interface iface, int command, const QList<QVariant> &params)
1620{
1621 if (hasInterface(iface)) {
1622
1623 switch (iface)
1624 {
1625 case TitleInterface:
1626 switch (command)
1627 {
1628 case availableTitles:
1629 return _iface_availableTitles();
1630 case title:
1631 return _iface_currentTitle();
1632 case setTitle:
1633 _iface_setCurrentTitle(params.first().toInt());
1634 break;
1635 case autoplayTitles:
1636 return m_autoplayTitles;
1637 case setAutoplayTitles:
1638 m_autoplayTitles = params.first().toBool();
1639 break;
1640 }
1641 break;
1642 default:
1643 break;
1644 }
1645 }
1646 return QVariant();
1647}
1648#endif
1649
1650int MediaObject::_iface_availableTitles() const
1651{
1652 return m_availableTitles;
1653}
1654
1655int MediaObject::_iface_currentTitle() const
1656{
1657 return m_currentTitle;
1658}
1659
1660void MediaObject::_iface_setCurrentTitle(int title)
1661{
1662 m_backend->logMessage(QString("setCurrentTitle %0").arg(title), Backend::Info, this);
1663 if ((title == m_currentTitle) || (title == m_pendingTitle))
1664 return;
1665
1666 m_pendingTitle = title;
1667
1668 if (m_state == Phonon::PlayingState || m_state == Phonon::StoppedState) {
1669 setTrack(m_pendingTitle);
1670 } else {
1671 setState(Phonon::StoppedState);
1672 }
1673}
1674
1675void MediaObject::setTrack(int title)
1676{
1677 if (((m_state != Phonon::PlayingState) && (m_state != Phonon::StoppedState)) || (title < 1) || (title > m_availableTitles))
1678 return;
1679
1680
1681 //let's seek to the beginning of the song
1682 GstFormat trackFormat = gst_format_get_by_nick("track");
1683 m_backend->logMessage(QString("setTrack %0").arg(title), Backend::Info, this);
1684 if (gst_element_seek_simple(m_pipeline, trackFormat, GST_SEEK_FLAG_FLUSH, title - 1)) {
1685 m_currentTitle = title;
1686 updateTotalTime();
1687 m_atEndOfStream = false;
1688 emit titleChanged(title);
1689 emit totalTimeChanged(totalTime());
1690 }
1691}
1692
1693} // ns Gstreamer
1694} // ns Phonon
1695
1696QT_END_NAMESPACE
1697
1698#include "moc_mediaobject.cpp"
Note: See TracBrowser for help on using the repository browser.