source: trunk/src/3rdparty/phonon/gstreamer/mediaobject.cpp@ 764

Last change on this file since 764 was 561, checked in by Dmitry A. Kuminov, 16 years ago

trunk: Merged in qt 4.6.1 sources.

File size: 47.5 KB
Line 
1/* This file is part of the KDE project.
2
3 Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
4
5 This library is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Lesser General Public License as published by
7 the Free Software Foundation, either version 2.1 or 3 of the License.
8
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public License
15 along with this library. If not, see <http://www.gnu.org/licenses/>.
16*/
17#include <cmath>
18#include <gst/interfaces/propertyprobe.h>
19#include "common.h"
20#include "mediaobject.h"
21#include "videowidget.h"
22#include "message.h"
23#include "backend.h"
24#include "streamreader.h"
25#include "phononsrc.h"
26#include <QtCore>
27#include <QtCore/QTimer>
28#include <QtCore/QVector>
29#include <QtCore/QFile>
30#include <QtCore/QByteRef>
31#include <QtCore/QStringList>
32#include <QtCore/QEvent>
33#include <QApplication>
34
35#define ABOUT_TO_FINNISH_TIME 2000
36#define MAX_QUEUE_TIME 20 * GST_SECOND
37
38QT_BEGIN_NAMESPACE
39
40namespace Phonon
41{
42namespace Gstreamer
43{
44
45MediaObject::MediaObject(Backend *backend, QObject *parent)
46 : QObject(parent)
47 , MediaNode(backend, AudioSource | VideoSource)
48 , m_resumeState(false)
49 , m_oldState(Phonon::LoadingState)
50 , m_oldPos(0)
51 , m_state(Phonon::LoadingState)
52 , m_pendingState(Phonon::LoadingState)
53 , m_tickTimer(new QTimer(this))
54 , m_prefinishMark(0)
55 , m_transitionTime(0)
56 , m_posAtSeek(-1)
57 , m_prefinishMarkReachedNotEmitted(true)
58 , m_aboutToFinishEmitted(false)
59 , m_loading(false)
60 , m_capsHandler(0)
61 , m_datasource(0)
62 , m_decodebin(0)
63 , m_audioPipe(0)
64 , m_videoPipe(0)
65 , m_totalTime(-1)
66 , m_bufferPercent(0)
67 , m_hasVideo(false)
68 , m_videoStreamFound(false)
69 , m_hasAudio(false)
70 , m_seekable(false)
71 , m_atEndOfStream(false)
72 , m_atStartOfStream(false)
73 , m_error(Phonon::NoError)
74 , m_pipeline(0)
75 , m_audioGraph(0)
76 , m_videoGraph(0)
77 , m_previousTickTime(-1)
78 , m_resetNeeded(false)
79 , m_autoplayTitles(true)
80 , m_availableTitles(0)
81 , m_currentTitle(1)
82{
83 qRegisterMetaType<GstCaps*>("GstCaps*");
84 qRegisterMetaType<State>("State");
85
86 static int count = 0;
87 m_name = "MediaObject" + QString::number(count++);
88
89 if (!m_backend->isValid()) {
90 setError(tr("Cannot start playback. \n\nCheck your GStreamer installation and make sure you "
91 "\nhave libgstreamer-plugins-base installed."), Phonon::FatalError);
92 } else {
93 m_root = this;
94 createPipeline();
95 m_backend->addBusWatcher(this);
96 connect(m_tickTimer, SIGNAL(timeout()), SLOT(emitTick()));
97 }
98 connect(this, SIGNAL(stateChanged(Phonon::State,Phonon::State)),
99 this, SLOT(notifyStateChange(Phonon::State,Phonon::State)));
100
101}
102
103MediaObject::~MediaObject()
104{
105 m_backend->removeBusWatcher(this);
106 if (m_pipeline) {
107 gst_element_set_state(m_pipeline, GST_STATE_NULL);
108 gst_object_unref(m_pipeline);
109 }
110 if (m_audioGraph) {
111 gst_element_set_state(m_audioGraph, GST_STATE_NULL);
112 gst_object_unref(m_audioGraph);
113 }
114 if (m_videoGraph) {
115 gst_element_set_state(m_videoGraph, GST_STATE_NULL);
116 gst_object_unref(m_videoGraph);
117 }
118}
119
120QString stateString(const Phonon::State &state)
121{
122 switch (state) {
123 case Phonon::LoadingState:
124 return QString("LoadingState");
125 case Phonon::StoppedState:
126 return QString("StoppedState");
127 case Phonon::PlayingState:
128 return QString("PlayingState");
129 case Phonon::BufferingState:
130 return QString("BufferingState");
131 case Phonon::PausedState:
132 return QString("PausedState");
133 case Phonon::ErrorState:
134 return QString("ErrorState");
135 }
136 return QString();
137}
138
139void MediaObject::saveState()
140{
141 //Only first resumeState is respected
142 if (m_resumeState)
143 return;
144
145 if (m_pendingState == Phonon::PlayingState || m_pendingState == Phonon::PausedState) {
146 m_resumeState = true;
147 m_oldState = m_pendingState;
148 m_oldPos = getPipelinePos();
149 }
150}
151
152void MediaObject::resumeState()
153{
154 if (m_resumeState)
155 QMetaObject::invokeMethod(this, "setState", Qt::QueuedConnection, Q_ARG(State, m_oldState));
156}
157
158void MediaObject::newPadAvailable (GstPad *pad)
159{
160 GstCaps *caps;
161 GstStructure *str;
162 caps = gst_pad_get_caps (pad);
163 if (caps) {
164 str = gst_caps_get_structure (caps, 0);
165 QString mediaString(gst_structure_get_name (str));
166
167 if (mediaString.startsWith("video")) {
168 connectVideo(pad);
169 } else if (mediaString.startsWith("audio")) {
170 connectAudio(pad);
171 } else {
172 m_backend->logMessage("Could not connect pad", Backend::Warning);
173 }
174 gst_caps_unref (caps);
175 }
176}
177
178void MediaObject::cb_newpad (GstElement *decodebin,
179 GstPad *pad,
180 gboolean last,
181 gpointer data)
182{
183 Q_UNUSED(decodebin);
184 Q_UNUSED(pad);
185 Q_UNUSED(last);
186 Q_UNUSED(data);
187
188 MediaObject *media = static_cast<MediaObject*>(data);
189 Q_ASSERT(media);
190 media->newPadAvailable(pad);
191}
192
193void MediaObject::noMorePadsAvailable ()
194{
195 if (m_missingCodecs.size() > 0) {
196 bool canPlay = (m_hasAudio || m_videoStreamFound);
197 Phonon::ErrorType error = canPlay ? Phonon::NormalError : Phonon::FatalError;
198 if (error == Phonon::NormalError && m_hasVideo && !m_videoStreamFound) {
199 m_hasVideo = false;
200 emit hasVideoChanged(false);
201 }
202 QString codecs = m_missingCodecs.join(", ");
203 setError(QString(tr("A required codec is missing. You need to install the following codec(s) to play this content: %0")).arg(codecs), error);
204 m_missingCodecs.clear();
205 }
206}
207
208void MediaObject::cb_no_more_pads (GstElement * decodebin, gpointer data)
209{
210 Q_UNUSED(decodebin);
211 MediaObject *media = static_cast<MediaObject*>(data);
212 Q_ASSERT(media);
213 QMetaObject::invokeMethod(media, "noMorePadsAvailable", Qt::QueuedConnection);
214}
215
216typedef void (*Ptr_gst_pb_utils_init)();
217typedef gchar* (*Ptr_gst_pb_utils_get_codec_description)(const GstCaps *);
218
219void MediaObject::cb_unknown_type (GstElement *decodebin, GstPad *pad, GstCaps *caps, gpointer data)
220{
221 Q_UNUSED(decodebin);
222 Q_UNUSED(pad);
223 MediaObject *media = static_cast<MediaObject*>(data);
224 Q_ASSERT(media);
225
226 QString value = "unknown codec";
227
228 // These functions require GStreamer > 0.10.12
229#ifndef QT_NO_LIBRARY
230 static Ptr_gst_pb_utils_init p_gst_pb_utils_init = 0;
231 static Ptr_gst_pb_utils_get_codec_description p_gst_pb_utils_get_codec_description = 0;
232 if (!p_gst_pb_utils_init) {
233 p_gst_pb_utils_init = (Ptr_gst_pb_utils_init)QLibrary::resolve(QLatin1String("gstpbutils-0.10"), 0, "gst_pb_utils_init");
234 p_gst_pb_utils_get_codec_description = (Ptr_gst_pb_utils_get_codec_description)QLibrary::resolve(QLatin1String("gstpbutils-0.10"), 0, "gst_pb_utils_get_codec_description");
235 if (p_gst_pb_utils_init)
236 p_gst_pb_utils_init();
237 }
238 if (p_gst_pb_utils_get_codec_description) {
239 gchar *codecName = NULL;
240 codecName = p_gst_pb_utils_get_codec_description (caps);
241 value = QString::fromUtf8(codecName);
242 g_free (codecName);
243 } else
244#endif //QT_NO_LIBRARY
245 {
246 // For GStreamer versions < 0.10.12
247 GstStructure *str = gst_caps_get_structure (caps, 0);
248 value = QString::fromUtf8(gst_structure_get_name (str));
249
250 }
251 media->addMissingCodecName(value);
252}
253
254static void notifyVideoCaps(GObject *obj, GParamSpec *, gpointer data)
255{
256 GstPad *pad = GST_PAD(obj);
257 GstCaps *caps = gst_pad_get_caps (pad);
258 Q_ASSERT(caps);
259 MediaObject *media = static_cast<MediaObject*>(data);
260
261 // We do not want any more notifications until the source changes
262 g_signal_handler_disconnect(pad, media->capsHandler());
263
264 // setVideoCaps calls loadingComplete(), meaning we cannot call it from
265 // the streaming thread
266 QMetaObject::invokeMethod(media, "setVideoCaps", Qt::QueuedConnection, Q_ARG(GstCaps *, caps));
267}
268
269void MediaObject::setVideoCaps(GstCaps *caps)
270{
271 GstStructure *str;
272 gint width, height;
273
274 if ((str = gst_caps_get_structure (caps, 0))) {
275 if (gst_structure_get_int (str, "width", &width) && gst_structure_get_int (str, "height", &height)) {
276 gint aspectNum = 0;
277 gint aspectDenum = 0;
278 if (gst_structure_get_fraction(str, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) {
279 if (aspectDenum > 0)
280 width = width*aspectNum/aspectDenum;
281 }
282 // Let child nodes know about our new video size
283 QSize size(width, height);
284 MediaNodeEvent event(MediaNodeEvent::VideoSizeChanged, &size);
285 notify(&event);
286 }
287 }
288 gst_caps_unref(caps);
289}
290
291// Adds an element to the pipeline if not previously added
292bool MediaObject::addToPipeline(GstElement *elem)
293{
294 bool success = true;
295 if (!GST_ELEMENT_PARENT(elem)) { // If not already in pipeline
296 success = gst_bin_add(GST_BIN(m_pipeline), elem);
297 }
298 return success;
299}
300
301void MediaObject::connectVideo(GstPad *pad)
302{
303 GstState currentState = GST_STATE(m_pipeline);
304 if (addToPipeline(m_videoGraph)) {
305 GstPad *videopad = gst_element_get_pad (m_videoGraph, "sink");
306 if (!GST_PAD_IS_LINKED (videopad) && (gst_pad_link (pad, videopad) == GST_PAD_LINK_OK)) {
307 gst_element_set_state(m_videoGraph, currentState == GST_STATE_PLAYING ? GST_STATE_PLAYING : GST_STATE_PAUSED);
308 m_videoStreamFound = true;
309 m_backend->logMessage("Video track connected", Backend::Info, this);
310 // Note that the notify::caps _must_ be installed after linking to work with Dapper
311 m_capsHandler = g_signal_connect(pad, "notify::caps", G_CALLBACK(notifyVideoCaps), this);
312
313 if (!m_loading && !m_hasVideo) {
314 m_hasVideo = m_videoStreamFound;
315 emit hasVideoChanged(m_hasVideo);
316 }
317 }
318 gst_object_unref (videopad);
319 } else {
320 m_backend->logMessage("The video stream could not be plugged.", Backend::Info, this);
321 }
322}
323
324void MediaObject::connectAudio(GstPad *pad)
325{
326 GstState currentState = GST_STATE(m_pipeline);
327 if (addToPipeline(m_audioGraph)) {
328 GstPad *audiopad = gst_element_get_pad (m_audioGraph, "sink");
329 if (!GST_PAD_IS_LINKED (audiopad) && (gst_pad_link (pad, audiopad)==GST_PAD_LINK_OK)) {
330 gst_element_set_state(m_audioGraph, currentState == GST_STATE_PLAYING ? GST_STATE_PLAYING : GST_STATE_PAUSED);
331 m_hasAudio = true;
332 m_backend->logMessage("Audio track connected", Backend::Info, this);
333 }
334 gst_object_unref (audiopad);
335 } else {
336 m_backend->logMessage("The audio stream could not be plugged.", Backend::Info, this);
337 }
338}
339
340void MediaObject::cb_pad_added(GstElement *decodebin,
341 GstPad *pad,
342 gpointer data)
343{
344 Q_UNUSED(decodebin);
345 GstPad *decodepad = static_cast<GstPad*>(data);
346 gst_pad_link (pad, decodepad);
347 //gst_object_unref (decodepad);
348}
349
350/**
351 * Create a media source from a given URL.
352 *
353 * returns true if successful
354 */
355bool MediaObject::createPipefromURL(const QUrl &url)
356{
357 // Remove any existing data source
358 if (m_datasource) {
359 gst_bin_remove(GST_BIN(m_pipeline), m_datasource);
360 // m_pipeline has the only ref to datasource
361 m_datasource = 0;
362 }
363
364 // Verify that the uri can be parsed
365 if (!url.isValid()) {
366 m_backend->logMessage(QString("%1 is not a valid URI").arg(url.toString()));
367 return false;
368 }
369
370 // Create a new datasource based on the input URL
371 QByteArray encoded_cstr_url = url.toEncoded();
372 m_datasource = gst_element_make_from_uri(GST_URI_SRC, encoded_cstr_url.constData(), (const char*)NULL);
373 if (!m_datasource)
374 return false;
375
376 // Set the device for MediaSource::Disc
377 if (m_source.type() == MediaSource::Disc) {
378
379 if (g_object_class_find_property (G_OBJECT_GET_CLASS (m_datasource), "device")) {
380 QByteArray mediaDevice = QFile::encodeName(m_source.deviceName());
381 if (!mediaDevice.isEmpty())
382 g_object_set (G_OBJECT (m_datasource), "device", mediaDevice.constData(), (const char*)NULL);
383 }
384
385 // Also Set optical disc speed to 2X for Audio CD
386 if (m_source.discType() == Phonon::Cd
387 && (g_object_class_find_property (G_OBJECT_GET_CLASS (m_datasource), "read-speed"))) {
388 g_object_set (G_OBJECT (m_datasource), "read-speed", 2, (const char*)NULL);
389 m_backend->logMessage(QString("new device speed : 2X"), Backend::Info, this);
390 }
391 }
392
393 // Link data source into pipeline
394 gst_bin_add(GST_BIN(m_pipeline), m_datasource);
395 if (!gst_element_link(m_datasource, m_decodebin)) {
396 // For sources with dynamic pads (such as RtspSrc) we need to connect dynamically
397 GstPad *decodepad = gst_element_get_pad (m_decodebin, "sink");
398 g_signal_connect (m_datasource, "pad-added", G_CALLBACK (&cb_pad_added), decodepad);
399 }
400
401 return true;
402}
403
404/**
405 * Create a media source from a media stream
406 *
407 * returns true if successful
408 */
409bool MediaObject::createPipefromStream(const MediaSource &source)
410{
411#ifndef QT_NO_PHONON_ABSTRACTMEDIASTREAM
412 // Remove any existing data source
413 if (m_datasource) {
414 gst_bin_remove(GST_BIN(m_pipeline), m_datasource);
415 // m_pipeline has the only ref to datasource
416 m_datasource = 0;
417 }
418
419 m_datasource = GST_ELEMENT(g_object_new(phonon_src_get_type(), NULL));
420 if (!m_datasource)
421 return false;
422
423 StreamReader *streamReader = new StreamReader(source);
424 g_object_set (G_OBJECT (m_datasource), "iodevice", streamReader, (const char*)NULL);
425
426 // Link data source into pipeline
427 gst_bin_add(GST_BIN(m_pipeline), m_datasource);
428 if (!gst_element_link(m_datasource, m_decodebin)) {
429 gst_bin_remove(GST_BIN(m_pipeline), m_datasource);
430 return false;
431 }
432 return true;
433#else //QT_NO_PHONON_ABSTRACTMEDIASTREAM
434 Q_UNUSED(source);
435 return false;
436#endif
437}
438
439void MediaObject::createPipeline()
440{
441 m_pipeline = gst_pipeline_new (NULL);
442 gst_object_ref (GST_OBJECT (m_pipeline));
443 gst_object_sink (GST_OBJECT (m_pipeline));
444
445 m_decodebin = gst_element_factory_make ("decodebin", NULL);
446 g_signal_connect (m_decodebin, "new-decoded-pad", G_CALLBACK (&cb_newpad), this);
447 g_signal_connect (m_decodebin, "unknown-type", G_CALLBACK (&cb_unknown_type), this);
448 g_signal_connect (m_decodebin, "no-more-pads", G_CALLBACK (&cb_no_more_pads), this);
449
450 gst_bin_add(GST_BIN(m_pipeline), m_decodebin);
451
452 // Create a bin to contain the gst elements for this medianode
453
454 // Set up audio graph
455 m_audioGraph = gst_bin_new(NULL);
456 gst_object_ref (GST_OBJECT (m_audioGraph));
457 gst_object_sink (GST_OBJECT (m_audioGraph));
458
459 // Note that these queues are only required for streaming content
460 // And should ideally be created on demand as they will disable
461 // pull-mode access. Also note that the max-size-time are increased to
462 // reduce buffer overruns as these are not gracefully handled at the moment.
463 m_audioPipe = gst_element_factory_make("queue", NULL);
464 g_object_set(G_OBJECT(m_audioPipe), "max-size-time", MAX_QUEUE_TIME, (const char*)NULL);
465 gst_bin_add(GST_BIN(m_audioGraph), m_audioPipe);
466 GstPad *audiopad = gst_element_get_pad (m_audioPipe, "sink");
467 gst_element_add_pad (m_audioGraph, gst_ghost_pad_new ("sink", audiopad));
468 gst_object_unref (audiopad);
469
470 // Set up video graph
471 m_videoGraph = gst_bin_new(NULL);
472 gst_object_ref (GST_OBJECT (m_videoGraph));
473 gst_object_sink (GST_OBJECT (m_videoGraph));
474
475 m_videoPipe = gst_element_factory_make("queue", NULL);
476 g_object_set(G_OBJECT(m_videoPipe), "max-size-time", MAX_QUEUE_TIME, (const char*)NULL);
477 gst_bin_add(GST_BIN(m_videoGraph), m_videoPipe);
478 GstPad *videopad = gst_element_get_pad (m_videoPipe, "sink");
479 gst_element_add_pad (m_videoGraph, gst_ghost_pad_new ("sink", videopad));
480 gst_object_unref (videopad);
481
482 if (m_pipeline && m_decodebin && m_audioGraph && m_videoGraph && m_audioPipe && m_videoPipe)
483 m_isValid = true;
484 else
485 m_backend->logMessage("Could not create pipeline for media object", Backend::Warning);
486}
487
488/**
489 * !reimp
490 */
491State MediaObject::state() const
492{
493 return m_state;
494}
495
496/**
497 * !reimp
498 */
499bool MediaObject::hasVideo() const
500{
501 return m_hasVideo;
502}
503
504/**
505 * !reimp
506 */
507bool MediaObject::isSeekable() const
508{
509 return m_seekable;
510}
511
512/**
513 * !reimp
514 */
515qint64 MediaObject::currentTime() const
516{
517 if (m_resumeState)
518 return m_oldPos;
519
520 switch (state()) {
521 case Phonon::PausedState:
522 case Phonon::BufferingState:
523 case Phonon::PlayingState:
524 return getPipelinePos();
525 case Phonon::StoppedState:
526 case Phonon::LoadingState:
527 return 0;
528 case Phonon::ErrorState:
529 break;
530 }
531 return -1;
532}
533
534/**
535 * !reimp
536 */
537qint32 MediaObject::tickInterval() const
538{
539 return m_tickInterval;
540}
541
542/**
543 * !reimp
544 */
545void MediaObject::setTickInterval(qint32 newTickInterval)
546{
547 m_tickInterval = newTickInterval;
548 if (m_tickInterval <= 0)
549 m_tickTimer->setInterval(50);
550 else
551 m_tickTimer->setInterval(newTickInterval);
552}
553
554/**
555 * !reimp
556 */
557void MediaObject::play()
558{
559 setState(Phonon::PlayingState);
560 m_resumeState = false;
561}
562
563/**
564 * !reimp
565 */
566QString MediaObject::errorString() const
567{
568 return m_errorString;
569}
570
571/**
572 * !reimp
573 */
574Phonon::ErrorType MediaObject::errorType() const
575{
576 return m_error;
577}
578
579/**
580 * Set the current state of the mediaObject.
581 *
582 * !### Note that both Playing and Paused states are set immediately
583 * This should obviously be done in response to actual gstreamer state changes
584 */
585void MediaObject::setState(State newstate)
586{
587 if (!isValid())
588 return;
589
590 if (m_state == newstate)
591 return;
592
593 if (m_loading) {
594 // We are still loading. The state will be requested
595 // when loading has completed.
596 m_pendingState = newstate;
597 return;
598 }
599
600 GstState currentState;
601 gst_element_get_state (m_pipeline, &currentState, NULL, 1000);
602
603 switch (newstate) {
604 case Phonon::BufferingState:
605 m_backend->logMessage("phonon state request: buffering", Backend::Info, this);
606 break;
607
608 case Phonon::PausedState:
609 m_backend->logMessage("phonon state request: paused", Backend::Info, this);
610 if (currentState == GST_STATE_PAUSED) {
611 changeState(Phonon::PausedState);
612 } else if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) != GST_STATE_CHANGE_FAILURE) {
613 m_pendingState = Phonon::PausedState;
614 } else {
615 m_backend->logMessage("phonon state request failed", Backend::Info, this);
616 }
617 break;
618
619 case Phonon::StoppedState:
620 m_backend->logMessage("phonon state request: Stopped", Backend::Info, this);
621 if (currentState == GST_STATE_READY) {
622 changeState(Phonon::StoppedState);
623 } else if (gst_element_set_state(m_pipeline, GST_STATE_READY) != GST_STATE_CHANGE_FAILURE) {
624 m_pendingState = Phonon::StoppedState;
625 } else {
626 m_backend->logMessage("phonon state request failed", Backend::Info, this);
627 }
628 m_atEndOfStream = false;
629 break;
630
631 case Phonon::PlayingState:
632 if (m_resetNeeded) {
633 // ### Note this is a workaround and it should really be gracefully
634 // handled by medianode when we implement live connections.
635 // This generally happens if medianodes have been connected after the MediaSource was set
636 // Note that a side-effect of this is that we resend all meta data.
637 gst_element_set_state(m_pipeline, GST_STATE_NULL);
638 m_resetNeeded = false;
639 // Send a source change so the X11 renderer
640 // will re-set the overlay
641 MediaNodeEvent event(MediaNodeEvent::SourceChanged);
642 notify(&event);
643 }
644 m_backend->logMessage("phonon state request: Playing", Backend::Info, this);
645 if (m_atEndOfStream) {
646 m_backend->logMessage("EOS already reached", Backend::Info, this);
647 } else if (currentState == GST_STATE_PLAYING) {
648 changeState(Phonon::PlayingState);
649 } else if (!m_atEndOfStream && gst_element_set_state(m_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE) {
650 m_pendingState = Phonon::PlayingState;
651 } else {
652 m_backend->logMessage("phonon state request failed", Backend::Info, this);
653 }
654 break;
655
656 case Phonon::ErrorState:
657 m_backend->logMessage("phonon state request : Error", Backend::Warning, this);
658 m_backend->logMessage(QString("Last error : %0").arg(errorString()) , Backend::Warning, this);
659 changeState(Phonon::ErrorState); //immediately set error state
660 break;
661
662 case Phonon::LoadingState:
663 m_backend->logMessage("phonon state request: Loading", Backend::Info, this);
664 changeState(Phonon::LoadingState);
665 break;
666 }
667}
668
669/*
670 * Signals that the requested state has completed
671 * by emitting stateChanged and updates the internal state.
672 */
673void MediaObject::changeState(State newstate)
674{
675 if (newstate == m_state)
676 return;
677
678 Phonon::State oldState = m_state;
679 m_state = newstate; // m_state must be set before emitting, since
680 // Error state requires that state() will return the new value
681 m_pendingState = newstate;
682 emit stateChanged(newstate, oldState);
683
684 switch (newstate) {
685 case Phonon::PausedState:
686 m_backend->logMessage("phonon state changed: paused", Backend::Info, this);
687 break;
688
689 case Phonon::BufferingState:
690 m_backend->logMessage("phonon state changed: buffering", Backend::Info, this);
691 break;
692
693 case Phonon::PlayingState:
694 m_backend->logMessage("phonon state changed: Playing", Backend::Info, this);
695 break;
696
697 case Phonon::StoppedState:
698 m_backend->logMessage("phonon state changed: Stopped", Backend::Info, this);
699 m_tickTimer->stop();
700 break;
701
702 case Phonon::ErrorState:
703 m_loading = false;
704 m_backend->logMessage("phonon state changed : Error", Backend::Info, this);
705 m_backend->logMessage(errorString(), Backend::Warning, this);
706 break;
707
708 case Phonon::LoadingState:
709 m_backend->logMessage("phonon state changed: Loading", Backend::Info, this);
710 break;
711 }
712}
713
714void MediaObject::setError(const QString &errorString, Phonon::ErrorType error)
715{
716 m_errorString = errorString;
717 m_error = error;
718 m_tickTimer->stop();
719
720 if (error == Phonon::FatalError) {
721 m_hasVideo = false;
722 emit hasVideoChanged(false);
723 gst_element_set_state(m_pipeline, GST_STATE_READY);
724 changeState(Phonon::ErrorState);
725 } else {
726 if (m_loading) //Flag error only after loading has completed
727 m_pendingState = Phonon::ErrorState;
728 else
729 changeState(Phonon::ErrorState);
730 }
731}
732
733qint64 MediaObject::totalTime() const
734{
735 return m_totalTime;
736}
737
738qint32 MediaObject::prefinishMark() const
739{
740 return m_prefinishMark;
741}
742
743qint32 MediaObject::transitionTime() const
744{
745 return m_transitionTime;
746}
747
748void MediaObject::setTransitionTime(qint32 time)
749{
750 m_transitionTime = time;
751}
752
753qint64 MediaObject::remainingTime() const
754{
755 return totalTime() - currentTime();
756}
757
758MediaSource MediaObject::source() const
759{
760 return m_source;
761}
762
763void MediaObject::setNextSource(const MediaSource &source)
764{
765 if (source.type() == MediaSource::Invalid &&
766 source.type() == MediaSource::Empty)
767 return;
768 m_nextSource = source;
769}
770
771/**
772 * Update total time value from the pipeline
773 */
774bool MediaObject::updateTotalTime()
775{
776 GstFormat format = GST_FORMAT_TIME;
777 gint64 duration = 0;
778 if (gst_element_query_duration (GST_ELEMENT(m_pipeline), &format, &duration)) {
779 setTotalTime(duration / GST_MSECOND);
780 return true;
781 }
782 return false;
783}
784
785/**
786 * Checks if the current source is seekable
787 */
788void MediaObject::updateSeekable()
789{
790 if (!isValid())
791 return;
792
793 GstQuery *query;
794 gboolean result;
795 gint64 start, stop;
796 query = gst_query_new_seeking(GST_FORMAT_TIME);
797 result = gst_element_query (m_pipeline, query);
798 if (result) {
799 gboolean seekable;
800 GstFormat format;
801 gst_query_parse_seeking (query, &format, &seekable, &start, &stop);
802
803 if (m_seekable != seekable) {
804 m_seekable = seekable;
805 emit seekableChanged(m_seekable);
806 }
807
808 if (m_seekable)
809 m_backend->logMessage("Stream is seekable", Backend::Info, this);
810 else
811 m_backend->logMessage("Stream is non-seekable", Backend::Info, this);
812 } else {
813 m_backend->logMessage("updateSeekable query failed", Backend::Info, this);
814 }
815 gst_query_unref (query);
816}
817
818qint64 MediaObject::getPipelinePos() const
819{
820 Q_ASSERT(m_pipeline);
821
822 // Note some formats (usually mpeg) do not allow us to accurately seek to the
823 // beginning or end of the file so we 'fake' it here rather than exposing the front end to potential issues.
824 if (m_atEndOfStream)
825 return totalTime();
826 if (m_atStartOfStream)
827 return 0;
828 if (m_posAtSeek >= 0)
829 return m_posAtSeek;
830
831 gint64 pos = 0;
832 GstFormat format = GST_FORMAT_TIME;
833 gst_element_query_position (GST_ELEMENT(m_pipeline), &format, &pos);
834 return (pos / GST_MSECOND);
835}
836
837/*
838 * Internal method to set a new total time for the media object
839 */
840void MediaObject::setTotalTime(qint64 newTime)
841{
842
843 if (newTime == m_totalTime)
844 return;
845
846 m_totalTime = newTime;
847
848 emit totalTimeChanged(m_totalTime);
849}
850
851/*
852 * !reimp
853 */
854void MediaObject::setSource(const MediaSource &source)
855{
856 if (!isValid())
857 return;
858
859 // We have to reset the state completely here, otherwise
860 // remnants of the old pipeline can result in strangenes
861 // such as failing duration queries etc
862 GstState state;
863 gst_element_set_state(m_pipeline, GST_STATE_NULL);
864 gst_element_get_state (m_pipeline, &state, NULL, 2000);
865
866 m_source = source;
867 emit currentSourceChanged(m_source);
868 m_previousTickTime = -1;
869 m_missingCodecs.clear();
870
871 // Go into to loading state
872 changeState(Phonon::LoadingState);
873 m_loading = true;
874 m_resetNeeded = false;
875 m_resumeState = false;
876 m_pendingState = Phonon::StoppedState;
877
878 // Make sure we start out unconnected
879 if (GST_ELEMENT_PARENT(m_audioGraph))
880 gst_bin_remove(GST_BIN(m_pipeline), m_audioGraph);
881 if (GST_ELEMENT_PARENT(m_videoGraph))
882 gst_bin_remove(GST_BIN(m_pipeline), m_videoGraph);
883
884 // Clear any existing errors
885 m_aboutToFinishEmitted = false;
886 m_error = NoError;
887 m_errorString = QString();
888
889 m_bufferPercent = 0;
890 m_prefinishMarkReachedNotEmitted = true;
891 m_aboutToFinishEmitted = false;
892 m_hasAudio = false;
893 m_videoStreamFound = false;
894 setTotalTime(-1);
895 m_atEndOfStream = false;
896
897 // Clear exising meta tags
898 m_metaData.clear();
899
900 switch (source.type()) {
901 case MediaSource::Url: {
902 if (createPipefromURL(source.url()))
903 m_loading = true;
904 else
905 setError(tr("Could not open media source."));
906 }
907 break;
908
909 case MediaSource::LocalFile: {
910 if (createPipefromURL(QUrl::fromLocalFile(source.fileName())))
911 m_loading = true;
912 else
913 setError(tr("Could not open media source."));
914 }
915 break;
916
917 case MediaSource::Invalid:
918 setError(tr("Invalid source type."), Phonon::NormalError);
919 break;
920
921 case MediaSource::Empty:
922 break;
923
924 case MediaSource::Stream:
925 if (createPipefromStream(source))
926 m_loading = true;
927 else
928 setError(tr("Could not open media source."));
929 break;
930
931 case MediaSource::Disc:
932 {
933 QString mediaUrl;
934 switch (source.discType()) {
935 case Phonon::NoDisc:
936 qWarning() << "I should never get to see a MediaSource that is a disc but doesn't specify which one";
937 return;
938 case Phonon::Cd: // CD tracks can be specified by setting the url in the following way uri=cdda:4
939 mediaUrl = QLatin1String("cdda://");
940 break;
941 case Phonon::Dvd:
942 mediaUrl = QLatin1String("dvd://");
943 break;
944 case Phonon::Vcd:
945 mediaUrl = QLatin1String("vcd://");
946 break;
947 default:
948 qWarning() << "media " << source.discType() << " not implemented";
949 return;
950 }
951 if (!mediaUrl.isEmpty() && createPipefromURL(QUrl(mediaUrl)))
952 m_loading = true;
953 else
954 setError(tr("Could not open media source."));
955 }
956 break;
957
958 default:
959 m_backend->logMessage("Source type not currently supported", Backend::Warning, this);
960 setError(tr("Could not open media source."), Phonon::NormalError);
961 break;
962 }
963
964 MediaNodeEvent event(MediaNodeEvent::SourceChanged);
965 notify(&event);
966
967 // We need to link this node to ensure that fake sinks are connected
968 // before loading, otherwise the stream will be blocked
969 if (m_loading)
970 link();
971 beginLoad();
972}
973
974void MediaObject::beginLoad()
975{
976 if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) != GST_STATE_CHANGE_FAILURE) {
977 m_backend->logMessage("Begin source load", Backend::Info, this);
978 } else {
979 setError(tr("Could not open media source."));
980 }
981}
982
983// Called when we are ready to leave the loading state
984void MediaObject::loadingComplete()
985{
986 if (m_videoStreamFound) {
987 MediaNodeEvent event(MediaNodeEvent::VideoAvailable);
988 notify(&event);
989 }
990 getStreamInfo();
991 m_loading = false;
992
993 setState(m_pendingState);
994 emit metaDataChanged(m_metaData);
995}
996
997void MediaObject::getStreamInfo()
998{
999 updateSeekable();
1000 updateTotalTime();
1001
1002 if (m_videoStreamFound != m_hasVideo) {
1003 m_hasVideo = m_videoStreamFound;
1004 emit hasVideoChanged(m_hasVideo);
1005 }
1006
1007 m_availableTitles = 1;
1008 gint64 titleCount;
1009 GstFormat format = gst_format_get_by_nick("track");
1010 if (gst_element_query_duration (m_pipeline, &format, &titleCount)) {
1011 //check if returned format is still "track",
1012 //gstreamer sometimes returns the total time, if tracks information is not available.
1013 if (qstrcmp(gst_format_get_name(format), "track") == 0) {
1014 int oldAvailableTitles = m_availableTitles;
1015 m_availableTitles = (int)titleCount;
1016 if (m_availableTitles != oldAvailableTitles) {
1017 emit availableTitlesChanged(m_availableTitles);
1018 m_backend->logMessage(QString("Available titles changed: %0").arg(m_availableTitles), Backend::Info, this);
1019 }
1020 }
1021 }
1022
1023}
1024
1025void MediaObject::setPrefinishMark(qint32 newPrefinishMark)
1026{
1027 m_prefinishMark = newPrefinishMark;
1028 if (currentTime() < totalTime() - m_prefinishMark) // not about to finish
1029 m_prefinishMarkReachedNotEmitted = true;
1030}
1031
1032void MediaObject::pause()
1033{
1034 m_backend->logMessage("pause()", Backend::Info, this);
1035 if (state() != Phonon::PausedState)
1036 setState(Phonon::PausedState);
1037 m_resumeState = false;
1038}
1039
1040void MediaObject::stop()
1041{
1042 if (state() != Phonon::StoppedState) {
1043 setState(Phonon::StoppedState);
1044 m_prefinishMarkReachedNotEmitted = true;
1045 }
1046 m_resumeState = false;
1047}
1048
1049void MediaObject::seek(qint64 time)
1050{
1051 if (!isValid())
1052 return;
1053
1054 if (isSeekable()) {
1055 switch (state()) {
1056 case Phonon::PlayingState:
1057 case Phonon::StoppedState:
1058 case Phonon::PausedState:
1059 case Phonon::BufferingState:
1060 m_backend->logMessage(QString("Seek to pos %0").arg(time), Backend::Info, this);
1061
1062 if (time <= 0)
1063 m_atStartOfStream = true;
1064 else
1065 m_atStartOfStream = false;
1066
1067 m_posAtSeek = getPipelinePos();
1068 m_tickTimer->stop();
1069
1070 if (gst_element_seek(m_pipeline, 1.0, GST_FORMAT_TIME,
1071 GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET,
1072 time * GST_MSECOND, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE))
1073 break;
1074 case Phonon::LoadingState:
1075 case Phonon::ErrorState:
1076 return;
1077 }
1078
1079 quint64 current = currentTime();
1080 quint64 total = totalTime();
1081
1082 if (current < total - m_prefinishMark)
1083 m_prefinishMarkReachedNotEmitted = true;
1084 if (current < total - ABOUT_TO_FINNISH_TIME)
1085 m_aboutToFinishEmitted = false;
1086 m_atEndOfStream = false;
1087 }
1088}
1089
1090void MediaObject::emitTick()
1091{
1092 if (m_resumeState) {
1093 return;
1094 }
1095
1096 qint64 currentTime = getPipelinePos();
1097 qint64 totalTime = m_totalTime;
1098
1099 if (m_tickInterval > 0 && currentTime != m_previousTickTime) {
1100 emit tick(currentTime);
1101 m_previousTickTime = currentTime;
1102 }
1103 if (m_state == Phonon::PlayingState) {
1104 if (currentTime >= totalTime - m_prefinishMark) {
1105 if (m_prefinishMarkReachedNotEmitted) {
1106 m_prefinishMarkReachedNotEmitted = false;
1107 emit prefinishMarkReached(totalTime - currentTime);
1108 }
1109 }
1110 // Prepare load of next source
1111 if (currentTime >= totalTime - ABOUT_TO_FINNISH_TIME) {
1112 if (!m_aboutToFinishEmitted) {
1113 m_aboutToFinishEmitted = true; // track is about to finish
1114 emit aboutToFinish();
1115 }
1116 }
1117 }
1118}
1119
1120
1121/*
1122 * Used to iterate through the gst_tag_list and extract values
1123 */
1124void foreach_tag_function(const GstTagList *list, const gchar *tag, gpointer user_data)
1125{
1126 TagMap *newData = static_cast<TagMap *>(user_data);
1127 QString value;
1128 GType type = gst_tag_get_type(tag);
1129 switch (type) {
1130 case G_TYPE_STRING: {
1131 char *str = 0;
1132 gst_tag_list_get_string(list, tag, &str);
1133 value = QString::fromUtf8(str);
1134 g_free(str);
1135 }
1136 break;
1137
1138 case G_TYPE_BOOLEAN: {
1139 int bval;
1140 gst_tag_list_get_boolean(list, tag, &bval);
1141 value = QString::number(bval);
1142 }
1143 break;
1144
1145 case G_TYPE_INT: {
1146 int ival;
1147 gst_tag_list_get_int(list, tag, &ival);
1148 value = QString::number(ival);
1149 }
1150 break;
1151
1152 case G_TYPE_UINT: {
1153 unsigned int uival;
1154 gst_tag_list_get_uint(list, tag, &uival);
1155 value = QString::number(uival);
1156 }
1157 break;
1158
1159 case G_TYPE_FLOAT: {
1160 float fval;
1161 gst_tag_list_get_float(list, tag, &fval);
1162 value = QString::number(fval);
1163 }
1164 break;
1165
1166 case G_TYPE_DOUBLE: {
1167 double dval;
1168 gst_tag_list_get_double(list, tag, &dval);
1169 value = QString::number(dval);
1170 }
1171 break;
1172
1173 default:
1174 //qDebug("Unsupported tag type: %s", g_type_name(type));
1175 break;
1176 }
1177
1178 QString key = QString(tag).toUpper();
1179 QString currVal = newData->value(key);
1180 if (!value.isEmpty() && !(newData->contains(key) && currVal == value))
1181 newData->insert(key, value);
1182}
1183
1184/**
1185 * Triggers playback after a song has completed in the current media queue
1186 */
1187void MediaObject::beginPlay()
1188{
1189 setSource(m_nextSource);
1190 m_nextSource = MediaSource();
1191 m_pendingState = Phonon::PlayingState;
1192}
1193
1194/**
1195 * Handle GStreamer bus messages
1196 */
1197void MediaObject::handleBusMessage(const Message &message)
1198{
1199
1200 if (!isValid())
1201 return;
1202
1203 GstMessage *gstMessage = message.rawMessage();
1204 Q_ASSERT(m_pipeline);
1205
1206 if (m_backend->debugLevel() >= Backend::Debug) {
1207 int type = GST_MESSAGE_TYPE(gstMessage);
1208 gchar* name = gst_element_get_name(gstMessage->src);
1209 QString msgString = QString("Bus: %0 (%1)").arg(gst_message_type_get_name ((GstMessageType)type)).arg(name);
1210 g_free(name);
1211 m_backend->logMessage(msgString, Backend::Debug, this);
1212 }
1213
1214 switch (GST_MESSAGE_TYPE (gstMessage)) {
1215
1216 case GST_MESSAGE_EOS:
1217 m_backend->logMessage("EOS recieved", Backend::Info, this);
1218 handleEndOfStream();
1219 break;
1220
1221 case GST_MESSAGE_TAG: {
1222 GstTagList* tag_list = 0;
1223 gst_message_parse_tag(gstMessage, &tag_list);
1224 if (tag_list) {
1225 TagMap oldMap = m_metaData; // Keep a copy of the old one for reference
1226 // Append any new meta tags to the existing tag list
1227 gst_tag_list_foreach (tag_list, &foreach_tag_function, &m_metaData);
1228 m_backend->logMessage("Meta tags found", Backend::Info, this);
1229 if (oldMap != m_metaData && !m_loading)
1230 emit metaDataChanged(m_metaData);
1231 gst_tag_list_free(tag_list);
1232 }
1233 }
1234 break;
1235
1236 case GST_MESSAGE_STATE_CHANGED : {
1237
1238 if (gstMessage->src != GST_OBJECT(m_pipeline))
1239 return;
1240
1241 GstState oldState;
1242 GstState newState;
1243 GstState pendingState;
1244 gst_message_parse_state_changed (gstMessage, &oldState, &newState, &pendingState);
1245
1246 if (newState == pendingState)
1247 return;
1248
1249 m_posAtSeek = -1;
1250
1251 switch (newState) {
1252
1253 case GST_STATE_PLAYING :
1254 m_atStartOfStream = false;
1255 m_backend->logMessage("gstreamer: pipeline state set to playing", Backend::Info, this);
1256 m_tickTimer->start();
1257 changeState(Phonon::PlayingState);
1258 if (m_resumeState && m_oldState == Phonon::PlayingState) {
1259 seek(m_oldPos);
1260 m_resumeState = false;
1261 }
1262 break;
1263
1264 case GST_STATE_NULL:
1265 m_backend->logMessage("gstreamer: pipeline state set to null", Backend::Info, this);
1266 m_tickTimer->stop();
1267 break;
1268
1269 case GST_STATE_PAUSED :
1270 m_backend->logMessage("gstreamer: pipeline state set to paused", Backend::Info, this);
1271 m_tickTimer->start();
1272 if (state() == Phonon::LoadingState) {
1273 // No_more_pads is not emitted from the decodebin in older versions (0.10.4)
1274 noMorePadsAvailable();
1275 loadingComplete();
1276 } else if (m_resumeState && m_oldState == Phonon::PausedState) {
1277 changeState(Phonon::PausedState);
1278 m_resumeState = false;
1279 break;
1280 } else {
1281 // A lot of autotests can break if we allow all paused changes through.
1282 if (m_pendingState == Phonon::PausedState) {
1283 changeState(Phonon::PausedState);
1284 }
1285 }
1286 break;
1287
1288 case GST_STATE_READY :
1289 if (!m_loading && m_pendingState == Phonon::StoppedState)
1290 changeState(Phonon::StoppedState);
1291 m_backend->logMessage("gstreamer: pipeline state set to ready", Backend::Debug, this);
1292 m_tickTimer->stop();
1293 break;
1294
1295 case GST_STATE_VOID_PENDING :
1296 m_backend->logMessage("gstreamer: pipeline state set to pending (void)", Backend::Debug, this);
1297 m_tickTimer->stop();
1298 break;
1299 }
1300 break;
1301 }
1302
1303 case GST_MESSAGE_ERROR: {
1304 gchar *debug;
1305 GError *err;
1306 QString logMessage;
1307 gst_message_parse_error (gstMessage, &err, &debug);
1308 gchar *errorMessage = gst_error_get_message (err->domain, err->code);
1309 logMessage.sprintf("Error: %s Message:%s (%s) Code:%d", debug, err->message, errorMessage, err->code);
1310 m_backend->logMessage(logMessage, Backend::Warning);
1311 g_free(errorMessage);
1312 g_free (debug);
1313
1314 if (err->domain == GST_RESOURCE_ERROR) {
1315 if (err->code == GST_RESOURCE_ERROR_NOT_FOUND) {
1316 setError(tr("Could not locate media source."), Phonon::FatalError);
1317 } else if (err->code == GST_RESOURCE_ERROR_OPEN_READ) {
1318 setError(tr("Could not open media source."), Phonon::FatalError);
1319 } else if (err->code == GST_RESOURCE_ERROR_BUSY) {
1320 // We need to check if this comes from an audio device by looking at sink caps
1321 GstPad* sinkPad = gst_element_get_static_pad(GST_ELEMENT(gstMessage->src), "sink");
1322 if (sinkPad) {
1323 GstCaps *caps = gst_pad_get_caps (sinkPad);
1324 GstStructure *str = gst_caps_get_structure (caps, 0);
1325 if (g_strrstr (gst_structure_get_name (str), "audio"))
1326 setError(tr("Could not open audio device. The device is already in use."), Phonon::NormalError);
1327 else
1328 setError(err->message, Phonon::FatalError);
1329 gst_caps_unref (caps);
1330 gst_object_unref (sinkPad);
1331 }
1332 } else {
1333 setError(QString(err->message), Phonon::FatalError);
1334 }
1335 } else if (err->domain == GST_STREAM_ERROR) {
1336 switch (err->code) {
1337 case GST_STREAM_ERROR_WRONG_TYPE:
1338 case GST_STREAM_ERROR_TYPE_NOT_FOUND:
1339 setError(tr("Could not decode media source."), Phonon::FatalError);
1340 break;
1341 default:
1342 setError(tr("Could not open media source."), Phonon::FatalError);
1343 break;
1344 }
1345 } else {
1346 setError(QString(err->message), Phonon::FatalError);
1347 }
1348 g_error_free (err);
1349 break;
1350 }
1351
1352 case GST_MESSAGE_WARNING: {
1353 gchar *debug;
1354 GError *err;
1355 gst_message_parse_warning(gstMessage, &err, &debug);
1356 QString msgString;
1357 msgString.sprintf("Warning: %s\nMessage:%s", debug, err->message);
1358 m_backend->logMessage(msgString, Backend::Warning);
1359 g_free (debug);
1360 g_error_free (err);
1361 break;
1362 }
1363
1364 case GST_MESSAGE_ELEMENT: {
1365 GstMessage *gstMessage = message.rawMessage();
1366 const GstStructure *gstStruct = gst_message_get_structure(gstMessage); //do not free this
1367 if (g_strrstr (gst_structure_get_name (gstStruct), "prepare-xwindow-id")) {
1368 MediaNodeEvent videoHandleEvent(MediaNodeEvent::VideoHandleRequest);
1369 notify(&videoHandleEvent);
1370 }
1371 break;
1372 }
1373
1374 case GST_MESSAGE_DURATION: {
1375 m_backend->logMessage("GST_MESSAGE_DURATION", Backend::Debug, this);
1376 updateTotalTime();
1377 break;
1378 }
1379
1380 case GST_MESSAGE_BUFFERING: {
1381 gint percent = 0;
1382 gst_structure_get_int (gstMessage->structure, "buffer-percent", &percent); //gst_message_parse_buffering was introduced in 0.10.11
1383
1384 if (m_bufferPercent != percent) {
1385 emit bufferStatus(percent);
1386 m_backend->logMessage(QString("Stream buffering %0").arg(percent), Backend::Debug, this);
1387 m_bufferPercent = percent;
1388 }
1389
1390 if (m_state != Phonon::BufferingState)
1391 emit stateChanged(m_state, Phonon::BufferingState);
1392 else if (percent == 100)
1393 emit stateChanged(Phonon::BufferingState, m_state);
1394 break;
1395 }
1396 //case GST_MESSAGE_INFO:
1397 //case GST_MESSAGE_STREAM_STATUS:
1398 //case GST_MESSAGE_CLOCK_PROVIDE:
1399 //case GST_MESSAGE_NEW_CLOCK:
1400 //case GST_MESSAGE_STEP_DONE:
1401 //case GST_MESSAGE_LATENCY: only from 0.10.12
1402 //case GST_MESSAGE_ASYNC_DONE: only from 0.10.13
1403 default:
1404 break;
1405 }
1406}
1407
1408void MediaObject::handleEndOfStream()
1409{
1410 // If the stream is not seekable ignore
1411 // otherwise chained radio broadcasts would stop
1412
1413
1414 if (m_atEndOfStream)
1415 return;
1416
1417 if (!m_seekable)
1418 m_atEndOfStream = true;
1419
1420 if (m_autoplayTitles &&
1421 m_availableTitles > 1 &&
1422 m_currentTitle < m_availableTitles) {
1423 _iface_setCurrentTitle(m_currentTitle + 1);
1424 return;
1425 }
1426
1427 if (m_nextSource.type() != MediaSource::Invalid
1428 && m_nextSource.type() != MediaSource::Empty) { // We only emit finish when the queue is actually empty
1429 QTimer::singleShot (qMax(0, transitionTime()), this, SLOT(beginPlay()));
1430 } else {
1431 m_pendingState = Phonon::PausedState;
1432 emit finished();
1433 if (!m_seekable) {
1434 setState(Phonon::StoppedState);
1435 // Note the behavior for live streams is not properly defined
1436 // But since we cant seek to 0, we don't have much choice other than stopping
1437 // the stream
1438 } else {
1439 // Only emit paused if the finished signal
1440 // did not result in a new state
1441 if (m_pendingState == Phonon::PausedState)
1442 setState(m_pendingState);
1443 }
1444 }
1445}
1446
1447// Notifes the pipeline about state changes in the media object
1448void MediaObject::notifyStateChange(Phonon::State newstate, Phonon::State oldstate)
1449{
1450 Q_UNUSED(oldstate);
1451 MediaNodeEvent event(MediaNodeEvent::StateChanged, &newstate);
1452 notify(&event);
1453}
1454
1455#ifndef QT_NO_PHONON_MEDIACONTROLLER
1456//interface management
1457bool MediaObject::hasInterface(Interface iface) const
1458{
1459 return iface == AddonInterface::TitleInterface;
1460}
1461
1462QVariant MediaObject::interfaceCall(Interface iface, int command, const QList<QVariant> &params)
1463{
1464 if (hasInterface(iface)) {
1465
1466 switch (iface)
1467 {
1468 case TitleInterface:
1469 switch (command)
1470 {
1471 case availableTitles:
1472 return _iface_availableTitles();
1473 case title:
1474 return _iface_currentTitle();
1475 case setTitle:
1476 _iface_setCurrentTitle(params.first().toInt());
1477 break;
1478 case autoplayTitles:
1479 return m_autoplayTitles;
1480 case setAutoplayTitles:
1481 m_autoplayTitles = params.first().toBool();
1482 break;
1483 }
1484 break;
1485 default:
1486 break;
1487 }
1488 }
1489 return QVariant();
1490}
1491#endif
1492
1493int MediaObject::_iface_availableTitles() const
1494{
1495 return m_availableTitles;
1496}
1497
1498int MediaObject::_iface_currentTitle() const
1499{
1500 return m_currentTitle;
1501}
1502
1503void MediaObject::_iface_setCurrentTitle(int title)
1504{
1505 GstFormat trackFormat = gst_format_get_by_nick("track");
1506 m_backend->logMessage(QString("setCurrentTitle %0").arg(title), Backend::Info, this);
1507 if ((title == m_currentTitle) || (title < 1) || (title > m_availableTitles))
1508 return;
1509
1510 m_currentTitle = title;
1511
1512 //let's seek to the beginning of the song
1513 if (gst_element_seek_simple(m_pipeline, trackFormat, GST_SEEK_FLAG_FLUSH, m_currentTitle - 1)) {
1514 updateTotalTime();
1515 m_atEndOfStream = false;
1516 emit titleChanged(title);
1517 emit totalTimeChanged(totalTime());
1518 }
1519}
1520
1521} // ns Gstreamer
1522} // ns Phonon
1523
1524QT_END_NAMESPACE
1525
1526#include "moc_mediaobject.cpp"
Note: See TracBrowser for help on using the repository browser.