source: trunk/src/3rdparty/phonon/ds9/mediaobject.cpp

Last change on this file was 846, checked in by Dmitry A. Kuminov, 15 years ago

trunk: Merged in qt 4.7.2 sources from branches/vendor/nokia/qt.

File size: 44.6 KB
Line 
1/* This file is part of the KDE project.
2
3Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
4
5This library is free software: you can redistribute it and/or modify
6it under the terms of the GNU Lesser General Public License as published by
7the Free Software Foundation, either version 2.1 or 3 of the License.
8
9This library is distributed in the hope that it will be useful,
10but WITHOUT ANY WARRANTY; without even the implied warranty of
11MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12GNU Lesser General Public License for more details.
13
14You should have received a copy of the GNU Lesser General Public License
15along with this library. If not, see <http://www.gnu.org/licenses/>.
16*/
17
18#include <QtCore/QVector>
19#include <QtCore/QTimerEvent>
20#include <QtCore/QTimer>
21#include <QtCore/QTime>
22#include <QtCore/QLibrary>
23
24#ifndef Q_CC_MSVC
25#include <dshow.h>
26#endif
27#include <objbase.h>
28#include <initguid.h>
29#include <qnetwork.h>
30#ifdef Q_CC_MSVC
31# include <comdef.h>
32#endif
33#include <evcode.h>
34
35#include "mediaobject.h"
36#include "videowidget.h"
37#include "audiooutput.h"
38
39
40#include <QtCore/QDebug>
41
42#define TIMER_INTERVAL 16 //... ms for the timer that polls the current state (we use the multimedia timer)
43#define PRELOAD_TIME 2000 // 2 seconds to load a source
44
45QT_BEGIN_NAMESPACE
46
47namespace Phonon
48{
49 namespace DS9
50 {
51 typedef BOOL (WINAPI* LPAMGETERRORTEXT)(HRESULT, WCHAR *, DWORD);
52
53 //first the definition of the WorkerThread class
54 WorkerThread::WorkerThread()
55 : QThread(), m_finished(false), m_currentWorkId(1)
56 {
57 }
58
59 WorkerThread::~WorkerThread()
60 {
61 }
62
63 void WorkerThread::run()
64 {
65 while (m_finished == false) {
66 HANDLE handles[FILTER_COUNT +1];
67 handles[0] = m_waitCondition;
68 int count = 1;
69 for(int i = 0; i < FILTER_COUNT; ++i) {
70 if (m_graphHandle[i].graph) {
71 handles[count++] = m_graphHandle[i].handle;
72 }
73 }
74 DWORD result = ::WaitForMultipleObjects(count, handles, FALSE, INFINITE);
75 if (result == WAIT_OBJECT_0) {
76 handleTask();
77 } else {
78 //this is the event management
79 const Graph &graph = m_graphHandle[result - WAIT_OBJECT_0 - 1].graph;
80 long eventCode;
81 LONG_PTR param1, param2;
82
83 ComPointer<IMediaEvent> mediaEvent(graph, IID_IMediaEvent);
84 mediaEvent->GetEvent(&eventCode, &param1, &param2, 0);
85 emit eventReady(graph, eventCode, param1);
86 mediaEvent->FreeEventParams(eventCode, param1, param2);
87 }
88 }
89 }
90
91 //wants to know as soon as the state is set
92 void WorkerThread::addStateChangeRequest(Graph graph, OAFilterState state, QList<Filter> decoders)
93 {
94 QMutexLocker locker(&m_mutex);
95 bool found = false;
96 //we try to see if there is already an attempt to change the state and we remove it
97 for(int i = 0; !found && i < m_queue.size(); ++i) {
98 const Work &w = m_queue.at(i);
99 if (w.graph == graph && w.task == ChangeState) {
100 found = true;
101 m_queue.removeAt(i);
102 }
103 }
104
105 //now let's create the new task
106 Work w;
107 w.task = ChangeState;
108 w.id = m_currentWorkId++;
109 w.graph = graph;
110 w.state = state;
111 w.decoders = decoders;
112 m_queue.enqueue(w);
113 m_waitCondition.set();
114 }
115
116 quint16 WorkerThread::addSeekRequest(Graph graph, qint64 time)
117 {
118 QMutexLocker locker(&m_mutex);
119 bool found = false;
120 //we try to see if there is already an attempt to seek and we remove it
121 for(int i = 0; !found && i < m_queue.size(); ++i) {
122 const Work &w = m_queue.at(i);
123 if (w.graph == graph && w.task == Seek) {
124 found = true;
125 m_queue.removeAt(i);
126 }
127 }
128
129 Work w;
130 w.task = Seek;
131 //we create a new graph
132 w.graph = graph;
133 w.id = m_currentWorkId++;
134 w.time = time;
135 m_queue.enqueue(w);
136 m_waitCondition.set();
137 return w.id;
138 }
139
140 quint16 WorkerThread::addUrlToRender(const QString &url)
141 {
142 QMutexLocker locker(&m_mutex);
143 Work w;
144 w.task = Render;
145 //we create a new graph
146 w.graph = Graph(CLSID_FilterGraph, IID_IGraphBuilder);
147 w.url = url;
148 w.url.detach();
149 w.id = m_currentWorkId++;
150 m_queue.enqueue(w);
151 m_waitCondition.set();
152 return w.id;
153 }
154
155 quint16 WorkerThread::addFilterToRender(const Filter &filter)
156 {
157 QMutexLocker locker(&m_mutex);
158 Work w;
159 w.task = Render;
160 //we create a new graph
161 w.graph = Graph(CLSID_FilterGraph, IID_IGraphBuilder);
162 w.filter = filter;
163 w.graph->AddFilter(filter, 0);
164 w.id = m_currentWorkId++;
165 m_queue.enqueue(w);
166 m_waitCondition.set();
167 return w.id;
168 }
169
170 void WorkerThread::replaceGraphForEventManagement(Graph newGraph, Graph oldGraph)
171 {
172 QMutexLocker locker(&m_mutex);
173 Work w;
174 w.task = ReplaceGraph;
175 w.graph = newGraph;
176 w.oldGraph = oldGraph;
177 m_queue.enqueue(w);
178 m_waitCondition.set();
179 }
180
181 void WorkerThread::handleTask()
182 {
183 QMutexLocker locker(Backend::directShowMutex);
184 {
185 QMutexLocker locker(&m_mutex);
186 if (m_finished || m_queue.isEmpty()) {
187 return;
188 }
189
190 m_currentWork = m_queue.dequeue();
191
192 //we ensure to have the wait condition in the right state
193 if (m_queue.isEmpty()) {
194 m_waitCondition.reset();
195 } else {
196 m_waitCondition.set();
197 }
198 }
199
200 HRESULT hr = S_OK;
201
202 if (m_currentWork.task == ReplaceGraph) {
203 int index = -1;
204 for(int i = 0; i < FILTER_COUNT; ++i) {
205 if (m_graphHandle[i].graph == m_currentWork.oldGraph) {
206 m_graphHandle[i].graph = Graph();
207 index = i;
208 break;
209 } else if (index == -1 && m_graphHandle[i].graph == 0) {
210 //this is the first available slot
211 index = i;
212 }
213 }
214
215 Q_ASSERT(index != -1);
216
217 //add the new graph
218 HANDLE h;
219 if (SUCCEEDED(ComPointer<IMediaEvent>(m_currentWork.graph, IID_IMediaEvent)
220 ->GetEventHandle(reinterpret_cast<OAEVENT*>(&h)))) {
221 m_graphHandle[index].graph = m_currentWork.graph;
222 m_graphHandle[index].handle = h;
223 }
224 } else if (m_currentWork.task == Render) {
225 if (m_currentWork.filter) {
226 //let's render pins
227 const QList<OutputPin> outputs = BackendNode::pins(m_currentWork.filter, PINDIR_OUTPUT);
228 for (int i = 0; SUCCEEDED(hr) && i < outputs.count(); ++i) {
229 hr = m_currentWork.graph->Render(outputs.at(i));
230 }
231 } else if (!m_currentWork.url.isEmpty()) {
232 //let's render a url (blocking call)
233 hr = m_currentWork.graph->RenderFile(reinterpret_cast<const wchar_t *>(m_currentWork.url.utf16()), 0);
234 }
235 if (hr != E_ABORT) {
236 emit asyncRenderFinished(m_currentWork.id, hr, m_currentWork.graph);
237 }
238 } else if (m_currentWork.task == Seek) {
239 //that's a seekrequest
240 ComPointer<IMediaSeeking> mediaSeeking(m_currentWork.graph, IID_IMediaSeeking);
241 qint64 newtime = m_currentWork.time * 10000;
242 hr = mediaSeeking->SetPositions(&newtime, AM_SEEKING_AbsolutePositioning,
243 0, AM_SEEKING_NoPositioning);
244 emit asyncSeekingFinished(m_currentWork.id, newtime / 10000);
245 hr = E_ABORT; //to avoid emitting asyncRenderFinished
246 } else if (m_currentWork.task == ChangeState) {
247
248 //remove useless decoders
249 QList<Filter> unused;
250 for (int i = 0; i < m_currentWork.decoders.count(); ++i) {
251 const Filter &filter = m_currentWork.decoders.at(i);
252 bool used = false;
253 const QList<OutputPin> pins = BackendNode::pins(filter, PINDIR_OUTPUT);
254 for( int i = 0; i < pins.count(); ++i) {
255 InputPin input;
256 if (pins.at(i)->ConnectedTo(input.pparam()) == S_OK) {
257 used = true;
258 }
259 }
260 if (!used) {
261 unused += filter;
262 }
263 }
264
265 //we can get the state
266 for (int i = 0; i < unused.count(); ++i) {
267 //we should remove this filter from the graph
268 m_currentWork.graph->RemoveFilter(unused.at(i));
269 }
270
271
272 //we can get the state
273 ComPointer<IMediaControl> mc(m_currentWork.graph, IID_IMediaControl);
274
275 //we change the state here
276 switch(m_currentWork.state)
277 {
278 case State_Stopped:
279 mc->Stop();
280 break;
281 case State_Paused:
282 mc->Pause();
283 break;
284 case State_Running:
285 mc->Run();
286 break;
287 }
288 OAFilterState s;
289 //blocking call
290 HRESULT hr = mc->GetState(INFINITE, &s);
291
292 if (SUCCEEDED(hr)) {
293 if (s == State_Stopped) {
294 emit stateReady(m_currentWork.graph, Phonon::StoppedState);
295 } else if (s == State_Paused) {
296 emit stateReady(m_currentWork.graph, Phonon::PausedState);
297 } else /*if (s == State_Running)*/ {
298 emit stateReady(m_currentWork.graph, Phonon::PlayingState);
299 }
300 }
301 }
302
303 {
304 QMutexLocker locker(&m_mutex);
305 m_currentWork = Work(); //reinitialize
306 }
307 }
308
309 void WorkerThread::abortCurrentRender(qint16 renderId)
310 {
311 QMutexLocker locker(&m_mutex);
312 if (m_currentWork.id == renderId) {
313 m_currentWork.graph->Abort();
314 }
315 bool found = false;
316 for(int i = 0; !found && i < m_queue.size(); ++i) {
317 const Work &w = m_queue.at(i);
318 if (w.id == renderId) {
319 found = true;
320 m_queue.removeAt(i);
321 if (m_queue.isEmpty()) {
322 m_waitCondition.reset();
323 }
324 }
325 }
326 }
327
328 //tells the thread to stop processing
329 void WorkerThread::signalStop()
330 {
331 QMutexLocker locker(&m_mutex);
332 m_queue.clear();
333 if (m_currentWork.graph) {
334 //in case we're currently rendering something
335 m_currentWork.graph->Abort();
336
337 }
338
339 m_finished = true;
340 m_waitCondition.set();
341 }
342
343
344 MediaObject::MediaObject(QObject *parent) : BackendNode(parent),
345 transactionState(Phonon::StoppedState),
346 m_errorType(Phonon::NoError),
347 m_state(Phonon::LoadingState),
348 m_nextState(Phonon::StoppedState),
349 m_prefinishMark(0),
350 m_tickInterval(0),
351 m_buffering(false),
352 m_oldHasVideo(false),
353 m_prefinishMarkSent(false),
354 m_aboutToFinishSent(false),
355 m_nextSourceReadyToStart(false),
356#ifndef QT_NO_PHONON_MEDIACONTROLLER
357 m_autoplayTitles(true),
358 m_currentTitle(0),
359#endif //QT_NO_PHONON_MEDIACONTROLLER
360 m_targetTick(INFINITE)
361 {
362
363 for(int i = 0; i < FILTER_COUNT; ++i) {
364 m_graphs[i] = new MediaGraph(this, i);
365 }
366
367 connect(&m_thread, SIGNAL(stateReady(Graph,Phonon::State)),
368 SLOT(slotStateReady(Graph,Phonon::State)));
369
370 connect(&m_thread, SIGNAL(eventReady(Graph,long,long)),
371 SLOT(handleEvents(Graph,long,long)));
372
373 connect(&m_thread, SIGNAL(asyncRenderFinished(quint16,HRESULT,Graph)),
374 SLOT(finishLoading(quint16,HRESULT,Graph)));
375
376 connect(&m_thread, SIGNAL(asyncSeekingFinished(quint16,qint64)),
377 SLOT(finishSeeking(quint16,qint64)));
378 //really special case
379 m_mediaObject = this;
380 m_thread.start();
381 }
382
383 MediaObject::~MediaObject()
384 {
385 //be sure to finish the timer first
386 m_tickTimer.stop();
387
388 //we finish the worker thread here
389 m_thread.signalStop();
390 m_thread.wait();
391
392 //and then we delete the graphs
393 for (int i = 0; i < FILTER_COUNT; ++i) {
394 delete m_graphs[i];
395 }
396 }
397
398 WorkerThread *MediaObject::workerThread()
399 {
400 return &m_thread;
401 }
402
403 MediaGraph *MediaObject::currentGraph() const
404 {
405 return m_graphs[0];
406 }
407
408 MediaGraph *MediaObject::nextGraph() const
409 {
410 return m_graphs[FILTER_COUNT - 1];
411 }
412
413 //utility function to save the graph to a file
414 void MediaObject::timerEvent(QTimerEvent *e)
415 {
416 if (e->timerId() == m_tickTimer.timerId()) {
417
418 const qint64 current = currentTime();
419 const qint64 total = totalTime();
420
421 if ( m_tickInterval != 0 && current > m_targetTick) {
422 updateTargetTick();
423 emit tick(current);
424 }
425
426 //check that the title hasn't changed
427#ifndef QT_NO_PHONON_MEDIACONTROLLER
428 if (m_autoplayTitles && m_currentTitle < _iface_availableTitles() - 1) {
429
430 if (current >= total) {
431 //we go to the next title
432 _iface_setCurrentTitle(m_currentTitle + 1, false);
433 emit tick(current);
434 }
435 return;
436 }
437#endif //QT_NO_PHONON_MEDIACONTROLLER
438
439 if (total) {
440 const qint64 remaining = total - current;
441
442 if (m_transitionTime < 0 && m_nextSourceReadyToStart) {
443 if (remaining < -m_transitionTime + TIMER_INTERVAL/2) {
444 //we need to switch graphs to run the next source in the queue (with cross-fading)
445 switchToNextSource();
446 return;
447 } else if (current < -m_transitionTime) {
448 //we are currently crossfading
449 for (int i = 0; i < m_audioOutputs.count(); ++i) {
450 m_audioOutputs.at(i)->setCrossFadingProgress( currentGraph()->index(), qMin( qreal(1.), qreal(current) / qreal(-m_transitionTime)));
451 }
452 }
453 }
454
455 if (m_prefinishMark > 0 && !m_prefinishMarkSent && remaining < m_prefinishMark + TIMER_INTERVAL/2) {
456#ifdef GRAPH_DEBUG
457 qDebug() << "DS9: emit prefinishMarkReached" << remaining << QTime::currentTime().toString();
458#endif
459 m_prefinishMarkSent = true;
460
461 emit prefinishMarkReached( remaining );
462 }
463
464 if (!m_aboutToFinishSent && remaining < PRELOAD_TIME - m_transitionTime + TIMER_INTERVAL/2) {
465 //let's take a 2 seconds time to actually load the next file
466#ifdef GRAPH_DEBUG
467 qDebug() << "DS9: emit aboutToFinish" << remaining << QTime::currentTime().toString();
468#endif
469 m_aboutToFinishSent = true;
470 emit aboutToFinish();
471 }
472 } else {
473 //total is 0: the stream is probably live (endless)
474 }
475
476 if (m_buffering) {
477 ComPointer<IAMNetworkStatus> status(currentGraph()->realSource(), IID_IAMNetworkStatus);
478 if (status) {
479 long l;
480 status->get_BufferingProgress(&l);
481 emit bufferStatus(l);
482#ifdef GRAPH_DEBUG
483 qDebug() << "emit bufferStatus(" << l << ")";
484#endif
485 }
486 }
487 }
488 }
489
490 void MediaObject::switchToNextSource()
491 {
492 m_prefinishMarkSent = false;
493 m_aboutToFinishSent = false;
494 m_nextSourceReadyToStart = false;
495
496 m_oldHasVideo = currentGraph()->hasVideo();
497
498 qSwap(m_graphs[0], m_graphs[1]); //swap the graphs
499
500 if (m_transitionTime >= 0)
501 m_graphs[1]->stop(); //make sure we stop the previous graph
502
503 if (currentGraph()->mediaSource().type() != Phonon::MediaSource::Invalid &&
504 catchComError(currentGraph()->renderResult())) {
505 setState(Phonon::ErrorState);
506 return;
507 }
508
509 //we need to play the next media
510 play();
511
512 //we tell the video widgets to switch now to the new source
513#ifndef QT_NO_PHONON_VIDEO
514 for (int i = 0; i < m_videoWidgets.count(); ++i) {
515 m_videoWidgets.at(i)->setCurrentGraph(currentGraph()->index());
516 }
517#endif //QT_NO_PHONON_VIDEO
518
519 emit currentSourceChanged(currentGraph()->mediaSource());
520 emit metaDataChanged(currentGraph()->metadata());
521
522 if (nextGraph()->hasVideo() != currentGraph()->hasVideo()) {
523 emit hasVideoChanged(currentGraph()->hasVideo());
524 }
525
526 emit tick(0);
527 emit totalTimeChanged(totalTime());
528
529#ifndef QT_NO_PHONON_MEDIACONTROLLER
530 setTitles(currentGraph()->titles());
531#endif //QT_NO_PHONON_MEDIACONTROLLER
532 }
533
534 Phonon::State MediaObject::state() const
535 {
536 if (m_buffering) {
537 return Phonon::BufferingState;
538 } else {
539 return m_state;
540 }
541 }
542
543 bool MediaObject::hasVideo() const
544 {
545 return currentGraph()->hasVideo();
546 }
547
548 bool MediaObject::isSeekable() const
549 {
550 return currentGraph()->isSeekable();
551 }
552
553 qint64 MediaObject::totalTime() const
554 {
555#ifndef QT_NO_PHONON_MEDIACONTROLLER
556 //1st, check if there is more titles after
557 const qint64 ret = (m_currentTitle < _iface_availableTitles() - 1) ?
558 titleAbsolutePosition(m_currentTitle+1) : currentGraph()->absoluteTotalTime();
559
560 //this is the duration of the current title
561 return ret - titleAbsolutePosition(m_currentTitle);
562#else
563 return currentGraph()->absoluteTotalTime();
564#endif //QT_NO_PHONON_MEDIACONTROLLER
565 }
566
567 qint64 MediaObject::currentTime() const
568 {
569 //this handles inaccuracy when stopping on a title
570 return currentGraph()->absoluteCurrentTime()
571#ifndef QT_NO_PHONON_MEDIACONTROLLER
572 - titleAbsolutePosition(m_currentTitle)
573#endif //QT_NO_PHONON_MEDIACONTROLLER
574 ;
575 }
576
577 qint32 MediaObject::tickInterval() const
578 {
579 return m_tickInterval;
580 }
581
582 void MediaObject::setTickInterval(qint32 newTickInterval)
583 {
584 m_tickInterval = newTickInterval;
585 updateTargetTick();
586 }
587
588 void MediaObject::pause()
589 {
590 if (currentGraph()->isLoading()) {
591 m_nextState = Phonon::PausedState;
592 } else {
593 currentGraph()->pause();
594 }
595 }
596
597 void MediaObject::stop()
598 {
599 if (currentGraph()->isLoading()) {
600 m_nextState = Phonon::StoppedState;
601 } else {
602 currentGraph()->stop();
603 }
604 }
605
606 void MediaObject::ensureStopped()
607 {
608 currentGraph()->ensureStopped();
609 if (m_state == Phonon::ErrorState) {
610 //we reset the state here
611 m_state = Phonon::StoppedState;
612 }
613 }
614
615 void MediaObject::play()
616 {
617 if (currentGraph()->isLoading()) {
618 m_nextState = Phonon::PlayingState;
619 } else {
620 currentGraph()->play();
621 }
622 }
623
624 QString MediaObject::errorString() const
625 {
626 return m_errorString;
627 }
628
629 Phonon::ErrorType MediaObject::errorType() const
630 {
631 return m_errorType;
632 }
633
634
635 void MediaObject::updateTargetTick()
636 {
637 if (m_tickInterval) {
638 const qint64 current = currentTime();
639 m_targetTick = current / m_tickInterval * m_tickInterval;
640 if (current == 0 || m_targetTick < current) {
641 m_targetTick += m_tickInterval;
642 }
643 }
644 }
645
646 void MediaObject::setState(Phonon::State newstate)
647 {
648 if (newstate == Phonon::PlayingState) {
649 updateTargetTick();
650 }
651
652 if (newstate == m_state) {
653 return;
654 }
655
656 //manage the timer
657 if (newstate == Phonon::PlayingState) {
658 m_tickTimer.start(TIMER_INTERVAL, this);
659 } else {
660 m_tickTimer.stop();
661 }
662
663 Phonon::State oldstate = state();
664 m_state = newstate;
665 emit stateChanged(newstate, oldstate);
666 }
667
668
669 qint32 MediaObject::prefinishMark() const
670 {
671 return m_prefinishMark;
672 }
673
674 void MediaObject::setPrefinishMark(qint32 newPrefinishMark)
675 {
676 m_prefinishMark = newPrefinishMark;
677 }
678
679 qint32 MediaObject::transitionTime() const
680 {
681 return m_transitionTime;
682 }
683
684 void MediaObject::setTransitionTime(qint32 time)
685 {
686 m_transitionTime = time;
687 }
688
689 qint64 MediaObject::remainingTime() const
690 {
691 return totalTime() - currentTime();
692 }
693
694
695 Phonon::MediaSource MediaObject::source() const
696 {
697 return currentGraph()->mediaSource();
698 }
699
700 void MediaObject::setNextSource(const Phonon::MediaSource &source)
701 {
702 m_nextSourceReadyToStart = true;
703 const bool shouldSwitch = (m_state == Phonon::StoppedState || m_state == Phonon::ErrorState);
704 nextGraph()->loadSource(source); //let's preload the source
705
706 if (shouldSwitch) {
707 switchToNextSource();
708 }
709 }
710
711 void MediaObject::setSource(const Phonon::MediaSource &source)
712 {
713 m_nextSourceReadyToStart = false;
714 m_prefinishMarkSent = false;
715 m_aboutToFinishSent = false;
716
717 m_oldHasVideo = currentGraph()->hasVideo();
718 setState(Phonon::LoadingState);
719 //After loading we go into stopped state
720 m_nextState = Phonon::StoppedState;
721 catchComError(currentGraph()->loadSource(source));
722 emit currentSourceChanged(source);
723 }
724
725 void MediaObject::slotStateReady(Graph graph, Phonon::State newState)
726 {
727 if (graph == currentGraph()->graph() && !currentGraph()->isLoading()) {
728 setState(newState);
729 }
730 }
731
732 void MediaObject::loadingFinished(MediaGraph *mg)
733 {
734 if (mg == currentGraph()) {
735#ifndef QT_NO_PHONON_MEDIACONTROLLER
736 //Title interface
737 m_currentTitle = 0;
738 setTitles(currentGraph()->titles());
739#endif //QT_NO_PHONON_MEDIACONTROLLER
740
741 HRESULT hr = mg->renderResult();
742
743 if (catchComError(hr)) {
744 return;
745 }
746
747 if (m_oldHasVideo != currentGraph()->hasVideo()) {
748 emit hasVideoChanged(currentGraph()->hasVideo());
749 }
750
751#ifndef QT_NO_PHONON_VIDEO
752 if (currentGraph()->hasVideo()) {
753 updateVideoGeometry();
754 }
755#endif //QT_NO_PHONON_VIDEO
756
757 emit metaDataChanged(currentGraph()->metadata());
758 emit totalTimeChanged(totalTime());
759
760 //let's put the next state
761 switch(m_nextState)
762 {
763 case Phonon::PausedState:
764 pause();
765 break;
766 case Phonon::PlayingState:
767 play();
768 break;
769 case Phonon::ErrorState:
770 setState(Phonon::ErrorState);
771 break;
772 case Phonon::StoppedState:
773 default:
774 stop();
775 break;
776 }
777 }
778 }
779
780 void MediaObject::seek(qint64 time)
781 {
782 //we seek into the current title
783 currentGraph()->absoluteSeek(time
784#ifndef QT_NO_PHONON_MEDIACONTROLLER
785 + titleAbsolutePosition(m_currentTitle)
786#endif //QT_NO_PHONON_MEDIACONTROLLER
787 );
788 }
789
790 void MediaObject::seekingFinished(MediaGraph *mg)
791 {
792 if (mg == currentGraph()) {
793
794 updateTargetTick();
795 if (currentTime() < totalTime() - m_prefinishMark) {
796 m_prefinishMarkSent = false;
797 }
798
799 if (currentTime() < totalTime() - PRELOAD_TIME + m_transitionTime) {
800 m_aboutToFinishSent = false;
801 }
802
803 //this helps the update of the application (seekslider for example)
804 if (m_state == PausedState || m_state == PlayingState) {
805 emit tick(currentTime());
806 }
807 }
808 }
809
810
811 bool MediaObject::catchComError(HRESULT hr)
812 {
813
814 m_errorString.clear();
815 m_errorType = Phonon::NoError;
816
817 if (hr != S_OK) {
818#ifdef GRAPH_DEBUG
819 qWarning("an error occurred 0x%x",hr);
820#endif
821 LPAMGETERRORTEXT getErrorText = (LPAMGETERRORTEXT)QLibrary::resolve(QLatin1String("quartz"), "AMGetErrorTextW");
822
823 WCHAR buffer[MAX_ERROR_TEXT_LEN];
824 if (getErrorText && getErrorText(hr, buffer, MAX_ERROR_TEXT_LEN)) {
825 m_errorString = QString::fromWCharArray(buffer);
826 } else {
827 m_errorString = QString::fromLatin1("Unknown error");
828 }
829 const QString comError = QString::number(uint(hr), 16);
830 if (!m_errorString.toLower().contains(comError.toLower())) {
831 m_errorString += QString::fromLatin1(" (0x%1)").arg(comError);
832 }
833 if (FAILED(hr)) {
834 m_errorType = Phonon::FatalError;
835 setState(Phonon::ErrorState);
836 } else {
837 m_errorType = Phonon::NormalError;
838 m_nextState = Phonon::ErrorState;
839 }
840 } else {
841 m_errorType = Phonon::NoError;
842
843 }
844
845 return m_errorType == Phonon::FatalError;
846 }
847
848
849 void MediaObject::grabNode(BackendNode *node)
850 {
851 for (int i = 0; i < FILTER_COUNT; ++i) {
852 m_graphs[i]->grabNode(node);
853 }
854 node->setMediaObject(this);
855 }
856
857 bool MediaObject::connectNodes(BackendNode *source, BackendNode *sink)
858 {
859 bool ret = true;
860 for (int i = 0; i < FILTER_COUNT; ++i) {
861 ret = ret && m_graphs[i]->connectNodes(source, sink);
862 }
863 if (ret) {
864#ifndef QT_NO_PHONON_VIDEO
865 if (VideoWidget *video = qobject_cast<VideoWidget*>(sink)) {
866 m_videoWidgets += video;
867 } else
868#endif //QT_NO_PHONON_VIDEO
869 if (AudioOutput *audio = qobject_cast<AudioOutput*>(sink)) {
870 m_audioOutputs += audio;
871 }
872 }
873 return ret;
874 }
875
876 bool MediaObject::disconnectNodes(BackendNode *source, BackendNode *sink)
877 {
878 bool ret = true;
879 for (int i = 0; i < FILTER_COUNT; ++i) {
880 ret = ret && m_graphs[i]->disconnectNodes(source, sink);
881 }
882 if (ret) {
883#ifndef QT_NO_PHONON_VIDEO
884 if (VideoWidget *video = qobject_cast<VideoWidget*>(sink)) {
885 m_videoWidgets.removeOne(video);
886 } else
887#endif //QT_NO_PHONON_VIDEO
888 if (AudioOutput *audio = qobject_cast<AudioOutput*>(sink)) {
889 m_audioOutputs.removeOne(audio);
890 }
891 }
892 return ret;
893 }
894
895#ifndef QT_NO_PHONON_VIDEO
896 void MediaObject::updateVideoGeometry()
897 {
898 for (int i = 0; i < m_videoWidgets.count(); ++i) {
899 m_videoWidgets.at(i)->notifyVideoLoaded();
900 }
901 }
902#endif //QT_NO_PHONON_VIDEO
903
904 void MediaObject::handleComplete(IGraphBuilder *graph)
905 {
906 if (graph == currentGraph()->graph()) {
907 if (m_transitionTime >= PRELOAD_TIME || m_aboutToFinishSent == false) {
908 emit aboutToFinish(); //give a chance to the frontend to give a next source
909 m_aboutToFinishSent = true;
910 }
911
912 if (!m_nextSourceReadyToStart) {
913 //this is the last source, we simply finish
914 const qint64 current = currentTime();
915 const OAFilterState currentState = currentGraph()->syncGetRealState();
916
917 emit tick(current); //this ensures that the end of the seek slider is reached
918 emit finished();
919
920 if (currentTime() == current && currentGraph()->syncGetRealState() == currentState) {
921 //no seek operation in-between
922 pause();
923 setState(Phonon::PausedState); //we set it here
924 }
925
926 } else if (m_transitionTime == 0) {
927 //gapless transition
928 switchToNextSource(); //let's call the function immediately
929 } else if (m_transitionTime > 0) {
930 //management of the transition (if it is >= 0)
931 QTimer::singleShot(m_transitionTime, this, SLOT(switchToNextSource()));
932 }
933 } else {
934 //it is just the end of the previous source (in case of cross-fading)
935 nextGraph()->cleanup();
936 }
937 for (int i = 0; i < m_audioOutputs.count(); ++i) {
938 m_audioOutputs.at(i)->setCrossFadingProgress( currentGraph()->index(), 1.); //cross-fading is in any case finished
939 }
940 }
941
942 void MediaObject::finishLoading(quint16 workId, HRESULT hr, Graph graph)
943 {
944 for(int i = 0; i < FILTER_COUNT; ++i) {
945 m_graphs[i]->finishLoading(workId, hr, graph);
946 }
947 }
948
949 void MediaObject::finishSeeking(quint16 workId, qint64 time)
950 {
951 for(int i = 0; i < FILTER_COUNT; ++i) {
952 m_graphs[i]->finishSeeking(workId, time);
953 }
954 }
955
956
957 void MediaObject::handleEvents(Graph graph, long eventCode, long param1)
958 {
959 QString eventDescription;
960 switch (eventCode)
961 {
962 case EC_BUFFERING_DATA:
963 if (graph == currentGraph()->graph()) {
964 m_buffering = param1;
965 emit stateChanged(state(), m_state);
966 }
967 break;
968 case EC_LENGTH_CHANGED:
969 if (graph == currentGraph()->graph()) {
970 emit totalTimeChanged( totalTime() );
971 }
972 break;
973
974 case EC_COMPLETE:
975 handleComplete(graph);
976 break;
977
978#ifndef QT_NO_PHONON_VIDEO
979 case EC_VIDEO_SIZE_CHANGED:
980 if (graph == currentGraph()->graph()) {
981 updateVideoGeometry();
982 }
983 break;
984#endif //QT_NO_PHONON_VIDEO
985
986#ifdef GRAPH_DEBUG
987 case EC_ACTIVATE: qDebug() << "EC_ACTIVATE: A video window is being " << (param1 ? "ACTIVATED" : "DEACTIVATED"); break;
988 case EC_BUILT: qDebug() << "EC_BUILT: Send by the Video Control when a graph has been built. Not forwarded to applications."; break;
989 case EC_CLOCK_CHANGED: qDebug() << "EC_CLOCK_CHANGED"; break;
990 case EC_CLOCK_UNSET: qDebug() << "EC_CLOCK_UNSET: The clock provider was disconnected."; break;
991 case EC_CODECAPI_EVENT: qDebug() << "EC_CODECAPI_EVENT: Sent by an encoder to signal an encoding event."; break;
992 case EC_DEVICE_LOST: qDebug() << "EC_DEVICE_LOST: A Plug and Play device was removed or has become available again."; break;
993 case EC_DISPLAY_CHANGED: qDebug() << "EC_DISPLAY_CHANGED: The display mode has changed."; break;
994 case EC_END_OF_SEGMENT: qDebug() << "EC_END_OF_SEGMENT: The end of a segment has been reached."; break;
995 case EC_ERROR_STILLPLAYING: qDebug() << "EC_ERROR_STILLPLAYING: An asynchronous command to run the graph has failed."; break;
996 case EC_ERRORABORT: qDebug() << "EC_ERRORABORT: An operation was aborted because of an error."; break;
997 case EC_EXTDEVICE_MODE_CHANGE: qDebug() << "EC_EXTDEVICE_MODE_CHANGE: Not supported."; break;
998 case EC_FULLSCREEN_LOST: qDebug() << "EC_FULLSCREEN_LOST: The video renderer is switching out of full-screen mode."; break;
999 case EC_GRAPH_CHANGED: qDebug() << "EC_GRAPH_CHANGED: The filter graph has changed."; break;
1000 case EC_NEED_RESTART: qDebug() << "EC_NEED_RESTART: A filter is requesting that the graph be restarted."; break;
1001 case EC_NOTIFY_WINDOW: qDebug() << "EC_NOTIFY_WINDOW: Notifies a filter of the video renderer's window."; break;
1002 case EC_OLE_EVENT: qDebug() << "EC_OLE_EVENT: A filter is passing a text string to the application."; break;
1003 case EC_OPENING_FILE: qDebug() << "EC_OPENING_FILE: The graph is opening a file, or has finished opening a file."; break;
1004 case EC_PALETTE_CHANGED: qDebug() << "EC_PALETTE_CHANGED: The video palette has changed."; break;
1005 case EC_PAUSED: qDebug() << "EC_PAUSED: A pause request has completed."; break;
1006 case EC_PREPROCESS_COMPLETE: qDebug() << "EC_PREPROCESS_COMPLETE: Sent by the WM ASF Writer filter when it completes the pre-processing for multipass encoding."; break;
1007 case EC_QUALITY_CHANGE: qDebug() << "EC_QUALITY_CHANGE: The graph is dropping samples, for quality control."; break;
1008 case EC_REPAINT: qDebug() << "EC_REPAINT: A video renderer requires a repaint."; break;
1009 case EC_SEGMENT_STARTED: qDebug() << "EC_SEGMENT_STARTED: A new segment has started."; break;
1010 case EC_SHUTTING_DOWN: qDebug() << "EC_SHUTTING_DOWN: The filter graph is shutting down, prior to being destroyed."; break;
1011 case EC_SNDDEV_IN_ERROR: qDebug() << "EC_SNDDEV_IN_ERROR: A device error has occurred in an audio capture filter."; break;
1012 case EC_SNDDEV_OUT_ERROR: qDebug() << "EC_SNDDEV_OUT_ERROR: A device error has occurred in an audio renderer filter."; break;
1013 case EC_STARVATION: qDebug() << "EC_STARVATION: A filter is not receiving enough data."; break;
1014 case EC_STATE_CHANGE: qDebug() << "EC_STATE_CHANGE: The filter graph has changed state."; break;
1015 case EC_STEP_COMPLETE: qDebug() << "EC_STEP_COMPLETE: A filter performing frame stepping has stepped the specified number of frames."; break;
1016 case EC_STREAM_CONTROL_STARTED: qDebug() << "EC_STREAM_CONTROL_STARTED: A stream-control start command has taken effect."; break;
1017 case EC_STREAM_CONTROL_STOPPED: qDebug() << "EC_STREAM_CONTROL_STOPPED: A stream-control stop command has taken effect."; break;
1018 case EC_STREAM_ERROR_STILLPLAYING: qDebug() << "EC_STREAM_ERROR_STILLPLAYING: An error has occurred in a stream. The stream is still playing."; break;
1019 case EC_STREAM_ERROR_STOPPED: qDebug() << "EC_STREAM_ERROR_STOPPED: A stream has stopped because of an error."; break;
1020 case EC_TIMECODE_AVAILABLE: qDebug() << "EC_TIMECODE_AVAILABLE: Not supported."; break;
1021 case EC_UNBUILT: qDebug() << "Sent by the Video Control when a graph has been torn down. Not forwarded to applications."; break;
1022 case EC_USERABORT: qDebug() << "EC_USERABORT: Send by the Video Control when a graph has been torn down. Not forwarded to applications."; break;
1023 case EC_VMR_RECONNECTION_FAILED: qDebug() << "EC_VMR_RECONNECTION_FAILED: Sent by the VMR-7 and the VMR-9 when it was unable to accept a dynamic format change request from the upstream decoder."; break;
1024 case EC_VMR_RENDERDEVICE_SET: qDebug() << "EC_VMR_RENDERDEVICE_SET: Sent when the VMR has selected its rendering mechanism."; break;
1025 case EC_VMR_SURFACE_FLIPPED: qDebug() << "EC_VMR_SURFACE_FLIPPED: Sent when the VMR-7's allocator presenter has called the DirectDraw Flip method on the surface being presented."; break;
1026 case EC_WINDOW_DESTROYED: qDebug() << "EC_WINDOW_DESTROYED: The video renderer was destroyed or removed from the graph"; break;
1027 case EC_WMT_EVENT: qDebug() << "EC_WMT_EVENT: Sent by the Windows Media Format SDK when an application uses the ASF Reader filter to play ASF files protected by digital rights management (DRM)."; break;
1028 case EC_WMT_INDEX_EVENT: qDebug() << "EC_WMT_INDEX_EVENT: Sent by the Windows Media Format SDK when an application uses the ASF Writer to index Windows Media Video files."; break;
1029
1030 //documented by Microsoft but not supported in the Platform SDK
1031 // case EC_BANDWIDTHCHANGE : qDebug() << "EC_BANDWIDTHCHANGE: not supported"; break;
1032 // case EC_CONTENTPROPERTY_CHANGED: qDebug() << "EC_CONTENTPROPERTY_CHANGED: not supported."; break;
1033 // case EC_EOS_SOON: qDebug() << "EC_EOS_SOON: not supported"; break;
1034 // case EC_ERRORABORTEX: qDebug() << "EC_ERRORABORTEX: An operation was aborted because of an error."; break;
1035 // case EC_FILE_CLOSED: qDebug() << "EC_FILE_CLOSED: The source file was closed because of an unexpected event."; break;
1036 // case EC_LOADSTATUS: qDebug() << "EC_LOADSTATUS: Notifies the application of progress when opening a network file."; break;
1037 // case EC_MARKER_HIT: qDebug() << "EC_MARKER_HIT: not supported."; break;
1038 // case EC_NEW_PIN: qDebug() << "EC_NEW_PIN: not supported."; break;
1039 // case EC_PLEASE_REOPEN: qDebug() << "EC_PLEASE_REOPEN: The source file has changed."; break;
1040 // case EC_PROCESSING_LATENCY: qDebug() << "EC_PROCESSING_LATENCY: Indicates the amount of time that a component is taking to process each sample."; break;
1041 // case EC_RENDER_FINISHED: qDebug() << "EC_RENDER_FINISHED: Not supported."; break;
1042 // case EC_SAMPLE_LATENCY: qDebug() << "EC_SAMPLE_LATENCY: Specifies how far behind schedule a component is for processing samples."; break;
1043 // case EC_SAMPLE_NEEDED: qDebug() << "EC_SAMPLE_NEEDED: Requests a new input sample from the Enhanced Video Renderer (EVR) filter."; break;
1044 // case EC_SCRUB_TIME: qDebug() << "EC_SCRUB_TIME: Specifies the time stamp for the most recent frame step."; break;
1045 // case EC_STATUS: qDebug() << "EC_STATUS: Contains two arbitrary status strings."; break;
1046 // case EC_VIDEOFRAMEREADY: qDebug() << "EC_VIDEOFRAMEREADY: A video frame is ready for display."; break;
1047
1048 default:
1049 qDebug() << "Unknown event" << eventCode << "(" << param1 << ")";
1050 break;
1051#else
1052 default:
1053 break;
1054#endif
1055 }
1056 }
1057
1058
1059#ifndef QT_NO_PHONON_MEDIACONTROLLER
1060 //interface management
1061 bool MediaObject::hasInterface(Interface iface) const
1062 {
1063 return iface == AddonInterface::TitleInterface;
1064 }
1065
1066 QVariant MediaObject::interfaceCall(Interface iface, int command, const QList<QVariant> &params)
1067 {
1068 if (hasInterface(iface)) {
1069
1070 switch (iface)
1071 {
1072 case TitleInterface:
1073 switch (command)
1074 {
1075 case availableTitles:
1076 return _iface_availableTitles();
1077 case title:
1078 return _iface_currentTitle();
1079 case setTitle:
1080 _iface_setCurrentTitle(params.first().toInt());
1081 break;
1082 case autoplayTitles:
1083 return m_autoplayTitles;
1084 case setAutoplayTitles:
1085 m_autoplayTitles = params.first().toBool();
1086 updateStopPosition();
1087 break;
1088 }
1089 break;
1090 default:
1091 break;
1092 }
1093 }
1094 return QVariant();
1095 }
1096
1097
1098 //TitleInterface
1099
1100 //this is called to set the time for the different titles
1101 qint64 MediaObject::titleAbsolutePosition(int title) const
1102 {
1103 if (title >= 0 && title < m_titles.count()) {
1104 return m_titles.at(title);
1105 } else {
1106 return 0;
1107 }
1108 }
1109
1110 void MediaObject::setTitles(const QList<qint64> &titles)
1111 {
1112 //this is called when the source is loaded
1113 const bool emitSignal = m_titles.count() != titles.count();
1114 m_titles = titles;
1115 if (emitSignal) {
1116 emit availableTitlesChanged(titles.count());
1117 }
1118 updateStopPosition();
1119 }
1120
1121
1122 int MediaObject::_iface_availableTitles() const
1123 {
1124 return m_titles.count() - 1;
1125 }
1126
1127 int MediaObject::_iface_currentTitle() const
1128 {
1129 return m_currentTitle;
1130 }
1131
1132 void MediaObject::_iface_setCurrentTitle(int title, bool bseek)
1133 {
1134#ifdef GRAPH_DEBUG
1135 qDebug() << "_iface_setCurrentTitle" << title;
1136#endif
1137 const int oldTitle = m_currentTitle;
1138 m_currentTitle = title;
1139 updateStopPosition();
1140 if (bseek) {
1141 //let's seek to the beginning of the song
1142 seek(0);
1143 } else {
1144 updateTargetTick();
1145 }
1146 if (oldTitle != title) {
1147 emit titleChanged(title);
1148 emit totalTimeChanged(totalTime());
1149 }
1150
1151 }
1152
1153 void MediaObject::updateStopPosition()
1154 {
1155 if (!m_autoplayTitles && m_currentTitle < _iface_availableTitles() - 1) {
1156 //stop position is set to the end of the track
1157 currentGraph()->setStopPosition(titleAbsolutePosition(m_currentTitle+1));
1158 } else {
1159 //stop position is set to the end
1160 currentGraph()->setStopPosition(-1);
1161 }
1162 }
1163#endif //QT_NO_PHONON_QT_NO_PHONON_MEDIACONTROLLER
1164
1165 void MediaObject::switchFilters(int index, Filter oldFilter, Filter newFilter)
1166 {
1167 if (currentGraph()->index() == index) {
1168 currentGraph()->switchFilters(oldFilter, newFilter);
1169 } else {
1170 nextGraph()->switchFilters(oldFilter, newFilter);
1171 }
1172
1173 }
1174
1175
1176 }
1177}
1178
1179QT_END_NAMESPACE
1180
1181#include "moc_mediaobject.cpp"
Note: See TracBrowser for help on using the repository browser.