source: trunk/src/multimedia/audio/qaudioinput_mac_p.cpp@ 568

Last change on this file since 568 was 561, checked in by Dmitry A. Kuminov, 15 years ago

trunk: Merged in qt 4.6.1 sources.

  • Property svn:eol-style set to native
File size: 26.6 KB
Line 
1/****************************************************************************
2**
3** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
4** All rights reserved.
5** Contact: Nokia Corporation ([email protected])
6**
7** This file is part of the QtMultimedia module of the Qt Toolkit.
8**
9** $QT_BEGIN_LICENSE:LGPL$
10** Commercial Usage
11** Licensees holding valid Qt Commercial licenses may use this file in
12** accordance with the Qt Commercial License Agreement provided with the
13** Software or, alternatively, in accordance with the terms contained in
14** a written agreement between you and Nokia.
15**
16** GNU Lesser General Public License Usage
17** Alternatively, this file may be used under the terms of the GNU Lesser
18** General Public License version 2.1 as published by the Free Software
19** Foundation and appearing in the file LICENSE.LGPL included in the
20** packaging of this file. Please review the following information to
21** ensure the GNU Lesser General Public License version 2.1 requirements
22** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
23**
24** In addition, as a special exception, Nokia gives you certain additional
25** rights. These rights are described in the Nokia Qt LGPL Exception
26** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
27**
28** GNU General Public License Usage
29** Alternatively, this file may be used under the terms of the GNU
30** General Public License version 3.0 as published by the Free Software
31** Foundation and appearing in the file LICENSE.GPL included in the
32** packaging of this file. Please review the following information to
33** ensure the GNU General Public License version 3.0 requirements will be
34** met: http://www.gnu.org/copyleft/gpl.html.
35**
36** If you have questions regarding the use of this file, please contact
37** Nokia at [email protected].
38** $QT_END_LICENSE$
39**
40****************************************************************************/
41
42//
43// W A R N I N G
44// -------------
45//
46// This file is not part of the Qt API. It exists for the convenience
47// of other Qt classes. This header file may change from version to
48// version without notice, or even be removed.
49//
50// We mean it.
51//
52
53#include <QtCore/qendian.h>
54#include <QtCore/qtimer.h>
55#include <QtCore/qdebug.h>
56
57#include <QtMultimedia/qaudiodeviceinfo.h>
58#include <QtMultimedia/qaudioinput.h>
59
60#include "qaudio_mac_p.h"
61#include "qaudioinput_mac_p.h"
62
63
64QT_BEGIN_NAMESPACE
65
66
67namespace
68{
69
70static const int default_buffer_size = 4 * 1024;
71
72class QAudioBufferList
73{
74public:
75 QAudioBufferList(AudioStreamBasicDescription const& streamFormat):
76 owner(false),
77 sf(streamFormat)
78 {
79 const bool isInterleaved = (sf.mFormatFlags & kAudioFormatFlagIsNonInterleaved) == 0;
80 const int numberOfBuffers = isInterleaved ? 1 : sf.mChannelsPerFrame;
81
82 dataSize = 0;
83
84 bfs = reinterpret_cast<AudioBufferList*>(qMalloc(sizeof(AudioBufferList) +
85 (sizeof(AudioBuffer) * numberOfBuffers)));
86
87 bfs->mNumberBuffers = numberOfBuffers;
88 for (int i = 0; i < numberOfBuffers; ++i) {
89 bfs->mBuffers[i].mNumberChannels = isInterleaved ? numberOfBuffers : 1;
90 bfs->mBuffers[i].mDataByteSize = 0;
91 bfs->mBuffers[i].mData = 0;
92 }
93 }
94
95 QAudioBufferList(AudioStreamBasicDescription const& streamFormat, char* buffer, int bufferSize):
96 owner(false),
97 sf(streamFormat),
98 bfs(0)
99 {
100 dataSize = bufferSize;
101
102 bfs = reinterpret_cast<AudioBufferList*>(qMalloc(sizeof(AudioBufferList) + sizeof(AudioBuffer)));
103
104 bfs->mNumberBuffers = 1;
105 bfs->mBuffers[0].mNumberChannels = 1;
106 bfs->mBuffers[0].mDataByteSize = dataSize;
107 bfs->mBuffers[0].mData = buffer;
108 }
109
110 QAudioBufferList(AudioStreamBasicDescription const& streamFormat, int framesToBuffer):
111 owner(true),
112 sf(streamFormat),
113 bfs(0)
114 {
115 const bool isInterleaved = (sf.mFormatFlags & kAudioFormatFlagIsNonInterleaved) == 0;
116 const int numberOfBuffers = isInterleaved ? 1 : sf.mChannelsPerFrame;
117
118 dataSize = framesToBuffer * sf.mBytesPerFrame;
119
120 bfs = reinterpret_cast<AudioBufferList*>(qMalloc(sizeof(AudioBufferList) +
121 (sizeof(AudioBuffer) * numberOfBuffers)));
122 bfs->mNumberBuffers = numberOfBuffers;
123 for (int i = 0; i < numberOfBuffers; ++i) {
124 bfs->mBuffers[i].mNumberChannels = isInterleaved ? numberOfBuffers : 1;
125 bfs->mBuffers[i].mDataByteSize = dataSize;
126 bfs->mBuffers[i].mData = qMalloc(dataSize);
127 }
128 }
129
130 ~QAudioBufferList()
131 {
132 if (owner) {
133 for (UInt32 i = 0; i < bfs->mNumberBuffers; ++i)
134 qFree(bfs->mBuffers[i].mData);
135 }
136
137 qFree(bfs);
138 }
139
140 AudioBufferList* audioBufferList() const
141 {
142 return bfs;
143 }
144
145 char* data(int buffer = 0) const
146 {
147 return static_cast<char*>(bfs->mBuffers[buffer].mData);
148 }
149
150 qint64 bufferSize(int buffer = 0) const
151 {
152 return bfs->mBuffers[buffer].mDataByteSize;
153 }
154
155 int frameCount(int buffer = 0) const
156 {
157 return bfs->mBuffers[buffer].mDataByteSize / sf.mBytesPerFrame;
158 }
159
160 int packetCount(int buffer = 0) const
161 {
162 return bfs->mBuffers[buffer].mDataByteSize / sf.mBytesPerPacket;
163 }
164
165 int packetSize() const
166 {
167 return sf.mBytesPerPacket;
168 }
169
170 void reset()
171 {
172 for (UInt32 i = 0; i < bfs->mNumberBuffers; ++i)
173 bfs->mBuffers[i].mDataByteSize = dataSize;
174 }
175
176private:
177 bool owner;
178 int dataSize;
179 AudioStreamBasicDescription sf;
180 AudioBufferList* bfs;
181};
182
183class QAudioPacketFeeder
184{
185public:
186 QAudioPacketFeeder(QAudioBufferList* abl):
187 audioBufferList(abl)
188 {
189 totalPackets = audioBufferList->packetCount();
190 position = 0;
191 }
192
193 bool feed(AudioBufferList& dst, UInt32& packetCount)
194 {
195 if (position == totalPackets) {
196 dst.mBuffers[0].mDataByteSize = 0;
197 packetCount = 0;
198 return false;
199 }
200
201 if (totalPackets - position < packetCount)
202 packetCount = totalPackets - position;
203
204 dst.mBuffers[0].mDataByteSize = packetCount * audioBufferList->packetSize();
205 dst.mBuffers[0].mData = audioBufferList->data() + (position * audioBufferList->packetSize());
206
207 position += packetCount;
208
209 return true;
210 }
211
212private:
213 UInt32 totalPackets;
214 UInt32 position;
215 QAudioBufferList* audioBufferList;
216};
217
218class QAudioInputBuffer : public QObject
219{
220 Q_OBJECT
221
222public:
223 QAudioInputBuffer(int bufferSize,
224 int maxPeriodSize,
225 AudioStreamBasicDescription const& inputFormat,
226 AudioStreamBasicDescription const& outputFormat,
227 QObject* parent):
228 QObject(parent),
229 m_deviceError(false),
230 m_inputFormat(inputFormat),
231 m_outputFormat(outputFormat)
232 {
233 m_maxPeriodSize = maxPeriodSize;
234 m_periodTime = m_maxPeriodSize / m_outputFormat.mBytesPerFrame * 1000 / m_outputFormat.mSampleRate;
235 m_buffer = new QAudioRingBuffer(bufferSize + (bufferSize % maxPeriodSize == 0 ? 0 : maxPeriodSize - (bufferSize % maxPeriodSize)));
236 m_inputBufferList = new QAudioBufferList(m_inputFormat);
237
238 m_flushTimer = new QTimer(this);
239 connect(m_flushTimer, SIGNAL(timeout()), SLOT(flushBuffer()));
240
241 if (inputFormat.mSampleRate != outputFormat.mSampleRate) {
242 if (AudioConverterNew(&m_inputFormat, &m_outputFormat, &m_audioConverter) != noErr) {
243 qWarning() << "QAudioInput: Unable to create an Audio Converter";
244 m_audioConverter = 0;
245 }
246 }
247 }
248
249 ~QAudioInputBuffer()
250 {
251 delete m_buffer;
252 }
253
254 qint64 renderFromDevice(AudioUnit audioUnit,
255 AudioUnitRenderActionFlags* ioActionFlags,
256 const AudioTimeStamp* inTimeStamp,
257 UInt32 inBusNumber,
258 UInt32 inNumberFrames)
259 {
260 const bool wasEmpty = m_buffer->used() == 0;
261
262 OSStatus err;
263 qint64 framesRendered = 0;
264
265 m_inputBufferList->reset();
266 err = AudioUnitRender(audioUnit,
267 ioActionFlags,
268 inTimeStamp,
269 inBusNumber,
270 inNumberFrames,
271 m_inputBufferList->audioBufferList());
272
273 if (m_audioConverter != 0) {
274 QAudioPacketFeeder feeder(m_inputBufferList);
275
276 bool wecan = true;
277 int copied = 0;
278
279 const int available = m_buffer->free();
280
281 while (err == noErr && wecan) {
282 QAudioRingBuffer::Region region = m_buffer->acquireWriteRegion(available);
283
284 if (region.second > 0) {
285 AudioBufferList output;
286 output.mNumberBuffers = 1;
287 output.mBuffers[0].mNumberChannels = 1;
288 output.mBuffers[0].mDataByteSize = region.second;
289 output.mBuffers[0].mData = region.first;
290
291 UInt32 packetSize = region.second / m_outputFormat.mBytesPerPacket;
292 err = AudioConverterFillComplexBuffer(m_audioConverter,
293 converterCallback,
294 &feeder,
295 &packetSize,
296 &output,
297 0);
298
299 region.second = output.mBuffers[0].mDataByteSize;
300 copied += region.second;
301
302 m_buffer->releaseWriteRegion(region);
303 }
304 else
305 wecan = false;
306 }
307
308 framesRendered += copied / m_outputFormat.mBytesPerFrame;
309 }
310 else {
311 const int available = m_inputBufferList->bufferSize();
312 bool wecan = true;
313 int copied = 0;
314
315 while (wecan && copied < available) {
316 QAudioRingBuffer::Region region = m_buffer->acquireWriteRegion(available - copied);
317
318 if (region.second > 0) {
319 memcpy(region.first, m_inputBufferList->data() + copied, region.second);
320 copied += region.second;
321 }
322 else
323 wecan = false;
324
325 m_buffer->releaseWriteRegion(region);
326 }
327
328 framesRendered = copied / m_outputFormat.mBytesPerFrame;
329 }
330
331 if (wasEmpty && framesRendered > 0)
332 emit readyRead();
333
334 return framesRendered;
335 }
336
337 qint64 readBytes(char* data, qint64 len)
338 {
339 bool wecan = true;
340 qint64 bytesCopied = 0;
341
342 len -= len % m_maxPeriodSize;
343 while (wecan && bytesCopied < len) {
344 QAudioRingBuffer::Region region = m_buffer->acquireReadRegion(len - bytesCopied);
345
346 if (region.second > 0) {
347 memcpy(data + bytesCopied, region.first, region.second);
348 bytesCopied += region.second;
349 }
350 else
351 wecan = false;
352
353 m_buffer->releaseReadRegion(region);
354 }
355
356 return bytesCopied;
357 }
358
359 void setFlushDevice(QIODevice* device)
360 {
361 if (m_device != device)
362 m_device = device;
363 }
364
365 void startFlushTimer()
366 {
367 if (m_device != 0) {
368 m_flushTimer->start((m_buffer->size() - (m_maxPeriodSize * 2)) / m_maxPeriodSize * m_periodTime);
369 }
370 }
371
372 void stopFlushTimer()
373 {
374 m_flushTimer->stop();
375 }
376
377 void flush(bool all = false)
378 {
379 if (m_device == 0)
380 return;
381
382 const int used = m_buffer->used();
383 const int readSize = all ? used : used - (used % m_maxPeriodSize);
384
385 if (readSize > 0) {
386 bool wecan = true;
387 int flushed = 0;
388
389 while (!m_deviceError && wecan && flushed < readSize) {
390 QAudioRingBuffer::Region region = m_buffer->acquireReadRegion(readSize - flushed);
391
392 if (region.second > 0) {
393 int bytesWritten = m_device->write(region.first, region.second);
394 if (bytesWritten < 0) {
395 stopFlushTimer();
396 m_deviceError = true;
397 }
398 else {
399 region.second = bytesWritten;
400 flushed += bytesWritten;
401 wecan = bytesWritten != 0;
402 }
403 }
404 else
405 wecan = false;
406
407 m_buffer->releaseReadRegion(region);
408 }
409 }
410 }
411
412 void reset()
413 {
414 m_buffer->reset();
415 m_deviceError = false;
416 }
417
418 int available() const
419 {
420 return m_buffer->free();
421 }
422
423 int used() const
424 {
425 return m_buffer->used();
426 }
427
428signals:
429 void readyRead();
430
431private slots:
432 void flushBuffer()
433 {
434 flush();
435 }
436
437private:
438 bool m_deviceError;
439 int m_maxPeriodSize;
440 int m_periodTime;
441 QIODevice* m_device;
442 QTimer* m_flushTimer;
443 QAudioRingBuffer* m_buffer;
444 QAudioBufferList* m_inputBufferList;
445 AudioConverterRef m_audioConverter;
446 AudioStreamBasicDescription m_inputFormat;
447 AudioStreamBasicDescription m_outputFormat;
448
449 const static OSStatus as_empty = 'qtem';
450
451 // Converter callback
452 static OSStatus converterCallback(AudioConverterRef inAudioConverter,
453 UInt32* ioNumberDataPackets,
454 AudioBufferList* ioData,
455 AudioStreamPacketDescription** outDataPacketDescription,
456 void* inUserData)
457 {
458 Q_UNUSED(inAudioConverter);
459 Q_UNUSED(outDataPacketDescription);
460
461 QAudioPacketFeeder* feeder = static_cast<QAudioPacketFeeder*>(inUserData);
462
463 if (!feeder->feed(*ioData, *ioNumberDataPackets))
464 return as_empty;
465
466 return noErr;
467 }
468};
469
470
471class MacInputDevice : public QIODevice
472{
473 Q_OBJECT
474
475public:
476 MacInputDevice(QAudioInputBuffer* audioBuffer, QObject* parent):
477 QIODevice(parent),
478 m_audioBuffer(audioBuffer)
479 {
480 open(QIODevice::ReadOnly | QIODevice::Unbuffered);
481 connect(m_audioBuffer, SIGNAL(readyRead()), SIGNAL(readyRead()));
482 }
483
484 qint64 readData(char* data, qint64 len)
485 {
486 return m_audioBuffer->readBytes(data, len);
487 }
488
489 qint64 writeData(const char* data, qint64 len)
490 {
491 Q_UNUSED(data);
492 Q_UNUSED(len);
493
494 return 0;
495 }
496
497 bool isSequential() const
498 {
499 return true;
500 }
501
502private:
503 QAudioInputBuffer* m_audioBuffer;
504};
505
506}
507
508
509QAudioInputPrivate::QAudioInputPrivate(const QByteArray& device, QAudioFormat const& format):
510 audioFormat(format)
511{
512 QDataStream ds(device);
513 quint32 did, mode;
514
515 ds >> did >> mode;
516
517 if (QAudio::Mode(mode) == QAudio::AudioOutput)
518 errorCode = QAudio::OpenError;
519 else {
520 isOpen = false;
521 audioDeviceId = AudioDeviceID(did);
522 audioUnit = 0;
523 startTime = 0;
524 totalFrames = 0;
525 audioBuffer = 0;
526 internalBufferSize = default_buffer_size;
527 clockFrequency = AudioGetHostClockFrequency() / 1000;
528 errorCode = QAudio::NoError;
529 stateCode = QAudio::StoppedState;
530
531 intervalTimer = new QTimer(this);
532 intervalTimer->setInterval(1000);
533 connect(intervalTimer, SIGNAL(timeout()), SIGNAL(notify()));
534 }
535}
536
537QAudioInputPrivate::~QAudioInputPrivate()
538{
539 close();
540}
541
542bool QAudioInputPrivate::open()
543{
544 UInt32 size = 0;
545
546 if (isOpen)
547 return true;
548
549 ComponentDescription cd;
550 cd.componentType = kAudioUnitType_Output;
551 cd.componentSubType = kAudioUnitSubType_HALOutput;
552 cd.componentManufacturer = kAudioUnitManufacturer_Apple;
553 cd.componentFlags = 0;
554 cd.componentFlagsMask = 0;
555
556 // Open
557 Component cp = FindNextComponent(NULL, &cd);
558 if (cp == 0) {
559 qWarning() << "QAudioInput: Failed to find HAL Output component";
560 return false;
561 }
562
563 if (OpenAComponent(cp, &audioUnit) != noErr) {
564 qWarning() << "QAudioInput: Unable to Open Output Component";
565 return false;
566 }
567
568 // Set mode
569 // switch to input mode
570 UInt32 enable = 1;
571 if (AudioUnitSetProperty(audioUnit,
572 kAudioOutputUnitProperty_EnableIO,
573 kAudioUnitScope_Input,
574 1,
575 &enable,
576 sizeof(enable)) != noErr) {
577 qWarning() << "QAudioInput: Unabled to switch to input mode (Enable Input)";
578 return false;
579 }
580
581 enable = 0;
582 if (AudioUnitSetProperty(audioUnit,
583 kAudioOutputUnitProperty_EnableIO,
584 kAudioUnitScope_Output,
585 0,
586 &enable,
587 sizeof(enable)) != noErr) {
588 qWarning() << "QAudioInput: Unabled to switch to input mode (Disable output)";
589 return false;
590 }
591
592 // register callback
593 AURenderCallbackStruct cb;
594 cb.inputProc = inputCallback;
595 cb.inputProcRefCon = this;
596
597 if (AudioUnitSetProperty(audioUnit,
598 kAudioOutputUnitProperty_SetInputCallback,
599 kAudioUnitScope_Global,
600 0,
601 &cb,
602 sizeof(cb)) != noErr) {
603 qWarning() << "QAudioInput: Failed to set AudioUnit callback";
604 return false;
605 }
606
607 // Set Audio Device
608 if (AudioUnitSetProperty(audioUnit,
609 kAudioOutputUnitProperty_CurrentDevice,
610 kAudioUnitScope_Global,
611 0,
612 &audioDeviceId,
613 sizeof(audioDeviceId)) != noErr) {
614 qWarning() << "QAudioInput: Unable to use configured device";
615 return false;
616 }
617
618 // Set format
619 streamFormat = toAudioStreamBasicDescription(audioFormat);
620
621 size = sizeof(deviceFormat);
622 if (AudioUnitGetProperty(audioUnit,
623 kAudioUnitProperty_StreamFormat,
624 kAudioUnitScope_Input,
625 1,
626 &deviceFormat,
627 &size) != noErr) {
628 qWarning() << "QAudioInput: Unable to retrieve device format";
629 return false;
630 }
631
632 // If the device frequency is different to the requested use a converter
633 if (deviceFormat.mSampleRate != streamFormat.mSampleRate) {
634 AudioUnitSetProperty(audioUnit,
635 kAudioUnitProperty_StreamFormat,
636 kAudioUnitScope_Output,
637 1,
638 &deviceFormat,
639 sizeof(streamFormat));
640 }
641 else {
642 AudioUnitSetProperty(audioUnit,
643 kAudioUnitProperty_StreamFormat,
644 kAudioUnitScope_Output,
645 1,
646 &streamFormat,
647 sizeof(streamFormat));
648 }
649
650 // Setup buffers
651 UInt32 numberOfFrames;
652 size = sizeof(UInt32);
653 if (AudioUnitGetProperty(audioUnit,
654 kAudioDevicePropertyBufferFrameSize,
655 kAudioUnitScope_Global,
656 0,
657 &numberOfFrames,
658 &size) != noErr) {
659 qWarning() << "QAudioInput: Failed to get audio period size";
660 return false;
661 }
662
663 // Allocate buffer
664 periodSizeBytes = (numberOfFrames * streamFormat.mSampleRate / deviceFormat.mSampleRate) *
665 streamFormat.mBytesPerFrame;
666 if (internalBufferSize < periodSizeBytes * 2)
667 internalBufferSize = periodSizeBytes * 2;
668 else
669 internalBufferSize -= internalBufferSize % streamFormat.mBytesPerFrame;
670
671 audioBuffer = new QAudioInputBuffer(internalBufferSize,
672 periodSizeBytes,
673 deviceFormat,
674 streamFormat,
675 this);
676
677 audioIO = new MacInputDevice(audioBuffer, this);
678
679 // Init
680 if (AudioUnitInitialize(audioUnit) != noErr) {
681 qWarning() << "QAudioInput: Failed to initialize AudioUnit";
682 return false;
683 }
684
685 isOpen = true;
686
687 return isOpen;
688}
689
690void QAudioInputPrivate::close()
691{
692 if (audioUnit != 0) {
693 AudioOutputUnitStop(audioUnit);
694 AudioUnitUninitialize(audioUnit);
695 CloseComponent(audioUnit);
696 }
697
698 delete audioBuffer;
699}
700
701QAudioFormat QAudioInputPrivate::format() const
702{
703 return audioFormat;
704}
705
706QIODevice* QAudioInputPrivate::start(QIODevice* device)
707{
708 QIODevice* op = device;
709
710 if (!open()) {
711 stateCode = QAudio::StoppedState;
712 errorCode = QAudio::OpenError;
713 return audioIO;
714 }
715
716 reset();
717 audioBuffer->reset();
718 audioBuffer->setFlushDevice(op);
719
720 if (op == 0)
721 op = audioIO;
722
723 // Start
724 startTime = AudioGetCurrentHostTime();
725 totalFrames = 0;
726
727 audioThreadStart();
728
729 stateCode = QAudio::ActiveState;
730 errorCode = QAudio::NoError;
731 emit stateChanged(stateCode);
732
733 return op;
734}
735
736void QAudioInputPrivate::stop()
737{
738 QMutexLocker lock(&mutex);
739 if (stateCode != QAudio::StoppedState) {
740 audioThreadStop();
741 audioBuffer->flush(true);
742
743 errorCode = QAudio::NoError;
744 stateCode = QAudio::StoppedState;
745 QMetaObject::invokeMethod(this, "stateChanged", Qt::QueuedConnection, Q_ARG(QAudio::State, stateCode));
746 }
747}
748
749void QAudioInputPrivate::reset()
750{
751 QMutexLocker lock(&mutex);
752 if (stateCode != QAudio::StoppedState) {
753 audioThreadStop();
754
755 errorCode = QAudio::NoError;
756 stateCode = QAudio::StoppedState;
757 QMetaObject::invokeMethod(this, "stateChanged", Qt::QueuedConnection, Q_ARG(QAudio::State, stateCode));
758 }
759}
760
761void QAudioInputPrivate::suspend()
762{
763 QMutexLocker lock(&mutex);
764 if (stateCode == QAudio::ActiveState || stateCode == QAudio::IdleState) {
765 audioThreadStop();
766
767 errorCode = QAudio::NoError;
768 stateCode = QAudio::SuspendedState;
769 QMetaObject::invokeMethod(this, "stateChanged", Qt::QueuedConnection, Q_ARG(QAudio::State, stateCode));
770 }
771}
772
773void QAudioInputPrivate::resume()
774{
775 QMutexLocker lock(&mutex);
776 if (stateCode == QAudio::SuspendedState) {
777 audioThreadStart();
778
779 errorCode = QAudio::NoError;
780 stateCode = QAudio::ActiveState;
781 QMetaObject::invokeMethod(this, "stateChanged", Qt::QueuedConnection, Q_ARG(QAudio::State, stateCode));
782 }
783}
784
785int QAudioInputPrivate::bytesReady() const
786{
787 return audioBuffer->used();
788}
789
790int QAudioInputPrivate::periodSize() const
791{
792 return periodSizeBytes;
793}
794
795void QAudioInputPrivate::setBufferSize(int bs)
796{
797 internalBufferSize = bs;
798}
799
800int QAudioInputPrivate::bufferSize() const
801{
802 return internalBufferSize;
803}
804
805void QAudioInputPrivate::setNotifyInterval(int milliSeconds)
806{
807 intervalTimer->setInterval(milliSeconds);
808}
809
810int QAudioInputPrivate::notifyInterval() const
811{
812 return intervalTimer->interval();
813}
814
815qint64 QAudioInputPrivate::processedUSecs() const
816{
817 return totalFrames * 1000000 / audioFormat.frequency();
818}
819
820qint64 QAudioInputPrivate::elapsedUSecs() const
821{
822 if (stateCode == QAudio::StoppedState)
823 return 0;
824
825 return (AudioGetCurrentHostTime() - startTime) / (clockFrequency / 1000);
826}
827
828QAudio::Error QAudioInputPrivate::error() const
829{
830 return errorCode;
831}
832
833QAudio::State QAudioInputPrivate::state() const
834{
835 return stateCode;
836}
837
838void QAudioInputPrivate::audioThreadStop()
839{
840 stopTimers();
841 if (audioThreadState.testAndSetAcquire(Running, Stopped))
842 threadFinished.wait(&mutex);
843}
844
845void QAudioInputPrivate::audioThreadStart()
846{
847 startTimers();
848 audioThreadState = Running;
849 AudioOutputUnitStart(audioUnit);
850}
851
852void QAudioInputPrivate::audioDeviceStop()
853{
854 AudioOutputUnitStop(audioUnit);
855 audioThreadState = Stopped;
856 threadFinished.wakeOne();
857}
858
859void QAudioInputPrivate::audioDeviceFull()
860{
861 QMutexLocker lock(&mutex);
862 if (stateCode == QAudio::ActiveState) {
863 audioDeviceStop();
864
865 errorCode = QAudio::UnderrunError;
866 stateCode = QAudio::IdleState;
867 QMetaObject::invokeMethod(this, "deviceStopped", Qt::QueuedConnection);
868 }
869}
870
871void QAudioInputPrivate::audioDeviceError()
872{
873 QMutexLocker lock(&mutex);
874 if (stateCode == QAudio::ActiveState) {
875 audioDeviceStop();
876
877 errorCode = QAudio::IOError;
878 stateCode = QAudio::StoppedState;
879 QMetaObject::invokeMethod(this, "deviceStopped", Qt::QueuedConnection);
880 }
881}
882
883void QAudioInputPrivate::startTimers()
884{
885 audioBuffer->startFlushTimer();
886 intervalTimer->start();
887}
888
889void QAudioInputPrivate::stopTimers()
890{
891 audioBuffer->stopFlushTimer();
892 intervalTimer->stop();
893}
894
895void QAudioInputPrivate::deviceStopped()
896{
897 stopTimers();
898 emit stateChanged(stateCode);
899}
900
901// Input callback
902OSStatus QAudioInputPrivate::inputCallback(void* inRefCon,
903 AudioUnitRenderActionFlags* ioActionFlags,
904 const AudioTimeStamp* inTimeStamp,
905 UInt32 inBusNumber,
906 UInt32 inNumberFrames,
907 AudioBufferList* ioData)
908{
909 Q_UNUSED(ioData);
910
911 QAudioInputPrivate* d = static_cast<QAudioInputPrivate*>(inRefCon);
912
913 const int threadState = d->audioThreadState.fetchAndAddAcquire(0);
914 if (threadState == Stopped)
915 d->audioDeviceStop();
916 else {
917 qint64 framesWritten;
918
919 framesWritten = d->audioBuffer->renderFromDevice(d->audioUnit,
920 ioActionFlags,
921 inTimeStamp,
922 inBusNumber,
923 inNumberFrames);
924
925 if (framesWritten > 0)
926 d->totalFrames += framesWritten;
927 else if (framesWritten == 0)
928 d->audioDeviceFull();
929 else if (framesWritten < 0)
930 d->audioDeviceError();
931 }
932
933 return noErr;
934}
935
936
937QT_END_NAMESPACE
938
939#include "qaudioinput_mac_p.moc"
940
Note: See TracBrowser for help on using the repository browser.