Path: blob/master/src/java.desktop/macosx/native/libjsound/PLATFORM_API_MacOSX_PCM.cpp
66644 views
/*1* Copyright (c) 2002, 2020, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation. Oracle designates this7* particular file as subject to the "Classpath" exception as provided8* by Oracle in the LICENSE file that accompanied this code.9*10* This code is distributed in the hope that it will be useful, but WITHOUT11* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or12* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License13* version 2 for more details (a copy is included in the LICENSE file that14* accompanied this code).15*16* You should have received a copy of the GNU General Public License version17* 2 along with this work; if not, write to the Free Software Foundation,18* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.19*20* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA21* or visit www.oracle.com if you need additional information or have any22* questions.23*/2425//#define USE_ERROR26//#define USE_TRACE27//#define USE_VERBOSE_TRACE2829#include <AudioUnit/AudioUnit.h>30#include <AudioToolbox/AudioConverter.h>31#include <pthread.h>32#include <math.h>33/*34#if !defined(__COREAUDIO_USE_FLAT_INCLUDES__)35#include <CoreAudio/CoreAudioTypes.h>36#else37#include <CoreAudioTypes.h>38#endif39*/4041#include "PLATFORM_API_MacOSX_Utils.h"4243extern "C" {44#include "Utilities.h"45#include "DirectAudio.h"46#if TARGET_OS_IPHONE47void DAUDIO_RequestRecordPermission();48#endif49}5051#if USE_DAUDIO == TRUE525354#ifdef USE_TRACE55static void PrintStreamDesc(const AudioStreamBasicDescription *inDesc) {56TRACE4("ID='%c%c%c%c'", (char)(inDesc->mFormatID >> 24), (char)(inDesc->mFormatID >> 16), (char)(inDesc->mFormatID >> 8), (char)(inDesc->mFormatID));57TRACE2(", %f Hz, flags=0x%lX", (float)inDesc->mSampleRate, (long unsigned)inDesc->mFormatFlags);58TRACE2(", %ld channels, %ld bits", (long)inDesc->mChannelsPerFrame, (long)inDesc->mBitsPerChannel);59TRACE1(", %ld bytes per frame\n", (long)inDesc->mBytesPerFrame);60}61#else62static inline void PrintStreamDesc(const AudioStreamBasicDescription *inDesc) { }63#endif646566#define MAX(x, y) ((x) >= (y) ? (x) : (y))67#define MIN(x, y) ((x) <= (y) ? (x) : (y))686970// =======================================71// MixerProvider functions implementation7273static DeviceList deviceCache;7475INT32 DAUDIO_GetDirectAudioDeviceCount() {76#ifdef TARGET_OS_IPHONE77DAUDIO_RequestRecordPermission();78#endif79deviceCache.Refresh();80int count = deviceCache.GetCount();81if (count > 0) {82// add "default" device83count++;84TRACE1("DAUDIO_GetDirectAudioDeviceCount: returns %d devices\n", count);85} else {86TRACE0("DAUDIO_GetDirectAudioDeviceCount: no devices found\n");87}88return count;89}9091INT32 DAUDIO_GetDirectAudioDeviceDescription(INT32 mixerIndex, DirectAudioDeviceDescription *desc) {92bool result = true;93desc->deviceID = 0;94if (mixerIndex == 0) {95// default device96strncpy(desc->name, "Default Audio Device", DAUDIO_STRING_LENGTH);97strncpy(desc->description, "Default Audio Device", DAUDIO_STRING_LENGTH);98desc->maxSimulLines = -1;99} else {100AudioDeviceID deviceID;101result = deviceCache.GetDeviceInfo(mixerIndex-1, &deviceID, DAUDIO_STRING_LENGTH,102desc->name, desc->vendor, desc->description, desc->version);103if (result) {104desc->deviceID = (INT32)deviceID;105desc->maxSimulLines = -1;106}107}108return result ? TRUE : FALSE;109}110111112void DAUDIO_GetFormats(INT32 mixerIndex, INT32 deviceID, int isSource, void* creator) {113TRACE3(">>DAUDIO_GetFormats mixerIndex=%d deviceID=0x%x isSource=%d\n", (int)mixerIndex, (int)deviceID, isSource);114115AudioDeviceID audioDeviceID = deviceID == 0 ? GetDefaultDevice(isSource) : (AudioDeviceID)deviceID;116117if (audioDeviceID == 0) {118return;119}120121int totalChannels = GetChannelCount(audioDeviceID, isSource);122123if (totalChannels == 0) {124TRACE0("<<DAUDIO_GetFormats, no streams!\n");125return;126}127128if (isSource && totalChannels < 2) {129// report 2 channels even if only mono is supported130totalChannels = 2;131}132133int channels[] = {1, 2, totalChannels};134int channelsCount = MIN(totalChannels, 3);135136float hardwareSampleRate = GetSampleRate(audioDeviceID, isSource);137TRACE2(" DAUDIO_GetFormats: got %d channels, sampleRate == %f\n", totalChannels, hardwareSampleRate);138139// any sample rates are supported140float sampleRate = -1;141142static int sampleBits[] = {8, 16, 24};143static int sampleBitsCount = sizeof(sampleBits)/sizeof(sampleBits[0]);144145// the last audio format is the default one (used by DataLine.open() if format is not specified)146// consider as default 16bit PCM stereo (mono is stereo is not supported) with the current sample rate147int defBits = 16;148int defChannels = MIN(2, channelsCount);149float defSampleRate = hardwareSampleRate;150// don't add default format is sample rate is not specified151bool addDefault = defSampleRate > 0;152153// TODO: CoreAudio can handle signed/unsigned, little-endian/big-endian154// TODO: register the formats (to prevent DirectAudio software conversion) - need to fix DirectAudioDevice.createDataLineInfo155// to avoid software conversions if both signed/unsigned or big-/little-endian are supported156for (int channelIndex = 0; channelIndex < channelsCount; channelIndex++) {157for (int bitIndex = 0; bitIndex < sampleBitsCount; bitIndex++) {158int bits = sampleBits[bitIndex];159if (addDefault && bits == defBits && channels[channelIndex] != defChannels && sampleRate == defSampleRate) {160// the format is the default one, don't add it now161continue;162}163DAUDIO_AddAudioFormat(creator,164bits, // sample size in bits165-1, // frame size (auto)166channels[channelIndex], // channels167sampleRate, // sample rate168DAUDIO_PCM, // only accept PCM169bits == 8 ? FALSE : TRUE, // signed170bits == 8 ? FALSE // little-endian for 8bit171: UTIL_IsBigEndianPlatform());172}173}174// add default format175if (addDefault) {176DAUDIO_AddAudioFormat(creator,177defBits, // 16 bits178-1, // automatically calculate frame size179defChannels, // channels180defSampleRate, // sample rate181DAUDIO_PCM, // PCM182TRUE, // signed183UTIL_IsBigEndianPlatform()); // native endianess184}185186TRACE0("<<DAUDIO_GetFormats\n");187}188189190// =======================================191// Source/Target DataLine functions implementation192193// ====194/* 1writer-1reader ring buffer class with flush() support */195class RingBuffer {196public:197RingBuffer() : pBuffer(NULL), nBufferSize(0) {198pthread_mutex_init(&lockMutex, NULL);199}200~RingBuffer() {201Deallocate();202pthread_mutex_destroy(&lockMutex);203}204205// extraBytes: number of additionally allocated bytes to prevent data206// overlapping when almost whole buffer is filled207// (required only if Write() can override the buffer)208bool Allocate(int requestedBufferSize, int extraBytes) {209int fullBufferSize = requestedBufferSize + extraBytes;210long powerOfTwo = 1;211while (powerOfTwo < fullBufferSize) {212powerOfTwo <<= 1;213}214if (powerOfTwo > INT_MAX || fullBufferSize < 0) {215ERROR0("RingBuffer::Allocate: REQUESTED MEMORY SIZE IS TOO BIG\n");216return false;217}218pBuffer = (Byte*)malloc(powerOfTwo);219if (pBuffer == NULL) {220ERROR0("RingBuffer::Allocate: OUT OF MEMORY\n");221return false;222}223224nBufferSize = requestedBufferSize;225nAllocatedBytes = powerOfTwo;226nPosMask = powerOfTwo - 1;227nWritePos = 0;228nReadPos = 0;229nFlushPos = -1;230231TRACE2("RingBuffer::Allocate: OK, bufferSize=%d, allocated:%d\n", nBufferSize, nAllocatedBytes);232return true;233}234235void Deallocate() {236if (pBuffer) {237free(pBuffer);238pBuffer = NULL;239nBufferSize = 0;240}241}242243inline int GetBufferSize() {244return nBufferSize;245}246247inline int GetAllocatedSize() {248return nAllocatedBytes;249}250251// gets number of bytes available for reading252int GetValidByteCount() {253lock();254INT64 result = nWritePos - (nFlushPos >= 0 ? nFlushPos : nReadPos);255unlock();256return result > (INT64)nBufferSize ? nBufferSize : (int)result;257}258259int Write(void *srcBuffer, int len, bool preventOverflow) {260lock();261TRACE2("RingBuffer::Write (%d bytes, preventOverflow=%d)\n", len, preventOverflow ? 1 : 0);262TRACE2(" writePos = %lld (%d)", (long long)nWritePos, Pos2Offset(nWritePos));263TRACE2(" readPos=%lld (%d)", (long long)nReadPos, Pos2Offset(nReadPos));264TRACE2(" flushPos=%lld (%d)\n", (long long)nFlushPos, Pos2Offset(nFlushPos));265266INT64 writePos = nWritePos;267if (preventOverflow) {268INT64 avail_read = writePos - (nFlushPos >= 0 ? nFlushPos : nReadPos);269if (avail_read >= (INT64)nBufferSize) {270// no space271TRACE0(" preventOverlow: OVERFLOW => len = 0;\n");272len = 0;273} else {274int avail_write = nBufferSize - (int)avail_read;275if (len > avail_write) {276TRACE2(" preventOverlow: desrease len: %d => %d\n", len, avail_write);277len = avail_write;278}279}280}281unlock();282283if (len > 0) {284285write((Byte *)srcBuffer, Pos2Offset(writePos), len);286287lock();288TRACE4("--RingBuffer::Write writePos: %lld (%d) => %lld, (%d)\n",289(long long)nWritePos, Pos2Offset(nWritePos), (long long)nWritePos + len, Pos2Offset(nWritePos + len));290nWritePos += len;291unlock();292}293return len;294}295296int Read(void *dstBuffer, int len) {297lock();298TRACE1("RingBuffer::Read (%d bytes)\n", len);299TRACE2(" writePos = %lld (%d)", (long long)nWritePos, Pos2Offset(nWritePos));300TRACE2(" readPos=%lld (%d)", (long long)nReadPos, Pos2Offset(nReadPos));301TRACE2(" flushPos=%lld (%d)\n", (long long)nFlushPos, Pos2Offset(nFlushPos));302303applyFlush();304INT64 avail_read = nWritePos - nReadPos;305// check for overflow306if (avail_read > (INT64)nBufferSize) {307nReadPos = nWritePos - nBufferSize;308avail_read = nBufferSize;309TRACE0(" OVERFLOW\n");310}311INT64 readPos = nReadPos;312unlock();313314if (len > (int)avail_read) {315TRACE2(" RingBuffer::Read - don't have enough data, len: %d => %d\n", len, (int)avail_read);316len = (int)avail_read;317}318319if (len > 0) {320321read((Byte *)dstBuffer, Pos2Offset(readPos), len);322323lock();324if (applyFlush()) {325// just got flush(), results became obsolete326TRACE0("--RingBuffer::Read, got Flush, return 0\n");327len = 0;328} else {329TRACE4("--RingBuffer::Read readPos: %lld (%d) => %lld (%d)\n",330(long long)nReadPos, Pos2Offset(nReadPos), (long long)nReadPos + len, Pos2Offset(nReadPos + len));331nReadPos += len;332}333unlock();334} else {335// underrun!336}337return len;338}339340// returns number of the flushed bytes341int Flush() {342lock();343INT64 flushedBytes = nWritePos - (nFlushPos >= 0 ? nFlushPos : nReadPos);344nFlushPos = nWritePos;345unlock();346return flushedBytes > (INT64)nBufferSize ? nBufferSize : (int)flushedBytes;347}348349private:350Byte *pBuffer;351int nBufferSize;352int nAllocatedBytes;353INT64 nPosMask;354355pthread_mutex_t lockMutex;356357volatile INT64 nWritePos;358volatile INT64 nReadPos;359// Flush() sets nFlushPos value to nWritePos;360// next Read() sets nReadPos to nFlushPos and resests nFlushPos to -1361volatile INT64 nFlushPos;362363inline void lock() {364pthread_mutex_lock(&lockMutex);365}366inline void unlock() {367pthread_mutex_unlock(&lockMutex);368}369370inline bool applyFlush() {371if (nFlushPos >= 0) {372nReadPos = nFlushPos;373nFlushPos = -1;374return true;375}376return false;377}378379inline int Pos2Offset(INT64 pos) {380return (int)(pos & nPosMask);381}382383void write(Byte *srcBuffer, int dstOffset, int len) {384int dstEndOffset = dstOffset + len;385386int lenAfterWrap = dstEndOffset - nAllocatedBytes;387if (lenAfterWrap > 0) {388// dest.buffer does wrap389len = nAllocatedBytes - dstOffset;390memcpy(pBuffer+dstOffset, srcBuffer, len);391memcpy(pBuffer, srcBuffer+len, lenAfterWrap);392} else {393// dest.buffer does not wrap394memcpy(pBuffer+dstOffset, srcBuffer, len);395}396}397398void read(Byte *dstBuffer, int srcOffset, int len) {399int srcEndOffset = srcOffset + len;400401int lenAfterWrap = srcEndOffset - nAllocatedBytes;402if (lenAfterWrap > 0) {403// need to unwrap data404len = nAllocatedBytes - srcOffset;405memcpy(dstBuffer, pBuffer+srcOffset, len);406memcpy(dstBuffer+len, pBuffer, lenAfterWrap);407} else {408// source buffer is not wrapped409memcpy(dstBuffer, pBuffer+srcOffset, len);410}411}412};413414415class Resampler {416private:417enum {418kResamplerEndOfInputData = 1 // error to interrupt conversion (end of input data)419};420public:421Resampler() : converter(NULL), outBuffer(NULL) { }422~Resampler() {423if (converter != NULL) {424AudioConverterDispose(converter);425}426if (outBuffer != NULL) {427free(outBuffer);428}429}430431// inFormat & outFormat must be interleaved!432bool Init(const AudioStreamBasicDescription *inFormat, const AudioStreamBasicDescription *outFormat,433int inputBufferSizeInBytes)434{435TRACE0(">>Resampler::Init\n");436TRACE0(" inFormat: ");437PrintStreamDesc(inFormat);438TRACE0(" outFormat: ");439PrintStreamDesc(outFormat);440TRACE1(" inputBufferSize: %d bytes\n", inputBufferSizeInBytes);441OSStatus err;442443if ((outFormat->mFormatFlags & kAudioFormatFlagIsNonInterleaved) != 0 && outFormat->mChannelsPerFrame != 1) {444ERROR0("Resampler::Init ERROR: outFormat is non-interleaved\n");445return false;446}447if ((inFormat->mFormatFlags & kAudioFormatFlagIsNonInterleaved) != 0 && inFormat->mChannelsPerFrame != 1) {448ERROR0("Resampler::Init ERROR: inFormat is non-interleaved\n");449return false;450}451452memcpy(&asbdIn, inFormat, sizeof(AudioStreamBasicDescription));453memcpy(&asbdOut, outFormat, sizeof(AudioStreamBasicDescription));454455err = AudioConverterNew(inFormat, outFormat, &converter);456457if (err || converter == NULL) {458OS_ERROR1(err, "Resampler::Init (AudioConverterNew), converter=%p", converter);459return false;460}461462// allocate buffer for output data463int maximumInFrames = inputBufferSizeInBytes / inFormat->mBytesPerFrame;464// take into account trailingFrames465AudioConverterPrimeInfo primeInfo = {0, 0};466UInt32 sizePrime = sizeof(primeInfo);467err = AudioConverterGetProperty(converter, kAudioConverterPrimeInfo, &sizePrime, &primeInfo);468if (err) {469OS_ERROR0(err, "Resampler::Init (get kAudioConverterPrimeInfo)");470// ignore the error471} else {472// the default primeMethod is kConverterPrimeMethod_Normal, so we need only trailingFrames473maximumInFrames += primeInfo.trailingFrames;474}475float outBufferSizeInFrames = (outFormat->mSampleRate / inFormat->mSampleRate) * ((float)maximumInFrames);476// to avoid complex calculation just set outBufferSize as double of the calculated value477outBufferSize = (int)outBufferSizeInFrames * outFormat->mBytesPerFrame * 2;478// safety check - consider 256 frame as the minimum input buffer479int minOutSize = 256 * outFormat->mBytesPerFrame;480if (outBufferSize < minOutSize) {481outBufferSize = minOutSize;482}483484outBuffer = malloc(outBufferSize);485486if (outBuffer == NULL) {487ERROR1("Resampler::Init ERROR: malloc failed (%d bytes)\n", outBufferSize);488AudioConverterDispose(converter);489converter = NULL;490return false;491}492493TRACE1(" allocated: %d bytes for output buffer\n", outBufferSize);494495TRACE0("<<Resampler::Init: OK\n");496return true;497}498499// returns size of the internal output buffer500int GetOutBufferSize() {501return outBufferSize;502}503504// process next part of data (writes resampled data to the ringBuffer without overflow check)505int Process(void *srcBuffer, int len, RingBuffer *ringBuffer) {506int bytesWritten = 0;507TRACE2(">>Resampler::Process: %d bytes, converter = %p\n", len, converter);508if (converter == NULL) { // sanity check509bytesWritten = ringBuffer->Write(srcBuffer, len, false);510} else {511InputProcData data;512data.pThis = this;513data.data = (Byte *)srcBuffer;514data.dataSize = len;515516OSStatus err;517do {518AudioBufferList abl; // by default it contains 1 AudioBuffer519abl.mNumberBuffers = 1;520abl.mBuffers[0].mNumberChannels = asbdOut.mChannelsPerFrame;521abl.mBuffers[0].mDataByteSize = outBufferSize;522abl.mBuffers[0].mData = outBuffer;523524UInt32 packets = (UInt32)outBufferSize / asbdOut.mBytesPerPacket;525526TRACE2(">>AudioConverterFillComplexBuffer: request %d packets, provide %d bytes buffer\n",527(int)packets, (int)abl.mBuffers[0].mDataByteSize);528529err = AudioConverterFillComplexBuffer(converter, ConverterInputProc, &data, &packets, &abl, NULL);530531TRACE2("<<AudioConverterFillComplexBuffer: got %d packets (%d bytes)\n",532(int)packets, (int)abl.mBuffers[0].mDataByteSize);533if (packets > 0) {534int bytesToWrite = (int)(packets * asbdOut.mBytesPerPacket);535bytesWritten += ringBuffer->Write(abl.mBuffers[0].mData, bytesToWrite, false);536}537538// if outputBuffer is small to store all available frames,539// we get noErr here. In the case just continue the conversion540} while (err == noErr);541542if (err != kResamplerEndOfInputData) {543// unexpected error544OS_ERROR0(err, "Resampler::Process (AudioConverterFillComplexBuffer)");545}546}547TRACE2("<<Resampler::Process: written %d bytes (converted from %d bytes)\n", bytesWritten, len);548549return bytesWritten;550}551552// resets internal bufferes553void Discontinue() {554TRACE0(">>Resampler::Discontinue\n");555if (converter != NULL) {556AudioConverterReset(converter);557}558TRACE0("<<Resampler::Discontinue\n");559}560561private:562AudioConverterRef converter;563564// buffer for output data565// note that there is no problem if the buffer is not big enough to store566// all converted data - it's only performance issue567void *outBuffer;568int outBufferSize;569570AudioStreamBasicDescription asbdIn;571AudioStreamBasicDescription asbdOut;572573struct InputProcData {574Resampler *pThis;575Byte *data; // data == NULL means we handle Discontinue(false)576int dataSize; // == 0 if all data was already provided to the converted of we handle Discontinue(false)577};578579static OSStatus ConverterInputProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets,580AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)581{582InputProcData *data = (InputProcData *)inUserData;583584TRACE3(" >>ConverterInputProc: requested %d packets, data contains %d bytes (%d packets)\n",585(int)*ioNumberDataPackets, (int)data->dataSize, (int)(data->dataSize / data->pThis->asbdIn.mBytesPerPacket));586if (data->dataSize == 0) {587// already called & provided all input data588// interrupt conversion by returning error589*ioNumberDataPackets = 0;590TRACE0(" <<ConverterInputProc: returns kResamplerEndOfInputData\n");591return kResamplerEndOfInputData;592}593594ioData->mNumberBuffers = 1;595ioData->mBuffers[0].mNumberChannels = data->pThis->asbdIn.mChannelsPerFrame;596ioData->mBuffers[0].mDataByteSize = data->dataSize;597ioData->mBuffers[0].mData = data->data;598599*ioNumberDataPackets = data->dataSize / data->pThis->asbdIn.mBytesPerPacket;600601// all data has been provided to the converter602data->dataSize = 0;603604TRACE1(" <<ConverterInputProc: returns %d packets\n", (int)(*ioNumberDataPackets));605return noErr;606}607608};609610611struct OSX_DirectAudioDevice {612AudioUnit audioUnit;613RingBuffer ringBuffer;614AudioStreamBasicDescription asbd;615616// only for target lines617UInt32 inputBufferSizeInBytes;618Resampler *resampler;619// to detect discontinuity (to reset resampler)620SInt64 lastWrittenSampleTime;621622623OSX_DirectAudioDevice() : audioUnit(NULL), asbd(), resampler(NULL), lastWrittenSampleTime(0) {624}625626~OSX_DirectAudioDevice() {627if (audioUnit) {628AudioComponentInstanceDispose(audioUnit);629}630if (resampler) {631delete resampler;632}633}634};635636static AudioUnit CreateOutputUnit(AudioDeviceID deviceID, int isSource)637{638OSStatus err;639AudioUnit unit;640641AudioComponentDescription desc;642desc.componentType = kAudioUnitType_Output;643#if !TARGET_OS_IPHONE644desc.componentSubType = (deviceID == 0 && isSource) ? kAudioUnitSubType_DefaultOutput : kAudioUnitSubType_HALOutput;645#else646desc.componentSubType = kAudioUnitSubType_RemoteIO;647#endif648desc.componentManufacturer = kAudioUnitManufacturer_Apple;649desc.componentFlags = 0;650desc.componentFlagsMask = 0;651652AudioComponent comp = AudioComponentFindNext(NULL, &desc);653err = AudioComponentInstanceNew(comp, &unit);654655if (err) {656OS_ERROR0(err, "CreateOutputUnit:OpenAComponent");657return NULL;658}659660if (!isSource) {661int enableIO = 0;662err = AudioUnitSetProperty(unit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output,6630, &enableIO, sizeof(enableIO));664if (err) {665OS_ERROR0(err, "SetProperty (output EnableIO)");666}667enableIO = 1;668err = AudioUnitSetProperty(unit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input,6691, &enableIO, sizeof(enableIO));670if (err) {671OS_ERROR0(err, "SetProperty (input EnableIO)");672}673674if (!deviceID) {675// get real AudioDeviceID for default input device (macosx current input device)676deviceID = GetDefaultDevice(isSource);677if (!deviceID) {678AudioComponentInstanceDispose(unit);679return NULL;680}681}682}683684if (deviceID) {685err = AudioUnitSetProperty(unit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global,6860, &deviceID, sizeof(deviceID));687if (err) {688OS_ERROR0(err, "SetProperty (CurrentDevice)");689AudioComponentInstanceDispose(unit);690return NULL;691}692}693694return unit;695}696697static OSStatus OutputCallback(void *inRefCon,698AudioUnitRenderActionFlags *ioActionFlags,699const AudioTimeStamp *inTimeStamp,700UInt32 inBusNumber,701UInt32 inNumberFrames,702AudioBufferList *ioData)703{704OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)inRefCon;705706int nchannels = ioData->mNumberBuffers; // should be always == 1 (interleaved channels)707AudioBuffer *audioBuffer = ioData->mBuffers;708709TRACE3(">>OutputCallback: busNum=%d, requested %d frames (%d bytes)\n",710(int)inBusNumber, (int)inNumberFrames, (int)(inNumberFrames * device->asbd.mBytesPerFrame));711TRACE3(" abl: %d buffers, buffer[0].channels=%d, buffer.size=%d\n",712nchannels, (int)audioBuffer->mNumberChannels, (int)audioBuffer->mDataByteSize);713714int bytesToRead = inNumberFrames * device->asbd.mBytesPerFrame;715if (bytesToRead > (int)audioBuffer->mDataByteSize) {716TRACE0("--OutputCallback: !!! audioBuffer IS TOO SMALL!!!\n");717bytesToRead = audioBuffer->mDataByteSize / device->asbd.mBytesPerFrame * device->asbd.mBytesPerFrame;718}719int bytesRead = device->ringBuffer.Read(audioBuffer->mData, bytesToRead);720if (bytesRead < bytesToRead) {721// no enough data (underrun)722TRACE2("--OutputCallback: !!! UNDERRUN (read %d bytes of %d)!!!\n", bytesRead, bytesToRead);723// silence the rest724memset((Byte*)audioBuffer->mData + bytesRead, 0, bytesToRead-bytesRead);725bytesRead = bytesToRead;726}727728audioBuffer->mDataByteSize = (UInt32)bytesRead;729// SAFETY: set mDataByteSize for all other AudioBuffer in the AudioBufferList to zero730while (--nchannels > 0) {731audioBuffer++;732audioBuffer->mDataByteSize = 0;733}734TRACE1("<<OutputCallback (returns %d)\n", bytesRead);735736return noErr;737}738739static OSStatus InputCallback(void *inRefCon,740AudioUnitRenderActionFlags *ioActionFlags,741const AudioTimeStamp *inTimeStamp,742UInt32 inBusNumber,743UInt32 inNumberFrames,744AudioBufferList *ioData)745{746OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)inRefCon;747748TRACE4(">>InputCallback: busNum=%d, timeStamp=%lld, %d frames (%d bytes)\n",749(int)inBusNumber, (long long)inTimeStamp->mSampleTime, (int)inNumberFrames, (int)(inNumberFrames * device->asbd.mBytesPerFrame));750751AudioBufferList abl; // by default it contains 1 AudioBuffer752abl.mNumberBuffers = 1;753abl.mBuffers[0].mNumberChannels = device->asbd.mChannelsPerFrame;754abl.mBuffers[0].mDataByteSize = device->inputBufferSizeInBytes; // assume this is == (inNumberFrames * device->asbd.mBytesPerFrame)755abl.mBuffers[0].mData = NULL; // request for the audioUnit's buffer756757OSStatus err = AudioUnitRender(device->audioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &abl);758if (err) {759OS_ERROR0(err, "<<InputCallback: AudioUnitRender");760} else {761if (device->resampler != NULL) {762// test for discontinuity763// AUHAL starts timestamps at zero, so test if the current timestamp less then the last written764SInt64 sampleTime = inTimeStamp->mSampleTime;765if (sampleTime < device->lastWrittenSampleTime) {766// discontinuity, reset the resampler767TRACE2(" InputCallback (RESAMPLED), DISCONTINUITY (%f -> %f)\n",768(float)device->lastWrittenSampleTime, (float)sampleTime);769770device->resampler->Discontinue();771} else {772TRACE2(" InputCallback (RESAMPLED), continuous: lastWrittenSampleTime = %f, sampleTime=%f\n",773(float)device->lastWrittenSampleTime, (float)sampleTime);774}775device->lastWrittenSampleTime = sampleTime + inNumberFrames;776777int bytesWritten = device->resampler->Process(abl.mBuffers[0].mData, (int)abl.mBuffers[0].mDataByteSize, &device->ringBuffer);778TRACE2("<<InputCallback (RESAMPLED, saved %d bytes of %d)\n", bytesWritten, (int)abl.mBuffers[0].mDataByteSize);779} else {780int bytesWritten = device->ringBuffer.Write(abl.mBuffers[0].mData, (int)abl.mBuffers[0].mDataByteSize, false);781TRACE2("<<InputCallback (saved %d bytes of %d)\n", bytesWritten, (int)abl.mBuffers[0].mDataByteSize);782}783}784785return noErr;786}787788789static void FillASBDForNonInterleavedPCM(AudioStreamBasicDescription& asbd,790float sampleRate, int channels, int sampleSizeInBits, bool isFloat, int isSigned, bool isBigEndian)791{792// FillOutASBDForLPCM cannot produce unsigned integer format793asbd.mSampleRate = sampleRate;794asbd.mFormatID = kAudioFormatLinearPCM;795asbd.mFormatFlags = (isFloat ? kAudioFormatFlagIsFloat : (isSigned ? kAudioFormatFlagIsSignedInteger : 0))796| (isBigEndian ? (kAudioFormatFlagIsBigEndian) : 0)797| kAudioFormatFlagIsPacked;798asbd.mBytesPerPacket = channels * ((sampleSizeInBits + 7) / 8);799asbd.mFramesPerPacket = 1;800asbd.mBytesPerFrame = asbd.mBytesPerPacket;801asbd.mChannelsPerFrame = channels;802asbd.mBitsPerChannel = sampleSizeInBits;803}804805void* DAUDIO_Open(INT32 mixerIndex, INT32 deviceID, int isSource,806int encoding, float sampleRate, int sampleSizeInBits,807int frameSize, int channels,808int isSigned, int isBigEndian, int bufferSizeInBytes)809{810TRACE3(">>DAUDIO_Open: mixerIndex=%d deviceID=0x%x isSource=%d\n", (int)mixerIndex, (unsigned int)deviceID, isSource);811TRACE3(" sampleRate=%d sampleSizeInBits=%d channels=%d\n", (int)sampleRate, sampleSizeInBits, channels);812#ifdef USE_TRACE813{814AudioDeviceID audioDeviceID = deviceID;815if (audioDeviceID == 0) {816// default device817audioDeviceID = GetDefaultDevice(isSource);818}819char name[256];820OSStatus err = GetAudioObjectProperty(audioDeviceID, kAudioUnitScope_Global, kAudioDevicePropertyDeviceName, 256, &name, 0);821if (err != noErr) {822OS_ERROR1(err, " audioDeviceID=0x%x, name is N/A:", (int)audioDeviceID);823} else {824TRACE2(" audioDeviceID=0x%x, name=%s\n", (int)audioDeviceID, name);825}826}827#endif828829if (encoding != DAUDIO_PCM) {830ERROR1("<<DAUDIO_Open: ERROR: unsupported encoding (%d)\n", encoding);831return NULL;832}833if (channels <= 0) {834ERROR1("<<DAUDIO_Open: ERROR: Invalid number of channels=%d!\n", channels);835return NULL;836}837838OSX_DirectAudioDevice *device = new OSX_DirectAudioDevice();839840AudioUnitScope scope = isSource ? kAudioUnitScope_Input : kAudioUnitScope_Output;841int element = isSource ? 0 : 1;842OSStatus err = noErr;843int extraBufferBytes = 0;844845device->audioUnit = CreateOutputUnit(deviceID, isSource);846847if (!device->audioUnit) {848delete device;849return NULL;850}851852if (!isSource) {853AudioDeviceID actualDeviceID = deviceID != 0 ? deviceID : GetDefaultDevice(isSource);854float hardwareSampleRate = GetSampleRate(actualDeviceID, isSource);855TRACE2("--DAUDIO_Open: sampleRate = %f, hardwareSampleRate=%f\n", sampleRate, hardwareSampleRate);856857if (fabs(sampleRate - hardwareSampleRate) > 1) {858device->resampler = new Resampler();859860// request HAL for Float32 with native endianess861FillASBDForNonInterleavedPCM(device->asbd, hardwareSampleRate, channels, 32, true, false, kAudioFormatFlagsNativeEndian != 0);862} else {863sampleRate = hardwareSampleRate; // in case sample rates are not exactly equal864}865}866867if (device->resampler == NULL) {868// no resampling, request HAL for the requested format869FillASBDForNonInterleavedPCM(device->asbd, sampleRate, channels, sampleSizeInBits, false, isSigned, isBigEndian);870}871872err = AudioUnitSetProperty(device->audioUnit, kAudioUnitProperty_StreamFormat, scope, element, &device->asbd, sizeof(device->asbd));873if (err) {874OS_ERROR0(err, "<<DAUDIO_Open set StreamFormat");875delete device;876return NULL;877}878879AURenderCallbackStruct output;880output.inputProc = isSource ? OutputCallback : InputCallback;881output.inputProcRefCon = device;882883err = AudioUnitSetProperty(device->audioUnit,884isSource885? (AudioUnitPropertyID)kAudioUnitProperty_SetRenderCallback886: (AudioUnitPropertyID)kAudioOutputUnitProperty_SetInputCallback,887kAudioUnitScope_Global, 0, &output, sizeof(output));888if (err) {889OS_ERROR0(err, "<<DAUDIO_Open set RenderCallback");890delete device;891return NULL;892}893894err = AudioUnitInitialize(device->audioUnit);895if (err) {896OS_ERROR0(err, "<<DAUDIO_Open UnitInitialize");897delete device;898return NULL;899}900901if (!isSource) {902// for target lines we need extra bytes in the ringBuffer903// to prevent collisions when InputCallback overrides data on overflow904UInt32 size;905OSStatus err;906907size = sizeof(device->inputBufferSizeInBytes);908err = AudioUnitGetProperty(device->audioUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global,9090, &device->inputBufferSizeInBytes, &size);910if (err) {911OS_ERROR0(err, "<<DAUDIO_Open (TargetDataLine)GetBufferSize\n");912delete device;913return NULL;914}915device->inputBufferSizeInBytes *= device->asbd.mBytesPerFrame; // convert frames to bytes916extraBufferBytes = (int)device->inputBufferSizeInBytes;917}918919if (device->resampler != NULL) {920// resampler output format is a user requested format (== ringBuffer format)921AudioStreamBasicDescription asbdOut; // ringBuffer format922FillASBDForNonInterleavedPCM(asbdOut, sampleRate, channels, sampleSizeInBits, false, isSigned, isBigEndian);923924// set resampler input buffer size to the HAL buffer size925if (!device->resampler->Init(&device->asbd, &asbdOut, (int)device->inputBufferSizeInBytes)) {926ERROR0("<<DAUDIO_Open: resampler.Init() FAILED.\n");927delete device;928return NULL;929}930// extra bytes in the ringBuffer (extraBufferBytes) should be equal resampler output buffer size931extraBufferBytes = device->resampler->GetOutBufferSize();932}933934if (!device->ringBuffer.Allocate(bufferSizeInBytes, extraBufferBytes)) {935ERROR0("<<DAUDIO_Open: Ring buffer allocation error\n");936delete device;937return NULL;938}939940TRACE0("<<DAUDIO_Open: OK\n");941return device;942}943944int DAUDIO_Start(void* id, int isSource) {945OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;946TRACE0("DAUDIO_Start\n");947948OSStatus err = AudioOutputUnitStart(device->audioUnit);949950if (err != noErr) {951OS_ERROR0(err, "DAUDIO_Start");952}953954return err == noErr ? TRUE : FALSE;955}956957int DAUDIO_Stop(void* id, int isSource) {958OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;959TRACE0("DAUDIO_Stop\n");960961OSStatus err = AudioOutputUnitStop(device->audioUnit);962963return err == noErr ? TRUE : FALSE;964}965966void DAUDIO_Close(void* id, int isSource) {967OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;968TRACE0("DAUDIO_Close\n");969970delete device;971}972973int DAUDIO_Write(void* id, char* data, int byteSize) {974OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;975TRACE1(">>DAUDIO_Write: %d bytes to write\n", byteSize);976977int result = device->ringBuffer.Write(data, byteSize, true);978979TRACE1("<<DAUDIO_Write: %d bytes written\n", result);980return result;981}982983int DAUDIO_Read(void* id, char* data, int byteSize) {984OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;985TRACE1(">>DAUDIO_Read: %d bytes to read\n", byteSize);986987int result = device->ringBuffer.Read(data, byteSize);988989TRACE1("<<DAUDIO_Read: %d bytes has been read\n", result);990return result;991}992993int DAUDIO_GetBufferSize(void* id, int isSource) {994OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;995996int bufferSizeInBytes = device->ringBuffer.GetBufferSize();997998TRACE1("DAUDIO_GetBufferSize returns %d\n", bufferSizeInBytes);999return bufferSizeInBytes;1000}10011002int DAUDIO_StillDraining(void* id, int isSource) {1003OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;10041005int draining = device->ringBuffer.GetValidByteCount() > 0 ? TRUE : FALSE;10061007TRACE1("DAUDIO_StillDraining returns %d\n", draining);1008return draining;1009}10101011int DAUDIO_Flush(void* id, int isSource) {1012OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;1013TRACE0("DAUDIO_Flush\n");10141015device->ringBuffer.Flush();10161017return TRUE;1018}10191020int DAUDIO_GetAvailable(void* id, int isSource) {1021OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;10221023int bytesInBuffer = device->ringBuffer.GetValidByteCount();1024if (isSource) {1025return device->ringBuffer.GetBufferSize() - bytesInBuffer;1026} else {1027return bytesInBuffer;1028}1029}10301031INT64 DAUDIO_GetBytePosition(void* id, int isSource, INT64 javaBytePos) {1032OSX_DirectAudioDevice *device = (OSX_DirectAudioDevice*)id;1033INT64 position;10341035if (isSource) {1036position = javaBytePos - device->ringBuffer.GetValidByteCount();1037} else {1038position = javaBytePos + device->ringBuffer.GetValidByteCount();1039}10401041TRACE2("DAUDIO_GetBytePosition returns %lld (javaBytePos = %lld)\n", (long long)position, (long long)javaBytePos);1042return position;1043}10441045void DAUDIO_SetBytePosition(void* id, int isSource, INT64 javaBytePos) {1046// no need javaBytePos (it's available in DAUDIO_GetBytePosition)1047}10481049int DAUDIO_RequiresServicing(void* id, int isSource) {1050return FALSE;1051}10521053void DAUDIO_Service(void* id, int isSource) {1054// unreachable1055}10561057#endif // USE_DAUDIO == TRUE105810591060