diff --git a/EZAudio/EZAudioDevice.h b/EZAudio/EZAudioDevice.h index f943b013..c36a3f44 100644 --- a/EZAudio/EZAudioDevice.h +++ b/EZAudio/EZAudioDevice.h @@ -33,6 +33,14 @@ */ + (NSArray *)inputDevices; +//------------------------------------------------------------------------------ + +/** + Enumerates all the available output devices and returns the result in an NSArray of EZAudioDevice instances. + @return An NSArray of output EZAudioDevice instances. + */ ++ (NSArray *)outputDevices; + #if TARGET_OS_IPHONE /** @@ -42,6 +50,13 @@ */ + (EZAudioDevice *)currentInputDevice; +/** + Provides the current EZAudioDevice that is being used to output audio. + - iOS only + @return An EZAudioDevice instance representing the currently selected ouotput device. + */ ++ (EZAudioDevice *)currentOutputDevice; + //------------------------------------------------------------------------------ /** @@ -52,23 +67,24 @@ + (void)enumerateInputDevicesUsingBlock:(void(^)(EZAudioDevice *device, BOOL *stop))block; -#elif TARGET_OS_MAC +//------------------------------------------------------------------------------ /** - Enumerates all the available devices and returns the result in an NSArray of EZAudioDevice instances. - - OSX only - @return An NSArray of input and output EZAudioDevice instances. + Enumerates all the available output devices. + - iOS only + @param block When enumerating this block executes repeatedly for each EZAudioDevice found. It contains two arguments - first, the EZAudioDevice found, then a pointer to a stop BOOL to allow breaking out of the enumeration) */ -+ (NSArray *)devices; ++ (void)enumerateOutputDevicesUsingBlock:(void (^)(EZAudioDevice *device, + BOOL *stop))block; -//------------------------------------------------------------------------------ +#elif TARGET_OS_MAC /** - Enumerates all the available output devices and returns the result in an NSArray of EZAudioDevice instances. + Enumerates all the available devices and returns the result in an NSArray of EZAudioDevice instances. - OSX only - @return An NSArray of output EZAudioDevice instances. + @return An NSArray of input and output EZAudioDevice instances. */ -+ (NSArray *)outputDevices; ++ (NSArray *)devices; //------------------------------------------------------------------------------ diff --git a/EZAudio/EZAudioDevice.m b/EZAudio/EZAudioDevice.m index 4772c114..d6cdde60 100644 --- a/EZAudio/EZAudioDevice.m +++ b/EZAudio/EZAudioDevice.m @@ -49,6 +49,19 @@ + (EZAudioDevice *)currentInputDevice //------------------------------------------------------------------------------ ++ (EZAudioDevice *)currentOutputDevice +{ + AVAudioSession *session = [AVAudioSession sharedInstance]; + AVAudioSessionPortDescription *port = [[[session currentRoute] outputs] firstObject]; + AVAudioSessionDataSourceDescription *dataSource = [session outputDataSource]; + EZAudioDevice *device = [[EZAudioDevice alloc] init]; + device.port = port; + device.dataSource = dataSource; + return device; +} + +//------------------------------------------------------------------------------ + + (NSArray *)inputDevices { __block NSMutableArray *devices = [NSMutableArray array]; @@ -61,6 +74,18 @@ + (NSArray *)inputDevices //------------------------------------------------------------------------------ ++ (NSArray *)outputDevices +{ + __block NSMutableArray *devices = [NSMutableArray array]; + [self enumerateOutputDevicesUsingBlock:^(EZAudioDevice *device, BOOL *stop) + { + [devices addObject:device]; + }]; + return devices; +} + +//------------------------------------------------------------------------------ + + (void)enumerateInputDevicesUsingBlock:(void (^)(EZAudioDevice *, BOOL *))block { if (!block) @@ -101,6 +126,42 @@ + (void)enumerateInputDevicesUsingBlock:(void (^)(EZAudioDevice *, BOOL *))block //------------------------------------------------------------------------------ ++ (void)enumerateOutputDevicesUsingBlock:(void (^)(EZAudioDevice *, BOOL *))block +{ + if (!block) + { + return; + } + + AVAudioSessionRouteDescription *currentRoute = [[AVAudioSession sharedInstance] currentRoute]; + NSArray *portDescriptions = [currentRoute outputs]; + + BOOL stop; + for (AVAudioSessionPortDescription *outputDevicePortDescription in portDescriptions) + { + // add any additional sub-devices + NSArray *dataSources = [outputDevicePortDescription dataSources]; + if (dataSources.count) + { + for (AVAudioSessionDataSourceDescription *outputDeviceDataSourceDescription in dataSources) + { + EZAudioDevice *device = [[EZAudioDevice alloc] init]; + device.port = outputDevicePortDescription; + device.dataSource = outputDeviceDataSourceDescription; + block(device, &stop); + } + } + else + { + EZAudioDevice *device = [[EZAudioDevice alloc] init]; + device.port = outputDevicePortDescription; + block(device, &stop); + } + } +} + +//------------------------------------------------------------------------------ + - (NSString *)name { NSMutableString *name = [NSMutableString string]; diff --git a/EZAudio/EZAudioPlayer.m b/EZAudio/EZAudioPlayer.m index 038290d5..69e60cc1 100644 --- a/EZAudio/EZAudioPlayer.m +++ b/EZAudio/EZAudioPlayer.m @@ -194,12 +194,12 @@ -(void)setAudioFile:(EZAudioFile *)audioFile { _audioFile = [EZAudioFile audioFileWithURL:audioFile.url]; _audioFile.delegate = self; NSAssert(_output,@"No output was found, this should by default be the EZOutput shared instance"); - [_output setAudioStreamBasicDescription:self.audioFile.clientFormat]; + [_output setInputFormat:self.audioFile.clientFormat]; } -(void)setOutput:(EZOutput*)output { _output = output; - _output.outputDataSource = self; + _output.dataSource = self; } #pragma mark - Methods @@ -278,9 +278,10 @@ -(void)audioFile:(EZAudioFile *)audioFile updatedPosition:(SInt64)framePosition } #pragma mark - EZOutputDataSource --(void) output:(EZOutput *)output +-(OSStatus) output:(EZOutput *)output shouldFillAudioBufferList:(AudioBufferList *)audioBufferList withNumberOfFrames:(UInt32)frames + timestamp:(const AudioTimeStamp *)timestamp { if (self.audioFile) { @@ -294,6 +295,7 @@ -(void) output:(EZOutput *)output [self seekToFrame:0]; } } + return noErr; } @end diff --git a/EZAudio/EZAudioUtilities.h b/EZAudio/EZAudioUtilities.h index 227726e9..d945d1dd 100644 --- a/EZAudio/EZAudioUtilities.h +++ b/EZAudio/EZAudioUtilities.h @@ -46,6 +46,17 @@ typedef struct TPCircularBuffer circularBuffer; } EZPlotHistoryInfo; +//------------------------------------------------------------------------------ + +/** + A data structure that holds information about a node in the context of an AUGraph. + */ +typedef struct +{ + AudioUnit audioUnit; + AUNode node; +} EZAudioNodeInfo; + //------------------------------------------------------------------------------ #pragma mark - Types //------------------------------------------------------------------------------ diff --git a/EZAudio/EZMicrophone.h b/EZAudio/EZMicrophone.h index 882cff90..38b21398 100644 --- a/EZAudio/EZMicrophone.h +++ b/EZAudio/EZMicrophone.h @@ -36,7 +36,7 @@ //------------------------------------------------------------------------------ /** - The delegate for the EZMicrophone provides a receiver for the incoming audio data events. When the microphone has been successfully internally configured it will try to send its delegate an AudioStreamBasicDescription describing the format of the incoming audio data. + The EZMicrophoneDelegate for the EZMicrophone provides a receiver for the incoming audio data events. When the microphone has been successfully internally configured it will try to send its delegate an AudioStreamBasicDescription describing the format of the incoming audio data. The audio data itself is sent back to the delegate in various forms: @@ -55,10 +55,9 @@ ///----------------------------------------------------------- /** - Called anytime the input device changes on an `EZMicrophone` instance. Mac only. + Called anytime the input device changes on an `EZMicrophone` instance. @param microphone The instance of the EZMicrophone that triggered the event. @param device The instance of the new EZAudioDevice the microphone is using to pull input. - @param notification Incase the device changed because of a notification (like from AVAudioSession) then we provide that notification to give the full context of the change. */ - (void)microphone:(EZMicrophone *)microphone changedDevice:(EZAudioDevice *)device; @@ -77,7 +76,7 @@ ///----------------------------------------------------------- /** - Returns back a float array of the audio received. This occurs on the background thread so any drawing code must explicity perform its functions on the main thread. + This method provides an array of float arrays of the audio received, each float array representing a channel of audio data This occurs on the background thread so any drawing code must explicity perform its functions on the main thread. @param microphone The instance of the EZMicrophone that triggered the event. @param buffer The audio data as an array of float arrays. In a stereo signal buffer[0] represents the left channel while buffer[1] would represent the right channel. @param bufferSize The size of each of the buffers (the length of each float array). @@ -304,7 +303,9 @@ */ - (AudioUnit *)audioUnit; +//------------------------------------------------------------------------------ #pragma mark - Setters +//------------------------------------------------------------------------------ ///----------------------------------------------------------- /// @name Customizing The Microphone Stream Format diff --git a/EZAudio/EZMicrophone.m b/EZAudio/EZMicrophone.m index 825b2eae..3125fb6b 100644 --- a/EZAudio/EZMicrophone.m +++ b/EZAudio/EZMicrophone.m @@ -535,21 +535,23 @@ - (void)setDevice:(EZAudioDevice *)device - (void)setOutput:(EZOutput *)output { _output = output; - [_output setAudioStreamBasicDescription:self.audioStreamBasicDescription]; - _output.outputDataSource = self; + _output.inputFormat = self.audioStreamBasicDescription; + _output.dataSource = self; } //------------------------------------------------------------------------------ #pragma mark - EZOutputDataSource //------------------------------------------------------------------------------ -- (void) output:(EZOutput *)output +- (OSStatus) output:(EZOutput *)output shouldFillAudioBufferList:(AudioBufferList *)audioBufferList withNumberOfFrames:(UInt32)frames + timestamp:(const AudioTimeStamp *)timestamp { memcpy(audioBufferList, self.info->audioBufferList, sizeof(AudioBufferList) + (self.info->audioBufferList->mNumberBuffers - 1)*sizeof(AudioBuffer)); + return noErr; } //------------------------------------------------------------------------------ diff --git a/EZAudio/EZOutput.h b/EZAudio/EZOutput.h index 129864b6..701894ec 100644 --- a/EZAudio/EZOutput.h +++ b/EZAudio/EZOutput.h @@ -30,72 +30,102 @@ #import #endif -#import "TPCircularBuffer.h" - +@class EZAudioDevice; @class EZOutput; +//------------------------------------------------------------------------------ +#pragma mark - Constants +//------------------------------------------------------------------------------ + +FOUNDATION_EXPORT UInt32 const EZOutputMaximumFramesPerSlice; +FOUNDATION_EXPORT Float64 const EZOutputDefaultSampleRate; + +//------------------------------------------------------------------------------ +#pragma mark - EZOutputDataSource +//------------------------------------------------------------------------------ + /** - The EZOutputDataSource (required for the EZOutput) specifies a receiver to provide audio data when the EZOutput is started. Only ONE datasource method is expected to be implemented and priority is given as such: - 1.) `output:callbackWithActionFlags:inTimeStamp:inBusNumber:inNumberFrames:ioData:` - 2.) `outputShouldUseCircularBuffer:` - 3.) `output:needsBufferListWithFrames:withBufferSize:` + The EZOutputDataSource specifies a receiver to provide audio data when the EZOutput is started. Since the 0.4.0 release this has been simplified to only one data source method. */ @protocol EZOutputDataSource @optional ///----------------------------------------------------------- -/// @name Pulling The Audio Data +/// @name Providing Audio Data ///----------------------------------------------------------- +@required + /** - Provides complete override of the output callback function. The delegate is expected to - @param output The instance of the EZOutput that asked for the data - @param ioActionFlags AudioUnitRenderActionFlags provided by the output callback - @param inTimeStamp AudioTimeStamp reference provided by the output callback - @param inBusNumber UInt32 representing the bus number provided by the output callback - @param inNumberFrames UInt32 representing the number of frames provided by the output callback - @param ioData AudioBufferList pointer representing the audio data that will be used for output provided by the output callback (fill this!) + Provides a way to provide output with data anytime the EZOutput needs audio data to play. This function provides an already allocated AudioBufferList to use for providing audio data into the output buffer. The expected format of the audio data provided here is specified by the EZOutput `inputFormat` property. This audio data will be converted into the client format specified by the EZOutput `clientFormat` property. + @param output The instance of the EZOutput that asked for the data. + @param audioBufferList The AudioBufferList structure pointer that needs to be filled with audio data + @param frames The amount of frames as a UInt32 that output will need to properly fill its output buffer. + @param timestamp A AudioTimeStamp pointer to use if you need the current host time. + @return An OSStatus code. If there was no error then use the noErr status code. */ --(void)output:(EZOutput*)output -callbackWithActionFlags:(AudioUnitRenderActionFlags*)ioActionFlags - inTimeStamp:(const AudioTimeStamp*)inTimeStamp - inBusNumber:(UInt32)inBusNumber -inNumberFrames:(UInt32)inNumberFrames - ioData:(AudioBufferList*)ioData; +- (OSStatus) output:(EZOutput *)output + shouldFillAudioBufferList:(AudioBufferList *)audioBufferList + withNumberOfFrames:(UInt32)frames + timestamp:(const AudioTimeStamp *)timestamp; + +@end + +//------------------------------------------------------------------------------ +#pragma mark - EZOutputDelegate +//------------------------------------------------------------------------------ /** - Provides output using a circular - @param output The instance of the EZOutput that asked for the data - @return The EZOutputDataSource's TPCircularBuffer structure holding the audio data in a circular buffer + The EZOutputDelegate for the EZOutput component provides a receiver to handle play state, device, and audio data change events. This is very similar to the EZMicrophoneDelegate for the EZMicrophone and the EZAudioFileDelegate for the EZAudioFile. */ --(TPCircularBuffer*)outputShouldUseCircularBuffer:(EZOutput *)output; +@protocol EZOutputDelegate +@optional /** - Provides a way to provide output with data anytime the EZOutput needs audio data to play. This function provides an already allocated AudioBufferList to use for providing audio data into the output buffer. - @param output The instance of the EZOutput that asked for the data. - @param audioBufferList The AudioBufferList structure pointer that needs to be filled with audio data - @param frames The amount of frames as a UInt32 that output will need to properly fill its output buffer. - @return A pointer to the AudioBufferList structure holding the audio data. If nil or NULL, will output silence. + Called anytime the EZOutput starts or stops. + @param output The instance of the EZOutput that triggered the event. + @param isPlaying A BOOL indicating whether the EZOutput instance is playing or not. */ --(void) output:(EZOutput *)output - shouldFillAudioBufferList:(AudioBufferList*)audioBufferList - withNumberOfFrames:(UInt32)frames; +- (void)output:(EZOutput *)output changedPlayingState:(BOOL)isPlaying; -@end +//------------------------------------------------------------------------------ /** - The EZOutput component provides a generic output to glue all the other EZAudio components together and push whatever sound you've created to the default output device (think opposite of the microphone). The EZOutputDataSource provides the required AudioBufferList needed to populate the output buffer. + Called anytime the `device` changes on an EZOutput instance. + @param output The instance of the EZOutput that triggered the event. + @param device The instance of the new EZAudioDevice the output is using to play audio data. */ -@interface EZOutput : NSObject +- (void)output:(EZOutput *)output changedDevice:(EZAudioDevice *)device; + +//------------------------------------------------------------------------------ + +/** + Like the EZMicrophoneDelegate, for the EZOutput this method provides an array of float arrays of the audio received, each float array representing a channel of audio data. This occurs on the background thread so any drawing code must explicity perform its functions on the main thread. + @param output The instance of the EZOutput that triggered the event. + @param buffer The audio data as an array of float arrays. In a stereo signal buffer[0] represents the left channel while buffer[1] would represent the right channel. + @param bufferSize A UInt32 representing the size of each of the buffers (the length of each float array). + @param numberOfChannels A UInt32 representing the number of channels (you can use this to know how many float arrays are in the `buffer` parameter. + @warning This function executes on a background thread to avoid blocking any audio operations. If operations should be performed on any other thread (like the main thread) it should be performed within a dispatch block like so: dispatch_async(dispatch_get_main_queue(), ^{ ...Your Code... }) + */ +- (void) output:(EZOutput *)output + playedAudio:(float **)buffer + withBufferSize:(UInt32)bufferSize + withNumberOfChannels:(UInt32)numberOfChannels; + +//------------------------------------------------------------------------------ + +@end -#pragma mark - Properties /** - The EZOutputDataSource that provides the required AudioBufferList to the output callback function + The EZOutput component provides a generic output to glue all the other EZAudio components together and push whatever sound you've created to the default output device (think opposite of the microphone). The EZOutputDataSource provides the required AudioBufferList needed to populate the output buffer while the EZOutputDelegate provides the same kind of mechanism as the EZMicrophoneDelegate or EZAudioFileDelegate in that you will receive a callback that provides non-interleaved, float data for visualizing the output (done using an internal float converter). As of 0.4.0 the EZOutput has been simplified to a single EZOutputDataSource method and now uses an AUGraph to provide format conversion from the `inputFormat` to the playback graph's `clientFormat` linear PCM formats, mixer controls for setting volume and pan settings, hooks to add in any number of effect audio units (see the `connectOutputOfSourceNode:sourceNodeOutputBus:toDestinationNode:destinationNodeInputBus:inGraph:` subclass method), and hardware device toggling (via EZAudioDevice). */ -@property (nonatomic,assign) idoutputDataSource; +@interface EZOutput : NSObject +//------------------------------------------------------------------------------ #pragma mark - Initializers +//------------------------------------------------------------------------------ + ///----------------------------------------------------------- /// @name Initializers ///----------------------------------------------------------- @@ -105,29 +135,38 @@ inNumberFrames:(UInt32)inNumberFrames @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback. @return A newly created instance of the EZOutput class. */ --(id)initWithDataSource:(id)dataSource; +- (instancetype)initWithDataSource:(id)dataSource; /** Creates a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource. @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback. - @param audioStreamBasicDescription The AudioStreamBasicDescription of the EZOutput. - @warning AudioStreamBasicDescriptions that are invalid will cause the EZOutput to fail to initialize + @param inputFormat The AudioStreamBasicDescription of the EZOutput. + @warning AudioStreamBasicDescription input formats must be linear PCM! @return A newly created instance of the EZOutput class. */ --(id) initWithDataSource:(id)dataSource - withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription; +- (instancetype)initWithDataSource:(id)dataSource + inputFormat:(AudioStreamBasicDescription)inputFormat; +//------------------------------------------------------------------------------ #pragma mark - Class Initializers +//------------------------------------------------------------------------------ + ///----------------------------------------------------------- /// @name Class Initializers ///----------------------------------------------------------- +/** + Class method to create a new instance of the EZOutput + @return A newly created instance of the EZOutput class. + */ ++ (instancetype)output; + /** Class method to create a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource. @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback. @return A newly created instance of the EZOutput class. */ -+(EZOutput*)outputWithDataSource:(id)dataSource; ++ (instancetype)outputWithDataSource:(id)dataSource; /** Class method to create a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource. @@ -136,10 +175,13 @@ inNumberFrames:(UInt32)inNumberFrames @warning AudioStreamBasicDescriptions that are invalid will cause the EZOutput to fail to initialize @return A newly created instance of the EZOutput class. */ -+(EZOutput*)outputWithDataSource:(id)dataSource - withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription; ++ (instancetype)outputWithDataSource:(id)dataSource + inputFormat:(AudioStreamBasicDescription)inputFormat; +//------------------------------------------------------------------------------ #pragma mark - Singleton +//------------------------------------------------------------------------------ + ///----------------------------------------------------------- /// @name Shared Instance ///----------------------------------------------------------- @@ -148,54 +190,187 @@ inNumberFrames:(UInt32)inNumberFrames Creates a shared instance of the EZOutput (one app will usually only need one output and share the role of the EZOutputDataSource). @return The shared instance of the EZOutput class. */ -+(EZOutput*)sharedOutput; ++ (instancetype)sharedOutput; + +//------------------------------------------------------------------------------ +#pragma mark - Properties +//------------------------------------------------------------------------------ -#pragma mark - Events ///----------------------------------------------------------- -/// @name Starting/Stopping The Output +/// @name Setting/Getting The Stream Formats ///----------------------------------------------------------- /** - Starts pulling audio data from the EZOutputDataSource to the default device output. + Provides the AudioStreamBasicDescription structure used at the beginning of the playback graph which is then converted into the `clientFormat` using the AUConverter audio unit. + @warning The AudioStreamBasicDescription set here must be linear PCM. Compressed formats are not supported...the EZAudioFile's clientFormat performs the audio conversion on the fly from compressed to linear PCM so there is no additional work to be done there. + @return An AudioStreamBasicDescription structure describing */ --(void)startPlayback; +@property (nonatomic, readwrite) AudioStreamBasicDescription inputFormat; + +//------------------------------------------------------------------------------ /** - Stops pulling audio data from the EZOutputDataSource to the default device output. + Provides the AudioStreamBasicDescription structure that serves as the common format used throughout the playback graph (similar to how the EZAudioFile as a clientFormat that is linear PCM to be shared amongst other components). The `inputFormat` is converted into this format at the beginning of the playback graph using an AUConverter audio unit. Defaults to the whatever the `defaultClientFormat` method returns is if a custom one isn't explicitly set. + @warning The AudioStreamBasicDescription set here must be linear PCM. Compressed formats are not supported by Audio Units. + @return An AudioStreamBasicDescription structure describing the common client format for the playback graph. */ --(void)stopPlayback; +@property (nonatomic, readwrite) AudioStreamBasicDescription clientFormat; + +//------------------------------------------------------------------------------ -#pragma mark - Getters ///----------------------------------------------------------- -/// @name Getting The Output Audio Format +/// @name Setting/Getting The Data Source and Delegate ///----------------------------------------------------------- /** - Provides the AudioStreamBasicDescription structure containing the format of the microphone's audio. - @return An AudioStreamBasicDescription structure describing the format of the microphone's audio. + The EZOutputDataSource that provides the audio data in the `inputFormat` for the EZOutput to play. If an EZOutputDataSource is not specified then the EZOutput will just output silence. + */ +@property (nonatomic, weak) id dataSource; + +//------------------------------------------------------------------------------ + +/** + The EZOutputDelegate for which to handle the output callbacks + */ +@property (nonatomic, weak) id delegate; + +//------------------------------------------------------------------------------ + +/** + Provides a flag indicating whether the EZOutput is pulling audio data from the EZOutputDataSource for playback. + @return YES if the EZOutput is running, NO if it is stopped + */ +@property (readonly) BOOL isPlaying; + +//------------------------------------------------------------------------------ + +/** + Provides the current pan from the audio player's mixer audio unit in the playback graph. Setting the pan adjusts the direction of the audio signal from left (0) to right (1). Default is 0.5 (middle). */ --(AudioStreamBasicDescription)audioStreamBasicDescription; +@property (nonatomic, assign) float pan; + +//------------------------------------------------------------------------------ + +/** + Provides the current volume from the audio player's mixer audio unit in the playback graph. Setting the volume adjusts the gain of the output between 0 and 1. Default is 1. + */ +@property (nonatomic, assign) float volume; + +//------------------------------------------------------------------------------ +#pragma mark - Core Audio Properties +//------------------------------------------------------------------------------ ///----------------------------------------------------------- -/// @name Getting The State Of The Output +/// @name Core Audio Properties ///----------------------------------------------------------- /** - Provides a flag indicating whether the EZOutput is pulling audio data from the EZOutputDataSource for playback. - @return YES if the EZOutput is pulling audio data to the output device, NO if it is stopped + The AUGraph used to chain together the converter, mixer, and output audio units. + */ +@property (readonly) AUGraph graph; + +//------------------------------------------------------------------------------ + +/** + The AudioUnit that is being used to convert the audio data coming into the output's playback graph. + */ +@property (readonly) AudioUnit converterAudioUnit; + +//------------------------------------------------------------------------------ + +/** + The AudioUnit that is being used as the mixer to adjust the volume on the output's playback graph. */ --(BOOL)isPlaying; +@property (readonly) AudioUnit mixerAudioUnit; +//------------------------------------------------------------------------------ + +/** + The AudioUnit that is being used as the hardware output for the output's playback graph. + */ +@property (readonly) AudioUnit outputAudioUnit; + +//------------------------------------------------------------------------------ #pragma mark - Setters +//------------------------------------------------------------------------------ + ///----------------------------------------------------------- -/// @name Customizing The Output Format +/// @name Getting/Setting The Output's Hardware Device ///----------------------------------------------------------- /** - Sets the AudioStreamBasicDescription on the output. - @warning Do not set this during playback. - @param asbd The new AudioStreamBasicDescription to use in place of the current audio format description. + An EZAudioDevice instance that is used to route the audio data out to the speaker. To find a list of available output devices see the EZAudioDevice `outputDevices` method. */ --(void)setAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd; +@property (nonatomic, strong, readwrite) EZAudioDevice *device; -@end +//------------------------------------------------------------------------------ +#pragma mark - Actions +//------------------------------------------------------------------------------ + +///----------------------------------------------------------- +/// @name Starting/Stopping The Output +///----------------------------------------------------------- + +/** + Starts pulling audio data from the EZOutputDataSource to the default device output. + */ +- (void)startPlayback; + +///----------------------------------------------------------- + +/** + Stops pulling audio data from the EZOutputDataSource to the default device output. + */ +- (void)stopPlayback; + +//------------------------------------------------------------------------------ +#pragma mark - Subclass +//------------------------------------------------------------------------------ + +///----------------------------------------------------------- +/// @name Subclass +///----------------------------------------------------------- + +/** + This method handles connecting the converter node to the mixer node within the AUGraph that is being used as the playback graph. Subclasses can override this method and insert their custom nodes to perform effects processing on the audio data being rendered. + + This was inspired by Daniel Kennett's blog post on how to add a custom equalizer to a CocoaLibSpotify SPCoreAudioController's AUGraph. For more information see Daniel's post and example code here: http://ikennd.ac/blog/2012/04/augraph-basics-in-cocoalibspotify/. + @param sourceNode An AUNode representing the node the audio data is coming from. + @param sourceNodeOutputBus A UInt32 representing the output bus from the source node that should be connected into the next node's input bus. + @param destinationNode An AUNode representing the node the audio data should be connected to. + @param destinationNodeInputBus A UInt32 representing the input bus the source node's output bus should be connecting to. + @param graph The AUGraph that is being used to hold the playback graph. Same as from the `graph` property. + @return An OSStatus code. For no error return back `noErr`. + */ +- (OSStatus)connectOutputOfSourceNode:(AUNode)sourceNode + sourceNodeOutputBus:(UInt32)sourceNodeOutputBus + toDestinationNode:(AUNode)destinationNode + destinationNodeInputBus:(UInt32)destinationNodeInputBus + inGraph:(AUGraph)graph; + +//------------------------------------------------------------------------------ + +/** + The default AudioStreamBasicDescription set as the client format of the output if no custom `clientFormat` is set. Defaults to a 44.1 kHz stereo, non-interleaved, float format. + @return An AudioStreamBasicDescription that will be used as the default stream format. + */ +- (AudioStreamBasicDescription)defaultClientFormat; + +//------------------------------------------------------------------------------ + +/** + The default AudioStreamBasicDescription set as the `inputFormat` of the output if no custom `inputFormat` is set. Defaults to a 44.1 kHz stereo, non-interleaved, float format. + @return An AudioStreamBasicDescription that will be used as the default stream format. + */ +- (AudioStreamBasicDescription)defaultInputFormat; + +//------------------------------------------------------------------------------ + +/** + The default value used as the AudioUnit subtype when creating the hardware output component. By default this is kAudioUnitSubType_RemoteIO for iOS and kAudioUnitSubType_HALOutput for OSX. + @warning If you change this to anything other than kAudioUnitSubType_HALOutput for OSX you will get a failed assertion because devices can only be set when using the HAL audio unit. + @return An OSType that represents the AudioUnit subtype for the hardware output component. + */ +- (OSType)outputAudioUnitSubType; + +@end \ No newline at end of file diff --git a/EZAudio/EZOutput.m b/EZAudio/EZOutput.m index c9d65fd0..9b97361a 100644 --- a/EZAudio/EZOutput.m +++ b/EZAudio/EZOutput.m @@ -24,350 +24,733 @@ // THE SOFTWARE. #import "EZOutput.h" +#import "EZAudioDevice.h" +#import "EZAudioFloatConverter.h" #import "EZAudioUtilities.h" -@interface EZOutput (){ - BOOL _customASBD; - BOOL _isPlaying; - AudioStreamBasicDescription _outputASBD; - AudioUnit _outputUnit; -} +//------------------------------------------------------------------------------ +#pragma mark - Constants +//------------------------------------------------------------------------------ + +UInt32 const EZOutputMaximumFramesPerSlice = 4096; +Float64 const EZOutputDefaultSampleRate = 44100.0f; + +//------------------------------------------------------------------------------ +#pragma mark - Data Structures +//------------------------------------------------------------------------------ + +typedef struct +{ + // stream format params + AudioStreamBasicDescription inputFormat; + AudioStreamBasicDescription clientFormat; + + // float converted data + float **floatData; + + // nodes + EZAudioNodeInfo converterNodeInfo; + EZAudioNodeInfo mixerNodeInfo; + EZAudioNodeInfo outputNodeInfo; + + // audio graph + AUGraph graph; +} EZOutputInfo; + +//------------------------------------------------------------------------------ +#pragma mark - Callbacks (Declaration) +//------------------------------------------------------------------------------ + +OSStatus EZOutputConverterInputCallback(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData); + +//------------------------------------------------------------------------------ + +OSStatus EZOutputGraphRenderCallback(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData); + +//------------------------------------------------------------------------------ +#pragma mark - EZOutput (Interface Extension) +//------------------------------------------------------------------------------ + +@interface EZOutput () +@property (nonatomic, strong) EZAudioFloatConverter *floatConverter; +@property (nonatomic, assign) EZOutputInfo *info; @end +//------------------------------------------------------------------------------ +#pragma mark - EZOutput (Implementation) +//------------------------------------------------------------------------------ + @implementation EZOutput -@synthesize outputDataSource = _outputDataSource; -static OSStatus OutputRenderCallback(void *inRefCon, - AudioUnitRenderActionFlags *ioActionFlags, - const AudioTimeStamp *inTimeStamp, - UInt32 inBusNumber, - UInt32 inNumberFrames, - AudioBufferList *ioData){ - -// NSLog(@"output something"); - - EZOutput *output = (__bridge EZOutput*)inRefCon; - // Manual override - if ([output.outputDataSource respondsToSelector:@selector(output:callbackWithActionFlags:inTimeStamp:inBusNumber:inNumberFrames:ioData:)]){ - [output.outputDataSource output:output - callbackWithActionFlags:ioActionFlags - inTimeStamp:inTimeStamp - inBusNumber:inBusNumber - inNumberFrames:inNumberFrames - ioData:ioData]; - } - else if ([output.outputDataSource respondsToSelector:@selector(outputShouldUseCircularBuffer:)]){ +//------------------------------------------------------------------------------ +#pragma mark - Dealloc +//------------------------------------------------------------------------------ + +- (void)dealloc +{ + if (self.floatConverter) + { + self.floatConverter = nil; + [EZAudioUtilities freeFloatBuffers:self.info->floatData + numberOfChannels:self.info->clientFormat.mChannelsPerFrame]; + } + [EZAudioUtilities checkResult:AUGraphStop(self.info->graph) + operation:"Failed to stop graph"]; + [EZAudioUtilities checkResult:AUGraphClose(self.info->graph) + operation:"Failed to close graph"]; + [EZAudioUtilities checkResult:AUGraphUninitialize(self.info->graph) + operation:"Failed to uninitialize graph"]; + free(self.info); +} + +//------------------------------------------------------------------------------ +#pragma mark - Initialization +//------------------------------------------------------------------------------ + +- (instancetype) init +{ + self = [super init]; + if (self) + { + [self setup]; + } + return self; +} + +//------------------------------------------------------------------------------ + +- (instancetype)initWithDataSource:(id)dataSource +{ + self = [self init]; + if (self) + { + self.dataSource = dataSource; + } + return self; +} + +//------------------------------------------------------------------------------ + +- (instancetype)initWithDataSource:(id)dataSource + inputFormat:(AudioStreamBasicDescription)inputFormat +{ + self = [self initWithDataSource:dataSource]; + if (self) + { + self.inputFormat = inputFormat; + } + return self; +} + +//------------------------------------------------------------------------------ +#pragma mark - Class Initializers +//------------------------------------------------------------------------------ + ++ (instancetype)output +{ + return [[self alloc] init]; +} + +//------------------------------------------------------------------------------ + ++ (instancetype)outputWithDataSource:(id)dataSource +{ + return [[self alloc] initWithDataSource:dataSource]; +} + +//------------------------------------------------------------------------------ + ++ (instancetype)outputWithDataSource:(id)dataSource + inputFormat:(AudioStreamBasicDescription)inputFormat +{ + return [[self alloc] initWithDataSource:dataSource + inputFormat:inputFormat]; +} + +//------------------------------------------------------------------------------ +#pragma mark - Singleton +//------------------------------------------------------------------------------ + ++ (instancetype)sharedOutput +{ + static EZOutput *output; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^ + { + output = [[self alloc] init]; + }); + return output; +} + +//------------------------------------------------------------------------------ +#pragma mark - Setup +//------------------------------------------------------------------------------ + +- (void)setup +{ + // + // Create structure to hold state data + // + self.info = (EZOutputInfo *)malloc(sizeof(EZOutputInfo)); + memset(self.info, 0, sizeof(EZOutputInfo)); - TPCircularBuffer *circularBuffer = [output.outputDataSource outputShouldUseCircularBuffer:output]; - if (!circularBuffer){ - float *left = (float*)ioData->mBuffers[0].mData; - float *right = (float*)ioData->mBuffers[1].mData; - for(int i = 0; i < inNumberFrames; i++){ - left[ i ] = 0.0f; - right[ i ] = 0.0f; - } - return noErr; - }; + // + // Setup the audio graph + // + [EZAudioUtilities checkResult:NewAUGraph(&self.info->graph) + operation:"Failed to create graph"]; - /** - Thank you Michael Tyson (A Tasty Pixel) for writing the TPCircularBuffer, you are amazing! - */ + // + // Add converter node + // + AudioComponentDescription converterDescription; + converterDescription.componentType = kAudioUnitType_FormatConverter; + converterDescription.componentSubType = kAudioUnitSubType_AUConverter; + converterDescription.componentManufacturer = kAudioUnitManufacturer_Apple; + [EZAudioUtilities checkResult:AUGraphAddNode(self.info->graph, + &converterDescription, + &self.info->converterNodeInfo.node) + operation:"Failed to add converter node to audio graph"]; + + // + // Add mixer node + // + AudioComponentDescription mixerDescription; + mixerDescription.componentType = kAudioUnitType_Mixer; +#if TARGET_OS_IPHONE + mixerDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer; +#elif TARGET_OS_MAC + mixerDescription.componentSubType = kAudioUnitSubType_StereoMixer; +#endif + mixerDescription.componentManufacturer = kAudioUnitManufacturer_Apple; + [EZAudioUtilities checkResult:AUGraphAddNode(self.info->graph, + &mixerDescription, + &self.info->mixerNodeInfo.node) + operation:"Failed to add mixer node to audio graph"]; - // Get the desired amount of bytes to copy - int32_t bytesToCopy = ioData->mBuffers[0].mDataByteSize; - float *left = (float*)ioData->mBuffers[0].mData; - float *right = (float*)ioData->mBuffers[1].mData; + // + // Add output node + // + AudioComponentDescription outputDescription; + outputDescription.componentType = kAudioUnitType_Output; + outputDescription.componentSubType = [self outputAudioUnitSubType]; + outputDescription.componentManufacturer = kAudioUnitManufacturer_Apple; + [EZAudioUtilities checkResult:AUGraphAddNode(self.info->graph, + &outputDescription, + &self.info->outputNodeInfo.node) + operation:"Failed to add output node to audio graph"]; - // Get the available bytes in the circular buffer - int32_t availableBytes; - float *buffer = TPCircularBufferTail(circularBuffer,&availableBytes); + // + // Open the graph + // + [EZAudioUtilities checkResult:AUGraphOpen(self.info->graph) + operation:"Failed to open graph"]; - // Ideally we'd have all the bytes to be copied, but compare it against the available bytes (get min) - int32_t amount = MIN(bytesToCopy,availableBytes); - memcpy( left, buffer, amount); - memcpy( right, buffer, amount); + // + // Make node connections + // + OSStatus status = [self connectOutputOfSourceNode:self.info->converterNodeInfo.node + sourceNodeOutputBus:0 + toDestinationNode:self.info->mixerNodeInfo.node + destinationNodeInputBus:0 + inGraph:self.info->graph]; + [EZAudioUtilities checkResult:status + operation:"Failed to connect output of source node to destination node in graph"]; - // Consume those bytes ( this will internally push the head of the circular buffer) - TPCircularBufferConsume(circularBuffer,amount); + // + // Connect mixer to output + // + [EZAudioUtilities checkResult:AUGraphConnectNodeInput(self.info->graph, + self.info->mixerNodeInfo.node, + 0, + self.info->outputNodeInfo.node, + 0) + operation:"Failed to connect mixer node to output node"]; - } - // Provided an AudioBufferList (defaults to silence) - else if ([output.outputDataSource respondsToSelector:@selector(output:shouldFillAudioBufferList:withNumberOfFrames:)]) { - [output.outputDataSource output:output - shouldFillAudioBufferList:ioData - withNumberOfFrames:inNumberFrames]; - } - - return noErr; + // + // Get the audio units + // + [EZAudioUtilities checkResult:AUGraphNodeInfo(self.info->graph, + self.info->converterNodeInfo.node, + &converterDescription, + &self.info->converterNodeInfo.audioUnit) + operation:"Failed to get converter audio unit"]; + [EZAudioUtilities checkResult:AUGraphNodeInfo(self.info->graph, + self.info->mixerNodeInfo.node, + &mixerDescription, + &self.info->mixerNodeInfo.audioUnit) + operation:"Failed to get mixer audio unit"]; + [EZAudioUtilities checkResult:AUGraphNodeInfo(self.info->graph, + self.info->outputNodeInfo.node, + &outputDescription, + &self.info->outputNodeInfo.audioUnit) + operation:"Failed to get output audio unit"]; + + // + // Add a node input callback for the converter node + // + AURenderCallbackStruct converterCallback; + converterCallback.inputProc = EZOutputConverterInputCallback; + converterCallback.inputProcRefCon = (__bridge void *)(self); + [EZAudioUtilities checkResult:AUGraphSetNodeInputCallback(self.info->graph, + self.info->converterNodeInfo.node, + 0, + &converterCallback) + operation:"Failed to set render callback on converter node"]; + + // + // Set stream formats + // + [self setClientFormat:[self defaultClientFormat]]; + [self setInputFormat:[self defaultInputFormat]]; + +#if TARGET_OS_IPHONE + EZAudioDevice *currentOutputDevice = [EZAudioDevice currentOutputDevice]; + [self setDevice:currentOutputDevice]; +#elif TARGET_OS_MAC + NSArray *outputDevices = [EZAudioDevice outputDevices]; + EZAudioDevice *defaultOutput = [outputDevices firstObject]; + [self setDevice:defaultOutput]; +#endif + + // + // Set maximum frames per slice to 4096 to allow playback during + // lock screen (iOS only?) + // + UInt32 maximumFramesPerSlice = EZOutputMaximumFramesPerSlice; + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->mixerNodeInfo.audioUnit, + kAudioUnitProperty_MaximumFramesPerSlice, + kAudioUnitScope_Global, + 0, + &maximumFramesPerSlice, + sizeof(maximumFramesPerSlice)) + operation:"Failed to set maximum frames per slice on mixer node"]; + + // + // Initialize all the audio units in the graph + // + [EZAudioUtilities checkResult:AUGraphInitialize(self.info->graph) + operation:"Failed to initialize graph"]; + + // + // Add render callback + // + [EZAudioUtilities checkResult:AudioUnitAddRenderNotify(self.info->mixerNodeInfo.audioUnit, + EZOutputGraphRenderCallback, + (__bridge void *)(self)) + operation:"Failed to add render callback"]; } -#pragma mark - Initialization --(id)init { - self = [super init]; - if(self){ - [self _configureOutput]; - } - return self; -} +//------------------------------------------------------------------------------ +#pragma mark - Actions +//------------------------------------------------------------------------------ --(id)initWithDataSource:(id)dataSource { - self = [super init]; - if(self){ - self.outputDataSource = dataSource; - [self _configureOutput]; - } - return self; +- (void)startPlayback +{ + // + // Start the AUGraph + // + [EZAudioUtilities checkResult:AUGraphStart(self.info->graph) + operation:"Failed to start graph"]; + + // + // Notify delegate + // + if ([self.delegate respondsToSelector:@selector(output:changedPlayingState:)]) + { + [self.delegate output:self changedPlayingState:[self isPlaying]]; + } } --(id) initWithDataSource:(id)dataSource - withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription { - self = [super init]; - if(self){ - _customASBD = YES; - _outputASBD = audioStreamBasicDescription; - self.outputDataSource = dataSource; - [self _configureOutput]; - } - return self; +//------------------------------------------------------------------------------ + +- (void)stopPlayback +{ + // + // Stop the AUGraph + // + [EZAudioUtilities checkResult:AUGraphStop(self.info->graph) + operation:"Failed to stop graph"]; + + // + // Notify delegate + // + if ([self.delegate respondsToSelector:@selector(output:changedPlayingState:)]) + { + [self.delegate output:self changedPlayingState:[self isPlaying]]; + } } -#pragma mark - Class Initializers -+(EZOutput*)outputWithDataSource:(id)dataSource { - return [[EZOutput alloc] initWithDataSource:dataSource]; +//------------------------------------------------------------------------------ +#pragma mark - Getters +//------------------------------------------------------------------------------ + +- (AudioStreamBasicDescription)clientFormat +{ + return self.info->clientFormat; } -+(EZOutput *)outputWithDataSource:(id)dataSource - withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription { - return [[EZOutput alloc] initWithDataSource:dataSource withAudioStreamBasicDescription:audioStreamBasicDescription]; +//------------------------------------------------------------------------------ + +- (AudioStreamBasicDescription)inputFormat +{ + return self.info->inputFormat; } -#pragma mark - Singleton -+(EZOutput*)sharedOutput { - static EZOutput *_sharedOutput = nil; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - _sharedOutput = [[EZOutput alloc] init]; - }); - return _sharedOutput; +//------------------------------------------------------------------------------ + +- (BOOL)isPlaying +{ + Boolean isPlaying; + [EZAudioUtilities checkResult:AUGraphIsRunning(self.info->graph, + &isPlaying) + operation:"Failed to check if graph is running"]; + return isPlaying; } -#pragma mark - Audio Component Initialization --(AudioComponentDescription)_getOutputAudioComponentDescription { - // Create an output component description for default output device - AudioComponentDescription outputComponentDescription; - outputComponentDescription.componentFlags = 0; - outputComponentDescription.componentFlagsMask = 0; - outputComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple; - #if TARGET_OS_IPHONE - outputComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO; - #elif TARGET_OS_MAC - outputComponentDescription.componentSubType = kAudioUnitSubType_DefaultOutput; - #endif - outputComponentDescription.componentType = kAudioUnitType_Output; - return outputComponentDescription; +//------------------------------------------------------------------------------ + +- (float)pan +{ + AudioUnitParameterID param; +#if TARGET_OS_IPHONE + param = kMultiChannelMixerParam_Pan; +#elif TARGET_OS_MAC + param = kStereoMixerParam_Pan; +#endif + AudioUnitParameterValue pan; + [EZAudioUtilities checkResult:AudioUnitGetParameter(self.info->mixerNodeInfo.audioUnit, + param, + kAudioUnitScope_Input, + 0, + &pan) operation:"Failed to get pan from mixer unit"]; + return pan; } --(AudioComponent)_getOutputComponentWithAudioComponentDescription:(AudioComponentDescription)outputComponentDescription { - // Try and find the component - AudioComponent outputComponent = AudioComponentFindNext( NULL , &outputComponentDescription); - NSAssert(outputComponent,@"Couldn't get input component unit!"); - return outputComponent; +//------------------------------------------------------------------------------ + +- (float)volume +{ + AudioUnitParameterID param; +#if TARGET_OS_IPHONE + param = kMultiChannelMixerParam_Volume; +#elif TARGET_OS_MAC + param = kStereoMixerParam_Volume; +#endif + AudioUnitParameterValue volume; + [EZAudioUtilities checkResult:AudioUnitGetParameter(self.info->mixerNodeInfo.audioUnit, + param, + kAudioUnitScope_Input, + 0, + &volume) + operation:"Failed to get volume from mixer unit"]; + return volume; } --(void)_createNewInstanceForOutputComponent:(AudioComponent)outputComponent { - // - [EZAudioUtilities checkResult:AudioComponentInstanceNew( outputComponent, &_outputUnit) - operation:"Failed to open component for output unit"]; +//------------------------------------------------------------------------------ +#pragma mark - Setters +//------------------------------------------------------------------------------ + +- (void)setClientFormat:(AudioStreamBasicDescription)clientFormat +{ + if (self.floatConverter) + { + self.floatConverter = nil; + [EZAudioUtilities freeFloatBuffers:self.info->floatData + numberOfChannels:self.clientFormat.mChannelsPerFrame]; + } + + self.info->clientFormat = clientFormat; + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->converterNodeInfo.audioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, + 0, + &self.info->clientFormat, + sizeof(self.info->clientFormat)) + operation:"Failed to set output client format on converter audio unit"]; + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->mixerNodeInfo.audioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, + 0, + &self.info->clientFormat, + sizeof(self.info->clientFormat)) + operation:"Failed to set input client format on mixer audio unit"]; + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->mixerNodeInfo.audioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, + 0, + &self.info->clientFormat, + sizeof(self.info->clientFormat)) + operation:"Failed to set output client format on mixer audio unit"]; + + self.floatConverter = [[EZAudioFloatConverter alloc] initWithInputFormat:clientFormat]; + self.info->floatData = [EZAudioUtilities floatBuffersWithNumberOfFrames:EZOutputMaximumFramesPerSlice + numberOfChannels:clientFormat.mChannelsPerFrame]; } -#pragma mark - Configure The Output Unit - -//-(void)_configureOutput { -// -// // Get component description for output -// AudioComponentDescription outputComponentDescription = [self _getOutputAudioComponentDescription]; -// -// // Get the output component -// AudioComponent outputComponent = [self _getOutputComponentWithAudioComponentDescription:outputComponentDescription]; -// -// // Create a new instance of the component and store it for internal use -// [self _createNewInstanceForOutputComponent:outputComponent]; -// -//} +//------------------------------------------------------------------------------ +- (void)setDevice:(EZAudioDevice *)device +{ #if TARGET_OS_IPHONE --(void)_configureOutput { - - // - AudioComponentDescription outputcd; - outputcd.componentFlags = 0; - outputcd.componentFlagsMask = 0; - outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; - outputcd.componentSubType = kAudioUnitSubType_RemoteIO; - outputcd.componentType = kAudioUnitType_Output; - - // - AudioComponent comp = AudioComponentFindNext(NULL,&outputcd); - [EZAudioUtilities checkResult:AudioComponentInstanceNew(comp,&_outputUnit) - operation:"Failed to get output unit"]; - - // Setup the output unit for playback - UInt32 oneFlag = 1; - AudioUnitElement bus0 = 0; - [EZAudioUtilities checkResult:AudioUnitSetProperty(_outputUnit, - kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Output, - bus0, - &oneFlag, - sizeof(oneFlag)) - operation:"Failed to enable output unit"]; - - // Get the hardware sample rate - Float64 hardwareSampleRate = 44100; -#if !(TARGET_IPHONE_SIMULATOR) - hardwareSampleRate = [[AVAudioSession sharedInstance] sampleRate]; + + // if the devices are equal then ignore + if ([device isEqual:self.device]) + { + return; + } + + NSError *error; + [[AVAudioSession sharedInstance] setOutputDataSource:device.dataSource error:&error]; + if (error) + { + NSLog(@"Error setting output device data source (%@), reason: %@", + device.dataSource, + error.localizedDescription); + } + +#elif TARGET_OS_MAC + UInt32 outputEnabled = device.outputChannelCount > 0; + NSAssert(outputEnabled, @"Selected EZAudioDevice does not have any output channels"); + NSAssert([self outputAudioUnitSubType] == kAudioUnitSubType_HALOutput, + @"Audio device selection on OSX is only available when using the kAudioUnitSubType_HALOutput output unit subtype"); + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->outputNodeInfo.audioUnit, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Output, + 0, + &outputEnabled, + sizeof(outputEnabled)) + operation:"Failed to set flag on device output"]; + + AudioDeviceID deviceId = device.deviceID; + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->outputNodeInfo.audioUnit, + kAudioOutputUnitProperty_CurrentDevice, + kAudioUnitScope_Global, + 0, + &deviceId, + sizeof(AudioDeviceID)) + operation:"Couldn't set default device on I/O unit"]; #endif - - // Setup an ASBD in canonical format by default - if (!_customASBD){ - _outputASBD = [EZAudioUtilities stereoCanonicalNonInterleavedFormatWithSampleRate:hardwareSampleRate]; - } - - // Set the format for output - [EZAudioUtilities checkResult:AudioUnitSetProperty(_outputUnit, - kAudioUnitProperty_StreamFormat, - kAudioUnitScope_Input, - bus0, - &_outputASBD, - sizeof(_outputASBD)) - operation:"Couldn't set the ASBD for input scope/bos 0"]; - - // - AURenderCallbackStruct input; - input.inputProc = OutputRenderCallback; - input.inputProcRefCon = (__bridge void *)self; - [EZAudioUtilities checkResult:AudioUnitSetProperty(_outputUnit, - kAudioUnitProperty_SetRenderCallback, - kAudioUnitScope_Input, - bus0, - &input, - sizeof(input)) - operation:"Failed to set the render callback on the output unit"]; - - // - [EZAudioUtilities checkResult:AudioUnitInitialize(_outputUnit) - operation:"Couldn't initialize output unit"]; - - + + // store device + _device = device; + + // notify delegate + if ([self.delegate respondsToSelector:@selector(output:changedDevice:)]) + { + [self.delegate output:self changedDevice:device]; + } } + +//------------------------------------------------------------------------------ + +- (void)setInputFormat:(AudioStreamBasicDescription)inputFormat +{ + self.info->inputFormat = inputFormat; + [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->converterNodeInfo.audioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, + 0, + &inputFormat, + sizeof(inputFormat)) + operation:"Failed to set input format on converter audio unit"]; +} + +//------------------------------------------------------------------------------ + +- (void)setPan:(float)pan +{ + AudioUnitParameterID param; +#if TARGET_OS_IPHONE + param = kMultiChannelMixerParam_Pan; #elif TARGET_OS_MAC --(void)_configureOutput { - - // - AudioComponentDescription outputcd; - outputcd.componentType = kAudioUnitType_Output; - outputcd.componentSubType = kAudioUnitSubType_DefaultOutput; - outputcd.componentManufacturer = kAudioUnitManufacturer_Apple; - - // - AudioComponent comp = AudioComponentFindNext(NULL, &outputcd); - if (comp == NULL){ - NSLog(@"Failed to get output unit"); - exit(-1); - } - [EZAudioUtilities checkResult:AudioComponentInstanceNew(comp, &_outputUnit) - operation:"Failed to open component for output unit"]; - - - // Setup an ASBD in canonical format by default - if (!_customASBD){ - _outputASBD = [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:44100]; - } - - // Set the format for output - [EZAudioUtilities checkResult:AudioUnitSetProperty(_outputUnit, - kAudioUnitProperty_StreamFormat, - kAudioUnitScope_Input, - 0, - &_outputASBD, - sizeof(_outputASBD)) - operation:"Couldn't set the ASBD for input scope/bos 0"]; - - // - AURenderCallbackStruct input; - input.inputProc = OutputRenderCallback; - input.inputProcRefCon = (__bridge void *)(self); - [EZAudioUtilities checkResult:AudioUnitSetProperty(_outputUnit, - kAudioUnitProperty_SetRenderCallback, - kAudioUnitScope_Input, - 0, - &input, - sizeof(input)) - operation:"Failed to set the render callback on the output unit"]; - - // - [EZAudioUtilities checkResult:AudioUnitInitialize(_outputUnit) - operation:"Couldn't initialize output unit"]; - + param = kStereoMixerParam_Pan; +#endif + [EZAudioUtilities checkResult:AudioUnitSetParameter(self.info->mixerNodeInfo.audioUnit, + param, + kAudioUnitScope_Input, + 0, + pan, + 0) + operation:"Failed to set volume on mixer unit"]; } + +//------------------------------------------------------------------------------ + +- (void)setVolume:(float)volume +{ + AudioUnitParameterID param; +#if TARGET_OS_IPHONE + param = kMultiChannelMixerParam_Volume; +#elif TARGET_OS_MAC + param = kStereoMixerParam_Volume; #endif + [EZAudioUtilities checkResult:AudioUnitSetParameter(self.info->mixerNodeInfo.audioUnit, + param, + kAudioUnitScope_Input, + 0, + volume, + 0) + operation:"Failed to set volume on mixer unit"]; +} + +//------------------------------------------------------------------------------ +#pragma mark - Core Audio Properties +//------------------------------------------------------------------------------ -#pragma mark - Events --(void)startPlayback { - if (!_isPlaying){ - [EZAudioUtilities checkResult:AudioOutputUnitStart(_outputUnit) - operation:"Failed to start output unit"]; - _isPlaying = YES; - } +- (AUGraph)graph +{ + return self.info->graph; } --(void)stopPlayback { - if (_isPlaying){ - [EZAudioUtilities checkResult:AudioOutputUnitStop(_outputUnit) - operation:"Failed to stop output unit"]; - _isPlaying = NO; - } +//------------------------------------------------------------------------------ + +- (AudioUnit)converterAudioUnit +{ + return self.info->converterNodeInfo.audioUnit; } -#pragma mark - Getters --(AudioStreamBasicDescription)audioStreamBasicDescription { - return _outputASBD; +//------------------------------------------------------------------------------ + +- (AudioUnit)mixerAudioUnit +{ + return self.info->mixerNodeInfo.audioUnit; } --(BOOL)isPlaying { - return _isPlaying; +//------------------------------------------------------------------------------ + +- (AudioUnit)outputAudioUnit +{ + return self.info->outputNodeInfo.audioUnit; } -#pragma mark - Setters --(void)setAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd { - BOOL wasPlaying = NO; - if (self.isPlaying){ - [self stopPlayback]; - wasPlaying = YES; - } - _customASBD = YES; - _outputASBD = asbd; - // Set the format for output - [EZAudioUtilities checkResult:AudioUnitSetProperty(_outputUnit, - kAudioUnitProperty_StreamFormat, - kAudioUnitScope_Input, - 0, - &_outputASBD, - sizeof(_outputASBD)) - operation:"Couldn't set the ASBD for input scope/bos 0"]; - if (wasPlaying) - { - [self startPlayback]; - } +//------------------------------------------------------------------------------ +#pragma mark - Subclass +//------------------------------------------------------------------------------ + +- (OSStatus)connectOutputOfSourceNode:(AUNode)sourceNode + sourceNodeOutputBus:(UInt32)sourceNodeOutputBus + toDestinationNode:(AUNode)destinationNode + destinationNodeInputBus:(UInt32)destinationNodeInputBus + inGraph:(AUGraph)graph +{ + // + // Default implementation is to just connect the source to destination + // + [EZAudioUtilities checkResult:AUGraphConnectNodeInput(graph, + sourceNode, + sourceNodeOutputBus, + destinationNode, + destinationNodeInputBus) + operation:"Failed to connect converter node to mixer node"]; + return noErr; } --(void)dealloc { - [EZAudioUtilities checkResult:AudioOutputUnitStop(_outputUnit) - operation:"Failed to uninitialize output unit"]; - [EZAudioUtilities checkResult:AudioUnitUninitialize(_outputUnit) - operation:"Failed to uninitialize output unit"]; - [EZAudioUtilities checkResult:AudioComponentInstanceDispose(_outputUnit) - operation:"Failed to uninitialize output unit"]; +//------------------------------------------------------------------------------ + +- (AudioStreamBasicDescription)defaultClientFormat +{ + return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:EZOutputDefaultSampleRate]; +} + +//------------------------------------------------------------------------------ + +- (AudioStreamBasicDescription)defaultInputFormat +{ + return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:EZOutputDefaultSampleRate]; } +//------------------------------------------------------------------------------ + +- (OSType)outputAudioUnitSubType +{ +#if TARGET_OS_IPHONE + return kAudioUnitSubType_RemoteIO; +#elif TARGET_OS_MAC + return kAudioUnitSubType_HALOutput; +#endif +} + +//------------------------------------------------------------------------------ + @end + +//------------------------------------------------------------------------------ +#pragma mark - Callbacks (Implementation) +//------------------------------------------------------------------------------ + +OSStatus EZOutputConverterInputCallback(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData) +{ + EZOutput *output = (__bridge EZOutput *)inRefCon; + + // + // Try to ask the data source for audio data to fill out the output's + // buffer list + // + if ([output.dataSource respondsToSelector:@selector(output:shouldFillAudioBufferList:withNumberOfFrames:timestamp:)]) + { + return [output.dataSource output:output + shouldFillAudioBufferList:ioData + withNumberOfFrames:inNumberFrames + timestamp:inTimeStamp]; + } + else + { + // + // Silence if there is nothing to output + // + for (int i = 0; i < ioData->mNumberBuffers; i++) + { + memset(ioData->mBuffers[i].mData, + 0, + ioData->mBuffers[i].mDataByteSize); + } + } + return noErr; +} + +//------------------------------------------------------------------------------ + +OSStatus EZOutputGraphRenderCallback(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData) +{ + EZOutput *output = (__bridge EZOutput *)inRefCon; + + // + // provide the audio received delegate callback + // + if (*ioActionFlags & kAudioUnitRenderAction_PostRender) + { + if ([output.delegate respondsToSelector:@selector(output:playedAudio:withBufferSize:withNumberOfChannels:)]) + { + UInt32 frames = ioData->mBuffers[0].mDataByteSize / output.info->clientFormat.mBytesPerFrame; + [output.floatConverter convertDataFromAudioBufferList:ioData + withNumberOfFrames:frames + toFloatBuffers:output.info->floatData]; + [output.delegate output:output + playedAudio:output.info->floatData + withBufferSize:inNumberFrames + withNumberOfChannels:output.info->clientFormat.mChannelsPerFrame]; + } + } + return noErr; +} \ No newline at end of file diff --git a/EZAudioExamples/OSX/EZAudioPassThroughExample/EZAudioPassThroughExample/PassThroughViewController.h b/EZAudioExamples/OSX/EZAudioPassThroughExample/EZAudioPassThroughExample/PassThroughViewController.h index 48a0b316..4b86c3bb 100644 --- a/EZAudioExamples/OSX/EZAudioPassThroughExample/EZAudioPassThroughExample/PassThroughViewController.h +++ b/EZAudioExamples/OSX/EZAudioPassThroughExample/EZAudioPassThroughExample/PassThroughViewController.h @@ -30,7 +30,7 @@ */ #import "EZAudio.h" -@interface PassThroughViewController : NSViewController +@interface PassThroughViewController : NSViewController //------------------------------------------------------------------------------ #pragma mark - Components diff --git a/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.h b/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.h index a3fa623c..d87a87c7 100644 --- a/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.h +++ b/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.h @@ -36,13 +36,21 @@ /** Using the EZOutputDataSource to provide output data to the EZOutput component. */ -@interface PlayFileViewController : NSViewController +@interface PlayFileViewController : NSViewController #pragma mark - Components /** The EZAudioFile representing of the currently selected audio file */ -@property (nonatomic,strong) EZAudioFile *audioFile; +@property (nonatomic, strong) EZAudioFile *audioFile; + +/** + The EZOutput component used to output the audio file's audio data. + */ +@property (nonatomic, strong) EZOutput *output; /** The CoreGraphics based audio plot @@ -56,7 +64,7 @@ @property (nonatomic, weak) IBOutlet NSTextField *filePathLabel; /** - <#Description#> + A label to display the audio file's current position. */ @property (nonatomic, weak) IBOutlet NSTextField *positionLabel; @@ -66,36 +74,36 @@ @property (nonatomic, weak) IBOutlet NSSlider *positionSlider; /** - <#Description#> + A label to display the value of the rolling history length of the audio plot. */ @property (nonatomic, weak) IBOutlet NSTextField *rollingHistoryLengthLabel; /** - <#Description#> + A slider to adjust the rolling history length of the audio plot. */ @property (nonatomic, weak) IBOutlet NSSlider *rollingHistoryLengthSlider; /** - A slider to adjust the sample rate. + A slider to adjust the volume. */ -@property (nonatomic, weak) IBOutlet NSSlider *sampleRateSlider; +@property (nonatomic, weak) IBOutlet NSSlider *volumeSlider; /** - A slider to adjust the sample rate. + A label to display the volume of the audio plot. */ -@property (nonatomic, weak) IBOutlet NSTextField *sampleRateLabel; +@property (nonatomic, weak) IBOutlet NSTextField *volumeLabel; /** A BOOL indicating whether or not we've reached the end of the file */ @property (nonatomic,assign) BOOL eof; -#pragma mark - Actions /** - Changes the sampling frequency on the output unit + The microphone pop up button (contains the menu for choosing a microphone input) */ --(IBAction)changeOutputSamplingFrequency:(id)sender; +@property (nonatomic, weak) IBOutlet NSPopUpButton *outputDevicePopUpButton; +#pragma mark - Actions /** Switches the plot drawing type between a buffer plot (visualizes the current stream of audio data from the update function) or a rolling plot (visualizes the audio data over time, this is the classic waveform look) */ @@ -106,6 +114,11 @@ */ - (IBAction)changeRollingHistoryLength:(id)sender; +/** + Changes the volume of the audio coming out of the EZOutput. + */ +- (IBAction)changeVolume:(id)sender; + /** Prompts the file manager and loads in a new audio file into the EZAudioFile representation. */ diff --git a/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.m b/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.m index 5b2a977d..2ade878d 100644 --- a/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.m +++ b/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.m @@ -52,11 +52,24 @@ - (void)awakeFromNib // Mirror self.audioPlot.shouldMirror = YES; + // + // Create EZOutput to play audio data + // + self.output = [EZOutput outputWithDataSource:self]; + self.output.delegate = self; + + // + // Reload the menu for the output device selector popup button + // + [self reloadOutputDevicePopUpButtonMenu]; + // // Configure UI components // - self.rollingHistoryLengthSlider.intValue = self.audioPlot.rollingHistoryLength; - self.rollingHistoryLengthLabel.intValue = self.audioPlot.rollingHistoryLength; + self.volumeSlider.floatValue = [self.output volume]; + self.volumeLabel.floatValue = [self.output volume]; + self.rollingHistoryLengthSlider.intValue = [self.audioPlot rollingHistoryLength]; + self.rollingHistoryLengthLabel.intValue = [self.audioPlot rollingHistoryLength]; // // Try opening the sample file @@ -68,6 +81,14 @@ - (void)awakeFromNib #pragma mark - Actions //------------------------------------------------------------------------------ +- (void)changedOutput:(NSMenuItem *)item +{ + EZAudioDevice *device = [item representedObject]; + [self.output setDevice:device]; +} + +//------------------------------------------------------------------------------ + - (void)changePlotType:(id)sender { NSInteger selectedSegment = [sender selectedSegment]; @@ -86,13 +107,11 @@ - (void)changePlotType:(id)sender //------------------------------------------------------------------------------ -- (void)changeOutputSamplingFrequency:(id)sender +- (void)changeVolume:(id)sender { - float sampleRate = ((NSSlider *)sender).floatValue; - AudioStreamBasicDescription asbd = [[EZOutput sharedOutput] audioStreamBasicDescription]; - asbd.mSampleRate = sampleRate; - [[EZOutput sharedOutput] setAudioStreamBasicDescription:asbd]; - self.sampleRateLabel.floatValue = sampleRate; + float value = [(NSSlider *)sender floatValue]; + [self.output setVolume:value]; + self.volumeLabel.floatValue = value; } //------------------------------------------------------------------------------ @@ -123,7 +142,7 @@ - (void)openFile:(id)sender -(void)play:(id)sender { - if (![[EZOutput sharedOutput] isPlaying]) + if (![self.output isPlaying]) { if (self.eof) { @@ -133,13 +152,11 @@ -(void)play:(id)sender { self.audioPlot.plotType = EZPlotTypeRolling; } - [EZOutput sharedOutput].outputDataSource = self; - [[EZOutput sharedOutput] startPlayback]; + [self.output startPlayback]; } else { - [EZOutput sharedOutput].outputDataSource = nil; - [[EZOutput sharedOutput] stopPlayback]; + [self.output stopPlayback]; } } @@ -191,12 +208,12 @@ -(void)openFileWithFilePathURL:(NSURL*)filePathURL // // Stop playback // - [[EZOutput sharedOutput] stopPlayback]; + [self.output stopPlayback]; // // Clear the audio plot // -// [self.audioPlot clear]; + [self.audioPlot clear]; // // Load the audio file and customize the UI @@ -212,10 +229,7 @@ -(void)openFileWithFilePathURL:(NSURL*)filePathURL // // Set the client format from the EZAudioFile on the output // - Float64 sampleRate = self.audioFile.clientFormat.mSampleRate; - self.sampleRateSlider.floatValue = sampleRate; - self.sampleRateLabel.floatValue = sampleRate; - [[EZOutput sharedOutput] setAudioStreamBasicDescription:self.audioFile.clientFormat]; + [self.output setInputFormat:self.audioFile.clientFormat]; // // Change back to a buffer plot, but mirror and fill the waveform @@ -238,29 +252,48 @@ -(void)openFileWithFilePathURL:(NSURL*)filePathURL }]; } -//------------------------------------------------------------------------------ -#pragma mark - EZAudioFileDelegate //------------------------------------------------------------------------------ --(void) audioFile:(EZAudioFile *)audioFile - readAudio:(float **)buffer - withBufferSize:(UInt32)bufferSize - withNumberOfChannels:(UInt32)numberOfChannels +- (void)reloadOutputDevicePopUpButtonMenu { - if ([[EZOutput sharedOutput] isPlaying]) + NSArray *outputDevices = [EZAudioDevice outputDevices]; + NSMenu *menu = [[NSMenu alloc] init]; + NSMenuItem *defaultOutputDeviceItem; + for (EZAudioDevice *device in outputDevices) { - __weak typeof (self) weakSelf = self; - dispatch_async(dispatch_get_main_queue(), ^{ - [weakSelf.audioPlot updateBuffer:buffer[0] - withBufferSize:bufferSize]; - }); + NSMenuItem *item = [[NSMenuItem alloc] initWithTitle:device.name + action:@selector(changedOutput:) + keyEquivalent:@""]; + item.representedObject = device; + item.target = self; + [menu addItem:item]; + + // If this device is the same one the microphone is using then + // we will use this menu item as the currently selected item + // in the microphone input popup button's list of items. For instance, + // if you are connected to an external display by default the external + // display's microphone might be used instead of the mac's built in + // mic. + if ([device isEqual:[self.output device]]) + { + defaultOutputDeviceItem = item; + } } + self.outputDevicePopUpButton.menu = menu; + + // + // Set the selected device to the current selection on the + // microphone input popup button + // + [self.outputDevicePopUpButton selectItem:defaultOutputDeviceItem]; } +//------------------------------------------------------------------------------ +#pragma mark - EZAudioFileDelegate //------------------------------------------------------------------------------ --(void)audioFile:(EZAudioFile *)audioFile - updatedPosition:(SInt64)framePosition { +-(void)audioFile:(EZAudioFile *)audioFile updatedPosition:(SInt64)framePosition +{ __weak typeof (self) weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ if (![weakSelf.positionSlider.cell isHighlighted]) @@ -275,9 +308,10 @@ -(void)audioFile:(EZAudioFile *)audioFile #pragma mark - EZOutputDataSource //------------------------------------------------------------------------------ --(void) output:(EZOutput *)output +-(OSStatus) output:(EZOutput *)output shouldFillAudioBufferList:(AudioBufferList *)audioBufferList withNumberOfFrames:(UInt32)frames + timestamp:(const AudioTimeStamp *)timestamp { if (self.audioFile) { @@ -291,6 +325,23 @@ -(void) output:(EZOutput *)output [self seekToFrame:0]; } } + return noErr; +} + +//------------------------------------------------------------------------------ +#pragma mark - EZOutputDelegate +//------------------------------------------------------------------------------ + +- (void) output:(EZOutput *)output + playedAudio:(float **)buffer + withBufferSize:(UInt32)bufferSize + withNumberOfChannels:(UInt32)numberOfChannels +{ + __weak typeof (self) weakSelf = self; + dispatch_async(dispatch_get_main_queue(), ^{ + [weakSelf.audioPlot updateBuffer:buffer[0] + withBufferSize:bufferSize]; + }); } //------------------------------------------------------------------------------ diff --git a/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.xib b/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.xib index b35102a8..f9435050 100644 --- a/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.xib +++ b/EZAudioExamples/OSX/EZAudioPlayFileExample/EZAudioPlayFileExample/PlayFileViewController.xib @@ -9,15 +9,16 @@ + - - + + @@ -27,7 +28,7 @@ - +