SDL_Mixer.m 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518
  1. /*
  2. Copyright (C) 2011 Id Software, Inc.
  3. This program is free software; you can redistribute it and/or
  4. modify it under the terms of the GNU General Public License
  5. as published by the Free Software Foundation; either version 2
  6. of the License, or (at your option) any later version.
  7. This program is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU General Public License for more details.
  11. You should have received a copy of the GNU General Public License
  12. along with this program; if not, write to the Free Software
  13. Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  14. */
  15. /*
  16. ===============================
  17. iOS implementation of our SDL_Mixer shim for playing MIDI files.
  18. ===============================
  19. */
  20. #include <stddef.h>
  21. #include "SDL_Mixer.h"
  22. // Use the Embedded Audio Synthesis library as the backend MIDI renderer.
  23. #include "embeddedaudiosynthesis/EASGlue.h"
  24. // Use Core Audio Units for sound output on the device.
  25. #import <Foundation/Foundation.h>
  26. #import <AudioToolbox/AudioToolbox.h>
  27. /*
  28. ===============================
  29. "Constants"
  30. ===============================
  31. */
  32. #define ID_GRAPH_SAMPLE_RATE 22050
  33. /*
  34. ===============================
  35. Internal Structures
  36. ===============================
  37. */
  38. // Data structure for mono or stereo sound, to pass to the application's render callback function,
  39. // which gets invoked by a Mixer unit input bus when it needs more audio to play.
  40. typedef struct {
  41. BOOL isStereo; // set to true if there is data in the audioDataRight member
  42. UInt32 frameCount; // the total number of frames in the audio data
  43. UInt32 sampleNumber; // the next audio sample to play
  44. } soundStruct, *soundStructPtr;
  45. typedef struct MIDIPlayerGraph_tag {
  46. AUGraph processingGraph;
  47. AudioUnit ioUnit;
  48. BOOL playing;
  49. AudioStreamBasicDescription streamFormat;
  50. soundStruct soundStructInst;
  51. } MIDIPlayerGraph;
  52. static MIDIPlayerGraph midiPlayer;
  53. /*
  54. ===============================
  55. Internal prototypes
  56. ===============================
  57. */
  58. AudioStreamBasicDescription getStreamFormat( void );
  59. static void printASBD( AudioStreamBasicDescription asbd );
  60. static void printErrorMessage( NSString * errorString, OSStatus result );
  61. static void configureAndInitializeAudioProcessingGraph( MIDIPlayerGraph * player );
  62. static void startMIDIPlayer( MIDIPlayerGraph * player );
  63. // AU graph callback.
  64. static OSStatus inputRenderCallback (
  65. void *inRefCon, // A pointer to a struct containing the complete audio data
  66. // to play, as well as state information such as the
  67. // first sample to play on this invocation of the callback.
  68. AudioUnitRenderActionFlags *ioActionFlags, // Unused here. When generating audio, use ioActionFlags to indicate silence
  69. // between sounds; for silence, also memset the ioData buffers to 0.
  70. const AudioTimeStamp *inTimeStamp, // Unused here.
  71. UInt32 inBusNumber, // The mixer unit input bus that is requesting some new
  72. // frames of audio data to play.
  73. UInt32 inNumberFrames, // The number of frames of audio to provide to the buffer(s)
  74. // pointed to by the ioData parameter.
  75. AudioBufferList *ioData // On output, the audio data to play. The callback's primary
  76. // responsibility is to fill the buffer(s) in the
  77. // AudioBufferList.
  78. );
  79. /* Open the mixer with a certain audio format */
  80. int Mix_OpenAudio(int frequency, uint16_t format, int channels,
  81. int chunksize) {
  82. EASGlueInit();
  83. midiPlayer.streamFormat = getStreamFormat();
  84. midiPlayer.playing = FALSE;
  85. configureAndInitializeAudioProcessingGraph( &midiPlayer );
  86. return 0;
  87. }
  88. /* Close the mixer, halting all playing audio */
  89. void Mix_CloseAudio(void) {
  90. AUGraphStop( midiPlayer.processingGraph );
  91. EASGlueShutdown();
  92. }
  93. /* Set a function that is called after all mixing is performed.
  94. This can be used to provide real-time visual display of the audio stream
  95. or add a custom mixer filter for the stream data.
  96. */
  97. void Mix_SetPostMix(void (*mix_func)
  98. (void *udata, uint8_t *stream, int len), void *arg) {
  99. }
  100. /* Fade in music or a channel over "ms" milliseconds, same semantics as the "Play" functions */
  101. int Mix_FadeInMusic(Mix_Music *music, int loops, int ms) {
  102. startMIDIPlayer( &midiPlayer );
  103. return 0;
  104. }
  105. /* Pause/Resume the music stream */
  106. void Mix_PauseMusic(void) {
  107. EASGluePause();
  108. }
  109. void Mix_ResumeMusic(void) {
  110. EASGlueResume();
  111. }
  112. /* Halt a channel, fading it out progressively till it's silent
  113. The ms parameter indicates the number of milliseconds the fading
  114. will take.
  115. */
  116. int Mix_FadeOutMusic(int ms) {
  117. EASGlueCloseFile();
  118. AUGraphStop( midiPlayer.processingGraph );
  119. return 1;
  120. }
  121. /* Free an audio chunk previously loaded */
  122. void Mix_FreeMusic(Mix_Music *music) {
  123. free(music);
  124. }
  125. /* Load a wave file or a music (.mod .s3m .it .xm) file */
  126. Mix_Music * Mix_LoadMUS(const char *file) {
  127. EASGlueOpenFile( file );
  128. Mix_Music * musicStruct = malloc( sizeof(Mix_Music) );
  129. return musicStruct;
  130. }
  131. const char * Mix_GetError(void) {
  132. return "";
  133. }
  134. /* Set the volume in the range of 0-128 of a specific channel or chunk.
  135. If the specified channel is -1, set volume for all channels.
  136. Returns the original volume.
  137. If the specified volume is -1, just return the current volume.
  138. */
  139. int Mix_VolumeMusic(int volume) {
  140. return 0;
  141. }
  142. /*
  143. =================================
  144. Audio Unit helper functions
  145. =================================
  146. */
  147. AudioStreamBasicDescription getStreamFormat( void ) {
  148. AudioStreamBasicDescription streamFormat = { 0, 0, 0, 0, 0, 0, 0, 0, 0 };
  149. // The AudioUnitSampleType data type is the recommended type for sample data in audio
  150. // units. This obtains the byte size of the type for use in filling in the ASBD.
  151. size_t bytesPerSample = sizeof (AudioUnitSampleType);
  152. // Fill the application audio format struct's fields to define a linear PCM,
  153. // stereo, noninterleaved stream at the hardware sample rate.
  154. streamFormat.mFormatID = kAudioFormatLinearPCM;
  155. streamFormat.mFormatFlags = kAudioFormatFlagsAudioUnitCanonical;
  156. streamFormat.mBytesPerPacket = bytesPerSample;
  157. streamFormat.mFramesPerPacket = 1;
  158. streamFormat.mBytesPerFrame = bytesPerSample;
  159. streamFormat.mChannelsPerFrame = 2; // 2 indicates stereo
  160. streamFormat.mBitsPerChannel = 8 * bytesPerSample;
  161. streamFormat.mSampleRate = ID_GRAPH_SAMPLE_RATE;
  162. NSLog (@"The stereo stream format for the I/O unit:");
  163. printASBD( streamFormat );
  164. return streamFormat;
  165. }
  166. static void startMIDIPlayer( MIDIPlayerGraph * player ) {
  167. if ( player == 0 ) {
  168. NSLog( @"NULL player object, can't start!" );
  169. return;
  170. }
  171. NSLog (@"Starting audio processing graph");
  172. OSStatus result = AUGraphStart (player->processingGraph);
  173. if (noErr != result) { printErrorMessage( @"AUGraphStart", result ); return;}
  174. player->playing = YES;
  175. }
  176. // You can use this method during development and debugging to look at the
  177. // fields of an AudioStreamBasicDescription struct.
  178. static void printASBD( AudioStreamBasicDescription asbd ) {
  179. char formatIDString[5];
  180. UInt32 formatID = CFSwapInt32HostToBig (asbd.mFormatID);
  181. bcopy (&formatID, formatIDString, 4);
  182. formatIDString[4] = '\0';
  183. NSLog (@" Sample Rate: %10.0f", asbd.mSampleRate);
  184. NSLog (@" Format ID: %10s", formatIDString);
  185. NSLog (@" Format Flags: %10lX", asbd.mFormatFlags);
  186. NSLog (@" Bytes per Packet: %10lu", asbd.mBytesPerPacket);
  187. NSLog (@" Frames per Packet: %10lu", asbd.mFramesPerPacket);
  188. NSLog (@" Bytes per Frame: %10lu", asbd.mBytesPerFrame);
  189. NSLog (@" Channels per Frame: %10lu", asbd.mChannelsPerFrame);
  190. NSLog (@" Bits per Channel: %10lu", asbd.mBitsPerChannel);
  191. }
  192. static void printErrorMessage( NSString * errorString, OSStatus result ) {
  193. char resultString[5];
  194. UInt32 swappedResult = CFSwapInt32HostToBig (result);
  195. bcopy (&swappedResult, resultString, 4);
  196. resultString[4] = '\0';
  197. NSLog (
  198. @"*** %@ error: %s\n",
  199. errorString,
  200. (char*) &resultString
  201. );
  202. }
  203. // This method performs all the work needed to set up the audio processing graph:
  204. // 1. Instantiate and open an audio processing graph
  205. // 2. Obtain the audio unit nodes for the graph
  206. // 3. Configure the Multichannel Mixer unit
  207. // * specify the number of input buses
  208. // * specify the output sample rate
  209. // * specify the maximum frames-per-slice
  210. // 4. Initialize the audio processing graph
  211. static void configureAndInitializeAudioProcessingGraph( MIDIPlayerGraph * player ) {
  212. if ( player == 0 ) {
  213. NSLog( @"NULL player graph object, can't initialize it!" );
  214. return;
  215. }
  216. NSLog (@"Configuring and then initializing audio processing graph");
  217. OSStatus result = noErr;
  218. //............................................................................
  219. // Create a new audio processing graph.
  220. result = NewAUGraph (&player->processingGraph);
  221. if (noErr != result) { printErrorMessage( @"NewAUGraph", result ); return;}
  222. //............................................................................
  223. // Specify the audio unit component descriptions for the audio units to be
  224. // added to the graph.
  225. // I/O unit
  226. AudioComponentDescription iOUnitDescription;
  227. iOUnitDescription.componentType = kAudioUnitType_Output;
  228. iOUnitDescription.componentSubType = kAudioUnitSubType_RemoteIO;
  229. iOUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
  230. iOUnitDescription.componentFlags = 0;
  231. iOUnitDescription.componentFlagsMask = 0;
  232. //............................................................................
  233. // Add nodes to the audio processing graph.
  234. NSLog (@"Adding nodes to audio processing graph");
  235. AUNode iONode; // node for I/O unit
  236. // Add the nodes to the audio processing graph
  237. result = AUGraphAddNode (
  238. player->processingGraph,
  239. &iOUnitDescription,
  240. &iONode);
  241. if (noErr != result) { printErrorMessage( @"AUGraphNewNode failed for I/O unit", result ); return;}
  242. //............................................................................
  243. // Open the audio processing graph
  244. // Following this call, the audio units are instantiated but not initialized
  245. // (no resource allocation occurs and the audio units are not in a state to
  246. // process audio).
  247. result = AUGraphOpen (player->processingGraph);
  248. if (noErr != result) { printErrorMessage( @"AUGraphOpen", result ); return;}
  249. //............................................................................
  250. // Obtain the mixer unit instance from its corresponding node.
  251. result = AUGraphNodeInfo (
  252. player->processingGraph,
  253. iONode,
  254. NULL,
  255. &player->ioUnit
  256. );
  257. if (noErr != result) { printErrorMessage( @"AUGraphNodeInfo", result ); return;}
  258. //............................................................................
  259. // Multichannel Mixer unit Setup
  260. // Setup the struture that contains the input render callback
  261. AURenderCallbackStruct inputCallbackStruct;
  262. inputCallbackStruct.inputProc = &inputRenderCallback;
  263. inputCallbackStruct.inputProcRefCon = &player->soundStructInst;
  264. NSLog (@"Registering the render callback with the I/O unit" );
  265. // Set a callback for the specified node's specified input
  266. result = AUGraphSetNodeInputCallback (
  267. player->processingGraph,
  268. iONode,
  269. 0,
  270. &inputCallbackStruct
  271. );
  272. if (noErr != result) { printErrorMessage( @"AUGraphSetNodeInputCallback", result ); return;}
  273. NSLog (@"Setting stereo stream format for I/O unit input bus");
  274. result = AudioUnitSetProperty (
  275. player->ioUnit,
  276. kAudioUnitProperty_StreamFormat,
  277. kAudioUnitScope_Input,
  278. 0,
  279. &player->streamFormat,
  280. sizeof (player->streamFormat)
  281. );
  282. if (noErr != result) { printErrorMessage( @"AudioUnitSetProperty (set input bus stream format)", result ); return;}
  283. //............................................................................
  284. // Initialize audio processing graph
  285. // Diagnostic code
  286. // Call CAShow if you want to look at the state of the audio processing
  287. // graph.
  288. NSLog (@"Audio processing graph state immediately before initializing it:");
  289. CAShow (player->processingGraph);
  290. NSLog (@"Initializing the audio processing graph");
  291. // Initialize the audio processing graph, configure audio data stream formats for
  292. // each input and output, and validate the connections between audio units.
  293. result = AUGraphInitialize (player->processingGraph);
  294. if (noErr != result) { printErrorMessage( @"AUGraphInitialize", result ); return;}
  295. }
  296. #define RAW_EAS_BUFFER_FRAMES 128
  297. static OSStatus inputRenderCallback (
  298. void *inRefCon, // A pointer to a struct containing the complete audio data
  299. // to play, as well as state information such as the
  300. // first sample to play on this invocation of the callback.
  301. AudioUnitRenderActionFlags *ioActionFlags, // Unused here. When generating audio, use ioActionFlags to indicate silence
  302. // between sounds; for silence, also memset the ioData buffers to 0.
  303. const AudioTimeStamp *inTimeStamp, // Unused here.
  304. UInt32 inBusNumber, // The mixer unit input bus that is requesting some new
  305. // frames of audio data to play.
  306. UInt32 inNumberFrames, // The number of frames of audio to provide to the buffer(s)
  307. // pointed to by the ioData parameter.
  308. AudioBufferList *ioData // On output, the audio data to play. The callback's primary
  309. // responsibility is to fill the buffer(s) in the
  310. // AudioBufferList.
  311. ) {
  312. //printf( "Need %lu samples in %lu buffers!\n", inNumberFrames, ioData->mNumberBuffers );
  313. EAS_I32 generatedThisRender = 0;
  314. EAS_I32 totalGenerated = 0;
  315. // It looks like EAS interleaves stereo samples, so we have to separate them into the two
  316. // different buffers that the audio unit provides.
  317. //const UInt32 totalInterleavedSamplesNeeded = inNumberFrames * 2;
  318. AudioBuffer * audioBufferLeft = &ioData->mBuffers[0];
  319. AudioBuffer * audioBufferRight = &ioData->mBuffers[1];
  320. /*
  321. printf( "Need %lu samples in %lu buffers!\n"
  322. "audioBuffer byte size: %lu channels: %lu\n",
  323. inNumberFrames, ioData->mNumberBuffers,
  324. audioBuffer->mDataByteSize, audioBuffer->mNumberChannels );
  325. */
  326. AudioUnitSampleType * hardwareBufferLeft = (AudioUnitSampleType *) audioBufferLeft->mData;
  327. AudioUnitSampleType * hardwareBufferRight = (AudioUnitSampleType *) audioBufferRight->mData;
  328. // EAS_Render always produces BUFFER_SIZE_IN_MONO_SAMPLES frames per call. Currently, this
  329. // is defined to 128. Let's fill up a 128 frame buffer, then do a conversion from EAS_PCM
  330. // (which is signed 16-bit integer) to AudioUnitSampleType (which is 8.24 fixed-point with
  331. // a range of -1 to +1).
  332. //
  333. // Note that EAS renders interleaved stereo, so we actually a buffer size of
  334. // 2 * BUFFER_SIZE_IN_MONO_SAMPLES.
  335. EAS_PCM rawEASSamples[RAW_EAS_BUFFER_FRAMES * 2];
  336. // EAS generates interleaved stereo samples, but the AudioUnit wants noninterleaved.
  337. while ( totalGenerated < inNumberFrames ) {
  338. //EASGlueRender( hardwareBuffer + totalGenerated*2, &generatedThisRender );
  339. EASGlueRender( rawEASSamples, &generatedThisRender );
  340. // Convert from EAS's signed 16-bit format to the AudioUnit's 8.24 fixed-point format.
  341. // Couldn't find this in the Apple docs, but the 8.24 format should be in the range of
  342. // -1.0 to 1.0, wasting 6 bits of precision.
  343. // All we have to do here is left-shift by 9 bits. This will not overflow, because the
  344. // destination is a 32-bit value.
  345. // Also take this opportunity to de-interleave the EAS-rendered samples.
  346. for ( int i = 0; i < RAW_EAS_BUFFER_FRAMES; ++i ) {
  347. hardwareBufferLeft[totalGenerated + i] = rawEASSamples[i * 2 + 0] << 9;
  348. hardwareBufferRight[totalGenerated + i] = rawEASSamples[i * 2 + 1] << 9;
  349. }
  350. totalGenerated += generatedThisRender;
  351. }
  352. return noErr;
  353. }