Created
May 21, 2022 22:19
-
-
Save hollance/91d9da0d07a869ef9f56466aa46a6466 to your computer and use it in GitHub Desktop.
Render basic waveform using Core Audio on macOS
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Compile this with: | |
// $ gcc synth.c -o synth -framework AudioToolbox | |
// Based on the CH07_AUGraphSineWave example from the book | |
// "Learning Core Audio: A Hands-On Guide to Audio Programming | |
// for Mac and iOS" by Chris Adamson and Kevin Avila | |
#include <AudioToolbox/AudioToolbox.h> | |
typedef struct | |
{ | |
AudioUnit outputUnit; | |
double sampleRate; | |
double phase; | |
} | |
Player; | |
OSStatus renderAudio( | |
void *inRefCon, | |
AudioUnitRenderActionFlags *ioActionFlags, | |
const AudioTimeStamp *inTimeStamp, | |
UInt32 inBusNumber, | |
UInt32 inNumberFrames, | |
AudioBufferList *ioData) | |
{ | |
Player *player = (Player *)inRefCon; | |
double sineFrequency = 880.0; | |
double cycleLength = player->sampleRate / sineFrequency; | |
double phase = player->phase; | |
float *dataL = (float *)ioData->mBuffers[0].mData; | |
float *dataR = (float *)ioData->mBuffers[1].mData; | |
for (UInt32 frame = 0; frame < inNumberFrames; ++frame) | |
{ | |
dataL[frame] = sin(2 * M_PI * phase / cycleLength); | |
dataR[frame] = sin(2 * M_PI * phase / cycleLength); | |
phase += 1.0; | |
if (phase > cycleLength) { | |
phase -= cycleLength; | |
} | |
} | |
player->phase = phase; | |
return noErr; | |
} | |
int main(int argc, const char *argv[]) | |
{ | |
Player player = { 0 }; | |
AudioComponentDescription desc = { 0 }; | |
desc.componentType = kAudioUnitType_Output; | |
desc.componentSubType = kAudioUnitSubType_DefaultOutput; | |
desc.componentManufacturer = kAudioUnitManufacturer_Apple; | |
AudioComponent comp = AudioComponentFindNext(NULL, &desc); | |
if (comp == NULL) { | |
fprintf(stderr, "Couldn't get output AudioUnit\n"); | |
exit(1); | |
} | |
if (noErr != AudioComponentInstanceNew(comp, &player.outputUnit)) { | |
fprintf(stderr, "Couldn't instantiate component for output AudioUnit\n"); | |
exit(1); | |
} | |
AURenderCallbackStruct input; | |
input.inputProc = renderAudio; | |
input.inputProcRefCon = &player; | |
if (noErr != AudioUnitSetProperty(player.outputUnit, | |
kAudioUnitProperty_SetRenderCallback, | |
kAudioUnitScope_Input, | |
0, | |
&input, | |
sizeof(input))) { | |
fprintf(stderr, "Couldn't set render callback on output AudioUnit\n"); | |
exit(1); | |
} | |
if (noErr != AudioUnitInitialize(player.outputUnit)) { | |
fprintf(stderr, "Couldn't initialize output AudioUnit\n"); | |
exit(1); | |
} | |
UInt32 size = sizeof(double); | |
if (noErr != AudioUnitGetProperty(player.outputUnit, | |
kAudioUnitProperty_SampleRate, | |
kAudioUnitScope_Output, | |
0, | |
&player.sampleRate, | |
&size)) { | |
fprintf(stderr, "Couldn't get sample rate\n"); | |
exit(1); | |
} | |
if (noErr != AudioOutputUnitStart(player.outputUnit)) { | |
fprintf(stderr, "Couldn't start output AudioUnit\n"); | |
exit(1); | |
} | |
sleep(5); // play for 5 seconds | |
AudioOutputUnitStop(player.outputUnit); | |
AudioUnitUninitialize(player.outputUnit); | |
AudioComponentInstanceDispose(player.outputUnit); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment