問(wèn)題描述
這是我的代碼:我使用此代碼通過(guò) Audio Unit 錄制 iPhone 輸出音頻然后將輸出保存在 output.caf 但 output.caf 文件為空任何人都知道我該怎么做?輸出音頻文件為空
這是初始化音頻單元
-(void) 初始化OutputUnit{OSStatus 狀態(tài);//描述音頻組件AudioComponentDescription 描述;desc.componentType = kAudioUnitType_Output;desc.componentSubType = kAudioUnitSubType_RemoteIO;desc.componentFlags = 0;desc.componentFlagsMask = 0;desc.componentManufacturer = kAudioUnitManufacturer_Apple;//獲取組件AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);//獲取音頻單元狀態(tài) = AudioComponentInstanceNew(inputComponent, &audioUnit);//啟用 IO 進(jìn)行錄制UInt32 標(biāo)志 = 1;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Input,kInputBus,&國(guó)旗,大小(標(biāo)志));//啟用 IO 進(jìn)行播放狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Output,k輸出總線,&國(guó)旗,大小(標(biāo)志));//描述格式AudioStreamBasicDescription audioFormat={0};音頻格式.mSampleRate = 44100.00;audioFormat.mFormatID = kAudioFormatLinearPCM;audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger |kAudioFormatFlagIsPacked;audioFormat.mFramesPerPacket = 1;audioFormat.mChannelsPerFrame = 1;audioFormat.mBitsPerChannel = 16;audioFormat.mBytesPerPacket = 2;audioFormat.mBytesPerFrame = 2;//應(yīng)用格式狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Output,kInputBus,&音頻格式,大小(音頻格式));狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,k輸出總線,&音頻格式,大小(音頻格式));//設(shè)置輸入回調(diào)AURenderCallbackStruct 回調(diào)結(jié)構(gòu);callbackStruct.inputProc = 錄音回調(diào);callbackStruct.inputProcRefCon = self;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_SetInputCallback,kAudioUnitScope_Global,kInputBus,&回調(diào)結(jié)構(gòu),sizeof(callbackStruct));//設(shè)置輸出回調(diào)callbackStruct.inputProc = 播放回調(diào);callbackStruct.inputProcRefCon = self;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_SetRenderCallback,kAudioUnitScope_Global,k輸出總線,&回調(diào)結(jié)構(gòu),sizeof(callbackStruct));//禁用記錄器的緩沖區(qū)分配(可選 - 如果我們想傳入我們自己的,請(qǐng)執(zhí)行此操作)標(biāo)志 = 0;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_ShouldAllocateBuffer,kAudioUnitScope_Output,kInputBus,&國(guó)旗,大小(標(biāo)志));音頻單元初始化(音頻單元);音頻輸出單元開(kāi)始(音頻單元);//初始化 le fichier 音頻NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);NSString *documentsDirectory = [paths objectAtIndex:0];NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] ??autorelease];NSLog(@">>>%@",destinationFilePath);CFURLRef 目的地URL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);CFRelease(destinationURL);NSAssert(setupErr == noErr, @"無(wú)法創(chuàng)建寫(xiě)入文件");setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);NSAssert(setupErr == noErr, @"無(wú)法為格式創(chuàng)建文件");setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);NSAssert(setupErr == noErr, @"無(wú)法初始化音頻文件的寫(xiě)入緩沖區(qū)");}
錄音回調(diào)
靜態(tài) OSStatus recordingCallback (void * inRefCon,AudioUnitRenderActionFlags * ioActionFlags,const AudioTimeStamp * inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,音頻緩沖區(qū)列表 * ioData) {NSLog(@"回調(diào)");if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0){AudioBufferList *bufferList;//<- 用緩沖區(qū)填充它(你會(huì)想要 malloc 它,因?yàn)樗且粋€(gè)動(dòng)態(tài)長(zhǎng)度列表)EffectState *effectState = (EffectState *)inRefCon;AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit];OSStatus 狀態(tài);NSLog(@"de5eal el 回調(diào)");//下面我得到了錯(cuò)誤狀態(tài) = AudioUnitRender(rioUnit,ioActionFlags,在時(shí)間戳,inBusNumber,inNumberFrames,緩沖區(qū)列表);if (noErr != status) { NSLog(@"AudioUnitRender error");返回?zé)o錯(cuò)誤;}//現(xiàn)在,我們剛剛讀取的樣本位于 bufferList 的緩沖區(qū)中ExtAudioFileWriteAsync(effectState->audioFileRef, inNumberFrames, bufferList);}返回?zé)o錯(cuò)誤;}//然后停止錄制-(無(wú)效)停止記錄{音頻輸出單元停止(音頻單元);AudioUnitUninitialize(audioUnit);}
在 initializaeOutputUnit 你只創(chuàng)建了你的音頻文件:
OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);
通過(guò)傳遞 0(幀)和 NULL(音頻緩沖區(qū))僅用于初始化內(nèi)部緩沖區(qū):
setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);
這就是 recordingCallback 的問(wèn)題所在:
1) ioActionFlags 始終為 0,inBusNumber 始終為 1,因?yàn)檫@就是您設(shè)置回調(diào)的方式 (kInputBus = 1):
if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)
所以只需刪除 if 語(yǔ)句.
2) 從 AudioUnitRender 您將收到 -50 錯(cuò)誤,它在 CoreAudioTypes.h 中定義為 kAudio_ParamError 錯(cuò)誤.發(fā)生這種情況的原因是 bufferList 未定義且為 NULL!
<塊引用> OSStatus 狀態(tài);狀態(tài) = AudioUnitRender(這個(gè)->mAudioUnit,ioActionFlags,在時(shí)間戳,kInputBus,inNumberFrames,&緩沖區(qū)列表);if (noErr != status) {printf("AudioUnitRender 錯(cuò)誤:%ld", status);返回?zé)o錯(cuò)誤;}
您只需要定義一個(gè)有效的 AudioBuffer 并將其傳遞給 AudioUnitRender,這是我的工作 RenderCallback:
<塊引用> 靜態(tài) OSStatus recordingCallback (void * inRefCon,AudioUnitRenderActionFlags * ioActionFlags,const AudioTimeStamp * inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,音頻緩沖區(qū)列表 * ioData) {雙倍 timeInSeconds = inTimeStamp->mSampleTime/kSampleRate;printf("
%fs inBusNumber: %lu inNumberFrames: %lu ", timeInSeconds, inBusNumber, inNumberFrames);//printAudioUnitRenderActionFlags(ioActionFlags);AudioBufferList 緩沖區(qū)列表;SInt16 個(gè)樣本[inNumberFrames];//足夠大的大小,不必?fù)?dān)心緩沖區(qū)溢出memset (&samples, 0, sizeof (samples));bufferList.mNumberBuffers = 1;bufferList.mBuffers[0].mData = 樣本;bufferList.mBuffers[0].mNumberChannels = 1;bufferList.mBuffers[0].mDataByteSize = inNumberFrames*sizeof(SInt16);ViewController* THIS = THIS = (__bridge ViewController *)inRefCon;OSStatus 狀態(tài);狀態(tài) = AudioUnitRender(這個(gè)->mAudioUnit,ioActionFlags,在時(shí)間戳,kInputBus,inNumberFrames,&緩沖區(qū)列表);if (noErr != status) {printf("AudioUnitRender 錯(cuò)誤:%ld", status);返回?zé)o錯(cuò)誤;}//現(xiàn)在,我們剛剛讀取的樣本位于 bufferList 的緩沖區(qū)中ExtAudioFileWriteAsync(THIS->mAudioFileRef, inNumberFrames, &bufferList);返回?zé)o錯(cuò)誤;}
在 stopRecord 中,您應(yīng)該使用 ExtAudioFileDispose 關(guān)閉音頻文件:
<塊引用> - (void)stopRecording:(NSTimer*)theTimer {printf("
停止錄制
");AudioOutputUnitStop(mAudioUnit);AudioUnitUninitialize(mAudioUnit);OSStatus 狀態(tài) = ExtAudioFileDispose(mAudioFileRef);printf("OSStatus(ExtAudioFileDispose): %ld
", status);}
完整源代碼:http://pastebin.com/92Fyjaye
this is my code : i use this code to record the iPhone output audio by using Audio Unit then saving the output in output.caf but the output.caf file is empty any body have idea about what shall i do ? the output audio file is empty
this is intializing the audio unit
-(void) initializaeOutputUnit
{
OSStatus status;
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &audioUnit);
// Enable IO for recording
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&flag,
sizeof(flag));
// Enable IO for playback
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&flag,
sizeof(flag));
// Describe format
AudioStreamBasicDescription audioFormat={0};
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 2;
audioFormat.mBytesPerFrame = 2;
// Apply format
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&audioFormat,
sizeof(audioFormat));
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(audioFormat));
// Set input callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = self;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
// Set output callback
callbackStruct.inputProc = playbackCallback;
callbackStruct.inputProcRefCon = self;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
kOutputBus,
&callbackStruct,
sizeof(callbackStruct));
// Disable buffer allocation for the recorder (optional - do this if we want to pass in our own)
flag = 0;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
kInputBus,
&flag,
sizeof(flag));
AudioUnitInitialize(audioUnit);
AudioOutputUnitStart(audioUnit);
// On initialise le fichier audio
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] autorelease];
NSLog(@">>> %@", destinationFilePath);
CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);
OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);
CFRelease(destinationURL);
NSAssert(setupErr == noErr, @"Couldn't create file for writing");
setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);
NSAssert(setupErr == noErr, @"Couldn't create file for format");
setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);
NSAssert(setupErr == noErr, @"Couldn't initialize write buffers for audio file");
}
the recording call back
static OSStatus recordingCallback (void * inRefCon,
AudioUnitRenderActionFlags * ioActionFlags,
const AudioTimeStamp * inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList * ioData) {
NSLog(@"callback");
if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)
{
AudioBufferList *bufferList; // <- Fill this up with buffers (you will want to malloc it, as it's a dynamic-length list)
EffectState *effectState = (EffectState *)inRefCon;
AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit];
OSStatus status;
NSLog(@"de5eal el call back ");
// BELOW I GET THE ERROR
status = AudioUnitRender( rioUnit,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
bufferList);
if (noErr != status) { NSLog(@"AudioUnitRender error"); return noErr;}
// Now, we have the samples we just read sitting in buffers in bufferList
ExtAudioFileWriteAsync(effectState->audioFileRef, inNumberFrames, bufferList);
}
return noErr;
}
// then stop Recording
- (void) stopRecord
{
AudioOutputUnitStop(audioUnit);
AudioUnitUninitialize(audioUnit);
}
In initializaeOutputUnit you only created your audio file:
OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);
by passing 0 (frames) and NULL (audiobuffer) is just for init internal buffers:
setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);
That's what's going wrong in recordingCallback:
1) ioActionFlags are always 0 and inBusNumber are always 1, because thats how you setup your callback (kInputBus = 1):
if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)
so just remove the if statement.
2) From AudioUnitRender you will receive -50 error, which is defined in CoreAudioTypes.h as an kAudio_ParamError error. This happens by bufferList is not defined and NULL!
OSStatus status; status = AudioUnitRender(THIS->mAudioUnit, ioActionFlags, inTimeStamp, kInputBus, inNumberFrames, &bufferList); if (noErr != status) { printf("AudioUnitRender error: %ld", status); return noErr; }
You just need to define an valid AudioBuffer and pass it to AudioUnitRender, this is my working RenderCallback:
static OSStatus recordingCallback (void * inRefCon, AudioUnitRenderActionFlags * ioActionFlags, const AudioTimeStamp * inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList * ioData) { double timeInSeconds = inTimeStamp->mSampleTime / kSampleRate; printf(" %fs inBusNumber: %lu inNumberFrames: %lu ", timeInSeconds, inBusNumber, inNumberFrames); //printAudioUnitRenderActionFlags(ioActionFlags); AudioBufferList bufferList; SInt16 samples[inNumberFrames]; // A large enough size to not have to worry about buffer overrun memset (&samples, 0, sizeof (samples)); bufferList.mNumberBuffers = 1; bufferList.mBuffers[0].mData = samples; bufferList.mBuffers[0].mNumberChannels = 1; bufferList.mBuffers[0].mDataByteSize = inNumberFrames*sizeof(SInt16); ViewController* THIS = THIS = (__bridge ViewController *)inRefCon; OSStatus status; status = AudioUnitRender(THIS->mAudioUnit, ioActionFlags, inTimeStamp, kInputBus, inNumberFrames, &bufferList); if (noErr != status) { printf("AudioUnitRender error: %ld", status); return noErr; } // Now, we have the samples we just read sitting in buffers in bufferList ExtAudioFileWriteAsync(THIS->mAudioFileRef, inNumberFrames, &bufferList); return noErr; }
In stopRecord you should close the audio file with ExtAudioFileDispose:
- (void)stopRecording:(NSTimer*)theTimer { printf(" stopRecording "); AudioOutputUnitStop(mAudioUnit); AudioUnitUninitialize(mAudioUnit); OSStatus status = ExtAudioFileDispose(mAudioFileRef); printf("OSStatus(ExtAudioFileDispose): %ld ", status); }
Full source code: http://pastebin.com/92Fyjaye
這篇關(guān)于誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音的文章就介紹到這了,希望我們推薦的答案對(duì)大家有所幫助,也希望大家多多支持html5模板網(wǎng)!