久久久久久久av_日韩在线中文_看一级毛片视频_日本精品二区_成人深夜福利视频_武道仙尊动漫在线观看

誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音

Can anybody help me in recording iPhone output sound through Audio Unit(誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音)
本文介紹了誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音的處理方法,對(duì)大家解決問(wèn)題具有一定的參考價(jià)值,需要的朋友們下面隨著小編來(lái)一起學(xué)習(xí)吧!

問(wèn)題描述

限時(shí)送ChatGPT賬號(hào)..

這是我的代碼:我使用此代碼通過(guò) Audio Unit 錄制 iPhone 輸出音頻然后將輸出保存在 output.caf 但 output.caf 文件為空任何人都知道我該怎么做?輸出音頻文件為空

這是初始化音頻單元

-(void) 初始化OutputUnit{OSStatus 狀態(tài);//描述音頻組件AudioComponentDescription 描述;desc.componentType = kAudioUnitType_Output;desc.componentSubType = kAudioUnitSubType_RemoteIO;desc.componentFlags = 0;desc.componentFlagsMask = 0;desc.componentManufacturer = kAudioUnitManufacturer_Apple;//獲取組件AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);//獲取音頻單元狀態(tài) = AudioComponentInstanceNew(inputComponent, &audioUnit);//啟用 IO 進(jìn)行錄制UInt32 標(biāo)志 = 1;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Input,kInputBus,&國(guó)旗,大小(標(biāo)志));//啟用 IO 進(jìn)行播放狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Output,k輸出總線,&國(guó)旗,大小(標(biāo)志));//描述格式AudioStreamBasicDescription audioFormat={0};音頻格式.mSampleRate = 44100.00;audioFormat.mFormatID = kAudioFormatLinearPCM;audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger |kAudioFormatFlagIsPacked;audioFormat.mFramesPerPacket = 1;audioFormat.mChannelsPerFrame = 1;audioFormat.mBitsPerChannel = 16;audioFormat.mBytesPerPacket = 2;audioFormat.mBytesPerFrame = 2;//應(yīng)用格式狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Output,kInputBus,&音頻格式,大小(音頻格式));狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,k輸出總線,&音頻格式,大小(音頻格式));//設(shè)置輸入回調(diào)AURenderCallbackStruct 回調(diào)結(jié)構(gòu);callbackStruct.inputProc = 錄音回調(diào);callbackStruct.inputProcRefCon = self;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_SetInputCallback,kAudioUnitScope_Global,kInputBus,&回調(diào)結(jié)構(gòu),sizeof(callbackStruct));//設(shè)置輸出回調(diào)callbackStruct.inputProc = 播放回調(diào);callbackStruct.inputProcRefCon = self;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_SetRenderCallback,kAudioUnitScope_Global,k輸出總線,&回調(diào)結(jié)構(gòu),sizeof(callbackStruct));//禁用記錄器的緩沖區(qū)分配(可選 - 如果我們想傳入我們自己的,請(qǐng)執(zhí)行此操作)標(biāo)志 = 0;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_ShouldAllocateBuffer,kAudioUnitScope_Output,kInputBus,&國(guó)旗,大小(標(biāo)志));音頻單元初始化(音頻單元);音頻輸出單元開(kāi)始(音頻單元);//初始化 le fichier 音頻NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);NSString *documentsDirectory = [paths objectAtIndex:0];NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] ??autorelease];NSLog(@">>>%@",destinationFilePath);CFURLRef 目的地URL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);CFRelease(destinationURL);NSAssert(setupErr == noErr, @"無(wú)法創(chuàng)建寫(xiě)入文件");setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);NSAssert(setupErr == noErr, @"無(wú)法為格式創(chuàng)建文件");setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);NSAssert(setupErr == noErr, @"無(wú)法初始化音頻文件的寫(xiě)入緩沖區(qū)");}

錄音回調(diào)

靜態(tài) OSStatus recordingCallback (void * inRefCon,AudioUnitRenderActionFlags * ioActionFlags,const AudioTimeStamp * inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,音頻緩沖區(qū)列表 * ioData) {NSLog(@"回調(diào)");if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0){AudioBufferList *bufferList;//<- 用緩沖區(qū)填充它(你會(huì)想要 malloc 它,因?yàn)樗且粋€(gè)動(dòng)態(tài)長(zhǎng)度列表)EffectState *effectState = (EffectState *)inRefCon;AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit];OSStatus 狀態(tài);NSLog(@"de5eal el 回調(diào)");//下面我得到了錯(cuò)誤狀態(tài) = AudioUnitRender(rioUnit,ioActionFlags,在時(shí)間戳,inBusNumber,inNumberFrames,緩沖區(qū)列表);if (noErr != status) { NSLog(@"AudioUnitRender error");返回?zé)o錯(cuò)誤;}//現(xiàn)在,我們剛剛讀取的樣本位于 bufferList 的緩沖區(qū)中ExtAudioFileWriteAsync(effectState->audioFileRef, inNumberFrames, bufferList);}返回?zé)o錯(cuò)誤;}//然后停止錄制-(無(wú)效)停止記錄{音頻輸出單元停止(音頻單元);AudioUnitUninitialize(audioUnit);}

解決方案

initializaeOutputUnit 你只創(chuàng)建了你的音頻文件:

OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);

通過(guò)傳遞 0(幀)和 NULL(音頻緩沖區(qū))僅用于初始化內(nèi)部緩沖區(qū):

setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);

這就是 recordingCallback 的問(wèn)題所在:

1) ioActionFlags 始終為 0,inBusNumber 始終為 1,因?yàn)檫@就是您設(shè)置回調(diào)的方式 (kInputBus = 1):

if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)

所以只需刪除 if 語(yǔ)句.

2) 從 AudioUnitRender 您將收到 -50 錯(cuò)誤,它在 CoreAudioTypes.h 中定義為 kAudio_ParamError 錯(cuò)誤.發(fā)生這種情況的原因是 bufferList 未定義且為 NULL!

<塊引用>

 OSStatus 狀態(tài);狀態(tài) = AudioUnitRender(這個(gè)->mAudioUnit,ioActionFlags,在時(shí)間戳,kInputBus,inNumberFrames,&緩沖區(qū)列表);if (noErr != status) {printf("AudioUnitRender 錯(cuò)誤:%ld", status);返回?zé)o錯(cuò)誤;}

您只需要定義一個(gè)有效的 AudioBuffer 并將其傳遞給 AudioUnitRender,這是我的工作 RenderCallback:

<塊引用>

 靜態(tài) OSStatus recordingCallback (void * inRefCon,AudioUnitRenderActionFlags * ioActionFlags,const AudioTimeStamp * inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,音頻緩沖區(qū)列表 * ioData) {雙倍 timeInSeconds = inTimeStamp->mSampleTime/kSampleRate;printf("
%fs inBusNumber: %lu inNumberFrames: %lu ", timeInSeconds, inBusNumber, inNumberFrames);//printAudioUnitRenderActionFlags(ioActionFlags);AudioBufferList 緩沖區(qū)列表;SInt16 個(gè)樣本[inNumberFrames];//足夠大的大小,不必?fù)?dān)心緩沖區(qū)溢出memset (&samples, 0, sizeof (samples));bufferList.mNumberBuffers = 1;bufferList.mBuffers[0].mData = 樣本;bufferList.mBuffers[0].mNumberChannels = 1;bufferList.mBuffers[0].mDataByteSize = inNumberFrames*sizeof(SInt16);ViewController* THIS = THIS = (__bridge ViewController *)inRefCon;OSStatus 狀態(tài);狀態(tài) = AudioUnitRender(這個(gè)->mAudioUnit,ioActionFlags,在時(shí)間戳,kInputBus,inNumberFrames,&緩沖區(qū)列表);if (noErr != status) {printf("AudioUnitRender 錯(cuò)誤:%ld", status);返回?zé)o錯(cuò)誤;}//現(xiàn)在,我們剛剛讀取的樣本位于 bufferList 的緩沖區(qū)中ExtAudioFileWriteAsync(THIS->mAudioFileRef, inNumberFrames, &bufferList);返回?zé)o錯(cuò)誤;}

stopRecord 中,您應(yīng)該使用 ExtAudioFileDispose 關(guān)閉音頻文件:

<塊引用>

 - (void)stopRecording:(NSTimer*)theTimer {printf("
停止錄制
");AudioOutputUnitStop(mAudioUnit);AudioUnitUninitialize(mAudioUnit);OSStatus 狀態(tài) = ExtAudioFileDispose(mAudioFileRef);printf("OSStatus(ExtAudioFileDispose): %ld
", status);}

完整源代碼:http://pastebin.com/92Fyjaye

this is my code : i use this code to record the iPhone output audio by using Audio Unit then saving the output in output.caf but the output.caf file is empty any body have idea about what shall i do ? the output audio file is empty

this is intializing the audio unit

-(void) initializaeOutputUnit
{
    OSStatus status;

    // Describe audio component
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    // Get component
    AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);

    // Get audio units
    status = AudioComponentInstanceNew(inputComponent, &audioUnit);

    // Enable IO for recording
    UInt32 flag = 1;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Input, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));

    // Enable IO for playback
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Output, 
                                  kOutputBus,
                                  &flag, 
                                  sizeof(flag));

    // Describe format
    AudioStreamBasicDescription audioFormat={0};
    audioFormat.mSampleRate         = 44100.00;
    audioFormat.mFormatID           = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket    = 1;
    audioFormat.mChannelsPerFrame   = 1;
    audioFormat.mBitsPerChannel     = 16;
    audioFormat.mBytesPerPacket     = 2;
    audioFormat.mBytesPerFrame      = 2;

    // Apply format
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Output, 
                                  kInputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Input, 
                                  kOutputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));


    // Set input callback
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = recordingCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioOutputUnitProperty_SetInputCallback, 
                                  kAudioUnitScope_Global, 
                                  kInputBus, 
                                  &callbackStruct, 
                                  sizeof(callbackStruct));

    // Set output callback
    callbackStruct.inputProc = playbackCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_SetRenderCallback, 
                                  kAudioUnitScope_Global, 
                                  kOutputBus,
                                  &callbackStruct, 
                                  sizeof(callbackStruct));

    // Disable buffer allocation for the recorder (optional - do this if we want to pass in our own)
    flag = 0;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_ShouldAllocateBuffer,
                                  kAudioUnitScope_Output, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));


    AudioUnitInitialize(audioUnit);
    AudioOutputUnitStart(audioUnit);


    // On initialise le fichier audio
    NSArray  *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] autorelease];
    NSLog(@">>> %@", destinationFilePath);
    CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);

    OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);  
    CFRelease(destinationURL);
    NSAssert(setupErr == noErr, @"Couldn't create file for writing");

    setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);
    NSAssert(setupErr == noErr, @"Couldn't create file for format");

    setupErr =  ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);
    NSAssert(setupErr == noErr, @"Couldn't initialize write buffers for audio file");

   }

the recording call back

static OSStatus recordingCallback       (void *                         inRefCon,
                                         AudioUnitRenderActionFlags *      ioActionFlags,
                                         const AudioTimeStamp *            inTimeStamp,
                                         UInt32                            inBusNumber,
                                         UInt32                            inNumberFrames,
                                         AudioBufferList *                 ioData) {
    NSLog(@"callback");
   if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0) 
   {
        AudioBufferList *bufferList; // <- Fill this up with buffers (you will want to malloc it, as it's a dynamic-length list)

        EffectState *effectState = (EffectState *)inRefCon;
       AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit];

        OSStatus status;
        NSLog(@"de5eal el call back ");
        // BELOW I GET THE ERROR
        status = AudioUnitRender( rioUnit,     
                                 ioActionFlags, 
                                 inTimeStamp, 
                                 inBusNumber, 
                                 inNumberFrames, 
                                 bufferList);

        if (noErr != status) { NSLog(@"AudioUnitRender error"); return noErr;}

        // Now, we have the samples we just read sitting in buffers in bufferList
        ExtAudioFileWriteAsync(effectState->audioFileRef, inNumberFrames, bufferList);

    }
    return noErr;     
}




// then stop Recording 
- (void) stopRecord
{

    AudioOutputUnitStop(audioUnit);
    AudioUnitUninitialize(audioUnit);
}

解決方案

In initializaeOutputUnit you only created your audio file:

OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);

by passing 0 (frames) and NULL (audiobuffer) is just for init internal buffers:

setupErr =  ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);

That's what's going wrong in recordingCallback:

1) ioActionFlags are always 0 and inBusNumber are always 1, because thats how you setup your callback (kInputBus = 1):

if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)

so just remove the if statement.

2) From AudioUnitRender you will receive -50 error, which is defined in CoreAudioTypes.h as an kAudio_ParamError error. This happens by bufferList is not defined and NULL!

 OSStatus status; 
 status = AudioUnitRender(THIS->mAudioUnit,     
                          ioActionFlags, 
                          inTimeStamp, 
                          kInputBus, 
                          inNumberFrames, 
                          &bufferList);

 if (noErr != status) {
      printf("AudioUnitRender error: %ld", status);   
      return noErr; 
 }

You just need to define an valid AudioBuffer and pass it to AudioUnitRender, this is my working RenderCallback:

  static OSStatus recordingCallback       (void *                      inRefCon,
                                           AudioUnitRenderActionFlags *      ioActionFlags,
                                           const AudioTimeStamp *            inTimeStamp,
                                           UInt32                            inBusNumber,
                                           UInt32                            inNumberFrames,
                                           AudioBufferList *                 ioData)  {
      double timeInSeconds = inTimeStamp->mSampleTime / kSampleRate;
      printf("
%fs inBusNumber: %lu inNumberFrames: %lu ", timeInSeconds, inBusNumber, inNumberFrames);
      //printAudioUnitRenderActionFlags(ioActionFlags);

      AudioBufferList bufferList;

      SInt16 samples[inNumberFrames]; // A large enough size to not have to worry about buffer overrun
      memset (&samples, 0, sizeof (samples));

      bufferList.mNumberBuffers = 1;
      bufferList.mBuffers[0].mData = samples;
      bufferList.mBuffers[0].mNumberChannels = 1;
      bufferList.mBuffers[0].mDataByteSize = inNumberFrames*sizeof(SInt16);

      ViewController* THIS = THIS = (__bridge ViewController *)inRefCon;

      OSStatus status;
      status = AudioUnitRender(THIS->mAudioUnit,     
                               ioActionFlags, 
                               inTimeStamp, 
                               kInputBus, 
                               inNumberFrames, 
                               &bufferList);

      if (noErr != status) {

          printf("AudioUnitRender error: %ld", status); 
          return noErr;
      }

      // Now, we have the samples we just read sitting in buffers in bufferList
      ExtAudioFileWriteAsync(THIS->mAudioFileRef, inNumberFrames, &bufferList);

      return noErr;      
 }

In stopRecord you should close the audio file with ExtAudioFileDispose:

  - (void)stopRecording:(NSTimer*)theTimer {
      printf("
stopRecording
");
      AudioOutputUnitStop(mAudioUnit);
      AudioUnitUninitialize(mAudioUnit);

      OSStatus status = ExtAudioFileDispose(mAudioFileRef);
      printf("OSStatus(ExtAudioFileDispose): %ld
", status); 
 }

Full source code: http://pastebin.com/92Fyjaye

這篇關(guān)于誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音的文章就介紹到這了,希望我們推薦的答案對(duì)大家有所幫助,也希望大家多多支持html5模板網(wǎng)!

【網(wǎng)站聲明】本站部分內(nèi)容來(lái)源于互聯(lián)網(wǎng),旨在幫助大家更快的解決問(wèn)題,如果有圖片或者內(nèi)容侵犯了您的權(quán)益,請(qǐng)聯(lián)系我們刪除處理,感謝您的支持!

相關(guān)文檔推薦

Can#39;t change target membership visibility in Xcode 4.5(無(wú)法更改 Xcode 4.5 中的目標(biāo)成員身份可見(jiàn)性)
UITableView: Handle cell selection in a mixed cell table view static and dynamic cells(UITableView:在混合單元格表視圖靜態(tài)和動(dòng)態(tài)單元格中處理單元格選擇)
How to remove Address Bar in Safari in iOS?(如何在 iOS 中刪除 Safari 中的地址欄?)
iOS 5 SDK is gone after upgrade to Xcode 4.5(升級(jí)到 Xcode 4.5 后,iOS 5 SDK 消失了)
Having trouble creating UIImage from CIImage in iOS5(在 iOS5 中從 CIImage 創(chuàng)建 UIImage 時(shí)遇到問(wèn)題)
Open target=quot;_blankquot; links outside of UIWebView in Safari(打開(kāi)目標(biāo)=“_blank;Safari 中 UIWebView 之外的鏈接)
主站蜘蛛池模板: 狠狠躁天天躁夜夜躁婷婷老牛影视 | 亚洲精品一区二区三区蜜桃久 | 欧美久久一级特黄毛片 | 亚洲精选一区二区 | 日韩播放 | 亚洲免费在线 | 国产一级片精品 | а天堂中文最新一区二区三区 | 欧美乱人伦视频 | 久久国产精品一区二区三区 | 精品久久久久久久人人人人传媒 | 成人精品一区二区三区中文字幕 | 91久久北条麻妃一区二区三区 | 亚洲精品永久免费 | 伊人狠狠干 | 亚洲视频在线一区 | 日韩av一区二区在线观看 | 在线观看成年视频 | 亚洲麻豆| 免费在线观看一区二区三区 | 日本一区二区在线视频 | 在线观看av网站永久 | 99热视| 国产超碰人人爽人人做人人爱 | 亚洲精品aⅴ| 久久出精品 | 中文在线播放 | 99精品网站 | 国产精品一区在线 | 国产做a爱免费视频 | 欧美中文在线 | 综合第一页 | 成人综合视频在线观看 | 精品真实国产乱文在线 | 秋霞在线一区二区 | 自拍在线 | 成人免费视频网站在线观看 | 成人免费淫片aa视频免费 | 欧美一区久久 | 亚洲成人一区二区 | 日韩精品一区二区三区视频播放 |