iOSはAVAudioSessionを使用してオーディオファイルをリアルタイムで録画(PCM-」ローカル.wavファイル)

12558 ワード

//
//  AudioRecordManager.h
//  Demo
//
//  Created by tao on 2020/7/8.
//  Copyright © 2020 idst. All rights reserved.
//

#import 
#import 

@protocol AudioRecorderManagerDelegate 

/**
 * @discuss Recorder    ,       
 */
-(void)recorderDidStart;

/**
 * @discuss Recorde    ,       
 */
-(void)recorderDidStop;

/**
 * @discuss Recorder     ,    VAD      ,        ,     AudioQueue      ,      !!!
 */
-(void)voiceRecorded:(NSData *_Nonnull) frame;

/**
 * @discuss Recorder           
 */
-(void)voiceVolume:(NSInteger)volume;


@end

NS_ASSUME_NONNULL_BEGIN

@interface AudioRecordManager : NSObject

@property (nonatomic, assign) id delegate;

@property (nonatomic, assign) BOOL isRecording;

+ (instancetype)sharedManager;

- (void)start;
- (void)stop;


@end

NS_ASSUME_NONNULL_END

 
 
//
//  AudioRecordManager.m
//  Demo
//
//  Created by tao on 2020/7/8.
//  Copyright © 2020 idst. All rights reserved.
//

#import "AudioRecordManager.h"

#define BufferSeconds 0.02  // 20ms



@interface AudioRecordManager () {
    AudioQueueRef _audioQRef;                        //        
    AudioStreamBasicDescription _audioRecordFormat;  //     
    AudioQueueBufferRef _audioQBufferRefs[3];        //        
}

@property (nonatomic, assign) AudioFileID recordFileID; //      
@property (nonatomic, assign) SInt64 recordPacket;     //        

@property (nonatomic, copy) NSString *originAudioSessionCategory; //   category
@property (nonatomic, strong) NSMutableData *bufferedAudioData; //    

@end

@implementation AudioRecordManager


/*!
 @discussion
 AudioQueue         
 @param      inAQ
          .
 @param      inBuffer
                         ,                   .
 @param      inStartTime
               
 @param      inNumberPacketDescriptions
        (packet descriptions)   ,         VBR(     (variable bitrate))  ,                    ,           AudioFileWritePackets  . CBR (     (constant bitrate))          。  CBR  ,              inPacketDescs       NULL
 */
static void inputAudioQueueBufferHandler(void * __nullable               inUserData,
                                         AudioQueueRef                   inAQ,
                                         AudioQueueBufferRef             inBuffer,
                                         const AudioTimeStamp *          inStartTime,
                                         UInt32                          inNumberPacketDescriptions,
                                         const AudioStreamPacketDescription * __nullable inPacketDescs) {
//    AudioRecordManager *manager = (__bridge AudioRecordManager*) inUserData;
    AudioRecordManager *manager = [AudioRecordManager sharedManager];
    
    
    if (manager.isRecording) {
        
        //     
        if (inNumberPacketDescriptions > 0) {
            AudioFileWritePackets(manager.recordFileID, FALSE, inBuffer->mAudioDataByteSize, inPacketDescs, manager.recordPacket, &inNumberPacketDescriptions, inBuffer->mAudioData);
            manager.recordPacket += inNumberPacketDescriptions;
        }
        
        NSData *data = [manager bufferPCMData:inBuffer];
        if (data) { 
            [manager handleAudioData:data];
        }
        
        //            ,          
        AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
    } else {
        //       
    }
}



+ (instancetype)sharedManager {
    static AudioRecordManager *manager = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        manager = [[AudioRecordManager alloc] init];
    });
    return manager;
}

- (instancetype)init {
    if (self = [super init]) {
        // register for app resign/active notifications for recorder state
        [self registerForBackgroundNotifications];
        
        [self initAudioFormat];
    }
    return self;
}


- (void)initAudioFormat {
    _audioRecordFormat.mFormatID = kAudioFormatLinearPCM;//    
    _audioRecordFormat.mSampleRate = 16000;//   
    _audioRecordFormat.mChannelsPerFrame = 1;//    
    _audioRecordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
    _audioRecordFormat.mBitsPerChannel = 16;//        
    _audioRecordFormat.mBytesPerPacket =  (_audioRecordFormat.mBitsPerChannel >> 3) * _audioRecordFormat.mChannelsPerFrame;//      
    _audioRecordFormat.mBytesPerFrame = _audioRecordFormat.mBytesPerPacket;//      
    _audioRecordFormat.mFramesPerPacket = 1;//     
}


- (void)initAudioQueue {
    OSStatus status = AudioQueueNewInput(&_audioRecordFormat, inputAudioQueueBufferHandler, (__bridge void * _Nullable)(self), NULL, NULL, 0, &_audioQRef);
    if (status != noErr) {
        NSLog(@"⚠️===      ");
        return;
    }
        
    //        +      
    int frames = [self computeRecordBufferSize:&_audioRecordFormat seconds: BufferSeconds];
    int bufferByteSize = frames * _audioRecordFormat.mBytesPerFrame;
    for (int i = 0; i < 3; i++) {
        AudioQueueAllocateBuffer(_audioQRef, bufferByteSize, &_audioQBufferRefs[i]);
        AudioQueueEnqueueBuffer(_audioQRef, _audioQBufferRefs[i], 0, NULL);
    }
    
    AudioQueueSetParameter(_audioQRef, kAudioQueueParam_Volume, 1.0f);
}

- (void)initFilePath {
    NSString *path = [NSTemporaryDirectory() stringByAppendingPathComponent:@"test.wav"];
    CFURLRef url = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)path, NULL);
    AudioFileCreateWithURL(url, kAudioFileCAFType, &_audioRecordFormat, kAudioFileFlags_EraseFile, &_recordFileID);
    CFRelease(url);
}


- (void)start {
    if ([[AVAudioSession sharedInstance] respondsToSelector:@selector(requestRecordPermission:)]) {
        [[AVAudioSession sharedInstance] performSelector:@selector(requestRecordPermission:) withObject:^(BOOL allow){
            if(allow) {
                [self startRecord];
            }else{
                NSLog(@"====>        !!! ");
            }
        }];
    } else {
        [self startRecord];
    }
}

- (void)stop {
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
            
        [self stopRecord];
        
        [[AVAudioSession sharedInstance] setCategory:self.originAudioSessionCategory error:nil];
        [[AVAudioSession sharedInstance] setActive:NO error:nil];
        
        self.bufferedAudioData = nil;
        
        if(self.delegate && [self.delegate respondsToSelector:@selector(recorderDidStop)]){
            dispatch_async(dispatch_get_main_queue(), ^{
                [self.delegate recorderDidStop];
            });
        }
    });
}


- (void)startRecord {

    _recordPacket = 0;
    
    [self initFilePath];
    [self initAudioQueue];
    
    
    //      (      )          
    self.originAudioSessionCategory = [[AVAudioSession sharedInstance] category];
    [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
    [[AVAudioSession sharedInstance] setActive:YES error:nil];
     
    OSStatus status = AudioQueueStart(_audioQRef, NULL);
    if (status != noErr) {
        NSLog(@"⚠️===     ");
        return;
    }
    
    self.isRecording = YES;
    self.bufferedAudioData = [NSMutableData data];
    
    if(self.delegate && [self.delegate respondsToSelector:@selector(recorderDidStart)]){
        [self.delegate recorderDidStart];
    }
    
    //           
    UInt32 val = 1;
    AudioQueueSetProperty(_audioQRef, kAudioQueueProperty_EnableLevelMetering, &val, sizeof(UInt32));
}


- (void)stopRecord {
    if (self.isRecording) {
        self.isRecording = NO;

        AudioQueueStop(_audioQRef, true);
        AudioQueueDispose(_audioQRef, TRUE);
        AudioFileClose(self.recordFileID);
    }
}


//     
- (float)getCurrentAudioPower {
    float channelAvg = 0;

    UInt32 dataSize = sizeof(AudioQueueLevelMeterState) * _audioRecordFormat.mChannelsPerFrame;
    AudioQueueLevelMeterState *levelMeter = (AudioQueueLevelMeterState *)malloc(dataSize);
    //kAudioQueueProperty_EnableLevelMetering getter
    OSStatus status = AudioQueueGetProperty(_audioQRef, kAudioQueueProperty_CurrentLevelMeter, levelMeter, &dataSize);
    if (status == noErr) {
        for (int i = 0; i < _audioRecordFormat.mChannelsPerFrame; i++) {
            channelAvg += levelMeter[i].mPeakPower;  //     
        }
    }
    free(levelMeter);
    return channelAvg;
}


/*
 * AudioQueue     frame     ,       ,      ,  。
 * 640 bytes = 320 frames/16bit = 20ms
 */

- (NSData *)bufferPCMData:(AudioQueueBufferRef)audioQBufferRef {
    
    int frames = [self computeRecordBufferSize:&_audioRecordFormat seconds: BufferSeconds];
    int bfSize = frames * _audioRecordFormat.mBytesPerFrame;
    
    NSInteger nBufferSpaceLeft = bfSize - self.bufferedAudioData.length;
    
    NSInteger nBytesReceived = audioQBufferRef->mAudioDataByteSize;
    NSInteger nBytesToCopy = nBufferSpaceLeft >= nBytesReceived ? nBytesReceived : nBufferSpaceLeft;
    NSInteger nBytesLeft = nBytesReceived - nBytesToCopy;
    
    [self.bufferedAudioData appendBytes:audioQBufferRef->mAudioData length:nBytesToCopy];
    if (self.bufferedAudioData.length == bfSize){
        // buffer is full
        NSData *frame = [NSData dataWithData:self.bufferedAudioData];
        // reset the buffer
        self.bufferedAudioData.length = 0;
        // save the left partial data
        if(nBytesLeft > 0){
            [self.bufferedAudioData appendBytes:(audioQBufferRef->mAudioData + nBytesToCopy) length:nBytesLeft];
        }
        return frame;
    }else{
        // DBG(@"Buffering, %@ of %u received",@(nBytesReceived),PCM_FRAME_BYTE_SIZE);
    }
    
    return nil;
}

/*
 * FIXME -     ,            ,          ?
 */
- (void)handleAudioData:(NSData *)audioData {
    float audioPower = [self getCurrentAudioPower];
    
    if (self.delegate && [self.delegate respondsToSelector:@selector(voiceVolume:)]) {
        [self.delegate voiceVolume:audioPower * 1000];
    }
    
    if (self.delegate && [self.delegate respondsToSelector:@selector(voiceRecorded:)]) {
        [self.delegate voiceRecorded:audioData];
    }
}


- (int)computeRecordBufferSize:(const AudioStreamBasicDescription*)format seconds:(float)seconds {
    
    int packets, frames, bytes = 0;
    
    frames = (int)ceil(seconds * format->mSampleRate);
    
    if (format->mBytesPerFrame > 0) {
        bytes = frames * format->mBytesPerFrame;
    } else {
        UInt32 maxPacketSize = 0;
        if (format->mBytesPerPacket > 0) {
            maxPacketSize = format->mBytesPerPacket;    // constant packet size
        }
        
        if (format->mFramesPerPacket > 0) {
            packets = frames / format->mFramesPerPacket;
        } else {
            packets = frames;    // worst-case scenario: 1 frame in a packet
        }
        
        if (packets == 0) {      // sanity check
            packets = 1;
        }
        
        bytes = packets * maxPacketSize;
    }
    return bytes;
}

- (void)dealloc {
    [self unregisterForBackgroundNotifications];
    
    AudioQueueStop(_audioQRef, true);
    AudioQueueDispose(_audioQRef, TRUE);
    AudioFileClose(self.recordFileID);
}



#pragma mark - Background Notifications
- (void)registerForBackgroundNotifications {
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(appResignActive)
                                                 name:NSExtensionHostWillResignActiveNotification
                                               object:nil];
    
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(appEnterForeground)
                                                 name:NSExtensionHostWillEnterForegroundNotification
                                               object:nil];
}

- (void)unregisterForBackgroundNotifications {
    [[NSNotificationCenter defaultCenter] removeObserver:self];
}


- (void)appResignActive {
}

- (void)appEnterForeground {
}

@end

 
参照先:
1、
AudioQueueを利用してオーディオ収集符号化と再生を行う(完全demo付)
=====>コード
 
2、
iOSはAudioQueueを使用して録音
iOSはAudioQueueを使用してオーディオ再生を行います
=====>コード