zoukankan      html  css  js  c++  java
  • 第九天 iOS音频技术

    1. 

    AQRecorder

        mRecordFormat.mFormatID = inFormatID;
        if (inFormatID == kAudioFormatLinearPCM)
        {
            // if we want pcm, default to signed 16-bit little-endian
            mRecordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
            mRecordFormat.mBitsPerChannel = 16;
            mRecordFormat.mBytesPerPacket = mRecordFormat.mBytesPerFrame = (mRecordFormat.mBitsPerChannel / 8) * mRecordFormat.mChannelsPerFrame;
            mRecordFormat.mFramesPerPacket = 1;
        }
        
    //    else  {
    //        
    //        mRecordFormat.mSampleRate = 44100;//8000.0;//44100.0;
    //        
    //        mRecordFormat.mFormatID = kAudioFormatMPEG4AAC; // kAudioFormatMPEG4AAC_HE does not work. Can't find `AudioClassDescription`. `mFormatFlags` is set to 0.
    //        mRecordFormat.mFormatFlags = kMPEG4Object_AAC_LC; // Format-specific flags to specify details of the format. Set to 0 to indicate no format flags. See “Audio Data Format Identifiers” for the flags that apply to each format.
    //        mRecordFormat.mBytesPerPacket = 0; // The number of bytes in a packet of audio data. To indicate variable packet size, set this field to 0. For a format that uses variable packet size, specify the size of each packet using an AudioStreamPacketDescription structure.
    //        mRecordFormat.mFramesPerPacket = 0; // The number of frames in a packet of audio data. For uncompressed audio, the value is 1. For variable bit-rate formats, the value is a larger fixed number, such as 1024 for AAC. For formats with a variable number of frames per packet, such as Ogg Vorbis, set this field to 0.
    //        mRecordFormat.mBytesPerFrame = 0; // The number of bytes from the start of one frame to the start of the next frame in an audio buffer. Set this field to 0 for compressed formats. ...
    //        mRecordFormat.mChannelsPerFrame = 1; // The number of channels in each frame of audio data. This value must be nonzero.
    //        mRecordFormat.mBitsPerChannel = 0; // ... Set this field to 0 for compressed formats.
    //        mRecordFormat.mReserved = 0; // Pads the structure out to force an even 8-byte alignment. Must be set to 0.
    //    }
            url = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)inRecordFile, NULL);
            
            // create the audio file
            OSStatus status = AudioFileCreateWithURL(url, kAudioFileCAFType, &mRecordFormat, kAudioFileFlags_EraseFile, &mRecordFile);
            CFRelease(url);
    // ____________________________________________________________________________________
    // AudioQueue callback function, called when an input buffers has been filled.
    void AQRecorder::MyInputBufferHandler(    void *                                inUserData,
                                          AudioQueueRef                        inAQ,
                                          AudioQueueBufferRef                    inBuffer,
                                          const AudioTimeStamp *                inStartTime,
                                          UInt32                                inNumPackets,
                                          const AudioStreamPacketDescription*    inPacketDesc)
    {
        AQRecorder *aqr = (AQRecorder *)inUserData;
        try {
            if (inNumPackets > 0) {
                // write packets to file
                XThrowIfError(AudioFileWritePackets(aqr->mRecordFile, FALSE, inBuffer->mAudioDataByteSize,
                                                    inPacketDesc, aqr->mRecordPacket, &inNumPackets, inBuffer->mAudioData),
                              "AudioFileWritePackets failed");
                aqr->mRecordPacket += inNumPackets;
            }
            
            // if we're not stopping, re-enqueue the buffe so that it gets filled again
            if (aqr->IsRunning())
                XThrowIfError(AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL), "AudioQueueEnqueueBuffer failed");
        } catch (CAXException e) {
            char buf[256];
            fprintf(stderr, "Error: %s (%s)
    ", e.mOperation, e.FormatError(buf));
        }
    }

    AQPlayer

    void AQPlayer::SetupNewQueue() 
    {
        XThrowIfError(AudioQueueNewOutput(&mDataFormat, AQPlayer::AQBufferCallback, this, 
                                            CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &mQueue), "AudioQueueNew failed");
    
    ...
    
    void AQPlayer::AQBufferCallback(void *                    inUserData,
                                    AudioQueueRef            inAQ,
                                    AudioQueueBufferRef        inCompleteAQBuffer) 
    {
        AQPlayer *THIS = (AQPlayer *)inUserData;
    
        if (THIS->mIsDone) return;
    
        UInt32 numBytes;
        UInt32 nPackets = THIS->GetNumPacketsToRead();
        OSStatus result = AudioFileReadPackets(THIS->GetAudioFileID(), false, &numBytes, inCompleteAQBuffer->mPacketDescriptions, THIS->GetCurrentPacket(), &nPackets, 
                                               inCompleteAQBuffer->mAudioData);
        if (result)
            printf("AudioFileReadPackets failed: %d", (int)result);
        if (nPackets > 0) {
            inCompleteAQBuffer->mAudioDataByteSize = numBytes;        
            inCompleteAQBuffer->mPacketDescriptionCount = nPackets;        
            AudioQueueEnqueueBuffer(inAQ, inCompleteAQBuffer, 0, NULL);
            THIS->mCurrentPacket = (THIS->GetCurrentPacket() + nPackets);
        } 

     

    2.levelmeter

    - (void)updateLevelMeter:(id)sender {
        /*
        if (self.delegate) {
            UInt32 dataSize = sizeof(AudioQueueLevelMeterState);
            AudioQueueGetProperty([self.decapsulator Queue], kAudioQueueProperty_CurrentLevelMeter, levelMeterStates, &dataSize);
            if ([self.delegate respondsToSelector:@selector(levelMeterChanged:)]) {
                [self.delegate levelMeterChanged:levelMeterStates[0].mPeakPower];
            }
            
        }*/
    }
        //self.timerLevelMeter = [NSTimer scheduledTimerWithTimeInterval:0.2 target:self selector:@selector(updateLevelMeter:) userInfo:nil repeats:YES];
        __weak __typeof(self)weakSelf = self;
        MLAudioMeterObserver *meterObserver = [[MLAudioMeterObserver alloc]init];
        meterObserver.actionBlock = ^(NSArray *levelMeterStates,MLAudioMeterObserver *meterObserver){
            NSLog(@"volume:%f",[MLAudioMeterObserver volumeForLevelMeterStates:levelMeterStates]);
            
            if ([weakSelf.delegate respondsToSelector:@selector(levelMeterChanged:)]) {
                [weakSelf.delegate levelMeterChanged:[MLAudioMeterObserver volumeForLevelMeterStates:levelMeterStates]];
            }
        };
        meterObserver.errorBlock = ^(NSError *error,MLAudioMeterObserver *meterObserver){
            //[[[UIAlertView alloc]initWithTitle:@"错误" message:error.userInfo[NSLocalizedDescriptionKey] delegate:nil cancelButtonTitle:nil otherButtonTitles:@"知道了", nil]show];
        };
        self.meterObserver = meterObserver;
        self.meterObserver.audioQueue = player->Queue();

    3.

    linesview

    - (void)levelMeterChanged:(float)levelMeter {
        dispatch_async(dispatch_get_main_queue(), ^{
                //self.levelMeter.progress = levelMeter;
            NSLog(@"%.2f",levelMeter*1);
            
            [self.levelMeterLineView1 addMeter:levelMeter*1];
            [self.levelMeterLineView2 addMeter:levelMeter*1];
        });
    
    }
    -(void)addMeter:(float)meter
    {
        if (high) {
            meter = meter*0.6 + 0.4;
        } else {
            meter = meter*0.6 + 0.35;
        }
        high = !high;
        
        [_meters addObject:@(meter)];
        
        if (_meters.count > 10) {
            [_meters removeObjectAtIndex:0];
        }
        [self setNeedsDisplay];
    }

    DXRecordView

    - (void)levelMeterChanged:(float)levelMeter {
        dispatch_async(dispatch_get_main_queue(), ^{
            //self.levelMeter.progress = levelMeter;
            NSLog(@"%.2f",levelMeter*1);
            
            float showMeter = levelMeter*0.6 + 0.35;
            
            [_recordView setVoiceImageWithLowPassResults:showMeter];
    
        });
        
    }
    -(void)setVoiceImageWithLowPassResults:(double)lowPassResults
    {
        CGRect frame = _meterImageView.frame;
        frame.size.height = 39*lowPassResults;
        frame.origin.y  = 22+5.5+39*(1-lowPassResults);
        _meterImageView.frame = frame;

    4.recordButton 按住说话

        //录制
        self.recordButton = [[UIButton alloc] initWithFrame:CGRectMake(24, kVerticalPadding, CGRectGetWidth(self.bounds)-(24 * 2), kInputTextViewMinHeight)];
        self.recordButton.titleLabel.font = [UIFont systemFontOfSize:15.0];
        [self.recordButton setTitleColor:[UIColor darkGrayColor] forState:UIControlStateNormal];
        [self.recordButton setBackgroundImage:[UIImage imageNamed:@"btn_long_round"] forState:UIControlStateNormal];
        [self.recordButton setBackgroundImage:[UIImage imageNamed:@"btn_long_round_hl"] forState:UIControlStateHighlighted];
        [self.recordButton setTitle:LOCALIZATION(@"按住说话") forState:UIControlStateNormal];
        [self.recordButton setTitle:LOCALIZATION(@"松开结束") forState:UIControlStateHighlighted];
        [self.recordButton setTitleColor:[UIColor whiteColor] forState:UIControlStateHighlighted];
        //self.recordButton.hidden = YES;
        [self.recordButton addTarget:self action:@selector(recordButtonTouchDown) forControlEvents:UIControlEventTouchDown];
        [self.recordButton addTarget:self action:@selector(recordButtonTouchUpOutside) forControlEvents:UIControlEventTouchUpOutside];
        [self.recordButton addTarget:self action:@selector(recordButtonTouchUpInside) forControlEvents:UIControlEventTouchUpInside];
        [self.recordButton addTarget:self action:@selector(recordDragOutside) forControlEvents:UIControlEventTouchDragExit];
        [self.recordButton addTarget:self action:@selector(recordDragInside) forControlEvents:UIControlEventTouchDragEnter];

    5.EMChatAudioBubbleView

    - (void)setModel:(MessageModel *)model
    {
        [super setModel:model];
        
        _timeLabel.text = [NSString stringWithFormat:@"%d'",self.model.time];
        
        if (self.model.isSender) {
            [_isReadView setHidden:YES];
            _animationImageView.image = [UIImage imageNamed:SENDER_ANIMATION_IMAGEVIEW_IMAGE_DEFAULT];
            _animationImageView.animationImages = _senderAnimationImages;
        }
        else{
            if (model.isPlayed) {
                [_isReadView setHidden:YES];
            }else{
                [_isReadView setHidden:NO];
            }
    
            _animationImageView.image = [UIImage imageNamed:RECEIVER_ANIMATION_IMAGEVIEW_IMAGE_DEFAULT];
            _animationImageView.animationImages = _recevierAnimationImages;
        }
        
        if (self.model.isPlaying)
        {
            [self startAudioAnimation];
        }else {
            [self stopAudioAnimation];
        }
    }
    @interface MiniCourseViewTableViewCell : UITableViewCell<UITableViewDataSource,UITableViewDelegate,CommentVoiceDelegate>
    
    @property(nonatomic, strong) NSString * MiniContent;
    @property(nonatomic, strong) UILabel *contentLabel,*numberLabel,* nameLabel,* timeLabel;
    @property(nonatomic, strong) NSMutableArray * commentModelArray;
    @property(nonatomic, strong) UIImageView * headImageView;
    @property(nonatomic, strong) UIButton * laudButton;
    
    @property (nonatomic, strong) UITableView * tableView;
    @property (nonatomic, strong) PlayVoiceButton *playVoiceButton;
    @property (nonatomic, strong)id <ReplyVoiceDelegate> delegate;
    
    
    @property(nonatomic, strong) CourseReplay * replyModel;
    
    + (float)getHeightWithString:(NSString *)string fontSize:(int)size contenViewWidth:(CGFloat)width;
    
    @end
     
    @interface FollowTalkThingCommentTableViewCell : UITableViewCell
    
    @property (nonatomic, strong) UIImageView *userImageView;
    @property (nonatomic, strong) UILabel *nameLabel;
    @property (nonatomic, strong) UILabel *timeLabel;
    @property (nonatomic, strong) UIButton *zanButton;
    
    @property (strong, nonatomic)  UILabel *answerLabel,*replyLabel;
    @property (nonatomic, strong) PlayVoiceButton *voiceButton;
    @property (strong, nonatomic)  UIView *answerView;
    
    @property (strong, nonatomic) UILabel *scoreLabel;
    
    @property (nonatomic, strong)id <playVoiceButton> delegate;
    
    +(CGSize)cellBubbleSizeWithContent:(NSString *)content;
    
    @property (strong, nonatomic)  id model;
    
    @end
  • 相关阅读:
    蒲公英 &#183; JELLY技术周刊 Vol.29: 前端智能化在阿里的那些事
    蒲公英 · JELLY技术周刊 Vol.28: Next.js 10 发布
    蒲公英 &#183; JELLY技术周刊 Vol 27: 平平无奇 React 17
    蒲公英 &#183; JELLY技术周刊 Vol.26: 请问您这个月要来点肝么?
    《痞子衡嵌入式半月刊》 索引
    痞子衡嵌入式:恩智浦MCU集成开发环境与开发工具教程
    《痞子衡嵌入式半月刊》 第 21 期
    痞子衡嵌入式:了解i.MXRT1060系列ROM中串行NOR Flash启动初始化流程优化点
    痞子衡嵌入式:深入i.MXRT1050系列ROM中串行NOR Flash启动初始化流程
    痞子衡嵌入式:一个奇怪的Keil MDK下变量链接强制对齐报错问题(--legacyalign)
  • 原文地址:https://www.cnblogs.com/javastart/p/5940787.html
Copyright © 2011-2022 走看看