一.AVFoundation基础

AVFoundation是一个可以用来使用和创建基于时间的视听媒体的框架,它提供了一个能使用基于时间的视听数据的详细级别的Objective-C接口。例如:您可以用它来检查,创建,编辑或是重新编码媒体文件。也可以从设备中获取输入流,在视频实时播放时操作和回放。

AVAsset

AVFoundation框架用来表示媒体的主要类是AVAssetAVAsset实例的媒体数据是一个或更多个(音频和视频轨道)的集合的聚集表示。它规定将有关集合的信息作为一个整体,如它的名称,时间,自然呈现大小等的信息,AVAsset不依赖于特定的数据格式。

CMTime
typedef struct{
     CMTimeValue    value;      
     CMTimeScale    timescale;  
     CMTimeFlags    flags;      
     CMTimeEpoch    epoch;      
} CMTime;

CMTime定义是一个C语言的结构体,CMTime是以分数的形式表示时间,value表示分子,timescale表示分母,flags是位掩码,表示时间的指定状态。在AVFoundation中使用CMTime来作为操作的时间单位,使得操作粒度非常精确。

使用AVFoundation可以很方便的完成几项工作:

  • 媒体播放:使用player itemplayer item tracks,你可以设置被播放器呈现的项目中可视的那一部分,设置音频的混合参数以及被应用于播放期间的视频组合设定,或者播放期间的禁用组件。

  • 读取,写入和重新编码媒体:AVFoundation允许你用几种方式创建新的asset的表现形式。你可以简单将已经存在的asset重新编码或者在编辑后再保存为一个新的asset

  • 获取视频缩略图:想要生成缩略图的asset初始化一个AVAssetImageGenerator的实例。AVAssetImageGenerator使用默认启用视频轨道来生成图像。

  • 编辑视频:AVFoundation使用compositions去从现有的媒体片段(通常是一个或多个视频和音频轨道)创建新的assets。你可以使用一个composition去添加和删除轨道,并调整它们的时间排序。你也可以设置相对音量和增加音频轨道,并且设置不透明度,浑浊坡道,视频跟踪。

  • 相机和麦克风捕获:iOS的相机和麦克风输入由一个capture session管理。

二.AVFoundation播放

AVPlayer用于播放多媒体的播放器,使用AVPlayerItem作为播放管理单元,我们可以通过编辑它的轨道来实现加速/禁音播放等效果,通过KVO来监听当前播放的进度和状态,当播放的时候会通过KVO渲染到AVPlayerLayer上。

// 设置监听粒度
- (void)setupCurrentCMTimeObserver {
    self.observeCMTime = YES;
    __weak typeof(self) weakSelf = self;
    if (CMTIME_IS_INVALID(self.currentTimeObserverInterval)) {
        _currentTimeObserverInterval = CMTimeMake(1, 10);
    }
    self.currentCMTimeObserver = [self.player addPeriodicTimeObserverForInterval:self.currentTimeObserverInterval queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
        if (weakSelf.observeCMTime) {
            weakSelf.currentTime = CMTimeGetSeconds(time) * weakSelf.speed;
        }
    }];
}

- (void)startObservePlayerItemProperties {
    if (self.playerItem) {
        [self.playerItem addObserver:self forKeyPath:@"duration" options:NSKeyValueObservingOptionNew context:NULL];
        [self.playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:NULL];
        [self.playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:NULL];
        [self.playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:NULL];
        self.observingPlayerItem = YES;
    }
}

- (void)stopObservePlayerItemProperties {
    if (self.playerItem && self.observingPlayerItem) {
        self.observingPlayerItem = NO;
        [self.playerItem removeObserver:self forKeyPath:@"duration"];
        [self.playerItem removeObserver:self forKeyPath:@"status"];
        [self.playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"];
        [self.playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"];
    }
}

- (void)reobserveNotifications {
    if (self.observerArray.count) {
        for (id observer in self.observerArray) {
            [[NSNotificationCenter defaultCenter] removeObserver:observer];
        }
    }

    self.observerArray = [NSMutableArray array];
    __weak typeof(self) weakSelf = self;
    {
        id observer = [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification object:self.playerItem queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
            weakSelf.didStart = NO;
            weakSelf.isFinish = YES;
            if (weakSelf.delegate && [weakSelf.delegate respondsToSelector:@selector(videoPlayerDidFinish:)]) {
                [weakSelf.delegate videoPlayerDidFinish:weakSelf];
            }
            [weakSelf stopUpdateStatisticTimer];
        }];
        [self.observerArray addObject:observer];
    }
    {
        id observer = [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemFailedToPlayToEndTimeNotification object:self.playerItem queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
            NSError *error = [note.userInfo objectForKey:AVPlayerItemFailedToPlayToEndTimeErrorKey];
            [weakSelf handlePlayFailWithError:error];
        }];
        [self.observerArray addObject:observer];
    }
    {
        id observer = [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemNewErrorLogEntryNotification object:self.playerItem queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
            AVPlayerItem *item = note.object;
            AVPlayerItemErrorLog *errorLog = item.errorLog;
            if (errorLog.events.count) {
                AVPlayerItemErrorLogEvent *event = errorLog.events.lastObject;
                NSError *error = [NSError errorWithDomain:event.errorDomain code:event.errorStatusCode userInfo:@{@"errorLogEvent": event}];
                [weakSelf handlePlayFailWithError:error];
            }
        }];
        [self.observerArray addObject:observer];
    }
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context {
    if (object == self.playerItem) {
        if ([keyPath isEqualToString:@"duration"]) {
            if (self.delegate && [self.delegate respondsToSelector:@selector(videoPlayer:didUpdateDuration:)]) {
                [self.delegate videoPlayer:self didUpdateDuration:CMTimeGetSeconds(self.playerItem.duration)];
            }
        } else if ([keyPath isEqualToString:@"playbackBufferEmpty"]) {

        } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) {
            NSLog(@"observeValueForKeyPath: playbackLikelyToKeepUp = %d", self.playerItem.playbackLikelyToKeepUp);
            if (self.delegate && [self.delegate respondsToSelector:@selector(videoPlayer:bufferToBeKeepUp:)]) {
                [self.delegate videoPlayer:self bufferToBeKeepUp:self.playerItem.playbackLikelyToKeepUp];
            }

        } else if ([keyPath isEqualToString:@"status"]) {
            if (self.playerItem.status == AVPlayerItemStatusFailed) {
                [self handlePlayFailWithError:self.playerItem.error];
            } else if (self.playerItem.status == AVPlayerItemStatusReadyToPlay) {

            }
        }
    } else if (object == self.playerLayer) {
        if (self.playerLayer.readyForDisplay) {
            self.didGetFirstFrame = YES;
        }
    } else if (object == self.player) {
        if (self.player.status == AVPlayerStatusFailed) {
            [self handlePlayFailWithError:self.player.error];
        }
    }
}

- (AVPlayerItem *)playerItemWithAsset:(AVAsset *)asset
{
    AVPlayerItem *playerItem = nil;
    if (self.speed == 1) {
        playerItem = [AVPlayerItem playerItemWithAsset:asset];
    } else {
        CMTime duration = asset.duration;
        AVMutableComposition *composition = [AVMutableComposition composition];
        [composition insertTimeRange:CMTimeRangeMake(kCMTimeZero, duration) ofAsset:asset atTime:kCMTimeZero error:nil];
        if (self.speed != 0) {
            [composition scaleTimeRange:CMTimeRangeMake(kCMTimeZero, duration) toDuration:CMTimeMultiplyByFloat64(duration, 1 / self.speed)];
        }
        playerItem = [AVPlayerItem playerItemWithAsset:composition];

        if ([composition tracksWithMediaType:AVMediaTypeVideo].count && [asset tracksWithMediaType:AVMediaTypeVideo].count) {
            AVMutableCompositionTrack *track = [composition tracksWithMediaType:AVMediaTypeVideo][0];
            track.preferredTransform = [asset tracksWithMediaType:AVMediaTypeVideo][0].preferredTransform;
        }
    }

    return playerItem;
}

//初始化并开始播放
- (void)initPlayerWithPlayItem:(AVPlayerItem *)playItem
{
    self.currentTimeObserverInterval = CMTimeMake(1, 10);

    self.playerItem = playItem;
    [self startObservePlayerItemProperties];

    self.player = [AVPlayer playerWithPlayerItem:self.playerItem];
    [self.player addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:NULL];
    self.player.muted = self.shouldMute;

    self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
    self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.playerLayer addObserver:self forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:NULL];

    [self reobserveNotifications];
    [self setupCurrentCMTimeObserver];

    [self.player play];
}

三.AVFoundation重新编码

AVAssetReader:用于从AVAssert实例中读取媒体样本。
  • 每个AVAssetReader对象在某个时刻只能和单个asset关联,但这个asset可包含多个tracks。所以,在开始读取之前,必须给asset reader指定一个AVAssetReaderOutput的具体子类,来配置media data怎样读取。

  • 可通过copyNextSampleBuffer方法可以访问音频样本和视频帧。

AVAssetWriter:将媒体数据从多个源写入指定文件格式的单个文件。
  • 不需要将asset writer对象与特定的asset相关联,但必须为要创建的每个输出文件使用单独的asset writer

  • 由于asset writer可以从多个源写入媒体数据,因此必须要为写入文件的每个track创建一个AVAssetWriterInput对象,每个AVAssetWriterInput期望以CMSampleBufferRef对象形式接收数据,但如果你想要将CVPixelBufferRef类型对象添加到asset writer input,就使用AVAssetWriterInputPixelBufferAdaptor类。

- (void)initReader
{
    self.reader = [[AVAssetReader alloc]initWithAsset:self.inputAsset error:nil];

    NSArray *videoTracks = [self.inputAsset tracksWithMediaType:AVMediaTypeVideo];
    NSDictionary *decompressionVideoSettings = @{
                                                 (id)kCVPixelBufferPixelFormatTypeKey     : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
                                                 (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary]
                                                 };

    self.videoCompositionOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:videoTracks
                                                                                                          videoSettings:decompressionVideoSettings];
                                                                                                          self.videoComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:self.inputAsset];


    self.videoComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:self.inputAsset];
    self.videoCompositionOutput.videoComposition = self.videoComposition;

    if ([self.reader canAddOutput:self.videoCompositionOutput]){
        [self.reader addOutput:self.videoCompositionOutput];
    }

    [self.reader startReading];
}

- (void)initWriter
{
    NSURL *outputFileURL = [FRVideoEditExportCache storyClockOutputUrl];

    self.writer = [AVAssetWriter assetWriterWithURL:outputFileURL
                                           fileType:AVFileTypeMPEG4
                                              error:nil];

    NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              AVVideoCodecH264, AVVideoCodecKey,
                                              [NSNumber numberWithInteger:self.videoComposition.renderSize.width], AVVideoWidthKey,
                                              [NSNumber numberWithInteger:self.videoComposition.renderSize.height], AVVideoHeightKey,
                                              nil];

    self.writerVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
    self.writerVideoInput.expectsMediaDataInRealTime = YES;

    NSDictionary *sourceBufferAttribute = [NSDictionary dictionaryWithObjectsAndKeys:
                                           [NSNumber numberWithInteger:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
                                           [NSNumber numberWithUnsignedInteger:self.videoComposition.renderSize.width], (id)kCVPixelBufferWidthKey,
                                           [NSNumber numberWithUnsignedInteger:self.videoComposition.renderSize.height], (id)kCVPixelBufferHeightKey,
                                           (id)kCFBooleanTrue, (id)kCVPixelFormatOpenGLESCompatibility,
                                           nil];

    self.writerInputPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.writerVideoInput
                                                                                                          sourcePixelBufferAttributes:sourceBufferAttribute];

    if([self.writer canAddInput:self.writerVideoInput]){
        [self.writer addInput:self.writerVideoInput];
    }

    [self.writer startWriting];
}

- (void)readWrite:(AVAsset *)asset
{
    self.inputAsset = asset;

    __weak typeof(self)weakSelf = self;
    dispatch_async(self.queue, ^{
        [weakSelf initReader];
        [weakSelf initWriter];

        // 从reader中拷贝出元视频数据
        CMSampleBufferRef buffer = [weakSelf.videoCompositionOutput copyNextSampleBuffer];
        while (buffer != NULL) {
            CMItemCount count = CMSampleBufferGetNumSamples(buffer);
            if (!count) {
                break;
            }

            // 获取元数据及其信息
            CMTime presentationTime = CMSampleBufferGetOutputPresentationTimeStamp(buffer);
            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);

            // 写回到Writer中
            if (!weakSelf.videoWritingStarted){
                weakSelf.videoWritingStarted = YES;
                [weakSelf.writer startSessionAtSourceTime:presentationTime];
            }
            if (weakSelf.writerVideoInput.readyForMoreMediaData){
                [weakSelf.writerInputPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
            }

            CFRelease(buffer);
            buffer = NULL;
            buffer = [weakSelf.videoCompositionOutput copyNextSampleBuffer];
        }

        [weakSelf.writer finishWritingWithCompletionHandler:^(void){
            weakSelf.videoWritingStarted = NO;

            switch (weakSelf.writer.status) {
                case AVAssetWriterStatusFailed:{
                    NSLog(@"Write fail");
                    break;
                }
                case AVAssetWriterStatusCompleted:{
                    NSLog(@"Write compelte");
                    break;
                }
                default:
                    break;
            }
        }];
    });
}

四.AVFoundation获取缩略图

AVAssetImageGenerator:用于获取视频的缩略图,它可以异步获取到一组采样点(CMTime)的缩略图,精确的采样点为视频压缩时的presentationTime。

- (void)setUrl:(NSURL *)url
{
    _url   = url;
    _asset = nil;
    _imageGenerator = nil;
}

- (AVAsset *)asset
{
    if(_asset == nil && _url){
        _asset = [AVAsset assetWithURL:_url];
    }
    return _asset;
}

- (AVAssetImageGenerator *)imageGenerator
{
    if(!_url){
        _imageGenerator = nil;
    }

    if(_imageGenerator == nil && self.asset){
        _imageGenerator = [[AVAssetImageGenerator alloc]initWithAsset:_asset];
        _imageGenerator.maximumSize = CGSizeMake(self.imageSize.width * [UIScreen mainScreen].scale, self.imageSize.height * [UIScreen mainScreen].scale);
        _imageGenerator.appliesPreferredTrackTransform = YES;
        _imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
        _imageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
    }
    return _imageGenerator;
}

- (void)generateUnscaledCGImagesAsynchronouslyForTimes:(NSArray<NSValue *> *)requestedTimes completionHandler:(AVAssetImageGeneratorCompletionHandler)handler {
    if (!self.imageGenerator) {
        return;
    }

    self.imageGenerator.maximumSize = CGSizeZero;
    [self.imageGenerator generateCGImagesAsynchronouslyForTimes:requestedTimes completionHandler:handler];
    self.imageGenerator.maximumSize = CGSizeMake(self.imageSize.width * [UIScreen mainScreen].scale, self.imageSize.height * [UIScreen mainScreen].scale);
}

五.AVFoundation视频编辑

我们使用MediaComposition 来进行视频编辑,其中文档中将相关类分为几个部分来介绍:

  • Media Composition:用于表示多条媒体轨道合集以及在合集层级操作相关的类。包括AVComposition,AVMutableComposition,AVCompositionTrack,AVMutableCompositionTrack,AVCompositionTrackSegment。
  • Video Composition:表示视频轨道处理相关类。
  • Movie Editing:QuikTime视频处理相关类。
  • Audio Mixing:音频合成处理相关类。

实例可参考官方Demo:AVSimpleEditoriOS

添加水印/设置遮罩

- (CALayer *)parentLayer
{
    if(!_parentLayer){
        _parentLayer = [CALayer layer];
        _parentLayer.backgroundColor = [UIColor whiteColor].CGColor;
    }
    return _parentLayer;
}

- (CALayer *)videoLayer
{
    if(!_videoLayer){
        _videoLayer = [CALayer layer];
    }
    return _videoLayer;
}

- (AVMutableVideoComposition *)maskWithInputAsset:(AVAsset *)asset
{
    // 初始化Composition
    if(!self.mutableComposition){
        self.mutableComposition = [AVMutableComposition composition];
        AVAssetTrack *assetVideoTrack = nil;
        AVAssetTrack *assetAudioTrack = nil;

        if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
            assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
        }
        if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
            assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
        }
        CMTime duration = [asset duration];
        if(assetVideoTrack != nil) {
            AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, duration) ofTrack:assetVideoTrack atTime:kCMTimeZero error:nil];
            compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform;
        }
        if(assetAudioTrack != nil) {
            AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, duration) ofTrack:assetAudioTrack atTime:kCMTimeZero error:nil];
        }
    }

    // 初始化VideoComposition
    if (!self.mutableVideoComposition) {
        //初始化容器大小和间隔
        AVAssetTrack *videoTrack     = [self.mutableComposition tracksWithMediaType:AVMediaTypeVideo][0];
        self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
        self.mutableVideoComposition.frameDuration = CMTimeMake(1, videoTrack.nominalFrameRate + 10);//渲染帧数太少会直接导致质量下降
        self.mutableVideoComposition.renderSize    = videoTrack.naturalSize;
        self.mutableVideoComposition.renderScale   = 1.0;

        //指定添加layer指令
        self.instruction           = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        self.instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
        self.layerInstruction      = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

        self.instruction.layerInstructions        = @[self.layerInstruction];
        self.mutableVideoComposition.instructions = @[self.instruction];
    }

    // 水印/遮罩都可以通过设置videoLayer来实现
    CGSize  renderSize  = self.mutableVideoComposition.renderSize;
    CGFloat maskLength  = renderSize.width > renderSize.height ? renderSize.height : renderSize.width;
    CGFloat maskOffset  = ABS(renderSize.width - renderSize.height);
    CGSize  maskSize    = CGSizeMake(maskLength, maskLength);
    CGRect  maskFrame   = CGRectMake(0, 0, maskSize.width, maskSize.height);
    CAShapeLayer *maskLayer = [CAShapeLayer layer];
    CGFloat radius = maskSize.width /2.0;
    UIBezierPath *circlePath = [UIBezierPath bezierPathWithArcCenter:CGPointMake(maskSize.width/2, maskSize.height/2)
                                                              radius:radius
                                                          startAngle:0
                                                            endAngle:2 * M_PI
                                                           clockwise:NO];
    maskLayer.path = circlePath.CGPath;

    // 当起始点变化的时候,Composition也需要进行移动
    CGAffineTransform transform = CGAffineTransformMakeTranslation(- maskOffset / 2.0, 0);
    if(renderSize.width > renderSize.height){
        transform = CGAffineTransformMakeTranslation(- maskOffset / 2.0, 0);
    }else{
         transform = CGAffineTransformMakeTranslation(0, - maskOffset / 2.0);
    }
    [self.layerInstruction setTransform:transform atTime:kCMTimeZero];
    self.mutableVideoComposition.renderSize = maskSize;

    // 将遮罩层添加到VideoLayer上
    self.videoLayer.frame  = maskFrame;
    self.parentLayer.frame = maskFrame;
    self.videoLayer.mask   = maskLayer;

    [self.parentLayer insertSublayer:self.videoLayer atIndex:0];
    self.mutableVideoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:self.videoLayer inLayer:self.parentLayer];

    return self.mutableVideoComposition;
}

添加音乐

// 替换音乐
+ (AVMutableComposition *)compositionWithInputAsset:(AVMutableComposition *)inputComposition audioAsset:(AVAsset *)audioAsset
{
    //移除旧音频
    AVAssetTrack *videoTrack = [inputComposition tracksWithMediaType:AVMediaTypeVideo][0];
    [self removeAudioTrack:inputComposition];

    if (!audioAsset) {
        return inputComposition;
    }

    //添加新音频
    AVMutableCompositionTrack *compositionAudioTrack = [inputComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError *err = nil;

    CMTime audioDuration = [audioAsset duration];
    CMTime videoDuration = [videoTrack.asset duration];

    err = [self resizeCompositionTrack:(AVMutableCompositionTrack *)[audioAsset tracksWithMediaType:AVMediaTypeAudio][0]
                          fromDuration:audioDuration
                    toCompositionTrack:compositionAudioTrack
                            toDuration:videoDuration];

    if (err) {
        NSLog(@"AVMutableCompositionTrack inserAudio error = %@",err);
    }
    return inputComposition;
}

// 添加背景音
+ (AVMutableAudioMix *)audioMixWithInputAsset:(AVMutableComposition *)inputComposition audioAsset:(AVAsset *)audioAsset
{
    AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];

    AVAssetTrack *videoTrack = [inputComposition tracksWithMediaType:AVMediaTypeVideo][0];

    //添加新音频
    AVMutableCompositionTrack *compositionAudioTrack = [inputComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError *err = nil;

    CMTime audioDuration = [audioAsset duration];
    CMTime videoDuration = [videoTrack.asset duration];

    err = [self resizeCompositionTrack:(AVMutableCompositionTrack *)[audioAsset tracksWithMediaType:AVMediaTypeAudio][0]
                          fromDuration:audioDuration
                    toCompositionTrack:compositionAudioTrack
                            toDuration:videoDuration];


    AVMutableAudioMixInputParameters *inputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioTrack];
    audioMix.inputParameters = @[inputParameters];

    return audioMix;
}

results matching ""

    No results matching ""