iOS 某个时间段添加特效,视频剪切、合并

1.美颜。大佬的简书---美颜实现
2.大佬封装的demo (https://github.com/Dongdong1991/GPURenderKitDemo/blob/master
//开始进行滤镜渲染

  -(void)saveEffectVideoActionWithPlayURL:(NSURL  *)url outPutFilter:     (GPUImageOutput<GPUImageInput> *)outPutFilter leftTime:(float)leftTime rightTime:  (float)rightTime callback:(void(^)(NSURL *outUrlPath))callBack  {
    
self.effectProgressEnd = callBack;
    self.leftTime = leftTime;
    self.rightTime = rightTime;
    // 要转换的视频
    //
    _movieFile = [[GPUImageMovie alloc] initWithURL:url];
    _movieFile.runBenchmark = YES;
    _movieFile.playAtActualSpeed = NO;
    
    // 创建滤镜
    [_movieFile addTarget:outPutFilter];
    
    // 设置输出路径
    NSString * pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.mp4"];
    // - 如果文件已存在,AVAssetWriter不允许直接写进新的帧,所以会删掉老的视频文件
    unlink([pathToMovie UTF8String]);
    self.movieURL = [NSURL fileURLWithPath:pathToMovie];
    
    // 输出 后面的size可改 ~ 现在来说480*640有点太差劲了
    AVAsset *asset = [AVAsset assetWithURL:url];
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
//    CGAffineTransform t = videoTrack.preferredTransform;//这里的矩阵有旋转角度,转换一下即可
    
        NSLog(@"=====hello  width:%f===height:%f",videoTrack.naturalSize.width,videoTrack.naturalSize.height);//宽高
    
    _movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:self.movieURL size:videoTrack.naturalSize];
    
    [outPutFilter addTarget:_movieWriter];
    
    _movieWriter.shouldPassthroughAudio = YES;
    _movieFile.audioEncodingTarget = _movieWriter;
    [_movieFile enableSynchronizedEncodingUsingMovieWriter:_movieWriter];
    
    // 完成之后的回调 - 为啥100%了之后还会待一会才调用这个completeBlock
    @weakify(self);
    [self.movieWriter setCompletionBlock:^{
        @strongify(self);
        [outPutFilter removeTarget:self.movieWriter];
        [self.movieWriter finishRecording];
        NSLog(@"_movieWriter_成功");
//        if (callBack) {
//            callBack(strongSelf.movieURL);
//        }
        //        // 异步写入相册
//                dispatch_queue_t concurrentQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
//                dispatch_async(concurrentQueue, ^{
//                    [strongSelf saveVideo:strongSelf.movieURL];
//                });
        [self cutVideo];
    }];
    [self.movieWriter setFailureBlock:^(NSError *error) {
        @strongify(self);
        NSLog(@"_movieWriter_失败");
        if (self.effectProgressEnd) {
            self.effectProgressEnd([NSURL fileURLWithPath:@""]);
        }
    }];
    [self.movieWriter startRecording];
    [_movieFile startProcessing];
    
}

//剪切视频

- (void)cutVideo {
    //视频总长
    float videoTimeLang = [JWVideoEditManage getMediaDurationWithMediaUrl:self.videoPath];
    
    //第一次截取视频资源
    if (_leftTime >1) {
        
        AVMutableComposition *composition = [self getAVMutableCompositionWithLeftTime:0 rightTime:_leftTime videoURL:self.videoPath];
        
        [self.compositionDic setObject:composition forKey:@"firstComposition"];
    } else  {
        _leftTime = 0;
    }
    
    //第二次截取视频资源
    _rightTime =  _rightTime + 1 > videoTimeLang ? videoTimeLang : _rightTime;
    
    AVMutableComposition *composition = [self getAVMutableCompositionWithLeftTime:_leftTime rightTime:_rightTime videoURL:self.videoPath];
    
    [self.compositionDic setObject:composition forKey:@"secondComposition"];
    
    //第三次截取视频资源
    if (videoTimeLang > _rightTime + 1)  {
        
        AVMutableComposition *composition = [self getAVMutableCompositionWithLeftTime:_rightTime rightTime:videoTimeLang videoURL:self.videoPath];
        
        [self.compositionDic setObject:composition forKey:@"thirdComposition"];
    }
    
    if (self.compositionDic[@"secondComposition"]) {
        AVMutableComposition *composition = [self getAVMutableCompositionWithLeftTime:self.leftTime rightTime:self.rightTime videoURL:self.movieURL];
        
        self.compositionDic[@"secondComposition"] = composition;

    }
    
    dispatch_async(dispatch_get_main_queue(), ^{
        [self combVideos];
    });
}

///根据URL 将视频资源转化为composition

- (AVMutableComposition *)getAVMutableCompositionWithLeftTime:(float)leftTime rightTime:(float)rightTime videoURL:(NSURL *)videoURL {
    
    NSDictionary *optDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    
    AVAsset *firstAsset = [[AVURLAsset alloc] initWithURL:videoURL options:optDict];
    
    TimeRange timeRange = {leftTime, rightTime - leftTime};
    
    //由于没有计算当前CMTime的起始位置,现在插入0的位置,所以合并出来的视频是后添加在前面,可以计算一下时间,插入到指定位置
    //CMTimeRangeMake 指定起去始位置
    //    CMTimeRange firstTimeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
    
    //开始位置startTime
    CMTime startTime = CMTimeMakeWithSeconds(timeRange.location, firstAsset.duration.timescale);
    //截取长度videoDuration
    CMTime videoDuration = CMTimeMakeWithSeconds(timeRange.length, firstAsset.duration.timescale);
    
    CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);
    
    
    AVMutableComposition *composition = [AVMutableComposition composition];
    //为视频类型的的Track
    AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    
    
    [compositionTrack insertTimeRange:videoTimeRange ofTrack:[firstAsset tracksWithMediaType:AVMediaTypeVideo][0] atTime:kCMTimeZero error:nil];
    
    //只合并视频,导出后声音会消失,所以需要把声音插入到混淆器中
    //添加音频,添加本地其他音乐也可以,与视频一致
    AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [audioTrack insertTimeRange:videoTimeRange ofTrack:[firstAsset tracksWithMediaType:AVMediaTypeAudio][0] atTime:kCMTimeZero error:nil];
    return composition;
    
}

///合并视频资源

- (void)combVideos {
    
    AVMutableComposition *firstComposition  = self.compositionDic[@"firstComposition"];
    AVMutableComposition *secondComposition  = self.compositionDic[@"secondComposition"];
    AVMutableComposition *thirdComposition  = self.compositionDic[@"thirdComposition"];
    
    
    //    NSDictionary *optDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    //    AVAsset *firstAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:self.videoPath] options:optDict];
    //    AVAsset *secondAsset = [[AVURLAsset alloc] initWithURL:<#(nonnull NSURL *)#> options:<#(nullable NSDictionary<NSString *,id> *)#>];
    
    AVMutableComposition *composition = [AVMutableComposition composition];
    //为视频类型的的Track
    AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    
    //倒序插入
    if (thirdComposition) {
        //现在插入0的位置,所以合并出来的视频是后添加在前面,可以计算一下时间,插入到指定位置
        //CMTimeRangeMake 指定起去始位置
        CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, thirdComposition.duration);
        [compositionTrack insertTimeRange:timeRange ofTrack:[thirdComposition tracksWithMediaType:AVMediaTypeVideo][0] atTime:kCMTimeZero error:nil];
        //只合并视频,导出后声音会消失,所以需要把声音插入到混淆器中
        //添加音频,添加本地其他音乐也可以,与视频一致
//        AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//        [audioTrack insertTimeRange:timeRange ofTrack:[thirdComposition tracksWithMediaType:AVMediaTypeAudio][0] atTime:kCMTimeZero error:nil];
    }
    if (secondComposition) {
        //现在插入0的位置,所以合并出来的视频是后添加在前面,可以计算一下时间,插入到指定位置
        //CMTimeRangeMake 指定起去始位置
        CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, secondComposition.duration);
        [compositionTrack insertTimeRange:timeRange ofTrack:[secondComposition tracksWithMediaType:AVMediaTypeVideo][0] atTime:kCMTimeZero error:nil];
        //添加音频
//        AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//        [audioTrack insertTimeRange:timeRange ofTrack:[secondComposition tracksWithMediaType:AVMediaTypeAudio][0] atTime:kCMTimeZero error:nil];
    }
    if (firstComposition) {
        //现在插入0的位置,所以合并出来的视频是后添加在前面,可以计算一下时间,插入到指定位置
        //CMTimeRangeMake 指定起去始位置
        CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, firstComposition.duration);
        [compositionTrack insertTimeRange:timeRange ofTrack:[firstComposition tracksWithMediaType:AVMediaTypeVideo][0] atTime:kCMTimeZero error:nil];
        //添加音频
//        AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//        [audioTrack insertTimeRange:timeRange ofTrack:[firstComposition tracksWithMediaType:AVMediaTypeAudio][0] atTime:kCMTimeZero error:nil];
    }
    //
    [self addAudioCompositionTrack:composition videoURL:self.videoPath];
    
    NSString *filePath =[NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"Documents/output_comb_%@.mp4",[self getNowTimeTimestamp2]]];
    
    //    NSString *filePath = [cachePath stringByAppendingPathComponent:@"comp.mp4"];
    AVAssetExportSession *exporterSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
    exporterSession.outputFileType = AVFileTypeMPEG4;
    exporterSession.outputURL = [NSURL fileURLWithPath:filePath]; //如果文件已存在,将造成导出失败
    exporterSession.shouldOptimizeForNetworkUse = YES; //用于互联网传输
    [exporterSession exportAsynchronouslyWithCompletionHandler:^{
        switch (exporterSession.status) {
            case AVAssetExportSessionStatusUnknown:
                NSLog(@"exporter Unknow");
                if (self.effectProgressEnd) {
                    self.effectProgressEnd([NSURL fileURLWithPath:@""]);
                }
                break;
            case AVAssetExportSessionStatusCancelled:
                NSLog(@"exporter Canceled");
                if (self.effectProgressEnd) {
                    self.effectProgressEnd([NSURL fileURLWithPath:@""]);
                }
                break;
            case AVAssetExportSessionStatusFailed:
                NSLog(@"exporter Failed");
                if (self.effectProgressEnd) {
                    self.effectProgressEnd([NSURL fileURLWithPath:@""]);
                }
                break;
            case AVAssetExportSessionStatusWaiting:
                NSLog(@"exporter Waiting");
                break;
            case AVAssetExportSessionStatusExporting:
                NSLog(@"exporter Exporting");
                //                [self saveVideo:[NSURL fileURLWithPath:filePath]];
                break;
            case AVAssetExportSessionStatusCompleted:
                NSLog(@"exporter Completed");
                            [self saveVideo:[NSURL fileURLWithPath:filePath]];
                if (self.effectProgressEnd) {
                    self.effectProgressEnd([NSURL fileURLWithPath:filePath]);
                }
                
                break;
        }
    }];
}

//: 添加声音 轨道 只保留原来视频声音
/**
添加声音 轨道,

@param mixComposition 合成器
@param asset 源
@param timeRange 视频返回
*/

- (void)addAudioCompositionTrack:(AVMutableComposition *)mixComposition videoURL:(NSURL *)videoURL{
    
    NSDictionary *optDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    
    AVAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:optDict];
    
    //音频采集compositionCommentaryTrack
    //atTime插放在视频的时间位置
    NSError * error = nil;
    //这里采用信号量加锁,是声音转换完成之后继续进行合成视频
    dispatch_semaphore_t t = dispatch_semaphore_create(0);
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        
            AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
            
            AVMutableCompositionTrack * audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
            // 加入合成轨道之中
            CMTimeRange audioTimeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
            //插入音频
            [audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:CMTimeMakeWithSeconds(0, asset.duration.timescale) error:nil];
            dispatch_semaphore_signal(t);
    });
    dispatch_semaphore_wait(t, DISPATCH_TIME_FOREVER);
    NSLog(@"error---%@",error);
}

-(NSString *)getNowTimeTimestamp2 {
    NSDate* dat = [NSDate dateWithTimeIntervalSinceNow:0];
    NSTimeInterval a=[dat timeIntervalSince1970];
    NSString*timeString = [NSString stringWithFormat:@"%0.f", a];//转为字符型
    return timeString;
}