短视频从无到有 (九)图片合成视频及图片压缩

图片合成视频

其实,视频的画面就是一帧帧的图片,当帧率大于16也就是一秒钟有16张以上的图片时,人类的视觉就觉得它是连续的。所以,视频画面可以分解成图片,图片也可以合成视频画面。 AVFoundation库可以很方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件。代码如下:

NSArray *imageArray =@[[UIImage imageNamed:@"avatar"],[UIImage imageNamed:@"beauty_off"],[UIImage imageNamed:@"demo"],[UIImage imageNamed:@"recordBefore"],[UIImage imageNamed:@"recording"],[UIImage imageNamed:@"watermark"]];
    
    CGSize size =CGSizeMake(720, 1280);
    
    //设置mov路径
    NSArray *paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
    NSString *moviePath =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"test.mov"]];
    
    unlink([moviePath UTF8String]);
    NSError *error;
   // AVFoundation库来方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件
    AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath] fileType:AVFileTypeQuickTimeMovie error:&error];
    
   //设置视频的格式 编码 尺寸
    NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,[NSNumber numberWithInt:size.width],AVVideoWidthKey,[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
    
    AVAssetWriterInput *writerInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    
    NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
    
    //    AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool实例,
    //    可以使用分配像素缓冲区写入输出文件。使用提供的像素为缓冲池分配通常
    //    是更有效的比添加像素缓冲区分配使用一个单独的池
    AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    
    
    //先判断下
    if ([videoWriter canAddInput:writerInput]) {
        
        [videoWriter addInput:writerInput];
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];
        
    }
    
    __block int i=0;
    [writerInput requestMediaDataWhenReadyOnQueue:dispatch_queue_create("mediaInputQueue", NULL)
                                       usingBlock:^{
         
                                           while ([writerInput isReadyForMoreMediaData]) {
                                               
                                               
                                               if (++i>=imageArray.count*10) {
                                                   
                                                   [writerInput markAsFinished];
                                                   [videoWriter finishWritingWithCompletionHandler:^{
                                                       
                                                       NSLog(@"合并视频成功");
                                                       //保存到手机相册
                                                       
                                                   }];
                                                   
                                                   break;
                                                   
                                               }
                                               
                                               CVPixelBufferRef buffer =NULL;
                                               int idx =i/10;
                                               
                                               if (idx <imageArray.count) {

                                                   buffer =(CVPixelBufferRef )[self pixelBufferFromCGImage:[imageArray[idx] CGImage] size:size];
                                               }
                                               
                                               
                                               if (buffer) {
                                                   
                                                   if (![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(i, 30)]) {
                                                       
                                                       NSLog(@"合成fail");
                                                   }else{
                                                       
                                                       NSLog(@"合成成功");
                                                   }
                                                   
                                                   
                                                   
                                                   CFRelease(buffer);
                                                   
                                               }
                                               
                                               
                                           }
                                           
                                           
                                           
    }];

其中绘制buffer的方法如下:

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options =[NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
    CVPixelBufferRef pxbuffer =NULL;
    CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
    
    NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer,0);
    
    void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata !=NULL);
    CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
    //    当你调用这个函数的时候,Quartz创建一个位图绘制环境,也就是位图上下文。当你向上下文中绘制信息时,Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定:每个组件的位数,颜色空间,alpha选项
    CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);
    
    //使用CGContextDrawImage绘制图片  这里设置不正确的话 会导致视频颠倒
    //    当通过CGContextDrawImage绘制图片到一个context中时,如果传入的是UIImage的CGImageRef,因为UIKit和CG坐标系y轴相反,所以图片绘制将会上下颠倒
    CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
    // 释放色彩空间
    CGColorSpaceRelease(rgbColorSpace);
    // 释放context
    CGContextRelease(context);
    // 解锁pixel buffer
    CVPixelBufferUnlockBaseAddress(pxbuffer,0);
    
    return pxbuffer;
}

图片压缩

两种压缩图片的方法:压缩图片质量(Quality),压缩图片尺寸(Size)。

1.压缩图片质量

NSData *data = UIImageJPEGRepresentation(image, compression);
UIImage *resultImage = [UIImage imageWithData:data];

通过UIImage和NSData的相互转化,减小 JPEG 图片的质量来压缩图片。UIImageJPEGRepresentation::第二个参数compression 取值 0.0~1.0,值越小表示图片质量越低,图片文件自然越小。

1.2压缩图片尺寸

UIGraphicsBeginImageContext(size); 
[image drawInRect:CGRectMake(0, 0, size.width, size.height)]; 
resultImage=UIGraphicsGetImageFromCurrentImageContext(); 
UIGraphicsEndImageContext();

给定所需的图片尺寸 size,resultImage 即为原图 image 绘制为 size 大小后的图片。压缩图片质量的优点在于,尽可能保留图片清晰度,图片不会明显模糊;缺点在于,不能保证图片压缩后小于指定大小。压缩图片尺寸可以使图片小于指定大小,但会使图片明显模糊(比压缩图片质量模糊)。所以在实际运用中,可以两者结合起来使用,给UIImage添加一个分类,上代码:

-(NSData *)compressWithMaxLength:(NSUInteger)maxLength{ 
// Compress by quality 
CGFloat compression = 1; 
NSData *data = UIImageJPEGRepresentation(self, compression); 
//NSLog(@"Before compressing quality, image size = %ld KB",data.length/1024); 
if (data.length < maxLength) return data; 
CGFloat max = 1; 
CGFloat min = 0;
 for (int i = 0; i < 6; ++i) { 
compression = (max + min) / 2; 
data = UIImageJPEGRepresentation(self, compression); //NSLog(@"Compression = %.1f", compression); //NSLog(@"In compressing quality loop, image size = %ld KB", data.length / 1024); 
if (data.length < maxLength * 0.9) 
{
 min = compression; 
} else if (data.length > maxLength) {
 max = compression; 
} else { 
break;
 } 
} 
//NSLog(@"After compressing quality, image size = %ld KB", data.length / 1024); 
if (data.length < maxLength) return data; 
UIImage *resultImage = [UIImage imageWithData:data];
 // Compress by size
 NSUInteger lastDataLength = 0; 
while (data.length > maxLength && data.length != lastDataLength) { 
lastDataLength = data.length; 
CGFloat ratio = (CGFloat)maxLength / data.length; //NSLog(@"Ratio = %.1f", ratio); 
CGSize size = CGSizeMake((NSUInteger)(resultImage.size.width * sqrtf(ratio)), (NSUInteger)(resultImage.size.height * sqrtf(ratio)));
 // Use NSUInteger to prevent white blank UIGraphicsBeginImageContext(size); 
[resultImage drawInRect:CGRectMake(0, 0, size.width, size.height)];
 resultImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); 
data = UIImageJPEGRepresentation(resultImage, compression); 
//NSLog(@"In compressing size loop, image size = %ld KB", data.length / 1024);
 } 
//NSLog(@"After compressing size loop, image size = %ld KB", data.length / 1024); 
return data; 
}

有什么问题,欢迎留言讨论。

推荐阅读更多精彩内容