音视频

AVFoundation - 根据图像生成视频

2018-05-18  本文已影响21人  比沉默寡言话多

先给一个可以直接拿过去用的代码,然后我再解释一下.

+ (void)compressImage:(NSURL *)imageUrl
             duration:(CGFloat)duration
                 size:(CGSize)imageSize
            outputUrl:(NSURL *)exportUrl
           completion:(void(^)(void))block {
    
    //get source
    CGImageRef image = NULL;
    CGImageSourceRef src = CGImageSourceCreateWithURL((CFURLRef)imageUrl, NULL);// 1
    if (src) {
        image = CGImageSourceCreateImageAtIndex(src, 0, NULL);
        //config
        size_t width = CGImageGetWidth(image);
        size_t height = CGImageGetHeight(image);
        CFRelease(src);
        CGRect dstRect = AVMakeRectWithAspectRatioInsideRect(CGSizeMake(width, height), CGRectMake(0, 0, imageSize.width, imageSize.height)); //2
        NSImage *scaleImage = [[NSImage alloc] initWithCGImage:image size:dstRect.size];
        CVPixelBufferRef buffer = [ExportMovieUtil getBufferFromNSImage:scaleImage]; // 3
        if (buffer) {
            //config output setting
            __block AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:exportUrl
                                                                           fileType:AVFileTypeQuickTimeMovie
                                                                              error:nil];
            NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264, AVVideoCodecKey,
                                           [NSNumber numberWithInt:dstRect.size.width], AVVideoWidthKey,
                                           [NSNumber numberWithInt:dstRect.size.height], AVVideoHeightKey, nil];// 4
            AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
            
            NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
            
            AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                             assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; 
            NSParameterAssert(writerInput);
            NSParameterAssert([videoWriter canAddInput:writerInput]);
            
            if ([videoWriter canAddInput:writerInput]) {
                [videoWriter addInput:writerInput];
            }
            
            [videoWriter startWriting];
            [videoWriter startSessionAtSourceTime:kCMTimeZero];
            
            //start to write
            dispatch_queue_t dispatchQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
            int __block frame = 0;
            
            [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
                while ([writerInput isReadyForMoreMediaData]) {
                    if(++frame > duration * 30) {
                        [writerInput markAsFinished];
                        if(videoWriter.status == AVAssetWriterStatusWriting){
                            CFRelease(buffer);
                            [videoWriter finishWritingWithCompletionHandler:^{
                                !block?:block();
                            }];
                        }
                        break;
                    }
                    
                    if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 30)]) {
                        NSLog(@"fail");
                    } else {
                        NSLog(@"success:%ld",(long)frame);
                    }
                }
            }];
        }
    }
}

应该能看到我的代码里除了简单的注释还有1,2,3,4这样的小注释

**这里po出另外一个函数的实现 **

+ (CVPixelBufferRef)getBufferFromNSImage:(NSImage *)image {
    CVPixelBufferRef buffer = NULL;
    // config
    size_t width = [image size].width;
    size_t height = [image size].height;
    size_t bitsPerComponent = 8; // set 8 bit per component
    CGColorSpaceRef colorSpace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    
    // create pixel buffer
    CVPixelBufferCreate(kCFAllocatorDefault, width, height, k32ARGBPixelFormat, (__bridge CFDictionaryRef)options, &buffer);
    CVPixelBufferLockBaseAddress(buffer, 0);
    void *rasterData = CVPixelBufferGetBaseAddress(buffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
    
    // context to draw in, set to pixel buffer's address
    CGContextRef ctxt = CGBitmapContextCreate(rasterData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedFirst);
    if(ctxt == NULL){
        NSLog(@"could not create context");
        return NULL;
    }
    // draw
    NSGraphicsContext *nsctxt = [NSGraphicsContext graphicsContextWithGraphicsPort:ctxt flipped:NO];
    [NSGraphicsContext saveGraphicsState];
    [NSGraphicsContext setCurrentContext:nsctxt];
    [image drawInRect:NSMakeRect(0, 0, width, height)];
    [NSGraphicsContext restoreGraphicsState];
    
    CVPixelBufferUnlockBaseAddress(buffer, 0);
    CFRelease(ctxt);
    
    return buffer;
}
上一篇下一篇

猜你喜欢

热点阅读