How to reduce the size of a .mov video file created with AVCaptureSession?

I can record video using AVCaptureSession

. But I want to reduce its size. How can i do this? I get the final URL

in the delegate's captureOutput method.

    VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error]; 
    if (!error) 
    { 
        if ([CaptureSession canAddInput:VideoInputDevice]) 
        {
            [CaptureSession addInput:VideoInputDevice]; 
        }
        else 
        {
            NSLog(@"Couldn't add video input"); 
        } 
    else 
    { 
        NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 
    NSLog(@"Couldn't create video capture device"); 
} 

AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 

      

+3


source to share


2 answers


You want to use AVAssetExportSession. The code will look something like this:

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:myAsset presetName:exportPreset];

exporter.videoComposition = _videoComposition;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = NO;
url = [url URLByAppendingPathExtension:CFBridgingRelease(UTTypeCopyPreferredTagWithClass((__bridge CFStringRef)(exporter.outputFileType), kUTTagClassFilenameExtension))];
exporter.outputURL = url;

[exporter exportAsynchronouslyWithCompletionHandler:^{
    // handle the completion in a way meaningful to your app
}];

      



Export preset must be one of the following:

AVF_EXPORT NSString *const AVAssetExportPreset640x480           NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset960x540           NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1280x720          NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1920x1080         NS_AVAILABLE(10_7, 5_0);
AVF_EXPORT NSString *const AVAssetExportPreset3840x2160         NS_AVAILABLE(10_10, NA);

      

0


source


Hope this method is helpful for u



- (void)compressingVideoWithSoundWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL andResolution:(CGSize)resolution
{
    self.myVideoWriter = nil;

    [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];

    //setup video writer
    AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];

    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    NSDictionary *videoWriterCompressionSettings =  [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];

    NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:resolution.width], AVVideoWidthKey, [NSNumber numberWithFloat:resolution.height], AVVideoHeightKey, nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeVideo
                                            outputSettings:videoWriterSettings];

    videoWriterInput.expectsMediaDataInRealTime = YES;

    videoWriterInput.transform = videoTrack.preferredTransform;


    NSError* writerError = nil;

    self.myVideoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];

    if (writerError) {
        NSLog(@"Writer error: %@", writerError);
        NSString *message = [NSString stringWithFormat:@"VideoEditor. compressingVideoWithInputURL: outputURL: andResolution:. Writer error: %@", writerError];
        FLog(message);
        return;
    }



    [self.myVideoWriter addInput:videoWriterInput];

    //setup video reader
    NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

    AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];

    AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];

    [videoReader addOutput:videoReaderOutput];

    //setup audio writer
    AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeAudio
                                            outputSettings:nil];

    audioWriterInput.expectsMediaDataInRealTime = NO;

    [self.myVideoWriter addInput:audioWriterInput];

    //setup audio reader
    AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

    AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];

    [audioReader addOutput:audioReaderOutput];

    [self.myVideoWriter startWriting];

    //start writing from video reader
    [videoReader startReading];

    [self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];

    dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", DISPATCH_QUEUE_SERIAL);

    [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
     ^{

         while ([videoWriterInput isReadyForMoreMediaData]) {

             CMSampleBufferRef sampleBuffer;

             if ([videoReader status] == AVAssetReaderStatusReading &&
                 (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {

                 [videoWriterInput appendSampleBuffer:sampleBuffer];
                 CFRelease(sampleBuffer);
             }

             else {

                 [videoWriterInput markAsFinished];

                 if ([videoReader status] == AVAssetReaderStatusCompleted) {
                     if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) {

                     }
                     else{
                         //start writing from audio reader
                         [audioReader startReading];

                         [self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];

                         dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);

                         [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{

                             while (audioWriterInput.readyForMoreMediaData) {

                                 CMSampleBufferRef sampleBuffer;

                                 if ([audioReader status] == AVAssetReaderStatusReading &&
                                     (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {

                                     [audioWriterInput appendSampleBuffer:sampleBuffer];
                                     CFRelease(sampleBuffer);
                                 }
                                 else {

                                     [audioWriterInput markAsFinished];

                                     if ([audioReader status] == AVAssetReaderStatusCompleted)
                                     {
                                         [self finishWritingAction];
                                         break;
                                     }

                                 }
                             }

                         }
                          ];
                     }
                 }

             }
         }
     }
     ];
}

      

+2


source







All Articles