Skip to content

Instantly share code, notes, and snippets.

@ourui
Created March 5, 2018 08:31
Show Gist options
  • Select an option

  • Save ourui/b1f3fa9672bc6d4b0bcc199d2be7cfef to your computer and use it in GitHub Desktop.

Select an option

Save ourui/b1f3fa9672bc6d4b0bcc199d2be7cfef to your computer and use it in GitHub Desktop.

Revisions

  1. ourui created this gist Mar 5, 2018.
    76 changes: 76 additions & 0 deletions ReversingAsset.m
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,76 @@
    #import "AVUtilities.h"
    #import <AVFoundation/AVFoundation.h>

    @implementation AVUtilities

    + (AVAsset *)assetByReversingAsset:(AVAsset *)asset outputURL:(NSURL *)outputURL {
    NSError *error;

    // Initialize the reader
    AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] lastObject];

    NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
    AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
    outputSettings:readerOutputSettings];
    [reader addOutput:readerOutput];
    [reader startReading];

    // read in the samples
    NSMutableArray *samples = [[NSMutableArray alloc] init];

    CMSampleBufferRef sample;
    while(sample = [readerOutput copyNextSampleBuffer]) {
    [samples addObject:(__bridge id)sample];
    CFRelease(sample);
    }

    // Initialize the writer
    AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:outputURL
    fileType:AVFileTypeMPEG4
    error:&error];
    NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
    @(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
    nil];
    NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
    AVVideoCodecH264, AVVideoCodecKey,
    [NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
    [NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
    videoCompressionProps, AVVideoCompressionPropertiesKey,
    nil];
    AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
    outputSettings:writerOutputSettings
    sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
    [writerInput setExpectsMediaDataInRealTime:NO];

    // Initialize an input adaptor so that we can append PixelBuffer
    AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];

    [writer addInput:writerInput];

    [writer startWriting];
    [writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];

    // Append the frames to the output.
    // Notice we append the frames from the tail end, using the timing of the frames from the front.
    for(NSInteger i = 0; i < samples.count; i++) {
    // Get the presentation time for the frame
    CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);

    // take the image/pixel buffer from tail end of the array
    CVPixelBufferRef imageBufferRef = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);

    while (!writerInput.readyForMoreMediaData) {
    [NSThread sleepForTimeInterval:0.1];
    }

    [pixelBufferAdaptor appendPixelBuffer:imageBufferRef withPresentationTime:presentationTime];

    }

    [writer finishWriting];

    return [AVAsset assetWithURL:outputURL];
    }

    @end