Skip to content

Instantly share code, notes, and snippets.

@tienit150198
Forked from acj/ATTENTION.md
Created February 27, 2023 16:03
Show Gist options
  • Save tienit150198/d7c4c561ff9b492a5935e29b9ee16f54 to your computer and use it in GitHub Desktop.
Save tienit150198/d7c4c561ff9b492a5935e29b9ee16f54 to your computer and use it in GitHub Desktop.

Revisions

  1. @acj acj revised this gist Mar 12, 2017. 1 changed file with 7 additions and 0 deletions.
    7 changes: 7 additions & 0 deletions ATTENTION.md
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,7 @@
    # This code has moved

    Please refer to the [TimeLapseBuilder-Swift](https://github.com/acj/TimeLapseBuilder-Swift) repository on GitHub from now on.

    I will leave the original code here as a reference, but new comments may be removed. Please open an issue on GitHub if you have questions or would like to contribute.

    Thanks!
  2. @acj acj renamed this gist Nov 21, 2016. 1 changed file with 0 additions and 0 deletions.
    File renamed without changes.
  3. @acj acj revised this gist Nov 18, 2016. 3 changed files with 184 additions and 4 deletions.
    3 changes: 1 addition & 2 deletions TimeLapseBuilder.swift
    Original file line number Diff line number Diff line change
    @@ -2,9 +2,8 @@
    // TimeLapseBuilder.swift
    //
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //
    // NOTE: This is the original Swift 1.2 implementation. For an updated version
    // NOTE: This is the original Swift 1.2 implementation. For an updated version
    // written in Swift 2.0, see https://gist.github.com/acj/6ae90aa1ebb8cad6b47b

    import AVFoundation
    3 changes: 1 addition & 2 deletions TimeLapseBuilderSwift20.swift
    Original file line number Diff line number Diff line change
    @@ -3,9 +3,8 @@
    // Vapor
    //
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //
    // NOTE: This implementation is written in Swift 2.0.
    // NOTE: This implementation is written in Swift 2.0.

    import AVFoundation
    import UIKit
    182 changes: 182 additions & 0 deletions TimeLapseBuilderSwift30.swift
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,182 @@
    //
    // TimeLapseBuilder30.swift
    //
    // Created by Adam Jensen on 11/18/16.
    //
    // NOTE: This implementation is written in Swift 3.0.

    import AVFoundation
    import UIKit

    let kErrorDomain = "TimeLapseBuilder"
    let kFailedToStartAssetWriterError = 0
    let kFailedToAppendPixelBufferError = 1

    class TimeLapseBuilder: NSObject {
    let photoURLs: [String]
    var videoWriter: AVAssetWriter?

    init(photoURLs: [String]) {
    self.photoURLs = photoURLs
    }

    func build(_ progress: @escaping ((Progress) -> Void), success: @escaping ((URL) -> Void), failure: ((NSError) -> Void)) {
    let inputSize = CGSize(width: 4000, height: 3000)
    let outputSize = CGSize(width: 1280, height: 720)
    var error: NSError?

    let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
    let videoOutputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("AssembledVideo.mov"))

    do {
    try FileManager.default.removeItem(at: videoOutputURL)
    } catch {}

    do {
    try videoWriter = AVAssetWriter(outputURL: videoOutputURL, fileType: AVFileTypeQuickTimeMovie)
    } catch let writerError as NSError {
    error = writerError
    videoWriter = nil
    }

    if let videoWriter = videoWriter {
    let videoSettings: [String : AnyObject] = [
    AVVideoCodecKey : AVVideoCodecH264 as AnyObject,
    AVVideoWidthKey : outputSize.width as AnyObject,
    AVVideoHeightKey : outputSize.height as AnyObject,
    // AVVideoCompressionPropertiesKey : [
    // AVVideoAverageBitRateKey : NSInteger(1000000),
    // AVVideoMaxKeyFrameIntervalKey : NSInteger(16),
    // AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel
    // ]
    ]

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)

    let sourceBufferAttributes = [
    (kCVPixelBufferPixelFormatTypeKey as String): Int(kCVPixelFormatType_32ARGB),
    (kCVPixelBufferWidthKey as String): Float(inputSize.width),
    (kCVPixelBufferHeightKey as String): Float(inputSize.height)] as [String : Any]

    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
    assetWriterInput: videoWriterInput,
    sourcePixelBufferAttributes: sourceBufferAttributes
    )

    assert(videoWriter.canAdd(videoWriterInput))
    videoWriter.add(videoWriterInput)

    if videoWriter.startWriting() {
    videoWriter.startSession(atSourceTime: kCMTimeZero)
    assert(pixelBufferAdaptor.pixelBufferPool != nil)

    let media_queue = DispatchQueue(label: "mediaInputQueue")

    videoWriterInput.requestMediaDataWhenReady(on: media_queue) {
    let fps: Int32 = 30
    let frameDuration = CMTimeMake(1, fps)
    let currentProgress = Progress(totalUnitCount: Int64(self.photoURLs.count))

    var frameCount: Int64 = 0
    var remainingPhotoURLs = [String](self.photoURLs)

    while videoWriterInput.isReadyForMoreMediaData && !remainingPhotoURLs.isEmpty {
    let nextPhotoURL = remainingPhotoURLs.remove(at: 0)
    let lastFrameTime = CMTimeMake(frameCount, fps)
    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)


    if !self.appendPixelBufferForImageAtURL(nextPhotoURL, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
    error = NSError(
    domain: kErrorDomain,
    code: kFailedToAppendPixelBufferError,
    userInfo: ["description": "AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer"]
    )

    break
    }

    frameCount += 1

    currentProgress.completedUnitCount = frameCount
    progress(currentProgress)
    }

    videoWriterInput.markAsFinished()
    videoWriter.finishWriting {
    if error == nil {
    success(videoOutputURL)
    }

    self.videoWriter = nil
    }
    }
    } else {
    error = NSError(
    domain: kErrorDomain,
    code: kFailedToStartAssetWriterError,
    userInfo: ["description": "AVAssetWriter failed to start writing"]
    )
    }
    }

    if let error = error {
    failure(error)
    }
    }

    func appendPixelBufferForImageAtURL(_ url: String, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = false

    autoreleasepool {
    if let url = URL(string: url),
    let imageData = try? Data(contentsOf: url),
    let image = UIImage(data: imageData),
    let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
    let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity: 1)
    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
    kCFAllocatorDefault,
    pixelBufferPool,
    pixelBufferPointer
    )

    if let pixelBuffer = pixelBufferPointer.pointee, status == 0 {
    fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer)

    appendSucceeded = pixelBufferAdaptor.append(
    pixelBuffer,
    withPresentationTime: presentationTime
    )

    pixelBufferPointer.deinitialize()
    } else {
    NSLog("error: Failed to allocate pixel buffer from pool")
    }

    pixelBufferPointer.deallocate(capacity: 1)
    }
    }

    return appendSucceeded
    }

    func fillPixelBufferFromImage(_ image: UIImage, pixelBuffer: CVPixelBuffer) {
    CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGContext(
    data: pixelData,
    width: Int(image.size.width),
    height: Int(image.size.height),
    bitsPerComponent: 8,
    bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
    space: rgbColorSpace,
    bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
    )

    context?.draw(image.cgImage!, in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))

    CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
    }
    }
  4. @acj acj revised this gist Nov 1, 2015. 1 changed file with 8 additions and 4 deletions.
    12 changes: 8 additions & 4 deletions TimeLapseBuilderSwift20.swift
    Original file line number Diff line number Diff line change
    @@ -132,30 +132,34 @@ class TimeLapseBuilder: NSObject {
    }

    func appendPixelBufferForImageAtURL(url: String, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = true
    var appendSucceeded = false

    autoreleasepool {
    if let url = NSURL(string: url),
    let imageData = NSData(contentsOfURL: url),
    let image = UIImage(data: imageData),
    let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
    let pixelBuffer = UnsafeMutablePointer<CVPixelBuffer?>.alloc(1)
    let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.alloc(1)
    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
    kCFAllocatorDefault,
    pixelBufferPool,
    pixelBuffer
    pixelBufferPointer
    )

    if let pixelBuffer = pixelBuffer.memory where status == 0 {
    if let pixelBuffer = pixelBufferPointer.memory where status == 0 {
    fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer)

    appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(
    pixelBuffer,
    withPresentationTime: presentationTime
    )

    pixelBufferPointer.destroy()
    } else {
    NSLog("error: Failed to allocate pixel buffer from pool")
    }

    pixelBufferPointer.dealloc(1)
    }
    }

  5. @acj acj revised this gist Nov 1, 2015. 1 changed file with 4 additions and 1 deletion.
    5 changes: 4 additions & 1 deletion TimeLapseBuilderSwift20.swift
    Original file line number Diff line number Diff line change
    @@ -32,7 +32,10 @@ class TimeLapseBuilder: NSObject {

    do {
    try NSFileManager.defaultManager().removeItemAtURL(videoOutputURL)
    videoWriter = try AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeQuickTimeMovie)
    } catch {}

    do {
    try videoWriter = AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeQuickTimeMovie)
    } catch let writerError as NSError {
    error = writerError
    videoWriter = nil
  6. @acj acj revised this gist Oct 17, 2015. 1 changed file with 1 addition and 0 deletions.
    1 change: 1 addition & 0 deletions TimeLapseBuilderSwift20.swift
    Original file line number Diff line number Diff line change
    @@ -5,6 +5,7 @@
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //
    // NOTE: This implementation is written in Swift 2.0.

    import AVFoundation
    import UIKit
  7. @acj acj revised this gist Oct 17, 2015. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions TimeLapseBuilderSwift20.swift
    Original file line number Diff line number Diff line change
    @@ -5,7 +5,6 @@
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //
    // NOTE: This implementation is written in Swift 2.0.

    import AVFoundation
    import UIKit
    @@ -164,12 +163,13 @@ class TimeLapseBuilder: NSObject {

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

    let context = CGBitmapContextCreate(
    pixelData,
    Int(image.size.width),
    Int(image.size.height),
    8,
    Int(4 * image.size.width),
    CVPixelBufferGetBytesPerRow(pixelBuffer),
    rgbColorSpace,
    CGImageAlphaInfo.PremultipliedFirst.rawValue
    )
  8. @acj acj revised this gist Oct 17, 2015. 2 changed files with 183 additions and 0 deletions.
    2 changes: 2 additions & 0 deletions TimeLapseBuilder.swift
    Original file line number Diff line number Diff line change
    @@ -4,6 +4,8 @@
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //
    // NOTE: This is the original Swift 1.2 implementation. For an updated version
    // written in Swift 2.0, see https://gist.github.com/acj/6ae90aa1ebb8cad6b47b

    import AVFoundation
    import UIKit
    181 changes: 181 additions & 0 deletions TimeLapseBuilderSwift20.swift
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,181 @@
    //
    // TimeLapseBuilder.swift
    // Vapor
    //
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //
    // NOTE: This implementation is written in Swift 2.0.

    import AVFoundation
    import UIKit

    let kErrorDomain = "TimeLapseBuilder"
    let kFailedToStartAssetWriterError = 0
    let kFailedToAppendPixelBufferError = 1

    class TimeLapseBuilder: NSObject {
    let photoURLs: [String]
    var videoWriter: AVAssetWriter?

    init(photoURLs: [String]) {
    self.photoURLs = photoURLs
    }

    func build(progress: (NSProgress -> Void), success: (NSURL -> Void), failure: (NSError -> Void)) {
    let inputSize = CGSize(width: 4000, height: 3000)
    let outputSize = CGSize(width: 1280, height: 720)
    var error: NSError?

    let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
    let videoOutputURL = NSURL(fileURLWithPath: documentsPath.stringByAppendingPathComponent("AssembledVideo.mov"))

    do {
    try NSFileManager.defaultManager().removeItemAtURL(videoOutputURL)
    videoWriter = try AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeQuickTimeMovie)
    } catch let writerError as NSError {
    error = writerError
    videoWriter = nil
    }

    if let videoWriter = videoWriter {
    let videoSettings: [String : AnyObject] = [
    AVVideoCodecKey : AVVideoCodecH264,
    AVVideoWidthKey : outputSize.width,
    AVVideoHeightKey : outputSize.height,
    // AVVideoCompressionPropertiesKey : [
    // AVVideoAverageBitRateKey : NSInteger(1000000),
    // AVVideoMaxKeyFrameIntervalKey : NSInteger(16),
    // AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel
    // ]
    ]

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)

    let sourceBufferAttributes = [String : AnyObject](dictionaryLiteral:
    (kCVPixelBufferPixelFormatTypeKey as String, Int(kCVPixelFormatType_32ARGB)),
    (kCVPixelBufferWidthKey as String, Float(inputSize.width)),
    (kCVPixelBufferHeightKey as String, Float(inputSize.height))
    )

    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
    assetWriterInput: videoWriterInput,
    sourcePixelBufferAttributes: sourceBufferAttributes
    )

    assert(videoWriter.canAddInput(videoWriterInput))
    videoWriter.addInput(videoWriterInput)

    if videoWriter.startWriting() {
    videoWriter.startSessionAtSourceTime(kCMTimeZero)
    assert(pixelBufferAdaptor.pixelBufferPool != nil)

    let media_queue = dispatch_queue_create("mediaInputQueue", nil)

    videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
    let fps: Int32 = 30
    let frameDuration = CMTimeMake(1, fps)
    let currentProgress = NSProgress(totalUnitCount: Int64(self.photoURLs.count))

    var frameCount: Int64 = 0
    var remainingPhotoURLs = [String](self.photoURLs)

    while (videoWriterInput.readyForMoreMediaData && !remainingPhotoURLs.isEmpty) {
    let nextPhotoURL = remainingPhotoURLs.removeAtIndex(0)
    let lastFrameTime = CMTimeMake(frameCount, fps)
    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)


    if !self.appendPixelBufferForImageAtURL(nextPhotoURL, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
    error = NSError(
    domain: kErrorDomain,
    code: kFailedToAppendPixelBufferError,
    userInfo: [
    "description": "AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer",
    "rawError": videoWriter.error ?? "(none)"
    ]
    )

    break
    }

    frameCount++

    currentProgress.completedUnitCount = frameCount
    progress(currentProgress)
    }

    videoWriterInput.markAsFinished()
    videoWriter.finishWritingWithCompletionHandler { () -> Void in
    if error == nil {
    success(videoOutputURL)
    }

    self.videoWriter = nil
    }
    })
    } else {
    error = NSError(
    domain: kErrorDomain,
    code: kFailedToStartAssetWriterError,
    userInfo: ["description": "AVAssetWriter failed to start writing"]
    )
    }
    }

    if let error = error {
    failure(error)
    }
    }

    func appendPixelBufferForImageAtURL(url: String, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = true

    autoreleasepool {
    if let url = NSURL(string: url),
    let imageData = NSData(contentsOfURL: url),
    let image = UIImage(data: imageData),
    let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
    let pixelBuffer = UnsafeMutablePointer<CVPixelBuffer?>.alloc(1)
    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
    kCFAllocatorDefault,
    pixelBufferPool,
    pixelBuffer
    )

    if let pixelBuffer = pixelBuffer.memory where status == 0 {
    fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer)

    appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(
    pixelBuffer,
    withPresentationTime: presentationTime
    )
    } else {
    NSLog("error: Failed to allocate pixel buffer from pool")
    }
    }
    }

    return appendSucceeded
    }

    func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBufferRef) {
    CVPixelBufferLockBaseAddress(pixelBuffer, 0)

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let context = CGBitmapContextCreate(
    pixelData,
    Int(image.size.width),
    Int(image.size.height),
    8,
    Int(4 * image.size.width),
    rgbColorSpace,
    CGImageAlphaInfo.PremultipliedFirst.rawValue
    )

    CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage)

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0)
    }
    }
  9. @acj acj revised this gist Aug 19, 2015. 1 changed file with 71 additions and 0 deletions.
    71 changes: 71 additions & 0 deletions BuildTimeLapseViewController.swift
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,71 @@
    //
    // BuildTimelapseViewController.swift
    //
    // Created by Adam Jensen on 5/9/15.
    //

    import JGProgressHUD
    import JoePro
    import UIKit

    class BuildTimelapseViewController: UIViewController {
    @IBOutlet weak var resolutionSegmentedControl: UISegmentedControl!
    @IBOutlet weak var speedSlider: UISlider!
    @IBOutlet weak var removeFisheyeSlider: UISwitch!

    var album: String?
    var camera: JoeProCamera?
    var timeLapseBuilder: TimeLapseBuilder?

    init(camera: JoeProCamera, album: String) {
    self.camera = camera
    self.album = album

    super.init(nibName: "BuildTimelapseViewController", bundle: nil)
    }

    required init(coder aDecoder: NSCoder) {
    super.init(coder: aDecoder)
    }

    override func viewDidLoad() {
    super.viewDidLoad()
    }

    @IBAction func buildButtonTapped(sender: AnyObject) {
    if let camera = camera,
    let album = album {

    let progressHUD = JGProgressHUD(style: .Light)
    progressHUD.textLabel.text = "Building your timelapse..."
    progressHUD.indicatorView = JGProgressHUDRingIndicatorView(HUDStyle: .Light)
    progressHUD.setProgress(0, animated: true)
    progressHUD.showInView(view)

    camera.listOfVideos(album) { (videos) -> Void in
    self.timeLapseBuilder = TimeLapseBuilder(photoURLs: videos)
    self.timeLapseBuilder!.build(
    { (progress: NSProgress) in
    NSLog("Progress: \(progress.completedUnitCount) / \(progress.totalUnitCount)")
    dispatch_async(dispatch_get_main_queue(), {
    let progressPercentage = Float(progress.completedUnitCount) / Float(progress.totalUnitCount)
    progressHUD.setProgress(progressPercentage, animated: true)
    })
    },
    success: { url in
    NSLog("Output written to \(url)")
    dispatch_async(dispatch_get_main_queue(), {
    progressHUD.dismiss()
    })
    },
    failure: { error in
    NSLog("failure: \(error)")
    dispatch_async(dispatch_get_main_queue(), {
    progressHUD.dismiss()
    })
    }
    )
    }
    }
    }
    }
  10. @acj acj created this gist May 15, 2015.
    175 changes: 175 additions & 0 deletions TimeLapseBuilder.swift
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,175 @@
    //
    // TimeLapseBuilder.swift
    //
    // Created by Adam Jensen on 5/10/15.
    // Copyright (c) 2015 Adam Jensen. All rights reserved.
    //

    import AVFoundation
    import UIKit

    let kErrorDomain = "TimeLapseBuilder"
    let kFailedToStartAssetWriterError = 0
    let kFailedToAppendPixelBufferError = 1

    class TimeLapseBuilder: NSObject {
    let photoURLs: [String]
    var videoWriter: AVAssetWriter?

    init(photoURLs: [String]) {
    self.photoURLs = photoURLs
    }

    func build(progress: (NSProgress -> Void), success: (NSURL -> Void), failure: (NSError -> Void)) {
    let inputSize = CGSize(width: 4000, height: 3000)
    let outputSize = CGSize(width: 1280, height: 720)
    var error: NSError?

    let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! NSString
    let videoOutputURL = NSURL(fileURLWithPath: documentsPath.stringByAppendingPathComponent("AssembledVideo.mov"))!

    NSFileManager.defaultManager().removeItemAtURL(videoOutputURL, error: nil)

    videoWriter = AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeQuickTimeMovie, error: &error)

    if let videoWriter = videoWriter {
    let videoSettings: [NSObject : AnyObject] = [
    AVVideoCodecKey : AVVideoCodecH264,
    AVVideoWidthKey : outputSize.width,
    AVVideoHeightKey : outputSize.height,
    // AVVideoCompressionPropertiesKey : [
    // AVVideoAverageBitRateKey : NSInteger(1000000),
    // AVVideoMaxKeyFrameIntervalKey : NSInteger(16),
    // AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel
    // ]
    ]

    let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)

    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
    assetWriterInput: videoWriterInput,
    sourcePixelBufferAttributes: [
    kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32ARGB,
    kCVPixelBufferWidthKey : inputSize.width,
    kCVPixelBufferHeightKey : inputSize.height,
    ]
    )

    assert(videoWriter.canAddInput(videoWriterInput))
    videoWriter.addInput(videoWriterInput)

    if videoWriter.startWriting() {
    videoWriter.startSessionAtSourceTime(kCMTimeZero)
    assert(pixelBufferAdaptor.pixelBufferPool != nil)

    let media_queue = dispatch_queue_create("mediaInputQueue", nil)

    videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
    let fps: Int32 = 30
    let frameDuration = CMTimeMake(1, fps)
    let currentProgress = NSProgress(totalUnitCount: Int64(self.photoURLs.count))

    var frameCount: Int64 = 0
    var remainingPhotoURLs = [String](self.photoURLs)

    while (videoWriterInput.readyForMoreMediaData && !remainingPhotoURLs.isEmpty) {
    let nextPhotoURL = remainingPhotoURLs.removeAtIndex(0)
    let lastFrameTime = CMTimeMake(frameCount, fps)
    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)


    if !self.appendPixelBufferForImageAtURL(nextPhotoURL, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
    error = NSError(
    domain: kErrorDomain,
    code: kFailedToAppendPixelBufferError,
    userInfo: [
    "description": "AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer",
    "rawError": videoWriter.error ?? "(none)"
    ]
    )

    break
    }

    frameCount++

    currentProgress.completedUnitCount = frameCount
    progress(currentProgress)
    }

    videoWriterInput.markAsFinished()
    videoWriter.finishWritingWithCompletionHandler { () -> Void in
    if error == nil {
    success(videoOutputURL)
    }
    }
    })
    } else {
    error = NSError(
    domain: kErrorDomain,
    code: kFailedToStartAssetWriterError,
    userInfo: ["description": "AVAssetWriter failed to start writing"]
    )
    }
    }

    if let error = error {
    failure(error)
    }
    }

    func appendPixelBufferForImageAtURL(url: String, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = true

    autoreleasepool {
    if let url = NSURL(string: url),
    let imageData = NSData(contentsOfURL: url),
    let image = UIImage(data: imageData) {
    var pixelBuffer: Unmanaged<CVPixelBuffer>?
    let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
    kCFAllocatorDefault,
    pixelBufferAdaptor.pixelBufferPool,
    &pixelBuffer
    )

    if let pixelBuffer = pixelBuffer where status == 0 {
    let managedPixelBuffer = pixelBuffer.takeRetainedValue()

    fillPixelBufferFromImage(image, pixelBuffer: managedPixelBuffer)

    appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(
    managedPixelBuffer,
    withPresentationTime: presentationTime
    )
    } else {
    NSLog("error: Failed to allocate pixel buffer from pool")
    }
    }
    }

    return appendSucceeded
    }

    func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBufferRef) {
    let imageData = CGDataProviderCopyData(CGImageGetDataProvider(image.CGImage))
    let lockStatus = CVPixelBufferLockBaseAddress(pixelBuffer, 0)

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

    let context = CGBitmapContextCreate(
    pixelData,
    Int(image.size.width),
    Int(image.size.height),
    8,
    Int(4 * image.size.width),
    rgbColorSpace,
    bitmapInfo
    )

    CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage)

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0)
    }
    }