I’ve an array of UIImage
I loop via to create particular person AVURLAsset
movies. To create, I adopted this submit.
My concern is that there are occasions when all the pictures efficiently convert, and there are different instances when the pictures fail to transform to video.
In including breakpoints, the reason being as a result of AVAssetWriterInputPixelBufferAdaptor
.pixelBufferPool
is nil regardless of having began the AVAssetWriter
and guaranteeing the outputURL
accessible to put in writing to.
Any ideas can be appreciated, code appended under.
func convert() {
var completionCounter = 0
DispatchQueue.world(qos: .userInitiated).async { [weak self] in
guard let self = self else { return }
// Create a serial queue to make sure all video processing occurs in sequence on the identical thread
let videoProcessingQueue = DispatchQueue(label: "videoProcessingQueue", qos: .userInitiated)
videoProcessingQueue.async {
for picture in photographs {
let settings = RenderSettings()
let imageAnimator = ImageAnimator(renderSettings: settings)
imageAnimator.picture = picture
imageAnimator.render { asset in
completionCounter += 1
if completionCounter == photographs.rely {
print("All photographs transformed to video")
}
}
}
}
}
}
class ImageAnimator {
static let kTimescale: Int32 = 600
let settings: RenderSettings
let videoWriter: VideoWriter
var picture: UIImage!
class func removeFileAtURL(fileURL: URL) {
do {
attempt FileManager.default.removeItem(atPath: fileURL.path)
} catch _ as NSError {
// Assume file would not exist.
}
}
init(renderSettings: RenderSettings) {
settings = renderSettings
videoWriter = VideoWriter(renderSettings: settings)
}
func render(completion: ((AVURLAsset)->Void)?) {
// The VideoWriter will fail if a file exists on the URL, so clear it out first.
ImageAnimator.removeFileAtURL(fileURL: settings.outputURL)
videoWriter.begin {
self.videoWriter.render(appendPixelBuffers: self.appendPixelBuffers) { asset in
completion?(asset)
}
}
}
func appendPixelBuffers(author: VideoWriter) -> Bool {
let frameDuration = CMTimeMake(worth: Int64(CGFloat(ImageAnimator.kTimescale) / settings.fps), timescale: ImageAnimator.kTimescale)
let totalFrames = Int(settings.fps * settings.length)
for frameNum in 0.. CVPixelBuffer? {
var pixelBufferOut: CVPixelBuffer?
let standing = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
if standing != kCVReturnSuccess {
fatalError("CVPixelBufferPoolCreatePixelBuffer() failed with standing (standing)")
}
guard let pixelBuffer = pixelBufferOut else {
print("pixel buffer out is nil")
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
guard let information = CVPixelBufferGetBaseAddress(pixelBuffer) else {
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return nil
}
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(information: information,
width: Int(measurement.width),
peak: Int(measurement.peak),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
house: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue) else {
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
print("pixel buffer out is nil")
return nil
}
context.clear(CGRect(x: 0, y: 0, width: measurement.width, peak: measurement.peak))
let horizontalRatio = measurement.width / picture.measurement.width
let verticalRatio = measurement.peak / picture.measurement.peak
let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let newSize = CGSize(width: picture.measurement.width * aspectRatio, peak: picture.measurement.peak * aspectRatio)
let x = newSize.width < measurement.width ? (measurement.width - newSize.width) / 2 : 0
let y = newSize.peak < measurement.peak ? (measurement.peak - newSize.peak) / 2 : 0
context.draw(picture.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, peak: newSize.peak))
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
init(renderSettings: RenderSettings) {
self.renderSettings = renderSettings
}
func begin(completion: @escaping () -> Void) {
print("Begin technique entered")
let avOutputSettings: [String: Any] = [
AVVideoCodecKey: renderSettings.avCodecKey,
AVVideoWidthKey: NSNumber(value: Float(renderSettings.size.width)),
AVVideoHeightKey: NSNumber(value: Float(renderSettings.size.height))
]
print("AVOutputSettings created")
func createPixelBufferAdaptor() {
let sourcePixelBufferAttributesDictionary = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32ARGB),
kCVPixelBufferWidthKey as String: NSNumber(value: Float(renderSettings.size.width)),
kCVPixelBufferHeightKey as String: NSNumber(value: Float(renderSettings.size.height)),
kCVPixelFormatOpenGLESCompatibility as String: true // Add compatibility option
]
pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
print("PixelBufferAdaptor created with pool: (String(describing: pixelBufferAdaptor.pixelBufferPool))")
}
func createAssetWriter(outputURL: URL) -> AVAssetWriter {
guard let assetWriter = attempt? AVAssetWriter(outputURL: outputURL, fileType: .mp4) else {
fatalError("AVAssetWriter() failed")
}
print("AssetWriter created")
guard assetWriter.canApply(outputSettings: avOutputSettings, forMediaType: .video) else {
fatalError("canApplyOutputSettings() failed")
}
print("canApplyOutputSettings handed")
return assetWriter
}
videoWriter = createAssetWriter(outputURL: renderSettings.outputURL)
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: avOutputSettings)
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
print("VideoWriterInput added")
} else {
fatalError("canAddInput() returned false")
}
createPixelBufferAdaptor()
guard videoWriter.startWriting() else {
print("Failed to start out writing")
return
}
print("VideoWriter began writing")
videoWriter.startSession(atSourceTime: CMTime.zero)
print("Session began")
precondition(pixelBufferAdaptor.pixelBufferPool != nil, "PixelBufferPool is nil")
completion()
}
func render(appendPixelBuffers: ((VideoWriter) -> Bool)?, completion: ((AVURLAsset) -> Void)?) {
precondition(videoWriter != nil, "Name begin() to initialize the author")
let queue = DispatchQueue(label: "mediaInputQueue", attributes: .concurrent)
videoWriterInput.requestMediaDataWhenReady(on: queue) {
let isFinished = appendPixelBuffers?(self) ?? false
if isFinished {
self.videoWriterInput.markAsFinished()
self.videoWriter.finishWriting {
DispatchQueue.primary.async {
let asset = AVURLAsset(url: self.videoWriter.outputURL)
completion?(asset)
}
}
} else {
// The closure shall be known as once more when the author is prepared.
}
}
}
func addImage(picture: UIImage, withPresentationTime presentationTime: CMTime) -> Bool {
guard let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool else {
print("Pixel buffer pool is nil")
return false
}
if let pixelBuffer = VideoWriter.pixelBufferFromImage(picture: picture, pixelBufferPool: pixelBufferPool, measurement: renderSettings.measurement), pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime) {
return true
} else {
print("Didn't append pixel buffer")
return false
}
}
}