From 53e53d76f49262466da3278afe40760b6d11b7c8 Mon Sep 17 00:00:00 2001 From: Rene Hexel Date: Sun, 13 Oct 2019 12:46:13 +1000 Subject: [PATCH 1/2] Upgrade to Swift 4.2 --- Package.swift | 2 +- framework/Source/Apple/MovieInput.swift | 4 ++-- framework/Source/Apple/MovieOutput.swift | 10 +++++----- framework/Source/Apple/PictureOutput.swift | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Package.swift b/Package.swift index 76e89014..6898d21f 100755 --- a/Package.swift +++ b/Package.swift @@ -56,5 +56,5 @@ let package = Package( products: platformProducts, dependencies: platformDependencies, targets: platformTargets, - swiftLanguageVersions: [.v4] + swiftLanguageVersions: [.v4_2] ) diff --git a/framework/Source/Apple/MovieInput.swift b/framework/Source/Apple/MovieInput.swift index 33a14f82..b10d7bc9 100644 --- a/framework/Source/Apple/MovieInput.swift +++ b/framework/Source/Apple/MovieInput.swift @@ -10,7 +10,7 @@ public class MovieInput: ImageSource { let playAtActualSpeed:Bool let loop:Bool var videoEncodingIsFinished = false - var previousFrameTime = kCMTimeZero + var previousFrameTime = CMTime.zero var previousActualFrameTime = CFAbsoluteTimeGetCurrent() var numberOfFramesCaptured = 0 @@ -55,7 +55,7 @@ public class MovieInput: ImageSource { var readerVideoTrackOutput:AVAssetReaderOutput? = nil; for output in self.assetReader.outputs { - if(output.mediaType == AVMediaType.video.rawValue) { + if(output.mediaType == .video) { readerVideoTrackOutput = output; } } diff --git a/framework/Source/Apple/MovieOutput.swift b/framework/Source/Apple/MovieOutput.swift index 4f9462da..b88db471 100644 --- a/framework/Source/Apple/MovieOutput.swift +++ b/framework/Source/Apple/MovieOutput.swift @@ -20,8 +20,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private var videoEncodingIsFinished = false private var audioEncodingIsFinished = false private var startTime:CMTime? - private var previousFrameTime = kCMTimeNegativeInfinity - private var previousAudioTime = kCMTimeNegativeInfinity + private var previousFrameTime = CMTime.negativeInfinity + private var previousAudioTime = CMTime.negativeInfinity private var encodingLiveVideo:Bool var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! @@ -45,7 +45,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.size = size assetWriter = try AVAssetWriter(url:URL, fileType:fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, preferredTimescale: 1000) var localSettings:[String:AnyObject] if let settings = settings { @@ -56,7 +56,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecType.h264 as NSString assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaType.video, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo @@ -266,7 +266,7 @@ public extension Timestamp { var asCMTime:CMTime { get { - return CMTimeMakeWithEpoch(value, timescale, epoch) + return CMTimeMakeWithEpoch(value: value, timescale: timescale, epoch: epoch) } } } diff --git a/framework/Source/Apple/PictureOutput.swift b/framework/Source/Apple/PictureOutput.swift index d3a7aba3..92b7d569 100644 --- a/framework/Source/Apple/PictureOutput.swift +++ b/framework/Source/Apple/PictureOutput.swift @@ -97,8 +97,8 @@ public class PictureOutput: ImageConsumer { #if canImport(UIKit) let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) switch encodedImageFormat { - case .png: imageData = UIImagePNGRepresentation(image)! // TODO: Better error handling here - case .jpeg: imageData = UIImageJPEGRepresentation(image, 0.8)! // TODO: Be able to set image quality + case .png: imageData = image.pngData()! // TODO: Better error handling here + case .jpeg: imageData = image.jpegData(compressionQuality: 0.8)! // TODO: Be able to set image quality } #else let bitmapRepresentation = NSBitmapImageRep(cgImage:cgImageFromBytes) From f25601c8914d676992fdcb9d9618d222aac9d2f2 Mon Sep 17 00:00:00 2001 From: Rene Hexel Date: Sun, 13 Oct 2019 12:48:14 +1000 Subject: [PATCH 2/2] Note Swift 4.2 --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a862cfa2..fd4513eb 100755 --- a/README.md +++ b/README.md @@ -24,8 +24,8 @@ Currently, GPUImage uses Lode Vandevenne's ## Technical requirements ## -- Swift 3 -- Xcode 8.0 on Mac or iOS +- Swift 4.2 or higher +- Xcode 10.1 or higher on Mac or iOS - iOS: 8.0 or higher (Swift is supported on 7.0, but not Mac-style frameworks) - OSX: 10.9 or higher - Linux: Wherever Swift code can be compiled. Currently, that's Ubuntu 14.04 or higher, along with the many other places it has been ported to. I've gotten this running on the latest Raspbian, for example. For camera input, Video4Linux needs to be installed.