support swift 3

This commit is contained in:
dimohamdy 2016-08-19 04:04:13 +03:00
parent 389ec550b3
commit 0267cc2664
3 changed files with 229 additions and 212 deletions

View File

@ -201,7 +201,7 @@
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0700;
LastUpgradeCheck = 0700;
LastUpgradeCheck = 0800;
ORGANIZATIONNAME = imaginaryCloud;
TargetAttributes = {
454C1F4019E82E2500C81915 = {
@ -209,6 +209,7 @@
};
D71DE8801AD677A7001E62F1 = {
CreatedOnToolsVersion = 6.3;
LastSwiftMigration = 0800;
};
};
};
@ -313,8 +314,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_ENTITLEMENTS[sdk=iphoneos*]" = "";
@ -324,6 +327,7 @@
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
@ -358,8 +362,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
@ -367,6 +373,7 @@
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
@ -376,6 +383,7 @@
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
@ -408,6 +416,7 @@
D71DE89B1AD677A8001E62F1 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEFINES_MODULE = YES;
@ -426,6 +435,7 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.imaginarycloud.$(PRODUCT_NAME:rfc1034identifier)";
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
@ -434,6 +444,7 @@
D71DE89C1AD677A8001E62F1 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
@ -449,6 +460,7 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.imaginarycloud.$(PRODUCT_NAME:rfc1034identifier)";
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0700"
LastUpgradeVersion = "0800"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
@ -23,21 +23,21 @@
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
buildConfiguration = "Debug"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
@ -55,10 +55,10 @@
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
buildConfiguration = "Release"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference

View File

@ -8,44 +8,44 @@
import UIKit
import AVFoundation
import AssetsLibrary
//import AssetsLibrary
import Photos
public enum CameraState {
case Ready, AccessDenied, NoDeviceFound, NotDetermined
case ready, accessDenied, noDeviceFound, notDetermined
}
public enum CameraDevice {
case Front, Back
case front, back
}
public enum CameraFlashMode: Int {
case Off, On, Auto
case off, on, auto
}
public enum CameraOutputMode {
case StillImage, VideoWithMic, VideoOnly
case stillImage, videoWithMic, videoOnly
}
public enum CameraOutputQuality: Int {
case Low, Medium, High
case low, medium, high
}
/// Class for handling iDevices custom camera usage
public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate {
open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate {
// MARK: - Public properties
/// Capture session to customize camera settings.
public var captureSession: AVCaptureSession?
open var captureSession: AVCaptureSession?
/// Property to determine if the manager should show the error for the user. If you want to show the errors yourself set this to false. If you want to add custom error UI set showErrorBlock property. Default value is false.
public var showErrorsToUsers = false
open var showErrorsToUsers = false
/// Property to determine if the manager should show the camera permission popup immediatly when it's needed or you want to show it manually. Default value is true. Be carful cause using the camera requires permission, if you set this value to false and don't ask manually you won't be able to use the camera.
public var showAccessPermissionPopupAutomatically = true
open var showAccessPermissionPopupAutomatically = true
/// A block creating UI to present error message to the user. This can be customised to be presented on the Window root view controller, or to pass in the viewController which will present the UIAlertController, for example.
public var showErrorBlock:(erTitle: String, erMessage: String) -> Void = { (erTitle: String, erMessage: String) -> Void in
open var showErrorBlock:(_ erTitle: String, _ erMessage: String) -> Void = { (erTitle: String, erMessage: String) -> Void in
// var alertController = UIAlertController(title: erTitle, message: erMessage, preferredStyle: .Alert)
// alertController.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: { (alertAction) -> Void in }))
@ -56,10 +56,10 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
/// Property to determine if manager should write the resources to the phone library. Default value is true.
public var writeFilesToPhoneLibrary = true
open var writeFilesToPhoneLibrary = true
/// Property to determine if manager should follow device orientation. Default value is true.
public var shouldRespondToOrientationChanges = true {
open var shouldRespondToOrientationChanges = true {
didSet {
if shouldRespondToOrientationChanges {
_startFollowingDeviceOrientation()
@ -70,18 +70,18 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
/// The Bool property to determine if the camera is ready to use.
public var cameraIsReady: Bool {
open var cameraIsReady: Bool {
get {
return cameraIsSetup
}
}
/// The Bool property to determine if current device has front camera.
public var hasFrontCamera: Bool = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
open var hasFrontCamera: Bool = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == .Front) {
if (captureDevice.position == .front) {
return true
}
}
@ -89,11 +89,11 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}()
/// The Bool property to determine if current device has flash.
public var hasFlash: Bool = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
open var hasFlash: Bool = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == .Back) {
if (captureDevice.position == .back) {
return captureDevice.hasFlash
}
}
@ -101,7 +101,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}()
/// Property to change camera device between front and back.
public var cameraDevice = CameraDevice.Back {
open var cameraDevice = CameraDevice.back {
didSet {
if cameraIsSetup {
if cameraDevice != oldValue {
@ -114,7 +114,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
/// Property to change camera flash mode.
public var flashMode = CameraFlashMode.Off {
open var flashMode = CameraFlashMode.off {
didSet {
if cameraIsSetup {
if flashMode != oldValue {
@ -125,7 +125,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
/// Property to change camera output quality.
public var cameraOutputQuality = CameraOutputQuality.High {
open var cameraOutputQuality = CameraOutputQuality.high {
didSet {
if cameraIsSetup {
if cameraOutputQuality != oldValue {
@ -136,7 +136,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
/// Property to change camera output.
public var cameraOutputMode = CameraOutputMode.StillImage {
open var cameraOutputMode = CameraOutputMode.stillImage {
didSet {
if cameraIsSetup {
if cameraOutputMode != oldValue {
@ -149,53 +149,53 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
/// Property to check video recording duration when in progress
public var recordedDuration : CMTime { return movieOutput?.recordedDuration ?? kCMTimeZero }
open var recordedDuration : CMTime { return movieOutput?.recordedDuration ?? kCMTimeZero }
/// Property to check video recording file size when in progress
public var recordedFileSize : Int64 { return movieOutput?.recordedFileSize ?? 0 }
open var recordedFileSize : Int64 { return movieOutput?.recordedFileSize ?? 0 }
// MARK: - Private properties
private weak var embeddingView: UIView?
private var videoCompletion: ((videoURL: NSURL?, error: NSError?) -> Void)?
fileprivate weak var embeddingView: UIView?
fileprivate var videoCompletion: ((_ videoURL: URL?, _ error: NSError?) -> Void)?
private var sessionQueue: dispatch_queue_t = dispatch_queue_create("CameraSessionQueue", DISPATCH_QUEUE_SERIAL)
fileprivate var sessionQueue: DispatchQueue = DispatchQueue(label: "CameraSessionQueue", attributes: [])
private lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .front}.first
}()
private lazy var backCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Back}.first
fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .back}.first
}()
private lazy var mic: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
fileprivate lazy var mic: AVCaptureDevice? = {
return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
}()
private var stillImageOutput: AVCaptureStillImageOutput?
private var movieOutput: AVCaptureMovieFileOutput?
private var previewLayer: AVCaptureVideoPreviewLayer?
private var library: ALAssetsLibrary?
fileprivate var stillImageOutput: AVCaptureStillImageOutput?
fileprivate var movieOutput: AVCaptureMovieFileOutput?
fileprivate var previewLayer: AVCaptureVideoPreviewLayer?
fileprivate var library: PHPhotoLibrary?
private var cameraIsSetup = false
private var cameraIsObservingDeviceOrientation = false
fileprivate var cameraIsSetup = false
fileprivate var cameraIsObservingDeviceOrientation = false
private var zoomScale = CGFloat(1.0)
private var beginZoomScale = CGFloat(1.0)
private var maxZoomScale = CGFloat(1.0)
fileprivate var zoomScale = CGFloat(1.0)
fileprivate var beginZoomScale = CGFloat(1.0)
fileprivate var maxZoomScale = CGFloat(1.0)
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
fileprivate var tempFilePath: URL = {
let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tempMovie").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
try FileManager.default.removeItem(atPath: tempPath)
} catch { }
}
return NSURL(string: tempPath)!
return URL(string: tempPath)!
}()
@ -210,19 +210,19 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
:returns: Current state of the camera: Ready / AccessDenied / NoDeviceFound / NotDetermined.
*/
public func addPreviewLayerToView(view: UIView) -> CameraState {
open func addPreviewLayerToView(_ view: UIView) -> CameraState {
return addPreviewLayerToView(view, newCameraOutputMode: cameraOutputMode)
}
public func addPreviewLayerToView(view: UIView, newCameraOutputMode: CameraOutputMode) -> CameraState {
open func addPreviewLayerToView(_ view: UIView, newCameraOutputMode: CameraOutputMode) -> CameraState {
return addLayerPreviewToView(view, newCameraOutputMode: newCameraOutputMode, completion: nil)
}
@available(*, unavailable, renamed="addLayerPreviewToView")
public func addPreviewLayerToView(view: UIView, newCameraOutputMode: CameraOutputMode, completition: (Void -> Void)?) -> CameraState {
@available(*, unavailable, renamed: "addLayerPreviewToView")
open func addPreviewLayerToView(_ view: UIView, newCameraOutputMode: CameraOutputMode, completition: ((Void) -> Void)?) -> CameraState {
return addLayerPreviewToView(view, newCameraOutputMode: newCameraOutputMode, completion: completition)
}
public func addLayerPreviewToView(view: UIView, newCameraOutputMode: CameraOutputMode, completion: (Void -> Void)?) -> CameraState {
open func addLayerPreviewToView(_ view: UIView, newCameraOutputMode: CameraOutputMode, completion: ((Void) -> Void)?) -> CameraState {
if _canLoadCamera() {
if let _ = embeddingView {
if let validPreviewLayer = previewLayer {
@ -248,24 +248,24 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
return _checkIfCameraIsAvailable()
}
@available(*, unavailable, renamed="askUserForCameraPermission")
public func askUserForCameraPermissions(completition: Bool -> Void) {}
@available(*, unavailable, renamed: "askUserForCameraPermission")
open func askUserForCameraPermissions(_ completition: (Bool) -> Void) {}
/**
Asks the user for camera permissions. Only works if the permissions are not yet determined. Note that it'll also automaticaly ask about the microphone permissions if you selected VideoWithMic output.
:param: completion Completion block with the result of permission request
*/
public func askUserForCameraPermission(completion: Bool -> Void) {
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeVideo, completionHandler: { (alowedAccess) -> Void in
if self.cameraOutputMode == .VideoWithMic {
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeAudio, completionHandler: { (alowedAccess) -> Void in
dispatch_sync(dispatch_get_main_queue(), { () -> Void in
open func askUserForCameraPermission(_ completion: @escaping (Bool) -> Void) {
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { (alowedAccess) -> Void in
if self.cameraOutputMode == .videoWithMic {
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeAudio, completionHandler: { (alowedAccess) -> Void in
DispatchQueue.main.sync(execute: { () -> Void in
completion(alowedAccess)
})
})
} else {
dispatch_sync(dispatch_get_main_queue(), { () -> Void in
DispatchQueue.main.sync(execute: { () -> Void in
completion(alowedAccess)
})
@ -276,7 +276,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
/**
Stops running capture session but all setup devices, inputs and outputs stay for further reuse.
*/
public func stopCaptureSession() {
open func stopCaptureSession() {
captureSession?.stopRunning()
_stopFollowingDeviceOrientation()
}
@ -284,9 +284,9 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
/**
Resumes capture session.
*/
public func resumeCaptureSession() {
open func resumeCaptureSession() {
if let validCaptureSession = captureSession {
if !validCaptureSession.running && cameraIsSetup {
if !validCaptureSession.isRunning && cameraIsSetup {
validCaptureSession.startRunning()
_startFollowingDeviceOrientation()
}
@ -308,9 +308,9 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
/**
Stops running capture session and removes all setup devices, inputs and outputs.
*/
public func stopAndRemoveCaptureSession() {
open func stopAndRemoveCaptureSession() {
stopCaptureSession()
cameraDevice = .Back
cameraDevice = .back
cameraIsSetup = false
previewLayer = nil
captureSession = nil
@ -321,15 +321,15 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
movieOutput = nil
}
@available(*, unavailable, renamed="capturePictureWithCompletion")
public func capturePictureWithCompletition(imageCompletition: (UIImage?, NSError?) -> Void) {}
@available(*, unavailable, renamed: "capturePictureWithCompletion")
open func capturePictureWithCompletition(_ imageCompletition: (UIImage?, NSError?) -> Void) {}
/**
Captures still image from currently running capture session.
:param: imageCompletion Completion block containing the captured UIImage
*/
public func capturePictureWithCompletion(imageCompletion: (UIImage?, NSError?) -> Void) {
open func capturePictureWithCompletion(_ imageCompletion: @escaping (UIImage?, NSError?) -> Void) {
self.capturePictureDataWithCompletion { data, error in
guard error == nil, let imageData = data else {
@ -339,53 +339,54 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
if self.writeFilesToPhoneLibrary == true, let library = self.library {
library.writeImageDataToSavedPhotosAlbum(imageData, metadata:nil, completionBlock: { picUrl, error in
guard error != nil else {
return
}
dispatch_async(dispatch_get_main_queue(), {
self._show(NSLocalizedString("Error", comment:""), message: error.localizedDescription)
})
})
library.performChanges({
PHAssetChangeRequest.creationRequestForAsset(from: UIImage(data: imageData)!)
}, completionHandler: { success, error in
guard error != nil else {
return
}
DispatchQueue.main.async(execute: {
self._show(NSLocalizedString("Error", comment:""), message: (error?.localizedDescription)!)
})
})
}
imageCompletion(UIImage(data: imageData), nil)
}
}
@available(*, unavailable, renamed="capturePictureWithCompletion")
public func capturePictureDataWithCompletition(imageCompletition: (NSData?, NSError?) -> Void) {}
@available(*, unavailable, renamed: "capturePictureWithCompletion")
open func capturePictureDataWithCompletition(_ imageCompletition: (Data?, NSError?) -> Void) {}
/**
Captures still image from currently running capture session.
:param: imageCompletion Completion block containing the captured imageData
*/
public func capturePictureDataWithCompletion(imageCompletion: (NSData?, NSError?) -> Void) {
open func capturePictureDataWithCompletion(_ imageCompletion: @escaping (Data?, NSError?) -> Void) {
guard cameraIsSetup else {
_show(NSLocalizedString("No capture session setup", comment:""), message: NSLocalizedString("I can't take any picture", comment:""))
return
}
guard cameraOutputMode == .StillImage else {
guard cameraOutputMode == .stillImage else {
_show(NSLocalizedString("Capture session output mode video", comment:""), message: NSLocalizedString("I can't take any picture", comment:""))
return
}
dispatch_async(sessionQueue, {
self._getStillImageOutput().captureStillImageAsynchronouslyFromConnection(self._getStillImageOutput().connectionWithMediaType(AVMediaTypeVideo), completionHandler: { [unowned self] sample, error in
sessionQueue.async(execute: {
self._getStillImageOutput().captureStillImageAsynchronously(from: self._getStillImageOutput().connection(withMediaType: AVMediaTypeVideo), completionHandler: { [unowned self] sample, error in
guard error == nil else {
dispatch_async(dispatch_get_main_queue(), {
self._show(NSLocalizedString("Error", comment:""), message: error.localizedDescription)
DispatchQueue.main.async(execute: {
self._show(NSLocalizedString("Error", comment:""), message: (error?.localizedDescription)!)
})
imageCompletion(nil, error)
imageCompletion(nil, error as NSError?)
return
}
@ -402,23 +403,23 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
/**
Starts recording a video with or without voice as in the session preset.
*/
public func startRecordingVideo() {
if cameraOutputMode != .StillImage {
_getMovieOutput().startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
open func startRecordingVideo() {
if cameraOutputMode != .stillImage {
_getMovieOutput().startRecording(toOutputFileURL: tempFilePath, recordingDelegate: self)
} else {
_show(NSLocalizedString("Capture session output still image", comment:""), message: NSLocalizedString("I can only take pictures", comment:""))
}
}
@available(*, unavailable, renamed="stopVideoRecording")
public func stopRecordingVideo(completition:(videoURL: NSURL?, error: NSError?) -> Void) {}
@available(*, unavailable, renamed: "stopVideoRecording")
open func stopRecordingVideo(_ completition:(_ videoURL: URL?, _ error: NSError?) -> Void) {}
/**
Stop recording a video. Save it to the cameraRoll and give back the url.
*/
public func stopVideoRecording(completion:(videoURL: NSURL?, error: NSError?) -> Void) {
open func stopVideoRecording(_ completion:((_ videoURL: URL?, _ error: NSError?) -> Void)?) {
if let runningMovieOutput = movieOutput {
if runningMovieOutput.recording {
if runningMovieOutput.isRecording {
videoCompletion = completion
runningMovieOutput.stopRecording()
}
@ -430,7 +431,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
:returns: Current state of the camera: Ready / AccessDenied / NoDeviceFound / NotDetermined
*/
public func currentCameraStatus() -> CameraState {
open func currentCameraStatus() -> CameraState {
return _checkIfCameraIsAvailable()
}
@ -439,7 +440,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
:returns: Current flash mode: Off / On / Auto
*/
public func changeFlashMode() -> CameraFlashMode {
open func changeFlashMode() -> CameraFlashMode {
flashMode = CameraFlashMode(rawValue: (flashMode.rawValue+1)%3)!
return flashMode
}
@ -449,40 +450,44 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
:returns: Current quality mode: Low / Medium / High
*/
public func changeQualityMode() -> CameraOutputQuality {
open func changeQualityMode() -> CameraOutputQuality {
cameraOutputQuality = CameraOutputQuality(rawValue: (cameraOutputQuality.rawValue+1)%3)!
return cameraOutputQuality
}
// MARK: - AVCaptureFileOutputRecordingDelegate
public func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
open func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
captureSession?.beginConfiguration()
if flashMode != .Off {
if flashMode != .off {
_updateTorch(flashMode)
}
captureSession?.commitConfiguration()
}
public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
_updateTorch(.Off)
open func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
_updateTorch(.off)
if (error != nil) {
_show(NSLocalizedString("Unable to save video to the iPhone", comment:""), message: error.localizedDescription)
} else {
if let validLibrary = library {
if writeFilesToPhoneLibrary {
validLibrary.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
self._show(NSLocalizedString("Unable to save video to the iPhone.", comment:""), message: error!.localizedDescription)
self._executeVideoCompletionWithURL(nil, error: error)
} else {
if let validAssetURL = assetURL {
self._executeVideoCompletionWithURL(validAssetURL, error: error)
validLibrary.performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
}, completionHandler: { success, error in
if (error != nil) {
self._show(NSLocalizedString("Unable to save video to the iPhone.", comment:""), message: error!.localizedDescription)
self._executeVideoCompletionWithURL(nil, error: error as NSError?)
} else {
if let validAssetURL = outputFileURL {
self._executeVideoCompletionWithURL(validAssetURL, error: error as NSError?)
}
}
}
})
} else {
_executeVideoCompletionWithURL(outputFileURL, error: error)
_executeVideoCompletionWithURL(outputFileURL, error: error as NSError?)
}
}
}
@ -490,15 +495,15 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
// MARK: - UIGestureRecognizerDelegate
private func attachZoom(view: UIView) {
fileprivate func attachZoom(_ view: UIView) {
let pinch = UIPinchGestureRecognizer(target: self, action: #selector(CameraManager._zoomStart(_:)))
view.addGestureRecognizer(pinch)
pinch.delegate = self
}
public func gestureRecognizerShouldBegin(gestureRecognizer: UIGestureRecognizer) -> Bool {
open func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer.isKindOfClass(UIPinchGestureRecognizer) {
if gestureRecognizer.isKind(of: UIPinchGestureRecognizer.self) {
beginZoomScale = zoomScale;
}
@ -506,18 +511,18 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
@objc
private func _zoomStart(recognizer: UIPinchGestureRecognizer) {
fileprivate func _zoomStart(_ recognizer: UIPinchGestureRecognizer) {
guard let view = embeddingView,
previewLayer = previewLayer
let previewLayer = previewLayer
else { return }
var allTouchesOnPreviewLayer = true
let numTouch = recognizer.numberOfTouches()
let numTouch = recognizer.numberOfTouches
for i in 0 ..< numTouch {
let location = recognizer.locationOfTouch(i, inView: view)
let convertedTouch = previewLayer.convertPoint(location, fromLayer: previewLayer.superlayer)
if !previewLayer.containsPoint(convertedTouch) {
let location = recognizer.location(ofTouch: i, in: view)
let convertedTouch = previewLayer.convert(location, from: previewLayer.superlayer)
if !previewLayer.contains(convertedTouch) {
allTouchesOnPreviewLayer = false
break
}
@ -527,7 +532,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
private func _zoom(scale: CGFloat) {
fileprivate func _zoom(_ scale: CGFloat) {
do {
let captureDevice = AVCaptureDevice.devices().first as? AVCaptureDevice
try captureDevice?.lockForConfiguration()
@ -545,12 +550,12 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
// MARK: - CameraManager()
private func _updateTorch(flashMode: CameraFlashMode) {
fileprivate func _updateTorch(_ flashMode: CameraFlashMode) {
captureSession?.beginConfiguration()
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == AVCaptureDevicePosition.Back) {
if (captureDevice.position == AVCaptureDevicePosition.back) {
let avTorchMode = AVCaptureTorchMode(rawValue: flashMode.rawValue)
if (captureDevice.isTorchModeSupported(avTorchMode!)) {
do {
@ -567,18 +572,18 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
private func _executeVideoCompletionWithURL(url: NSURL?, error: NSError?) {
fileprivate func _executeVideoCompletionWithURL(_ url: URL?, error: NSError?) {
if let validCompletion = videoCompletion {
validCompletion(videoURL: url, error: error)
validCompletion(url, error)
videoCompletion = nil
}
}
private func _getMovieOutput() -> AVCaptureMovieFileOutput {
fileprivate func _getMovieOutput() -> AVCaptureMovieFileOutput {
var shouldReinitializeMovieOutput = movieOutput == nil
if !shouldReinitializeMovieOutput {
if let connection = movieOutput!.connectionWithMediaType(AVMediaTypeVideo) {
shouldReinitializeMovieOutput = shouldReinitializeMovieOutput || !connection.active
if let connection = movieOutput!.connection(withMediaType: AVMediaTypeVideo) {
shouldReinitializeMovieOutput = shouldReinitializeMovieOutput || !connection.isActive
}
}
@ -597,11 +602,11 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
return movieOutput!
}
private func _getStillImageOutput() -> AVCaptureStillImageOutput {
fileprivate func _getStillImageOutput() -> AVCaptureStillImageOutput {
var shouldReinitializeStillImageOutput = stillImageOutput == nil
if !shouldReinitializeStillImageOutput {
if let connection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {
shouldReinitializeStillImageOutput = shouldReinitializeStillImageOutput || !connection.active
if let connection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
shouldReinitializeStillImageOutput = shouldReinitializeStillImageOutput || !connection.isActive
}
}
if shouldReinitializeStillImageOutput {
@ -618,26 +623,26 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
return stillImageOutput!
}
@objc private func _orientationChanged() {
@objc fileprivate func _orientationChanged() {
var currentConnection: AVCaptureConnection?;
switch cameraOutputMode {
case .StillImage:
currentConnection = stillImageOutput?.connectionWithMediaType(AVMediaTypeVideo)
case .VideoOnly, .VideoWithMic:
currentConnection = _getMovieOutput().connectionWithMediaType(AVMediaTypeVideo)
case .stillImage:
currentConnection = stillImageOutput?.connection(withMediaType: AVMediaTypeVideo)
case .videoOnly, .videoWithMic:
currentConnection = _getMovieOutput().connection(withMediaType: AVMediaTypeVideo)
}
if let validPreviewLayer = previewLayer {
if let validPreviewLayerConnection = validPreviewLayer.connection {
if validPreviewLayerConnection.supportsVideoOrientation {
if validPreviewLayerConnection.isVideoOrientationSupported {
validPreviewLayerConnection.videoOrientation = _currentVideoOrientation()
}
}
if let validOutputLayerConnection = currentConnection {
if validOutputLayerConnection.supportsVideoOrientation {
if validOutputLayerConnection.isVideoOrientationSupported {
validOutputLayerConnection.videoOrientation = _currentVideoOrientation()
}
}
dispatch_async(dispatch_get_main_queue(), { () -> Void in
DispatchQueue.main.async(execute: { () -> Void in
if let validEmbeddingView = self.embeddingView {
validPreviewLayer.frame = validEmbeddingView.bounds
}
@ -645,26 +650,26 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
private func _currentVideoOrientation() -> AVCaptureVideoOrientation {
switch UIDevice.currentDevice().orientation {
case .LandscapeLeft:
return .LandscapeRight
case .LandscapeRight:
return .LandscapeLeft
fileprivate func _currentVideoOrientation() -> AVCaptureVideoOrientation {
switch UIDevice.current.orientation {
case .landscapeLeft:
return .landscapeRight
case .landscapeRight:
return .landscapeLeft
default:
return .Portrait
return .portrait
}
}
private func _canLoadCamera() -> Bool {
fileprivate func _canLoadCamera() -> Bool {
let currentCameraState = _checkIfCameraIsAvailable()
return currentCameraState == .Ready || (currentCameraState == .NotDetermined && showAccessPermissionPopupAutomatically)
return currentCameraState == .ready || (currentCameraState == .notDetermined && showAccessPermissionPopupAutomatically)
}
private func _setupCamera(completion: Void -> Void) {
fileprivate func _setupCamera(_ completion: @escaping (Void) -> Void) {
captureSession = AVCaptureSession()
dispatch_async(sessionQueue, {
sessionQueue.async(execute: {
if let validCaptureSession = self.captureSession {
validCaptureSession.beginConfiguration()
validCaptureSession.sessionPreset = AVCaptureSessionPresetHigh
@ -685,24 +690,24 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
})
}
private func _startFollowingDeviceOrientation() {
fileprivate func _startFollowingDeviceOrientation() {
if shouldRespondToOrientationChanges && !cameraIsObservingDeviceOrientation {
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(CameraManager._orientationChanged), name: UIDeviceOrientationDidChangeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(CameraManager._orientationChanged), name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
cameraIsObservingDeviceOrientation = true
}
}
private func _stopFollowingDeviceOrientation() {
fileprivate func _stopFollowingDeviceOrientation() {
if cameraIsObservingDeviceOrientation {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceOrientationDidChangeNotification, object: nil)
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
cameraIsObservingDeviceOrientation = false
}
}
private func _addPreviewLayerToView(view: UIView) {
fileprivate func _addPreviewLayerToView(_ view: UIView) {
embeddingView = view
attachZoom(view)
dispatch_async(dispatch_get_main_queue(), { () -> Void in
DispatchQueue.main.async(execute: { () -> Void in
guard let _ = self.previewLayer else {
return
}
@ -712,54 +717,54 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
})
}
private func _setupMaxZoomScale() {
fileprivate func _setupMaxZoomScale() {
var maxZoom = CGFloat(1.0)
beginZoomScale = CGFloat(1.0)
if cameraDevice == .Back {
if cameraDevice == .back {
maxZoom = (backCameraDevice?.activeFormat.videoMaxZoomFactor)!
}
else if cameraDevice == .Front {
else if cameraDevice == .front {
maxZoom = (frontCameraDevice?.activeFormat.videoMaxZoomFactor)!
}
maxZoomScale = maxZoom
}
private func _checkIfCameraIsAvailable() -> CameraState {
let deviceHasCamera = UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.Rear) || UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.Front)
fileprivate func _checkIfCameraIsAvailable() -> CameraState {
let deviceHasCamera = UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.rear) || UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.front)
if deviceHasCamera {
let authorizationStatus = AVCaptureDevice.authorizationStatusForMediaType(AVMediaTypeVideo)
let userAgreedToUseIt = authorizationStatus == .Authorized
let authorizationStatus = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
let userAgreedToUseIt = authorizationStatus == .authorized
if userAgreedToUseIt {
return .Ready
} else if authorizationStatus == AVAuthorizationStatus.NotDetermined {
return .NotDetermined
return .ready
} else if authorizationStatus == AVAuthorizationStatus.notDetermined {
return .notDetermined
} else {
_show(NSLocalizedString("Camera access denied", comment:""), message:NSLocalizedString("You need to go to settings app and grant acces to the camera device to use it.", comment:""))
return .AccessDenied
return .accessDenied
}
} else {
_show(NSLocalizedString("Camera unavailable", comment:""), message:NSLocalizedString("The device does not have a camera.", comment:""))
return .NoDeviceFound
return .noDeviceFound
}
}
private func _setupOutputMode(newCameraOutputMode: CameraOutputMode, oldCameraOutputMode: CameraOutputMode?) {
fileprivate func _setupOutputMode(_ newCameraOutputMode: CameraOutputMode, oldCameraOutputMode: CameraOutputMode?) {
captureSession?.beginConfiguration()
if let cameraOutputToRemove = oldCameraOutputMode {
// remove current setting
switch cameraOutputToRemove {
case .StillImage:
case .stillImage:
if let validStillImageOutput = stillImageOutput {
captureSession?.removeOutput(validStillImageOutput)
}
case .VideoOnly, .VideoWithMic:
case .videoOnly, .videoWithMic:
if let validMovieOutput = movieOutput {
captureSession?.removeOutput(validMovieOutput)
}
if cameraOutputToRemove == .VideoWithMic {
if cameraOutputToRemove == .videoWithMic {
_removeMicInput()
}
}
@ -767,7 +772,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
// configure new devices
switch newCameraOutputMode {
case .StillImage:
case .stillImage:
if (stillImageOutput == nil) {
_setupOutputs()
}
@ -778,10 +783,10 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
}
case .VideoOnly, .VideoWithMic:
case .videoOnly, .videoWithMic:
captureSession?.addOutput(_getMovieOutput())
if newCameraOutputMode == .VideoWithMic {
if newCameraOutputMode == .videoWithMic {
if let validMic = _deviceInputFromDevice(mic) {
captureSession?.addInput(validMic)
}
@ -792,7 +797,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
_orientationChanged()
}
private func _setupOutputs() {
fileprivate func _setupOutputs() {
if (stillImageOutput == nil) {
stillImageOutput = AVCaptureStillImageOutput()
}
@ -801,35 +806,35 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
movieOutput!.movieFragmentInterval = kCMTimeInvalid
}
if library == nil {
library = ALAssetsLibrary()
library = PHPhotoLibrary.shared()
}
}
private func _setupPreviewLayer() {
fileprivate func _setupPreviewLayer() {
if let validCaptureSession = captureSession {
previewLayer = AVCaptureVideoPreviewLayer(session: validCaptureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
}
}
private func _updateCameraDevice(deviceType: CameraDevice) {
fileprivate func _updateCameraDevice(_ deviceType: CameraDevice) {
if let validCaptureSession = captureSession {
validCaptureSession.beginConfiguration()
let inputs = validCaptureSession.inputs as! [AVCaptureInput]
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice && cameraDevice == .Front {
if deviceInput.device == backCameraDevice && cameraDevice == .front {
validCaptureSession.removeInput(deviceInput)
break;
} else if deviceInput.device == frontCameraDevice && cameraDevice == .Back {
} else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
validCaptureSession.removeInput(deviceInput)
break;
}
}
}
switch cameraDevice {
case .Front:
case .front:
if hasFrontCamera {
if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
@ -837,7 +842,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
}
case .Back:
case .back:
if let validBackDevice = _deviceInputFromDevice(backCameraDevice) {
if !inputs.contains(validBackDevice) {
validCaptureSession.addInput(validBackDevice)
@ -848,12 +853,12 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
private func _updateFlasMode(flashMode: CameraFlashMode) {
fileprivate func _updateFlasMode(_ flashMode: CameraFlashMode) {
captureSession?.beginConfiguration()
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == AVCaptureDevicePosition.Back) {
if (captureDevice.position == AVCaptureDevicePosition.back) {
let avFlashMode = AVCaptureFlashMode(rawValue: flashMode.rawValue)
if (captureDevice.isFlashModeSupported(avFlashMode!)) {
do {
@ -869,16 +874,16 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
captureSession?.commitConfiguration()
}
private func _updateCameraQualityMode(newCameraOutputQuality: CameraOutputQuality) {
fileprivate func _updateCameraQualityMode(_ newCameraOutputQuality: CameraOutputQuality) {
if let validCaptureSession = captureSession {
var sessionPreset = AVCaptureSessionPresetLow
switch (newCameraOutputQuality) {
case CameraOutputQuality.Low:
case CameraOutputQuality.low:
sessionPreset = AVCaptureSessionPresetLow
case CameraOutputQuality.Medium:
case CameraOutputQuality.medium:
sessionPreset = AVCaptureSessionPresetMedium
case CameraOutputQuality.High:
if cameraOutputMode == .StillImage {
case CameraOutputQuality.high:
if cameraOutputMode == .stillImage {
sessionPreset = AVCaptureSessionPresetPhoto
} else {
sessionPreset = AVCaptureSessionPresetHigh
@ -896,7 +901,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
private func _removeMicInput() {
fileprivate func _removeMicInput() {
guard let inputs = captureSession?.inputs as? [AVCaptureInput] else { return }
for input in inputs {
@ -909,15 +914,15 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}
}
private func _show(title: String, message: String) {
fileprivate func _show(_ title: String, message: String) {
if showErrorsToUsers {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.showErrorBlock(erTitle: title, erMessage: message)
DispatchQueue.main.async(execute: { () -> Void in
self.showErrorBlock(title, message)
})
}
}
private func _deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
fileprivate func _deviceInputFromDevice(_ device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)