Compare commits

...

4 Commits

Author SHA1 Message Date
Ricardo Torrão 22f3e5320e more Swift 3.0 changes 2016-09-16 15:05:26 +01:00
Ricardo Torrão 8f31fba846 Merge pull request #61 from dimohamdy/swift-3.0
update for Swift 3.0
2016-09-16 12:27:46 +01:00
dimohamdy 206bce0e94 update demo to support swift 3 2016-08-19 04:08:26 +03:00
dimohamdy 0267cc2664 support swift 3 2016-08-19 04:04:13 +03:00
10 changed files with 467 additions and 410 deletions

View File

@ -2,8 +2,29 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/). This project adheres to [Semantic Versioning](http://semver.org/).
## [2.2.2](https://github.com/imaginary-cloud/CameraManager/tree/2.2.2) - 2016-03-07 ## [3.0.0](https://github.com/imaginary-cloud/CameraManager/tree/3.0.0) - 2016-09-16
### Changed
- Syntax update for Swift 3.0.
## [2.2.4](https://github.com/imaginary-cloud/CameraManager/tree/2.2.4) - 2016-07-06
### Added
- Add error checking.
### Changed
- Fixes completion typos and suggests renamed functions.
## [2.2.3](https://github.com/imaginary-cloud/CameraManager/tree/2.2.3) - 2016-05-12
### Changed
- Fixed zoom in StillImage Mode.
- Minor refactoring
## [2.2.2](https://github.com/imaginary-cloud/CameraManager/tree/2.2.2) - 2016-03-07
### Added ### Added
- `CHANGELOG.md` file. - `CHANGELOG.md` file.

View File

@ -1,12 +1,12 @@
Pod::Spec.new do |s| Pod::Spec.new do |s|
s.name = "CameraManager" s.name = "CameraManager"
s.version = "2.2.4" s.version = "3.0.0"
s.summary = "This is a simple Swift class to provide all the configurations you need to create custom camera view in your app. Just drag, drop and use." s.summary = "This is a simple Swift class to provide all the configurations you need to create custom camera view in your app. Just drag, drop and use."
s.requires_arc = true s.requires_arc = true
s.homepage = "https://github.com/imaginary-cloud/CameraManager" s.homepage = "https://github.com/imaginary-cloud/CameraManager"
s.license = 'MIT' s.license = 'MIT'
s.author = { "torrao" => "rtorrao@imaginarycloud.com" } s.author = { "torrao" => "rtorrao@imaginarycloud.com" }
s.source = { :git => "https://github.com/imaginary-cloud/CameraManager.git", :tag => "2.2.4" } s.source = { :git => "https://github.com/imaginary-cloud/CameraManager.git", :tag => "3.0.0" }
s.social_media_url = 'http://www.imaginarycloud.com/' s.social_media_url = 'http://www.imaginarycloud.com/'
s.platform = :ios, '8.0' s.platform = :ios, '8.0'
s.source_files = 'camera/CameraManager.swift' s.source_files = 'camera/CameraManager.swift'

View File

@ -14,6 +14,14 @@ The easiest way to install the CameraManager is with: [CocoaPods](http://cocoapo
### Podfile ### Podfile
If you want Swift 3.0 syntax use:
```ruby
use_frameworks!
pod 'CameraManager', '~> 3.0'
```
If you want Swift 2.0 syntax use: If you want Swift 2.0 syntax use:
```ruby ```ruby
@ -52,6 +60,12 @@ let package = Package(
Add the following line to your Cartfile: Add the following line to your Cartfile:
If you want Swift 3.0 syntax use:
```
github "imaginary-cloud/CameraManager" >= 3.0
```
If you want Swift 2.0 syntax use: If you want Swift 2.0 syntax use:
``` ```

View File

@ -201,14 +201,17 @@
isa = PBXProject; isa = PBXProject;
attributes = { attributes = {
LastSwiftUpdateCheck = 0700; LastSwiftUpdateCheck = 0700;
LastUpgradeCheck = 0700; LastUpgradeCheck = 0800;
ORGANIZATIONNAME = imaginaryCloud; ORGANIZATIONNAME = imaginaryCloud;
TargetAttributes = { TargetAttributes = {
454C1F4019E82E2500C81915 = { 454C1F4019E82E2500C81915 = {
CreatedOnToolsVersion = 6.0.1; CreatedOnToolsVersion = 6.0.1;
DevelopmentTeam = QM7HJTY23M;
LastSwiftMigration = 0800;
}; };
D71DE8801AD677A7001E62F1 = { D71DE8801AD677A7001E62F1 = {
CreatedOnToolsVersion = 6.3; CreatedOnToolsVersion = 6.3;
LastSwiftMigration = 0800;
}; };
}; };
}; };
@ -313,8 +316,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_ENTITLEMENTS[sdk=iphoneos*]" = ""; "CODE_SIGN_ENTITLEMENTS[sdk=iphoneos*]" = "";
@ -324,6 +329,7 @@
ENABLE_TESTABILITY = YES; ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99; GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO; GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0; GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = ( GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1", "DEBUG=1",
@ -358,8 +364,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
@ -367,6 +375,7 @@
ENABLE_NS_ASSERTIONS = NO; ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99; GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNDECLARED_SELECTOR = YES;
@ -376,6 +385,7 @@
IPHONEOS_DEPLOYMENT_TARGET = 8.0; IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO; MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos; SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
TARGETED_DEVICE_FAMILY = "1,2"; TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES; VALIDATE_PRODUCT = YES;
}; };
@ -385,11 +395,13 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = camera/Info.plist; INFOPLIST_FILE = camera/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0; IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = cameraDemo; PRODUCT_BUNDLE_IDENTIFIER = cameraDemo;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
}; };
name = Debug; name = Debug;
}; };
@ -397,17 +409,20 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = camera/Info.plist; INFOPLIST_FILE = camera/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0; IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = cameraDemo; PRODUCT_BUNDLE_IDENTIFIER = cameraDemo;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
}; };
name = Release; name = Release;
}; };
D71DE89B1AD677A8001E62F1 /* Debug */ = { D71DE89B1AD677A8001E62F1 /* Debug */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
CURRENT_PROJECT_VERSION = 1; CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
@ -426,6 +441,7 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.imaginarycloud.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_BUNDLE_IDENTIFIER = "com.imaginarycloud.$(PRODUCT_NAME:rfc1034identifier)";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
VERSIONING_SYSTEM = "apple-generic"; VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = ""; VERSION_INFO_PREFIX = "";
}; };
@ -434,6 +450,7 @@
D71DE89C1AD677A8001E62F1 /* Release */ = { D71DE89C1AD677A8001E62F1 /* Release */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
COPY_PHASE_STRIP = NO; COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1; CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
@ -449,6 +466,7 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.imaginarycloud.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_BUNDLE_IDENTIFIER = "com.imaginarycloud.$(PRODUCT_NAME:rfc1034identifier)";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
VERSIONING_SYSTEM = "apple-generic"; VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = ""; VERSION_INFO_PREFIX = "";
}; };

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Scheme <Scheme
LastUpgradeVersion = "0700" LastUpgradeVersion = "0800"
version = "1.3"> version = "1.3">
<BuildAction <BuildAction
parallelizeBuildables = "YES" parallelizeBuildables = "YES"
@ -23,21 +23,21 @@
</BuildActionEntries> </BuildActionEntries>
</BuildAction> </BuildAction>
<TestAction <TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES" shouldUseLaunchSchemeArgsEnv = "YES">
buildConfiguration = "Debug">
<Testables> <Testables>
</Testables> </Testables>
<AdditionalOptions> <AdditionalOptions>
</AdditionalOptions> </AdditionalOptions>
</TestAction> </TestAction>
<LaunchAction <LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0" launchStyle = "0"
useCustomWorkingDirectory = "NO" useCustomWorkingDirectory = "NO"
buildConfiguration = "Debug"
ignoresPersistentStateOnLaunch = "NO" ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES" debugDocumentVersioning = "YES"
debugServiceExtension = "internal" debugServiceExtension = "internal"
@ -55,10 +55,10 @@
</AdditionalOptions> </AdditionalOptions>
</LaunchAction> </LaunchAction>
<ProfileAction <ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES" shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = "" savedToolIdentifier = ""
useCustomWorkingDirectory = "NO" useCustomWorkingDirectory = "NO"
buildConfiguration = "Release"
debugDocumentVersioning = "YES"> debugDocumentVersioning = "YES">
<MacroExpansion> <MacroExpansion>
<BuildableReference <BuildableReference

View File

@ -14,30 +14,30 @@ class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow? var window: UIWindow?
func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch. // Override point for customization after application launch.
return true return true
} }
func applicationWillResignActive(application: UIApplication) { func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
} }
func applicationDidEnterBackground(application: UIApplication) { func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
} }
func applicationWillEnterForeground(application: UIApplication) { func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
} }
func applicationDidBecomeActive(application: UIApplication) { func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
} }
func applicationWillTerminate(application: UIApplication) { func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
} }

View File

@ -8,58 +8,57 @@
import UIKit import UIKit
import AVFoundation import AVFoundation
import AssetsLibrary import Photos
public enum CameraState { public enum CameraState {
case Ready, AccessDenied, NoDeviceFound, NotDetermined case ready, accessDenied, noDeviceFound, notDetermined
} }
public enum CameraDevice { public enum CameraDevice {
case Front, Back case front, back
} }
public enum CameraFlashMode: Int { public enum CameraFlashMode: Int {
case Off, On, Auto case off, on, auto
} }
public enum CameraOutputMode { public enum CameraOutputMode {
case StillImage, VideoWithMic, VideoOnly case stillImage, videoWithMic, videoOnly
} }
public enum CameraOutputQuality: Int { public enum CameraOutputQuality: Int {
case Low, Medium, High case low, medium, high
} }
/// Class for handling iDevices custom camera usage /// Class for handling iDevices custom camera usage
public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate { open class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGestureRecognizerDelegate {
// MARK: - Public properties // MARK: - Public properties
/// Capture session to customize camera settings. /// Capture session to customize camera settings.
public var captureSession: AVCaptureSession? open var captureSession: AVCaptureSession?
/// Property to determine if the manager should show the error for the user. If you want to show the errors yourself set this to false. If you want to add custom error UI set showErrorBlock property. Default value is false. /// Property to determine if the manager should show the error for the user. If you want to show the errors yourself set this to false. If you want to add custom error UI set showErrorBlock property. Default value is false.
public var showErrorsToUsers = false open var showErrorsToUsers = false
/// Property to determine if the manager should show the camera permission popup immediatly when it's needed or you want to show it manually. Default value is true. Be carful cause using the camera requires permission, if you set this value to false and don't ask manually you won't be able to use the camera. /// Property to determine if the manager should show the camera permission popup immediatly when it's needed or you want to show it manually. Default value is true. Be carful cause using the camera requires permission, if you set this value to false and don't ask manually you won't be able to use the camera.
public var showAccessPermissionPopupAutomatically = true open var showAccessPermissionPopupAutomatically = true
/// A block creating UI to present error message to the user. This can be customised to be presented on the Window root view controller, or to pass in the viewController which will present the UIAlertController, for example. /// A block creating UI to present error message to the user. This can be customised to be presented on the Window root view controller, or to pass in the viewController which will present the UIAlertController, for example.
public var showErrorBlock:(erTitle: String, erMessage: String) -> Void = { (erTitle: String, erMessage: String) -> Void in open var showErrorBlock:(_ erTitle: String, _ erMessage: String) -> Void = { (erTitle: String, erMessage: String) -> Void in
// var alertController = UIAlertController(title: erTitle, message: erMessage, preferredStyle: .Alert) // var alertController = UIAlertController(title: erTitle, message: erMessage, preferredStyle: .Alert)
// alertController.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: { (alertAction) -> Void in })) // alertController.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: { (alertAction) -> Void in }))
// //
// if let topController = UIApplication.sharedApplication().keyWindow?.rootViewController { // if let topController = UIApplication.sharedApplication().keyWindow?.rootViewController {
// topController.presentViewController(alertController, animated: true, completion:nil) // topController.presentViewController(alertController, animated: true, completion:nil)
// } // }
} }
/// Property to determine if manager should write the resources to the phone library. Default value is true. /// Property to determine if manager should write the resources to the phone library. Default value is true.
public var writeFilesToPhoneLibrary = true open var writeFilesToPhoneLibrary = true
/// Property to determine if manager should follow device orientation. Default value is true. /// Property to determine if manager should follow device orientation. Default value is true.
public var shouldRespondToOrientationChanges = true { open var shouldRespondToOrientationChanges = true {
didSet { didSet {
if shouldRespondToOrientationChanges { if shouldRespondToOrientationChanges {
_startFollowingDeviceOrientation() _startFollowingDeviceOrientation()
@ -70,18 +69,18 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/// The Bool property to determine if the camera is ready to use. /// The Bool property to determine if the camera is ready to use.
public var cameraIsReady: Bool { open var cameraIsReady: Bool {
get { get {
return cameraIsSetup return cameraIsSetup
} }
} }
/// The Bool property to determine if current device has front camera. /// The Bool property to determine if current device has front camera.
public var hasFrontCamera: Bool = { open var hasFrontCamera: Bool = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices { for device in devices! {
let captureDevice = device as! AVCaptureDevice let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == .Front) { if (captureDevice.position == .front) {
return true return true
} }
} }
@ -89,19 +88,19 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}() }()
/// The Bool property to determine if current device has flash. /// The Bool property to determine if current device has flash.
public var hasFlash: Bool = { open var hasFlash: Bool = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices { for device in devices! {
let captureDevice = device as! AVCaptureDevice let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == .Back) { if (captureDevice.position == .back) {
return captureDevice.hasFlash return captureDevice.hasFlash
} }
} }
return false return false
}() }()
/// Property to change camera device between front and back. /// Property to change camera device between front and back.
public var cameraDevice = CameraDevice.Back { open var cameraDevice = CameraDevice.back {
didSet { didSet {
if cameraIsSetup { if cameraIsSetup {
if cameraDevice != oldValue { if cameraDevice != oldValue {
@ -114,7 +113,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/// Property to change camera flash mode. /// Property to change camera flash mode.
public var flashMode = CameraFlashMode.Off { open var flashMode = CameraFlashMode.off {
didSet { didSet {
if cameraIsSetup { if cameraIsSetup {
if flashMode != oldValue { if flashMode != oldValue {
@ -125,7 +124,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/// Property to change camera output quality. /// Property to change camera output quality.
public var cameraOutputQuality = CameraOutputQuality.High { open var cameraOutputQuality = CameraOutputQuality.high {
didSet { didSet {
if cameraIsSetup { if cameraIsSetup {
if cameraOutputQuality != oldValue { if cameraOutputQuality != oldValue {
@ -136,7 +135,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/// Property to change camera output. /// Property to change camera output.
public var cameraOutputMode = CameraOutputMode.StillImage { open var cameraOutputMode = CameraOutputMode.stillImage {
didSet { didSet {
if cameraIsSetup { if cameraIsSetup {
if cameraOutputMode != oldValue { if cameraOutputMode != oldValue {
@ -149,80 +148,75 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/// Property to check video recording duration when in progress /// Property to check video recording duration when in progress
public var recordedDuration : CMTime { return movieOutput?.recordedDuration ?? kCMTimeZero } open var recordedDuration : CMTime { return movieOutput?.recordedDuration ?? kCMTimeZero }
/// Property to check video recording file size when in progress /// Property to check video recording file size when in progress
public var recordedFileSize : Int64 { return movieOutput?.recordedFileSize ?? 0 } open var recordedFileSize : Int64 { return movieOutput?.recordedFileSize ?? 0 }
// MARK: - Private properties // MARK: - Private properties
private weak var embeddingView: UIView? fileprivate weak var embeddingView: UIView?
private var videoCompletion: ((videoURL: NSURL?, error: NSError?) -> Void)? fileprivate var videoCompletion: ((_ videoURL: URL?, _ error: NSError?) -> Void)?
private var sessionQueue: dispatch_queue_t = dispatch_queue_create("CameraSessionQueue", DISPATCH_QUEUE_SERIAL) fileprivate var sessionQueue: DispatchQueue = DispatchQueue(label: "CameraSessionQueue", attributes: [])
private lazy var frontCameraDevice: AVCaptureDevice? = { fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice] let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first return devices.filter{$0.position == .front}.first
}() }()
private lazy var backCameraDevice: AVCaptureDevice? = { fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice] let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Back}.first return devices.filter{$0.position == .back}.first
}() }()
private lazy var mic: AVCaptureDevice? = { fileprivate lazy var mic: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
}() }()
private var stillImageOutput: AVCaptureStillImageOutput? fileprivate var stillImageOutput: AVCaptureStillImageOutput?
private var movieOutput: AVCaptureMovieFileOutput? fileprivate var movieOutput: AVCaptureMovieFileOutput?
private var previewLayer: AVCaptureVideoPreviewLayer? fileprivate var previewLayer: AVCaptureVideoPreviewLayer?
private var library: ALAssetsLibrary? fileprivate var library: PHPhotoLibrary?
private var cameraIsSetup = false fileprivate var cameraIsSetup = false
private var cameraIsObservingDeviceOrientation = false fileprivate var cameraIsObservingDeviceOrientation = false
private var zoomScale = CGFloat(1.0) fileprivate var zoomScale = CGFloat(1.0)
private var beginZoomScale = CGFloat(1.0) fileprivate var beginZoomScale = CGFloat(1.0)
private var maxZoomScale = CGFloat(1.0) fileprivate var maxZoomScale = CGFloat(1.0)
private var tempFilePath: NSURL = { fileprivate var tempFilePath: URL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tempMovie").appendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) { if FileManager.default.fileExists(atPath: tempPath) {
do { do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath) try FileManager.default.removeItem(atPath: tempPath)
} catch { } } catch { }
} }
return NSURL(string: tempPath)! return URL(string: tempPath)!
}() }()
// MARK: - CameraManager // MARK: - CameraManager
/** /**
Inits a capture session and adds a preview layer to the given view. Preview layer bounds will automaticaly be set to match given view. Default session is initialized with still image output. Inits a capture session and adds a preview layer to the given view. Preview layer bounds will automaticaly be set to match given view. Default session is initialized with still image output.
:param: view The view you want to add the preview layer to :param: view The view you want to add the preview layer to
:param: cameraOutputMode The mode you want capturesession to run image / video / video and microphone :param: cameraOutputMode The mode you want capturesession to run image / video / video and microphone
:param: completion Optional completion block :param: completion Optional completion block
:returns: Current state of the camera: Ready / AccessDenied / NoDeviceFound / NotDetermined. :returns: Current state of the camera: Ready / AccessDenied / NoDeviceFound / NotDetermined.
*/ */
public func addPreviewLayerToView(view: UIView) -> CameraState { open func addPreviewLayerToView(_ view: UIView) -> CameraState {
return addPreviewLayerToView(view, newCameraOutputMode: cameraOutputMode) return addPreviewLayerToView(view, newCameraOutputMode: cameraOutputMode)
} }
public func addPreviewLayerToView(view: UIView, newCameraOutputMode: CameraOutputMode) -> CameraState { open func addPreviewLayerToView(_ view: UIView, newCameraOutputMode: CameraOutputMode) -> CameraState {
return addLayerPreviewToView(view, newCameraOutputMode: newCameraOutputMode, completion: nil) return addLayerPreviewToView(view, newCameraOutputMode: newCameraOutputMode, completion: nil)
} }
@available(*, unavailable, renamed="addLayerPreviewToView") open func addLayerPreviewToView(_ view: UIView, newCameraOutputMode: CameraOutputMode, completion: ((Void) -> Void)?) -> CameraState {
public func addPreviewLayerToView(view: UIView, newCameraOutputMode: CameraOutputMode, completition: (Void -> Void)?) -> CameraState {
return addLayerPreviewToView(view, newCameraOutputMode: newCameraOutputMode, completion: completition)
}
public func addLayerPreviewToView(view: UIView, newCameraOutputMode: CameraOutputMode, completion: (Void -> Void)?) -> CameraState {
if _canLoadCamera() { if _canLoadCamera() {
if let _ = embeddingView { if let _ = embeddingView {
if let validPreviewLayer = previewLayer { if let validPreviewLayer = previewLayer {
@ -248,24 +242,21 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
return _checkIfCameraIsAvailable() return _checkIfCameraIsAvailable()
} }
@available(*, unavailable, renamed="askUserForCameraPermission")
public func askUserForCameraPermissions(completition: Bool -> Void) {}
/** /**
Asks the user for camera permissions. Only works if the permissions are not yet determined. Note that it'll also automaticaly ask about the microphone permissions if you selected VideoWithMic output. Asks the user for camera permissions. Only works if the permissions are not yet determined. Note that it'll also automaticaly ask about the microphone permissions if you selected VideoWithMic output.
:param: completion Completion block with the result of permission request :param: completion Completion block with the result of permission request
*/ */
public func askUserForCameraPermission(completion: Bool -> Void) { open func askUserForCameraPermission(_ completion: @escaping (Bool) -> Void) {
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeVideo, completionHandler: { (alowedAccess) -> Void in AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { (alowedAccess) -> Void in
if self.cameraOutputMode == .VideoWithMic { if self.cameraOutputMode == .videoWithMic {
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeAudio, completionHandler: { (alowedAccess) -> Void in AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeAudio, completionHandler: { (alowedAccess) -> Void in
dispatch_sync(dispatch_get_main_queue(), { () -> Void in DispatchQueue.main.sync(execute: { () -> Void in
completion(alowedAccess) completion(alowedAccess)
}) })
}) })
} else { } else {
dispatch_sync(dispatch_get_main_queue(), { () -> Void in DispatchQueue.main.sync(execute: { () -> Void in
completion(alowedAccess) completion(alowedAccess)
}) })
@ -274,19 +265,19 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/** /**
Stops running capture session but all setup devices, inputs and outputs stay for further reuse. Stops running capture session but all setup devices, inputs and outputs stay for further reuse.
*/ */
public func stopCaptureSession() { open func stopCaptureSession() {
captureSession?.stopRunning() captureSession?.stopRunning()
_stopFollowingDeviceOrientation() _stopFollowingDeviceOrientation()
} }
/** /**
Resumes capture session. Resumes capture session.
*/ */
public func resumeCaptureSession() { open func resumeCaptureSession() {
if let validCaptureSession = captureSession { if let validCaptureSession = captureSession {
if !validCaptureSession.running && cameraIsSetup { if !validCaptureSession.isRunning && cameraIsSetup {
validCaptureSession.startRunning() validCaptureSession.startRunning()
_startFollowingDeviceOrientation() _startFollowingDeviceOrientation()
} }
@ -306,11 +297,11 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/** /**
Stops running capture session and removes all setup devices, inputs and outputs. Stops running capture session and removes all setup devices, inputs and outputs.
*/ */
public func stopAndRemoveCaptureSession() { open func stopAndRemoveCaptureSession() {
stopCaptureSession() stopCaptureSession()
cameraDevice = .Back cameraDevice = .back
cameraIsSetup = false cameraIsSetup = false
previewLayer = nil previewLayer = nil
captureSession = nil captureSession = nil
@ -321,15 +312,12 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
movieOutput = nil movieOutput = nil
} }
@available(*, unavailable, renamed="capturePictureWithCompletion")
public func capturePictureWithCompletition(imageCompletition: (UIImage?, NSError?) -> Void) {}
/** /**
Captures still image from currently running capture session. Captures still image from currently running capture session.
:param: imageCompletion Completion block containing the captured UIImage :param: imageCompletion Completion block containing the captured UIImage
*/ */
public func capturePictureWithCompletion(imageCompletion: (UIImage?, NSError?) -> Void) { open func capturePictureWithCompletion(_ imageCompletion: @escaping (UIImage?, NSError?) -> Void) {
self.capturePictureDataWithCompletion { data, error in self.capturePictureDataWithCompletion { data, error in
guard error == nil, let imageData = data else { guard error == nil, let imageData = data else {
@ -339,53 +327,49 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
if self.writeFilesToPhoneLibrary == true, let library = self.library { if self.writeFilesToPhoneLibrary == true, let library = self.library {
library.writeImageDataToSavedPhotosAlbum(imageData, metadata:nil, completionBlock: { picUrl, error in
guard error != nil else { library.performChanges({
return PHAssetChangeRequest.creationRequestForAsset(from: UIImage(data: imageData)!)
} }, completionHandler: { success, error in
guard error != nil else {
dispatch_async(dispatch_get_main_queue(), { return
self._show(NSLocalizedString("Error", comment:""), message: error.localizedDescription) }
})
DispatchQueue.main.async(execute: {
self._show(NSLocalizedString("Error", comment:""), message: (error?.localizedDescription)!)
})
}) })
} }
imageCompletion(UIImage(data: imageData), nil) imageCompletion(UIImage(data: imageData), nil)
} }
} }
@available(*, unavailable, renamed="capturePictureWithCompletion")
public func capturePictureDataWithCompletition(imageCompletition: (NSData?, NSError?) -> Void) {}
/** /**
Captures still image from currently running capture session. Captures still image from currently running capture session.
:param: imageCompletion Completion block containing the captured imageData :param: imageCompletion Completion block containing the captured imageData
*/ */
public func capturePictureDataWithCompletion(imageCompletion: (NSData?, NSError?) -> Void) { open func capturePictureDataWithCompletion(_ imageCompletion: @escaping (Data?, NSError?) -> Void) {
guard cameraIsSetup else { guard cameraIsSetup else {
_show(NSLocalizedString("No capture session setup", comment:""), message: NSLocalizedString("I can't take any picture", comment:"")) _show(NSLocalizedString("No capture session setup", comment:""), message: NSLocalizedString("I can't take any picture", comment:""))
return return
} }
guard cameraOutputMode == .StillImage else { guard cameraOutputMode == .stillImage else {
_show(NSLocalizedString("Capture session output mode video", comment:""), message: NSLocalizedString("I can't take any picture", comment:"")) _show(NSLocalizedString("Capture session output mode video", comment:""), message: NSLocalizedString("I can't take any picture", comment:""))
return return
} }
dispatch_async(sessionQueue, { sessionQueue.async(execute: {
self._getStillImageOutput().captureStillImageAsynchronouslyFromConnection(self._getStillImageOutput().connectionWithMediaType(AVMediaTypeVideo), completionHandler: { [unowned self] sample, error in self._getStillImageOutput().captureStillImageAsynchronously(from: self._getStillImageOutput().connection(withMediaType: AVMediaTypeVideo), completionHandler: { [unowned self] sample, error in
guard error == nil else { guard error == nil else {
dispatch_async(dispatch_get_main_queue(), { DispatchQueue.main.async(execute: {
self._show(NSLocalizedString("Error", comment:""), message: error.localizedDescription) self._show(NSLocalizedString("Error", comment:""), message: (error?.localizedDescription)!)
}) })
imageCompletion(nil, error) imageCompletion(nil, error as NSError?)
return return
} }
@ -394,31 +378,28 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
imageCompletion(imageData, nil) imageCompletion(imageData, nil)
}) })
}) })
} }
/** /**
Starts recording a video with or without voice as in the session preset. Starts recording a video with or without voice as in the session preset.
*/ */
public func startRecordingVideo() { open func startRecordingVideo() {
if cameraOutputMode != .StillImage { if cameraOutputMode != .stillImage {
_getMovieOutput().startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self) _getMovieOutput().startRecording(toOutputFileURL: tempFilePath, recordingDelegate: self)
} else { } else {
_show(NSLocalizedString("Capture session output still image", comment:""), message: NSLocalizedString("I can only take pictures", comment:"")) _show(NSLocalizedString("Capture session output still image", comment:""), message: NSLocalizedString("I can only take pictures", comment:""))
} }
} }
@available(*, unavailable, renamed="stopVideoRecording")
public func stopRecordingVideo(completition:(videoURL: NSURL?, error: NSError?) -> Void) {}
/** /**
Stop recording a video. Save it to the cameraRoll and give back the url. Stop recording a video. Save it to the cameraRoll and give back the url.
*/ */
public func stopVideoRecording(completion:(videoURL: NSURL?, error: NSError?) -> Void) { open func stopVideoRecording(_ completion:((_ videoURL: URL?, _ error: NSError?) -> Void)?) {
if let runningMovieOutput = movieOutput { if let runningMovieOutput = movieOutput {
if runningMovieOutput.recording { if runningMovieOutput.isRecording {
videoCompletion = completion videoCompletion = completion
runningMovieOutput.stopRecording() runningMovieOutput.stopRecording()
} }
@ -426,79 +407,96 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
/** /**
Current camera status. Current camera status.
:returns: Current state of the camera: Ready / AccessDenied / NoDeviceFound / NotDetermined :returns: Current state of the camera: Ready / AccessDenied / NoDeviceFound / NotDetermined
*/ */
public func currentCameraStatus() -> CameraState { open func currentCameraStatus() -> CameraState {
return _checkIfCameraIsAvailable() return _checkIfCameraIsAvailable()
} }
/** /**
Change current flash mode to next value from available ones. Change current flash mode to next value from available ones.
:returns: Current flash mode: Off / On / Auto :returns: Current flash mode: Off / On / Auto
*/ */
public func changeFlashMode() -> CameraFlashMode { open func changeFlashMode() -> CameraFlashMode {
flashMode = CameraFlashMode(rawValue: (flashMode.rawValue+1)%3)! flashMode = CameraFlashMode(rawValue: (flashMode.rawValue+1)%3)!
return flashMode return flashMode
} }
/** /**
Change current output quality mode to next value from available ones. Change current output quality mode to next value from available ones.
:returns: Current quality mode: Low / Medium / High :returns: Current quality mode: Low / Medium / High
*/ */
public func changeQualityMode() -> CameraOutputQuality { open func changeQualityMode() -> CameraOutputQuality {
cameraOutputQuality = CameraOutputQuality(rawValue: (cameraOutputQuality.rawValue+1)%3)! cameraOutputQuality = CameraOutputQuality(rawValue: (cameraOutputQuality.rawValue+1)%3)!
return cameraOutputQuality return cameraOutputQuality
} }
// MARK: - AVCaptureFileOutputRecordingDelegate // MARK: - AVCaptureFileOutputRecordingDelegate
public func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) { open func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
captureSession?.beginConfiguration() captureSession?.beginConfiguration()
if flashMode != .Off { if flashMode != .off {
_updateTorch(flashMode) _updateTorch(flashMode)
} }
captureSession?.commitConfiguration() captureSession?.commitConfiguration()
} }
public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { open func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
_updateTorch(.Off) _updateTorch(.off)
if (error != nil) { if (error != nil) {
_show(NSLocalizedString("Unable to save video to the iPhone", comment:""), message: error.localizedDescription) _show(NSLocalizedString("Unable to save video to the iPhone", comment:""), message: error.localizedDescription)
} else { } else {
if let validLibrary = library {
if writeFilesToPhoneLibrary { if writeFilesToPhoneLibrary {
validLibrary.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) { if PHPhotoLibrary.authorizationStatus() == .authorized {
self._show(NSLocalizedString("Unable to save video to the iPhone.", comment:""), message: error!.localizedDescription) saveVideoToLibrary(outputFileURL)
self._executeVideoCompletionWithURL(nil, error: error) }
} else { else {
if let validAssetURL = assetURL { PHPhotoLibrary.requestAuthorization({ (autorizationStatus) in
self._executeVideoCompletionWithURL(validAssetURL, error: error) if autorizationStatus == .authorized {
} self.saveVideoToLibrary(outputFileURL)
} }
}) })
} else {
_executeVideoCompletionWithURL(outputFileURL, error: error)
} }
} else {
_executeVideoCompletionWithURL(outputFileURL, error: error as NSError?)
} }
} }
} }
fileprivate func saveVideoToLibrary(_ fileURL: URL) {
if let validLibrary = library {
validLibrary.performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: fileURL)
}, completionHandler: { success, error in
if (error != nil) {
self._show(NSLocalizedString("Unable to save video to the iPhone.", comment:""), message: error!.localizedDescription)
self._executeVideoCompletionWithURL(nil, error: error as NSError?)
} else {
self._executeVideoCompletionWithURL(fileURL, error: error as NSError?)
}
})
}
}
// MARK: - UIGestureRecognizerDelegate // MARK: - UIGestureRecognizerDelegate
private func attachZoom(view: UIView) { fileprivate func attachZoom(_ view: UIView) {
let pinch = UIPinchGestureRecognizer(target: self, action: #selector(CameraManager._zoomStart(_:))) let pinch = UIPinchGestureRecognizer(target: self, action: #selector(CameraManager._zoomStart(_:)))
view.addGestureRecognizer(pinch) view.addGestureRecognizer(pinch)
pinch.delegate = self pinch.delegate = self
} }
public func gestureRecognizerShouldBegin(gestureRecognizer: UIGestureRecognizer) -> Bool { open func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer.isKindOfClass(UIPinchGestureRecognizer) { if gestureRecognizer.isKind(of: UIPinchGestureRecognizer.self) {
beginZoomScale = zoomScale; beginZoomScale = zoomScale;
} }
@ -506,18 +504,18 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
@objc @objc
private func _zoomStart(recognizer: UIPinchGestureRecognizer) { fileprivate func _zoomStart(_ recognizer: UIPinchGestureRecognizer) {
guard let view = embeddingView, guard let view = embeddingView,
previewLayer = previewLayer let previewLayer = previewLayer
else { return } else { return }
var allTouchesOnPreviewLayer = true var allTouchesOnPreviewLayer = true
let numTouch = recognizer.numberOfTouches() let numTouch = recognizer.numberOfTouches
for i in 0 ..< numTouch { for i in 0 ..< numTouch {
let location = recognizer.locationOfTouch(i, inView: view) let location = recognizer.location(ofTouch: i, in: view)
let convertedTouch = previewLayer.convertPoint(location, fromLayer: previewLayer.superlayer) let convertedTouch = previewLayer.convert(location, from: previewLayer.superlayer)
if !previewLayer.containsPoint(convertedTouch) { if !previewLayer.contains(convertedTouch) {
allTouchesOnPreviewLayer = false allTouchesOnPreviewLayer = false
break break
} }
@ -527,7 +525,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
private func _zoom(scale: CGFloat) { fileprivate func _zoom(_ scale: CGFloat) {
do { do {
let captureDevice = AVCaptureDevice.devices().first as? AVCaptureDevice let captureDevice = AVCaptureDevice.devices().first as? AVCaptureDevice
try captureDevice?.lockForConfiguration() try captureDevice?.lockForConfiguration()
@ -545,12 +543,12 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
// MARK: - CameraManager() // MARK: - CameraManager()
private func _updateTorch(flashMode: CameraFlashMode) { fileprivate func _updateTorch(_ flashMode: CameraFlashMode) {
captureSession?.beginConfiguration() captureSession?.beginConfiguration()
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices { for device in devices! {
let captureDevice = device as! AVCaptureDevice let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == AVCaptureDevicePosition.Back) { if (captureDevice.position == AVCaptureDevicePosition.back) {
let avTorchMode = AVCaptureTorchMode(rawValue: flashMode.rawValue) let avTorchMode = AVCaptureTorchMode(rawValue: flashMode.rawValue)
if (captureDevice.isTorchModeSupported(avTorchMode!)) { if (captureDevice.isTorchModeSupported(avTorchMode!)) {
do { do {
@ -567,18 +565,18 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
private func _executeVideoCompletionWithURL(url: NSURL?, error: NSError?) { fileprivate func _executeVideoCompletionWithURL(_ url: URL?, error: NSError?) {
if let validCompletion = videoCompletion { if let validCompletion = videoCompletion {
validCompletion(videoURL: url, error: error) validCompletion(url, error)
videoCompletion = nil videoCompletion = nil
} }
} }
private func _getMovieOutput() -> AVCaptureMovieFileOutput { fileprivate func _getMovieOutput() -> AVCaptureMovieFileOutput {
var shouldReinitializeMovieOutput = movieOutput == nil var shouldReinitializeMovieOutput = movieOutput == nil
if !shouldReinitializeMovieOutput { if !shouldReinitializeMovieOutput {
if let connection = movieOutput!.connectionWithMediaType(AVMediaTypeVideo) { if let connection = movieOutput!.connection(withMediaType: AVMediaTypeVideo) {
shouldReinitializeMovieOutput = shouldReinitializeMovieOutput || !connection.active shouldReinitializeMovieOutput = shouldReinitializeMovieOutput || !connection.isActive
} }
} }
@ -597,11 +595,11 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
return movieOutput! return movieOutput!
} }
private func _getStillImageOutput() -> AVCaptureStillImageOutput { fileprivate func _getStillImageOutput() -> AVCaptureStillImageOutput {
var shouldReinitializeStillImageOutput = stillImageOutput == nil var shouldReinitializeStillImageOutput = stillImageOutput == nil
if !shouldReinitializeStillImageOutput { if !shouldReinitializeStillImageOutput {
if let connection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) { if let connection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
shouldReinitializeStillImageOutput = shouldReinitializeStillImageOutput || !connection.active shouldReinitializeStillImageOutput = shouldReinitializeStillImageOutput || !connection.isActive
} }
} }
if shouldReinitializeStillImageOutput { if shouldReinitializeStillImageOutput {
@ -618,26 +616,26 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
return stillImageOutput! return stillImageOutput!
} }
@objc private func _orientationChanged() { @objc fileprivate func _orientationChanged() {
var currentConnection: AVCaptureConnection?; var currentConnection: AVCaptureConnection?;
switch cameraOutputMode { switch cameraOutputMode {
case .StillImage: case .stillImage:
currentConnection = stillImageOutput?.connectionWithMediaType(AVMediaTypeVideo) currentConnection = stillImageOutput?.connection(withMediaType: AVMediaTypeVideo)
case .VideoOnly, .VideoWithMic: case .videoOnly, .videoWithMic:
currentConnection = _getMovieOutput().connectionWithMediaType(AVMediaTypeVideo) currentConnection = _getMovieOutput().connection(withMediaType: AVMediaTypeVideo)
} }
if let validPreviewLayer = previewLayer { if let validPreviewLayer = previewLayer {
if let validPreviewLayerConnection = validPreviewLayer.connection { if let validPreviewLayerConnection = validPreviewLayer.connection {
if validPreviewLayerConnection.supportsVideoOrientation { if validPreviewLayerConnection.isVideoOrientationSupported {
validPreviewLayerConnection.videoOrientation = _currentVideoOrientation() validPreviewLayerConnection.videoOrientation = _currentVideoOrientation()
} }
} }
if let validOutputLayerConnection = currentConnection { if let validOutputLayerConnection = currentConnection {
if validOutputLayerConnection.supportsVideoOrientation { if validOutputLayerConnection.isVideoOrientationSupported {
validOutputLayerConnection.videoOrientation = _currentVideoOrientation() validOutputLayerConnection.videoOrientation = _currentVideoOrientation()
} }
} }
dispatch_async(dispatch_get_main_queue(), { () -> Void in DispatchQueue.main.async(execute: { () -> Void in
if let validEmbeddingView = self.embeddingView { if let validEmbeddingView = self.embeddingView {
validPreviewLayer.frame = validEmbeddingView.bounds validPreviewLayer.frame = validEmbeddingView.bounds
} }
@ -645,26 +643,26 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
private func _currentVideoOrientation() -> AVCaptureVideoOrientation { fileprivate func _currentVideoOrientation() -> AVCaptureVideoOrientation {
switch UIDevice.currentDevice().orientation { switch UIDevice.current.orientation {
case .LandscapeLeft: case .landscapeLeft:
return .LandscapeRight return .landscapeRight
case .LandscapeRight: case .landscapeRight:
return .LandscapeLeft return .landscapeLeft
default: default:
return .Portrait return .portrait
} }
} }
private func _canLoadCamera() -> Bool { fileprivate func _canLoadCamera() -> Bool {
let currentCameraState = _checkIfCameraIsAvailable() let currentCameraState = _checkIfCameraIsAvailable()
return currentCameraState == .Ready || (currentCameraState == .NotDetermined && showAccessPermissionPopupAutomatically) return currentCameraState == .ready || (currentCameraState == .notDetermined && showAccessPermissionPopupAutomatically)
} }
private func _setupCamera(completion: Void -> Void) { fileprivate func _setupCamera(_ completion: @escaping (Void) -> Void) {
captureSession = AVCaptureSession() captureSession = AVCaptureSession()
dispatch_async(sessionQueue, { sessionQueue.async(execute: {
if let validCaptureSession = self.captureSession { if let validCaptureSession = self.captureSession {
validCaptureSession.beginConfiguration() validCaptureSession.beginConfiguration()
validCaptureSession.sessionPreset = AVCaptureSessionPresetHigh validCaptureSession.sessionPreset = AVCaptureSessionPresetHigh
@ -685,24 +683,24 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}) })
} }
private func _startFollowingDeviceOrientation() { fileprivate func _startFollowingDeviceOrientation() {
if shouldRespondToOrientationChanges && !cameraIsObservingDeviceOrientation { if shouldRespondToOrientationChanges && !cameraIsObservingDeviceOrientation {
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(CameraManager._orientationChanged), name: UIDeviceOrientationDidChangeNotification, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(CameraManager._orientationChanged), name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
cameraIsObservingDeviceOrientation = true cameraIsObservingDeviceOrientation = true
} }
} }
private func _stopFollowingDeviceOrientation() { fileprivate func _stopFollowingDeviceOrientation() {
if cameraIsObservingDeviceOrientation { if cameraIsObservingDeviceOrientation {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceOrientationDidChangeNotification, object: nil) NotificationCenter.default.removeObserver(self, name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
cameraIsObservingDeviceOrientation = false cameraIsObservingDeviceOrientation = false
} }
} }
private func _addPreviewLayerToView(view: UIView) { fileprivate func _addPreviewLayerToView(_ view: UIView) {
embeddingView = view embeddingView = view
attachZoom(view) attachZoom(view)
dispatch_async(dispatch_get_main_queue(), { () -> Void in DispatchQueue.main.async(execute: { () -> Void in
guard let _ = self.previewLayer else { guard let _ = self.previewLayer else {
return return
} }
@ -712,54 +710,54 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
}) })
} }
private func _setupMaxZoomScale() { fileprivate func _setupMaxZoomScale() {
var maxZoom = CGFloat(1.0) var maxZoom = CGFloat(1.0)
beginZoomScale = CGFloat(1.0) beginZoomScale = CGFloat(1.0)
if cameraDevice == .Back { if cameraDevice == .back {
maxZoom = (backCameraDevice?.activeFormat.videoMaxZoomFactor)! maxZoom = (backCameraDevice?.activeFormat.videoMaxZoomFactor)!
} }
else if cameraDevice == .Front { else if cameraDevice == .front {
maxZoom = (frontCameraDevice?.activeFormat.videoMaxZoomFactor)! maxZoom = (frontCameraDevice?.activeFormat.videoMaxZoomFactor)!
} }
maxZoomScale = maxZoom maxZoomScale = maxZoom
} }
private func _checkIfCameraIsAvailable() -> CameraState { fileprivate func _checkIfCameraIsAvailable() -> CameraState {
let deviceHasCamera = UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.Rear) || UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.Front) let deviceHasCamera = UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.rear) || UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.front)
if deviceHasCamera { if deviceHasCamera {
let authorizationStatus = AVCaptureDevice.authorizationStatusForMediaType(AVMediaTypeVideo) let authorizationStatus = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
let userAgreedToUseIt = authorizationStatus == .Authorized let userAgreedToUseIt = authorizationStatus == .authorized
if userAgreedToUseIt { if userAgreedToUseIt {
return .Ready return .ready
} else if authorizationStatus == AVAuthorizationStatus.NotDetermined { } else if authorizationStatus == AVAuthorizationStatus.notDetermined {
return .NotDetermined return .notDetermined
} else { } else {
_show(NSLocalizedString("Camera access denied", comment:""), message:NSLocalizedString("You need to go to settings app and grant acces to the camera device to use it.", comment:"")) _show(NSLocalizedString("Camera access denied", comment:""), message:NSLocalizedString("You need to go to settings app and grant acces to the camera device to use it.", comment:""))
return .AccessDenied return .accessDenied
} }
} else { } else {
_show(NSLocalizedString("Camera unavailable", comment:""), message:NSLocalizedString("The device does not have a camera.", comment:"")) _show(NSLocalizedString("Camera unavailable", comment:""), message:NSLocalizedString("The device does not have a camera.", comment:""))
return .NoDeviceFound return .noDeviceFound
} }
} }
private func _setupOutputMode(newCameraOutputMode: CameraOutputMode, oldCameraOutputMode: CameraOutputMode?) { fileprivate func _setupOutputMode(_ newCameraOutputMode: CameraOutputMode, oldCameraOutputMode: CameraOutputMode?) {
captureSession?.beginConfiguration() captureSession?.beginConfiguration()
if let cameraOutputToRemove = oldCameraOutputMode { if let cameraOutputToRemove = oldCameraOutputMode {
// remove current setting // remove current setting
switch cameraOutputToRemove { switch cameraOutputToRemove {
case .StillImage: case .stillImage:
if let validStillImageOutput = stillImageOutput { if let validStillImageOutput = stillImageOutput {
captureSession?.removeOutput(validStillImageOutput) captureSession?.removeOutput(validStillImageOutput)
} }
case .VideoOnly, .VideoWithMic: case .videoOnly, .videoWithMic:
if let validMovieOutput = movieOutput { if let validMovieOutput = movieOutput {
captureSession?.removeOutput(validMovieOutput) captureSession?.removeOutput(validMovieOutput)
} }
if cameraOutputToRemove == .VideoWithMic { if cameraOutputToRemove == .videoWithMic {
_removeMicInput() _removeMicInput()
} }
} }
@ -767,7 +765,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
// configure new devices // configure new devices
switch newCameraOutputMode { switch newCameraOutputMode {
case .StillImage: case .stillImage:
if (stillImageOutput == nil) { if (stillImageOutput == nil) {
_setupOutputs() _setupOutputs()
} }
@ -778,10 +776,10 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
} }
case .VideoOnly, .VideoWithMic: case .videoOnly, .videoWithMic:
captureSession?.addOutput(_getMovieOutput()) captureSession?.addOutput(_getMovieOutput())
if newCameraOutputMode == .VideoWithMic { if newCameraOutputMode == .videoWithMic {
if let validMic = _deviceInputFromDevice(mic) { if let validMic = _deviceInputFromDevice(mic) {
captureSession?.addInput(validMic) captureSession?.addInput(validMic)
} }
@ -792,7 +790,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
_orientationChanged() _orientationChanged()
} }
private func _setupOutputs() { fileprivate func _setupOutputs() {
if (stillImageOutput == nil) { if (stillImageOutput == nil) {
stillImageOutput = AVCaptureStillImageOutput() stillImageOutput = AVCaptureStillImageOutput()
} }
@ -801,35 +799,35 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
movieOutput!.movieFragmentInterval = kCMTimeInvalid movieOutput!.movieFragmentInterval = kCMTimeInvalid
} }
if library == nil { if library == nil {
library = ALAssetsLibrary() library = PHPhotoLibrary.shared()
} }
} }
private func _setupPreviewLayer() { fileprivate func _setupPreviewLayer() {
if let validCaptureSession = captureSession { if let validCaptureSession = captureSession {
previewLayer = AVCaptureVideoPreviewLayer(session: validCaptureSession) previewLayer = AVCaptureVideoPreviewLayer(session: validCaptureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
} }
} }
private func _updateCameraDevice(deviceType: CameraDevice) { fileprivate func _updateCameraDevice(_ deviceType: CameraDevice) {
if let validCaptureSession = captureSession { if let validCaptureSession = captureSession {
validCaptureSession.beginConfiguration() validCaptureSession.beginConfiguration()
let inputs = validCaptureSession.inputs as! [AVCaptureInput] let inputs = validCaptureSession.inputs as! [AVCaptureInput]
for input in inputs { for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput { if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice && cameraDevice == .Front { if deviceInput.device == backCameraDevice && cameraDevice == .front {
validCaptureSession.removeInput(deviceInput) validCaptureSession.removeInput(deviceInput)
break; break;
} else if deviceInput.device == frontCameraDevice && cameraDevice == .Back { } else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
validCaptureSession.removeInput(deviceInput) validCaptureSession.removeInput(deviceInput)
break; break;
} }
} }
} }
switch cameraDevice { switch cameraDevice {
case .Front: case .front:
if hasFrontCamera { if hasFrontCamera {
if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice) { if let validFrontDevice = _deviceInputFromDevice(frontCameraDevice) {
if !inputs.contains(validFrontDevice) { if !inputs.contains(validFrontDevice) {
@ -837,7 +835,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
} }
case .Back: case .back:
if let validBackDevice = _deviceInputFromDevice(backCameraDevice) { if let validBackDevice = _deviceInputFromDevice(backCameraDevice) {
if !inputs.contains(validBackDevice) { if !inputs.contains(validBackDevice) {
validCaptureSession.addInput(validBackDevice) validCaptureSession.addInput(validBackDevice)
@ -848,12 +846,12 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
private func _updateFlasMode(flashMode: CameraFlashMode) { fileprivate func _updateFlasMode(_ flashMode: CameraFlashMode) {
captureSession?.beginConfiguration() captureSession?.beginConfiguration()
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices { for device in devices! {
let captureDevice = device as! AVCaptureDevice let captureDevice = device as! AVCaptureDevice
if (captureDevice.position == AVCaptureDevicePosition.Back) { if (captureDevice.position == AVCaptureDevicePosition.back) {
let avFlashMode = AVCaptureFlashMode(rawValue: flashMode.rawValue) let avFlashMode = AVCaptureFlashMode(rawValue: flashMode.rawValue)
if (captureDevice.isFlashModeSupported(avFlashMode!)) { if (captureDevice.isFlashModeSupported(avFlashMode!)) {
do { do {
@ -869,16 +867,16 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
captureSession?.commitConfiguration() captureSession?.commitConfiguration()
} }
private func _updateCameraQualityMode(newCameraOutputQuality: CameraOutputQuality) { fileprivate func _updateCameraQualityMode(_ newCameraOutputQuality: CameraOutputQuality) {
if let validCaptureSession = captureSession { if let validCaptureSession = captureSession {
var sessionPreset = AVCaptureSessionPresetLow var sessionPreset = AVCaptureSessionPresetLow
switch (newCameraOutputQuality) { switch (newCameraOutputQuality) {
case CameraOutputQuality.Low: case CameraOutputQuality.low:
sessionPreset = AVCaptureSessionPresetLow sessionPreset = AVCaptureSessionPresetLow
case CameraOutputQuality.Medium: case CameraOutputQuality.medium:
sessionPreset = AVCaptureSessionPresetMedium sessionPreset = AVCaptureSessionPresetMedium
case CameraOutputQuality.High: case CameraOutputQuality.high:
if cameraOutputMode == .StillImage { if cameraOutputMode == .stillImage {
sessionPreset = AVCaptureSessionPresetPhoto sessionPreset = AVCaptureSessionPresetPhoto
} else { } else {
sessionPreset = AVCaptureSessionPresetHigh sessionPreset = AVCaptureSessionPresetHigh
@ -896,7 +894,7 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
private func _removeMicInput() { fileprivate func _removeMicInput() {
guard let inputs = captureSession?.inputs as? [AVCaptureInput] else { return } guard let inputs = captureSession?.inputs as? [AVCaptureInput] else { return }
for input in inputs { for input in inputs {
@ -909,15 +907,15 @@ public class CameraManager: NSObject, AVCaptureFileOutputRecordingDelegate, UIGe
} }
} }
private func _show(title: String, message: String) { fileprivate func _show(_ title: String, message: String) {
if showErrorsToUsers { if showErrorsToUsers {
dispatch_async(dispatch_get_main_queue(), { () -> Void in DispatchQueue.main.async(execute: { () -> Void in
self.showErrorBlock(erTitle: title, erMessage: message) self.showErrorBlock(title, message)
}) })
} }
} }
private func _deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? { fileprivate func _deviceInputFromDevice(_ device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil } guard let validDevice = device else { return nil }
do { do {
return try AVCaptureDeviceInput(device: validDevice) return try AVCaptureDeviceInput(device: validDevice)

View File

@ -15,7 +15,7 @@ class ImageViewController: UIViewController {
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
self.navigationController?.navigationBar.hidden = false self.navigationController?.navigationBar.isHidden = false
if let validImage = self.image { if let validImage = self.image {
self.imageView.image = validImage self.imageView.image = validImage

View File

@ -2,6 +2,12 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0"> <plist version="1.0">
<dict> <dict>
<key>NSPhotoLibraryUsageDescription</key>
<string>CameraManager will access your photo library to save pictures/video.</string>
<key>NSMicrophoneUsageDescription</key>
<string>CameraManager will use your microphone to record audio for videos.</string>
<key>NSCameraUsageDescription</key>
<string>CameraManager will use your camera to take pictures/video.</string>
<key>CFBundleDevelopmentRegion</key> <key>CFBundleDevelopmentRegion</key>
<string>en</string> <string>en</string>
<key>CFBundleExecutable</key> <key>CFBundleExecutable</key>

View File

@ -33,32 +33,32 @@ class ViewController: UIViewController {
cameraManager.showAccessPermissionPopupAutomatically = false cameraManager.showAccessPermissionPopupAutomatically = false
askForPermissionsButton.hidden = true askForPermissionsButton.isHidden = true
askForPermissionsLabel.hidden = true askForPermissionsLabel.isHidden = true
let currentCameraState = cameraManager.currentCameraStatus() let currentCameraState = cameraManager.currentCameraStatus()
if currentCameraState == .NotDetermined { if currentCameraState == .notDetermined {
askForPermissionsButton.hidden = false askForPermissionsButton.isHidden = false
askForPermissionsLabel.hidden = false askForPermissionsLabel.isHidden = false
} else if (currentCameraState == .Ready) { } else if (currentCameraState == .ready) {
addCameraToView() addCameraToView()
} }
if !cameraManager.hasFlash { if !cameraManager.hasFlash {
flashModeButton.enabled = false flashModeButton.isEnabled = false
flashModeButton.setTitle("No flash", forState: UIControlState.Normal) flashModeButton.setTitle("No flash", for: UIControlState())
} }
} }
override func viewWillAppear(animated: Bool) { override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated) super.viewWillAppear(animated)
navigationController?.navigationBar.hidden = true navigationController?.navigationBar.isHidden = true
cameraManager.resumeCaptureSession() cameraManager.resumeCaptureSession()
} }
override func viewWillDisappear(animated: Bool) { override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated) super.viewWillDisappear(animated)
cameraManager.stopCaptureSession() cameraManager.stopCaptureSession()
} }
@ -66,42 +66,42 @@ class ViewController: UIViewController {
// MARK: - ViewController // MARK: - ViewController
private func addCameraToView() fileprivate func addCameraToView()
{ {
cameraManager.addPreviewLayerToView(cameraView, newCameraOutputMode: CameraOutputMode.VideoWithMic) cameraManager.addPreviewLayerToView(cameraView, newCameraOutputMode: CameraOutputMode.videoWithMic)
cameraManager.showErrorBlock = { [weak self] (erTitle: String, erMessage: String) -> Void in cameraManager.showErrorBlock = { [weak self] (erTitle: String, erMessage: String) -> Void in
let alertController = UIAlertController(title: erTitle, message: erMessage, preferredStyle: .Alert) let alertController = UIAlertController(title: erTitle, message: erMessage, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: { (alertAction) -> Void in })) alertController.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.default, handler: { (alertAction) -> Void in }))
self?.presentViewController(alertController, animated: true, completion: nil) self?.present(alertController, animated: true, completion: nil)
} }
} }
// MARK: - @IBActions // MARK: - @IBActions
@IBAction func changeFlashMode(sender: UIButton) @IBAction func changeFlashMode(_ sender: UIButton)
{ {
switch (cameraManager.changeFlashMode()) { switch (cameraManager.changeFlashMode()) {
case .Off: case .off:
sender.setTitle("Flash Off", forState: UIControlState.Normal) sender.setTitle("Flash Off", for: UIControlState())
case .On: case .on:
sender.setTitle("Flash On", forState: UIControlState.Normal) sender.setTitle("Flash On", for: UIControlState())
case .Auto: case .auto:
sender.setTitle("Flash Auto", forState: UIControlState.Normal) sender.setTitle("Flash Auto", for: UIControlState())
} }
} }
@IBAction func recordButtonTapped(sender: UIButton) { @IBAction func recordButtonTapped(_ sender: UIButton) {
switch (cameraManager.cameraOutputMode) { switch (cameraManager.cameraOutputMode) {
case .StillImage: case .stillImage:
cameraManager.capturePictureWithCompletion({ (image, error) -> Void in cameraManager.capturePictureWithCompletion({ (image, error) -> Void in
if let errorOccured = error { if let errorOccured = error {
self.cameraManager.showErrorBlock(erTitle: "Error occurred", erMessage: errorOccured.localizedDescription) self.cameraManager.showErrorBlock("Error occurred", errorOccured.localizedDescription)
} }
else { else {
let vc: ImageViewController? = self.storyboard?.instantiateViewControllerWithIdentifier("ImageVC") as? ImageViewController let vc: ImageViewController? = self.storyboard?.instantiateViewController(withIdentifier: "ImageVC") as? ImageViewController
if let validVC: ImageViewController = vc { if let validVC: ImageViewController = vc {
if let capturedImage = image { if let capturedImage = image {
validVC.image = capturedImage validVC.image = capturedImage
@ -110,51 +110,51 @@ class ViewController: UIViewController {
} }
} }
}) })
case .VideoWithMic, .VideoOnly: case .videoWithMic, .videoOnly:
sender.selected = !sender.selected sender.isSelected = !sender.isSelected
sender.setTitle(" ", forState: UIControlState.Selected) sender.setTitle(" ", for: UIControlState.selected)
sender.backgroundColor = sender.selected ? UIColor.redColor() : UIColor.greenColor() sender.backgroundColor = sender.isSelected ? UIColor.red : UIColor.green
if sender.selected { if sender.isSelected {
cameraManager.startRecordingVideo() cameraManager.startRecordingVideo()
} else { } else {
cameraManager.stopVideoRecording({ (videoURL, error) -> Void in cameraManager.stopVideoRecording({ (videoURL, error) -> Void in
if let errorOccured = error { if let errorOccured = error {
self.cameraManager.showErrorBlock(erTitle: "Error occurred", erMessage: errorOccured.localizedDescription) self.cameraManager.showErrorBlock("Error occurred", errorOccured.localizedDescription)
} }
}) })
} }
} }
} }
@IBAction func outputModeButtonTapped(sender: UIButton) { @IBAction func outputModeButtonTapped(_ sender: UIButton) {
cameraManager.cameraOutputMode = cameraManager.cameraOutputMode == CameraOutputMode.VideoWithMic ? CameraOutputMode.StillImage : CameraOutputMode.VideoWithMic cameraManager.cameraOutputMode = cameraManager.cameraOutputMode == CameraOutputMode.videoWithMic ? CameraOutputMode.stillImage : CameraOutputMode.videoWithMic
switch (cameraManager.cameraOutputMode) { switch (cameraManager.cameraOutputMode) {
case .StillImage: case .stillImage:
cameraButton.selected = false cameraButton.isSelected = false
cameraButton.backgroundColor = UIColor.greenColor() cameraButton.backgroundColor = UIColor.green
sender.setTitle("Image", forState: UIControlState.Normal) sender.setTitle("Image", for: UIControlState())
case .VideoWithMic, .VideoOnly: case .videoWithMic, .videoOnly:
sender.setTitle("Video", forState: UIControlState.Normal) sender.setTitle("Video", for: UIControlState())
} }
} }
@IBAction func changeCameraDevice(sender: UIButton) { @IBAction func changeCameraDevice(_ sender: UIButton) {
cameraManager.cameraDevice = cameraManager.cameraDevice == CameraDevice.Front ? CameraDevice.Back : CameraDevice.Front cameraManager.cameraDevice = cameraManager.cameraDevice == CameraDevice.front ? CameraDevice.back : CameraDevice.front
switch (cameraManager.cameraDevice) { switch (cameraManager.cameraDevice) {
case .Front: case .front:
sender.setTitle("Front", forState: UIControlState.Normal) sender.setTitle("Front", for: UIControlState())
case .Back: case .back:
sender.setTitle("Back", forState: UIControlState.Normal) sender.setTitle("Back", for: UIControlState())
} }
} }
@IBAction func askForCameraPermissions(sender: UIButton) { @IBAction func askForCameraPermissions(_ sender: UIButton) {
cameraManager.askUserForCameraPermission({ permissionGranted in cameraManager.askUserForCameraPermission({ permissionGranted in
self.askForPermissionsButton.hidden = true self.askForPermissionsButton.isHidden = true
self.askForPermissionsLabel.hidden = true self.askForPermissionsLabel.isHidden = true
self.askForPermissionsButton.alpha = 0 self.askForPermissionsButton.alpha = 0
self.askForPermissionsLabel.alpha = 0 self.askForPermissionsLabel.alpha = 0
if permissionGranted { if permissionGranted {
@ -163,15 +163,15 @@ class ViewController: UIViewController {
}) })
} }
@IBAction func changeCameraQuality(sender: UIButton) { @IBAction func changeCameraQuality(_ sender: UIButton) {
switch (cameraManager.changeQualityMode()) { switch (cameraManager.changeQualityMode()) {
case .High: case .high:
sender.setTitle("High", forState: UIControlState.Normal) sender.setTitle("High", for: UIControlState())
case .Low: case .low:
sender.setTitle("Low", forState: UIControlState.Normal) sender.setTitle("Low", for: UIControlState())
case .Medium: case .medium:
sender.setTitle("Medium", forState: UIControlState.Normal) sender.setTitle("Medium", for: UIControlState())
} }
} }
} }