1
0
mirror of git://jb55.com/damus synced 2024-09-30 00:40:45 +00:00

add CameraModel and CameraService for interacting with the camera

This commit is contained in:
Suhail Saqan 2023-09-19 13:33:41 -07:00 committed by William Casarin
parent 7744787c51
commit 472f81b311
4 changed files with 859 additions and 0 deletions

View File

@ -380,6 +380,9 @@
4FE60CDD295E1C5E00105A1F /* Wallet.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4FE60CDC295E1C5E00105A1F /* Wallet.swift */; };
50088DA129E8271A008A1FDF /* WebSocket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 50088DA029E8271A008A1FDF /* WebSocket.swift */; };
501F8C802A0220E1001AFC1D /* KeychainStorage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 501F8C7F2A0220E1001AFC1D /* KeychainStorage.swift */; };
BA3759922ABCCEBA0018D73B /* CameraService+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */; };
BA3759932ABCCEBA0018D73B /* CameraModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759902ABCCEBA0018D73B /* CameraModel.swift */; };
BA3759942ABCCEBA0018D73B /* CameraService.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759912ABCCEBA0018D73B /* CameraService.swift */; };
501F8C822A0224EB001AFC1D /* KeychainStorageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 501F8C812A0224EB001AFC1D /* KeychainStorageTests.swift */; };
504323A72A34915F006AE6DC /* RelayModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 504323A62A34915F006AE6DC /* RelayModel.swift */; };
504323A92A3495B6006AE6DC /* RelayModelCache.swift in Sources */ = {isa = PBXBuildFile; fileRef = 504323A82A3495B6006AE6DC /* RelayModelCache.swift */; };
@ -934,6 +937,9 @@
4C9B0DED2A65A75F00CBDA21 /* AttrStringTestExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttrStringTestExtensions.swift; sourceTree = "<group>"; };
4C9B0DF22A65C46800CBDA21 /* ProfileEditButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileEditButton.swift; sourceTree = "<group>"; };
4C9BB83029C0ED4F00FC4E37 /* DisplayName.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DisplayName.swift; sourceTree = "<group>"; };
BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraService+Extensions.swift"; sourceTree = "<group>"; };
BA3759902ABCCEBA0018D73B /* CameraModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraModel.swift; sourceTree = "<group>"; };
BA3759912ABCCEBA0018D73B /* CameraService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraService.swift; sourceTree = "<group>"; };
4C9BB83329C12D9900FC4E37 /* EventProfileName.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EventProfileName.swift; sourceTree = "<group>"; };
4C9F18E129AA9B6C008C55EC /* CustomizeZapView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomizeZapView.swift; sourceTree = "<group>"; };
4C9F18E329ABDE6D008C55EC /* MaybeAnonPfpView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MaybeAnonPfpView.swift; sourceTree = "<group>"; };
@ -2279,6 +2285,9 @@
BA3759882ABCCDE30018D73B /* Camera */ = {
isa = PBXGroup;
children = (
BA3759902ABCCEBA0018D73B /* CameraModel.swift */,
BA3759912ABCCEBA0018D73B /* CameraService.swift */,
BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */,
BA3759892ABCCDE30018D73B /* ImageResizer.swift */,
BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */,
BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */,
@ -2642,6 +2651,7 @@
4C363A8428233689006E126D /* Parser.swift in Sources */,
3AAA95CA298DF87B00F3D526 /* TranslationService.swift in Sources */,
4CE4F9E328528C5200C00DD9 /* AddRelayView.swift in Sources */,
BA3759922ABCCEBA0018D73B /* CameraService+Extensions.swift in Sources */,
4C363A9A28283854006E126D /* Reply.swift in Sources */,
BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */,
4CFF8F6729CC9E3A008DB934 /* ImageView.swift in Sources */,
@ -2679,6 +2689,7 @@
4C198DF529F88D2E004C165C /* ImageMetadata.swift in Sources */,
4CCEB7AE29B53D260078AA28 /* SearchingEventView.swift in Sources */,
4CF0ABE929844AF100D66079 /* AnyCodable.swift in Sources */,
BA3759932ABCCEBA0018D73B /* CameraModel.swift in Sources */,
4C0A3F8F280F640A000448DE /* ThreadModel.swift in Sources */,
4C3AC79F2833115300E1F516 /* FollowButtonView.swift in Sources */,
4C4E137B2A76D5FB00BDD832 /* MuteThreadNotify.swift in Sources */,
@ -2788,6 +2799,7 @@
4C5F9114283D694D0052CD1C /* FollowTarget.swift in Sources */,
4CF0ABD629817F5B00D66079 /* ReportView.swift in Sources */,
4C1A9A2729DDE31900516EAC /* TranslationSettingsView.swift in Sources */,
BA3759942ABCCEBA0018D73B /* CameraService.swift in Sources */,
4CB8838629656C8B00DC99E7 /* NIP05.swift in Sources */,
4CF0ABD82981980C00D66079 /* Lists.swift in Sources */,
F71694EA2A662232001F4053 /* SuggestedUsersView.swift in Sources */,

View File

@ -0,0 +1,122 @@
//
// CameraModel.swift
// damus
//
// Created by Suhail Saqan on 8/5/23.
//
import Foundation
import AVFoundation
import Combine
final class CameraModel: ObservableObject {
private let service = CameraService()
@Published var showAlertError = false
@Published var isFlashOn = false
@Published var willCapturePhoto = false
@Published var isCameraButtonDisabled = false
@Published var isPhotoProcessing = false
@Published var isRecording = false
@Published var captureMode: CameraMediaType = .image
@Published public var mediaItems: [MediaItem] = []
@Published var thumbnail: Thumbnail!
var alertError: AlertError!
var session: AVCaptureSession
private var subscriptions = Set<AnyCancellable>()
init() {
self.session = service.session
service.$shouldShowAlertView.sink { [weak self] (val) in
self?.alertError = self?.service.alertError
self?.showAlertError = val
}
.store(in: &self.subscriptions)
service.$flashMode.sink { [weak self] (mode) in
self?.isFlashOn = mode == .on
}
.store(in: &self.subscriptions)
service.$willCapturePhoto.sink { [weak self] (val) in
self?.willCapturePhoto = val
}
.store(in: &self.subscriptions)
service.$isCameraButtonDisabled.sink { [weak self] (val) in
self?.isCameraButtonDisabled = val
}
.store(in: &self.subscriptions)
service.$isPhotoProcessing.sink { [weak self] (val) in
self?.isPhotoProcessing = val
}
.store(in: &self.subscriptions)
service.$isRecording.sink { [weak self] (val) in
self?.isRecording = val
}
.store(in: &self.subscriptions)
service.$captureMode.sink { [weak self] (mode) in
self?.captureMode = mode
}
.store(in: &self.subscriptions)
service.$mediaItems.sink { [weak self] (mode) in
self?.mediaItems = mode
}
.store(in: &self.subscriptions)
service.$thumbnail.sink { [weak self] (thumbnail) in
guard let pic = thumbnail else { return }
self?.thumbnail = pic
}
.store(in: &self.subscriptions)
}
func configure() {
service.checkForPermissions()
service.configure()
}
func stop() {
service.stop()
}
func capturePhoto() {
service.capturePhoto()
}
func startRecording() {
service.startRecording()
}
func stopRecording() {
service.stopRecording()
}
func flipCamera() {
service.changeCamera()
}
func zoom(with factor: CGFloat) {
service.set(zoom: factor)
}
func switchFlash() {
service.flashMode = service.flashMode == .on ? .off : .on
}
}

View File

@ -0,0 +1,32 @@
//
// CameraService+Extensions.swift
// damus
//
// Created by Suhail Saqan on 8/5/23.
//
import Foundation
import UIKit
import AVFoundation
extension AVCaptureVideoOrientation {
init?(deviceOrientation: UIDeviceOrientation) {
switch deviceOrientation {
case .portrait: self = .portrait
case .portraitUpsideDown: self = .portraitUpsideDown
case .landscapeLeft: self = .landscapeRight
case .landscapeRight: self = .landscapeLeft
default: return nil
}
}
init?(interfaceOrientation: UIInterfaceOrientation) {
switch interfaceOrientation {
case .portrait: self = .portrait
case .portraitUpsideDown: self = .portraitUpsideDown
case .landscapeLeft: self = .landscapeLeft
case .landscapeRight: self = .landscapeRight
default: return nil
}
}
}

View File

@ -0,0 +1,693 @@
//
// CameraService.swift
// Campus
//
// Created by Suhail Saqan on 8/5/23.
//
import Foundation
import Combine
import AVFoundation
import Photos
import UIKit
public struct Thumbnail: Identifiable, Equatable {
public var id: String
public var type: CameraMediaType
public var url: URL
public init(id: String = UUID().uuidString, type: CameraMediaType, url: URL) {
self.id = id
self.type = type
self.url = url
}
public var thumbnailImage: UIImage? {
switch type {
case .image:
return ImageResizer(targetWidth: 100).resize(at: url)
case .video:
return generateVideoThumbnail(for: url)
}
}
}
public struct AlertError {
public var title: String = ""
public var message: String = ""
public var primaryButtonTitle = "Accept"
public var secondaryButtonTitle: String?
public var primaryAction: (() -> ())?
public var secondaryAction: (() -> ())?
public init(title: String = "", message: String = "", primaryButtonTitle: String = "Accept", secondaryButtonTitle: String? = nil, primaryAction: (() -> ())? = nil, secondaryAction: (() -> ())? = nil) {
self.title = title
self.message = message
self.primaryAction = primaryAction
self.primaryButtonTitle = primaryButtonTitle
self.secondaryAction = secondaryAction
}
}
func generateVideoThumbnail(for videoURL: URL) -> UIImage? {
let asset = AVAsset(url: videoURL)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
do {
let cgImage = try imageGenerator.copyCGImage(at: .zero, actualTime: nil)
return UIImage(cgImage: cgImage)
} catch {
print("Error generating thumbnail: \(error)")
return nil
}
}
public enum CameraMediaType {
case image
case video
}
public struct MediaItem {
let url: URL
let type: CameraMediaType
}
public class CameraService: NSObject, Identifiable {
public let session = AVCaptureSession()
public var isSessionRunning = false
public var isConfigured = false
var setupResult: SessionSetupResult = .success
public var alertError: AlertError = AlertError()
@Published public var flashMode: AVCaptureDevice.FlashMode = .off
@Published public var shouldShowAlertView = false
@Published public var isPhotoProcessing = false
@Published public var captureMode: CameraMediaType = .image
@Published public var isRecording: Bool = false
@Published public var willCapturePhoto = false
@Published public var isCameraButtonDisabled = false
@Published public var isCameraUnavailable = false
@Published public var thumbnail: Thumbnail?
@Published public var mediaItems: [MediaItem] = []
public let sessionQueue = DispatchQueue(label: "io.damus.camera")
@objc dynamic public var videoDeviceInput: AVCaptureDeviceInput!
@objc dynamic public var audioDeviceInput: AVCaptureDeviceInput!
public let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified)
public let photoOutput = AVCapturePhotoOutput()
public let movieOutput = AVCaptureMovieFileOutput()
var videoCaptureProcessor: VideoCaptureProcessor?
var photoCaptureProcessor: PhotoCaptureProcessor?
public var keyValueObservations = [NSKeyValueObservation]()
override public init() {
super.init()
DispatchQueue.main.async {
self.isCameraButtonDisabled = true
self.isCameraUnavailable = true
}
}
enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
}
public func configure() {
if !self.isSessionRunning && !self.isConfigured {
sessionQueue.async {
self.configureSession()
}
}
}
public func checkForPermissions() {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break
case .notDetermined:
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in
if !granted {
self.setupResult = .notAuthorized
}
self.sessionQueue.resume()
})
default:
setupResult = .notAuthorized
DispatchQueue.main.async {
self.alertError = AlertError(title: "Camera Access", message: "Damus needs camera and microphone access. Enable in settings.", primaryButtonTitle: "Go to settings", secondaryButtonTitle: nil, primaryAction: {
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!,
options: [:], completionHandler: nil)
}, secondaryAction: nil)
self.shouldShowAlertView = true
self.isCameraUnavailable = true
self.isCameraButtonDisabled = true
}
}
}
private func configureSession() {
if setupResult != .success {
return
}
session.beginConfiguration()
session.sessionPreset = .high
// Add video input.
do {
var defaultVideoDevice: AVCaptureDevice?
if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
// If a rear dual camera is not available, default to the rear wide angle camera.
defaultVideoDevice = backCameraDevice
} else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
// If the rear wide angle camera isn't available, default to the front wide angle camera.
defaultVideoDevice = frontCameraDevice
}
guard let videoDevice = defaultVideoDevice else {
print("Default video device is unavailable.")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
print("Couldn't add video device input to the session.")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
self.audioDeviceInput = audioDeviceInput
} else {
print("Couldn't add audio device input to the session.")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
// Add video output
if session.canAddOutput(movieOutput) {
session.addOutput(movieOutput)
} else {
print("Could not add movie output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
} catch {
print("Couldn't create video device input: \(error)")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
// Add the photo output.
if session.canAddOutput(photoOutput) {
session.addOutput(photoOutput)
photoOutput.maxPhotoQualityPrioritization = .quality
} else {
print("Could not add photo output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
session.commitConfiguration()
self.isConfigured = true
self.start()
}
private func resumeInterruptedSession() {
sessionQueue.async {
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
if !self.session.isRunning {
DispatchQueue.main.async {
self.alertError = AlertError(title: "Camera Error", message: "Unable to resume camera", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil)
self.shouldShowAlertView = true
self.isCameraUnavailable = true
self.isCameraButtonDisabled = true
}
} else {
DispatchQueue.main.async {
self.isCameraUnavailable = false
self.isCameraButtonDisabled = false
}
}
}
}
public func changeCamera() {
DispatchQueue.main.async {
self.isCameraButtonDisabled = true
}
sessionQueue.async {
let currentVideoDevice = self.videoDeviceInput.device
let currentPosition = currentVideoDevice.position
let preferredPosition: AVCaptureDevice.Position
let preferredDeviceType: AVCaptureDevice.DeviceType
switch currentPosition {
case .unspecified, .front:
preferredPosition = .back
preferredDeviceType = .builtInWideAngleCamera
case .back:
preferredPosition = .front
preferredDeviceType = .builtInWideAngleCamera
@unknown default:
print("Unknown capture position. Defaulting to back, dual-camera.")
preferredPosition = .back
preferredDeviceType = .builtInWideAngleCamera
}
let devices = self.videoDeviceDiscoverySession.devices
var newVideoDevice: AVCaptureDevice? = nil
if let device = devices.first(where: { $0.position == preferredPosition && $0.deviceType == preferredDeviceType }) {
newVideoDevice = device
} else if let device = devices.first(where: { $0.position == preferredPosition }) {
newVideoDevice = device
}
if let videoDevice = newVideoDevice {
do {
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
self.session.beginConfiguration()
self.session.removeInput(self.videoDeviceInput)
if self.session.canAddInput(videoDeviceInput) {
NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: currentVideoDevice)
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: videoDeviceInput.device)
self.session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
self.session.addInput(self.videoDeviceInput)
}
if let connection = self.photoOutput.connection(with: .video) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
self.photoOutput.maxPhotoQualityPrioritization = .quality
self.session.commitConfiguration()
} catch {
print("Error occurred while creating video device input: \(error)")
}
}
DispatchQueue.main.async {
self.isCameraButtonDisabled = false
}
}
}
public func focus(with focusMode: AVCaptureDevice.FocusMode, exposureMode: AVCaptureDevice.ExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool) {
sessionQueue.async {
guard let device = self.videoDeviceInput?.device else { return }
do {
try device.lockForConfiguration()
if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) {
device.focusPointOfInterest = devicePoint
device.focusMode = focusMode
}
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
device.exposurePointOfInterest = devicePoint
device.exposureMode = exposureMode
}
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
} catch {
print("Could not lock device for configuration: \(error)")
}
}
}
public func focus(at focusPoint: CGPoint) {
let device = self.videoDeviceInput.device
do {
try device.lockForConfiguration()
if device.isFocusPointOfInterestSupported {
device.focusPointOfInterest = focusPoint
device.exposurePointOfInterest = focusPoint
device.exposureMode = .continuousAutoExposure
device.focusMode = .continuousAutoFocus
device.unlockForConfiguration()
}
}
catch {
print(error.localizedDescription)
}
}
@objc public func stop(completion: (() -> ())? = nil) {
sessionQueue.async {
if self.isSessionRunning {
if self.setupResult == .success {
self.session.stopRunning()
self.isSessionRunning = self.session.isRunning
print("CAMERA STOPPED")
self.removeObservers()
if !self.session.isRunning {
DispatchQueue.main.async {
self.isCameraButtonDisabled = true
self.isCameraUnavailable = true
completion?()
}
}
}
}
}
}
@objc public func start() {
sessionQueue.async {
if !self.isSessionRunning && self.isConfigured {
switch self.setupResult {
case .success:
self.addObservers()
self.session.startRunning()
print("CAMERA RUNNING")
self.isSessionRunning = self.session.isRunning
if self.session.isRunning {
DispatchQueue.main.async {
self.isCameraButtonDisabled = false
self.isCameraUnavailable = false
}
}
case .notAuthorized:
print("Application not authorized to use camera")
DispatchQueue.main.async {
self.isCameraButtonDisabled = true
self.isCameraUnavailable = true
}
case .configurationFailed:
DispatchQueue.main.async {
self.alertError = AlertError(title: "Camera Error", message: "Camera configuration failed. Either your device camera is not available or other application is using it", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil)
self.shouldShowAlertView = true
self.isCameraButtonDisabled = true
self.isCameraUnavailable = true
}
}
}
}
}
public func set(zoom: CGFloat) {
let factor = zoom < 1 ? 1 : zoom
let device = self.videoDeviceInput.device
do {
try device.lockForConfiguration()
device.videoZoomFactor = factor
device.unlockForConfiguration()
}
catch {
print(error.localizedDescription)
}
}
public func capturePhoto() {
if self.setupResult != .configurationFailed {
let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait
self.isCameraButtonDisabled = true
sessionQueue.async {
if let photoOutputConnection = self.photoOutput.connection(with: .video) {
photoOutputConnection.videoOrientation = videoPreviewLayerOrientation
}
var photoSettings = AVCapturePhotoSettings()
// Capture HEIF photos when supported. Enable according to user settings and high-resolution photos.
if (self.photoOutput.availablePhotoCodecTypes.contains(.hevc)) {
photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
}
if self.videoDeviceInput.device.isFlashAvailable {
photoSettings.flashMode = self.flashMode
}
if !photoSettings.__availablePreviewPhotoPixelFormatTypes.isEmpty {
photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: photoSettings.__availablePreviewPhotoPixelFormatTypes.first!]
}
photoSettings.photoQualityPrioritization = .speed
if self.photoCaptureProcessor == nil {
self.photoCaptureProcessor = PhotoCaptureProcessor(with: photoSettings, photoOutput: self.photoOutput, willCapturePhotoAnimation: {
DispatchQueue.main.async {
self.willCapturePhoto.toggle()
self.willCapturePhoto.toggle()
}
}, completionHandler: { (photoCaptureProcessor) in
if let data = photoCaptureProcessor.photoData {
let url = self.savePhoto(data: data)
if let unwrappedURL = url {
self.thumbnail = Thumbnail(type: .image, url: unwrappedURL)
}
} else {
print("Data for photo not found")
}
self.isCameraButtonDisabled = false
}, photoProcessingHandler: { animate in
self.isPhotoProcessing = animate
})
}
self.photoCaptureProcessor?.capturePhoto(settings: photoSettings)
}
}
}
public func startRecording() {
if self.setupResult != .configurationFailed {
let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait
self.isCameraButtonDisabled = true
sessionQueue.async {
if let videoOutputConnection = self.movieOutput.connection(with: .video) {
videoOutputConnection.videoOrientation = videoPreviewLayerOrientation
var videoSettings = [String: Any]()
if self.movieOutput.availableVideoCodecTypes.contains(.hevc) == true {
videoSettings[AVVideoCodecKey] = AVVideoCodecType.hevc
self.movieOutput.setOutputSettings(videoSettings, for: videoOutputConnection)
}
}
if self.videoCaptureProcessor == nil {
self.videoCaptureProcessor = VideoCaptureProcessor(movieOutput: self.movieOutput, beginHandler: {
self.isRecording = true
}, completionHandler: { (videoCaptureProcessor, outputFileURL) in
self.isCameraButtonDisabled = false
self.captureMode = .image
self.mediaItems.append(MediaItem(url: outputFileURL, type: .video))
self.thumbnail = Thumbnail(type: .video, url: outputFileURL)
}, videoProcessingHandler: { animate in
self.isPhotoProcessing = animate
})
}
self.videoCaptureProcessor?.startCapture(session: self.session)
}
}
}
func stopRecording() {
if let videoCaptureProcessor = self.videoCaptureProcessor {
isRecording = false
videoCaptureProcessor.stopCapture()
}
}
func savePhoto(imageType: String = "jpeg", data: Data) -> URL? {
guard let uiImage = UIImage(data: data) else {
print("Error converting media data to UIImage")
return nil
}
guard let compressedData = uiImage.jpegData(compressionQuality: 0.8) else {
print("Error converting UIImage to JPEG data")
return nil
}
let temporaryDirectory = NSTemporaryDirectory()
let tempFileName = "\(UUID().uuidString).\(imageType)"
let tempFileURL = URL(fileURLWithPath: temporaryDirectory).appendingPathComponent(tempFileName)
do {
try compressedData.write(to: tempFileURL)
self.mediaItems.append(MediaItem(url: tempFileURL, type: .image))
return tempFileURL
} catch {
print("Error saving image data to temporary URL: \(error.localizedDescription)")
}
return nil
}
private func addObservers() {
let systemPressureStateObservation = observe(\.videoDeviceInput.device.systemPressureState, options: .new) { _, change in
guard let systemPressureState = change.newValue else { return }
self.setRecommendedFrameRateRangeForPressureState(systemPressureState: systemPressureState)
}
keyValueObservations.append(systemPressureStateObservation)
// NotificationCenter.default.addObserver(self, selector: #selector(self.onOrientationChange), name: UIDevice.orientationDidChangeNotification, object: nil)
NotificationCenter.default.addObserver(self,
selector: #selector(subjectAreaDidChange),
name: .AVCaptureDeviceSubjectAreaDidChange,
object: videoDeviceInput.device)
NotificationCenter.default.addObserver(self, selector: #selector(uiRequestedNewFocusArea), name: .init(rawValue: "UserDidRequestNewFocusPoint"), object: nil)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: session)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionWasInterrupted),
name: .AVCaptureSessionWasInterrupted,
object: session)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionInterruptionEnded),
name: .AVCaptureSessionInterruptionEnded,
object: session)
}
private func removeObservers() {
NotificationCenter.default.removeObserver(self)
for keyValueObservation in keyValueObservations {
keyValueObservation.invalidate()
}
keyValueObservations.removeAll()
}
@objc private func uiRequestedNewFocusArea(notification: NSNotification) {
guard let userInfo = notification.userInfo as? [String: Any], let devicePoint = userInfo["devicePoint"] as? CGPoint else { return }
self.focus(at: devicePoint)
}
@objc
private func subjectAreaDidChange(notification: NSNotification) {
let devicePoint = CGPoint(x: 0.5, y: 0.5)
focus(with: .continuousAutoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false)
}
@objc
private func sessionRuntimeError(notification: NSNotification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return }
print("Capture session runtime error: \(error)")
if error.code == .mediaServicesWereReset {
sessionQueue.async {
if self.isSessionRunning {
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
}
}
}
}
private func setRecommendedFrameRateRangeForPressureState(systemPressureState: AVCaptureDevice.SystemPressureState) {
let pressureLevel = systemPressureState.level
if pressureLevel == .serious || pressureLevel == .critical {
do {
try self.videoDeviceInput.device.lockForConfiguration()
print("WARNING: Reached elevated system pressure level: \(pressureLevel). Throttling frame rate.")
self.videoDeviceInput.device.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 20)
self.videoDeviceInput.device.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 15)
self.videoDeviceInput.device.unlockForConfiguration()
} catch {
print("Could not lock device for configuration: \(error)")
}
} else if pressureLevel == .shutdown {
print("Session stopped running due to shutdown system pressure level.")
}
}
@objc
private func sessionWasInterrupted(notification: NSNotification) {
DispatchQueue.main.async {
self.isCameraUnavailable = true
}
if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?,
let reasonIntegerValue = userInfoValue.integerValue,
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
print("Capture session was interrupted with reason \(reason)")
if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient {
print("Session stopped running due to video devies in use by another client.")
} else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps {
print("Session stopped running due to video devies is not available with multiple foreground apps.")
} else if reason == .videoDeviceNotAvailableDueToSystemPressure {
print("Session stopped running due to shutdown system pressure level.")
}
}
}
@objc
private func sessionInterruptionEnded(notification: NSNotification) {
print("Capture session interruption ended")
DispatchQueue.main.async {
self.isCameraUnavailable = false
}
}
}