From 71a7ba18961f68d96cd33c5345aaf37040058a95 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Thu, 10 Aug 2023 03:10:13 -0500 Subject: [PATCH] redid everything, much faster --- damus.xcodeproj/project.pbxproj | 50 +- .../xcshareddata/swiftpm/Package.resolved | 39 + damus/Models/Camera/CameraModel.swift | 122 ++++ .../Camera/CameraService+Extensions.swift | 33 + damus/Models/Camera/CameraService.swift | 686 ++++++++++++++++++ damus/Models/Camera/ImageResizer.swift | 50 ++ .../Models/Camera/PhotoCaptureProcessor.swift | 91 +++ .../Models/Camera/VideoCaptureProcessor.swift | 78 ++ damus/Views/Camera/CameraPreview.swift | 123 ++-- damus/Views/Camera/CameraView.swift | 300 ++++---- damus/Views/PostView.swift | 36 +- 11 files changed, 1397 insertions(+), 211 deletions(-) create mode 100644 damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved create mode 100644 damus/Models/Camera/CameraModel.swift create mode 100644 damus/Models/Camera/CameraService+Extensions.swift create mode 100644 damus/Models/Camera/CameraService.swift create mode 100644 damus/Models/Camera/ImageResizer.swift create mode 100644 damus/Models/Camera/PhotoCaptureProcessor.swift create mode 100644 damus/Models/Camera/VideoCaptureProcessor.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 4e48135e9a..cf42d47385 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -377,6 +377,12 @@ 9609F058296E220800069BF3 /* BannerImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9609F057296E220800069BF3 /* BannerImageView.swift */; }; 9C83F89329A937B900136C08 /* TextViewWrapper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9C83F89229A937B900136C08 /* TextViewWrapper.swift */; }; 9CA876E229A00CEA0003B9A3 /* AttachMediaUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CA876E129A00CE90003B9A3 /* AttachMediaUtility.swift */; }; + BA27222E2A806E39004CDF52 /* VideoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA27222D2A806E39004CDF52 /* VideoCaptureProcessor.swift */; }; + BA3BF2892A7F156B00600232 /* PhotoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2842A7F156900600232 /* PhotoCaptureProcessor.swift */; }; + BA3BF28A2A7F156B00600232 /* ImageResizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2852A7F156900600232 /* ImageResizer.swift */; }; + BA3BF28B2A7F156B00600232 /* CameraService+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2862A7F156A00600232 /* CameraService+Extensions.swift */; }; + BA3BF28C2A7F156B00600232 /* CameraService.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2872A7F156A00600232 /* CameraService.swift */; }; + BA3BF28F2A7F1B2D00600232 /* CameraModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF28E2A7F1B2D00600232 /* CameraModel.swift */; }; BA4AB0AE2A63B9270070A32A /* AddEmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */; }; BA4AB0B02A63B94D0070A32A /* EmojiListItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */; }; BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA693073295D649800ADDB87 /* UserSettingsStore.swift */; }; @@ -931,6 +937,12 @@ 9609F057296E220800069BF3 /* BannerImageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BannerImageView.swift; sourceTree = ""; }; 9C83F89229A937B900136C08 /* TextViewWrapper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextViewWrapper.swift; sourceTree = ""; }; 9CA876E129A00CE90003B9A3 /* AttachMediaUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttachMediaUtility.swift; sourceTree = ""; }; + BA27222D2A806E39004CDF52 /* VideoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoCaptureProcessor.swift; sourceTree = ""; }; + BA3BF2842A7F156900600232 /* PhotoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureProcessor.swift; sourceTree = ""; }; + BA3BF2852A7F156900600232 /* ImageResizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageResizer.swift; sourceTree = ""; }; + BA3BF2862A7F156A00600232 /* CameraService+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraService+Extensions.swift"; sourceTree = ""; }; + BA3BF2872A7F156A00600232 /* CameraService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraService.swift; sourceTree = ""; }; + BA3BF28E2A7F1B2D00600232 /* CameraModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraModel.swift; sourceTree = ""; }; BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AddEmojiView.swift; sourceTree = ""; }; BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiListItemView.swift; sourceTree = ""; }; BA693073295D649800ADDB87 /* UserSettingsStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSettingsStore.swift; sourceTree = ""; }; @@ -1077,6 +1089,7 @@ 4C0A3F8D280F63FF000448DE /* Models */ = { isa = PBXGroup; children = ( + BA3BF2832A7F151300600232 /* Camera */, 4C190F1E2A535FC200027FD5 /* Zaps */, 4C54AA0829A55416003E4487 /* Notifications */, 3AA247FC297E3CFF0090C62D /* RepostsModel.swift */, @@ -1868,15 +1881,17 @@ path = Extensions; sourceTree = ""; }; - F71694E82A66221E001F4053 /* Onboarding */ = { + BA3BF2832A7F151300600232 /* Camera */ = { isa = PBXGroup; children = ( - F71694E92A662232001F4053 /* SuggestedUsersView.swift */, - F71694F12A67314D001F4053 /* SuggestedUserView.swift */, - F71694EB2A662292001F4053 /* SuggestedUsersViewModel.swift */, - F71694ED2A6624F9001F4053 /* suggested_users.json */, + BA27222D2A806E39004CDF52 /* VideoCaptureProcessor.swift */, + BA3BF2872A7F156A00600232 /* CameraService.swift */, + BA3BF2862A7F156A00600232 /* CameraService+Extensions.swift */, + BA3BF2852A7F156900600232 /* ImageResizer.swift */, + BA3BF2842A7F156900600232 /* PhotoCaptureProcessor.swift */, + BA3BF28E2A7F1B2D00600232 /* CameraModel.swift */, ); - path = Onboarding; + path = Camera; sourceTree = ""; }; BA8A4F0D2A2D95880045C48C /* Camera */ = { @@ -1888,6 +1903,17 @@ path = Camera; sourceTree = ""; }; + F71694E82A66221E001F4053 /* Onboarding */ = { + isa = PBXGroup; + children = ( + F71694E92A662232001F4053 /* SuggestedUsersView.swift */, + F71694F12A67314D001F4053 /* SuggestedUserView.swift */, + F71694EB2A662292001F4053 /* SuggestedUsersViewModel.swift */, + F71694ED2A6624F9001F4053 /* suggested_users.json */, + ); + path = Onboarding; + sourceTree = ""; + }; F7F0BA23297892AE009531F3 /* Modifiers */ = { isa = PBXGroup; children = ( @@ -2127,6 +2153,7 @@ 4C190F252A547D2000027FD5 /* LoadScript.swift in Sources */, 4C59B98C2A76C2550032FFEB /* ProfileUpdatedNotify.swift in Sources */, 4C363A8C28236B92006E126D /* PubkeyView.swift in Sources */, + BA3BF28A2A7F156B00600232 /* ImageResizer.swift in Sources */, 4CDA128A29E9D10C0006FA5A /* SignalView.swift in Sources */, 4C12535C2A76CA540004F4B8 /* LoginNotify.swift in Sources */, 4C5C7E68284ED36500A22DF5 /* SearchHomeModel.swift in Sources */, @@ -2201,6 +2228,7 @@ 4CA2EFA0280E37AC0044ACD8 /* TimelineView.swift in Sources */, 4C30AC7629A5770900E2BD5A /* NotificationItemView.swift in Sources */, 4C86F7C42A76C44C00EC0817 /* ZappingNotify.swift in Sources */, + BA27222E2A806E39004CDF52 /* VideoCaptureProcessor.swift in Sources */, 4C363A8428233689006E126D /* Parser.swift in Sources */, 3AAA95CA298DF87B00F3D526 /* TranslationService.swift in Sources */, 4CE4F9E328528C5200C00DD9 /* AddRelayView.swift in Sources */, @@ -2294,6 +2322,7 @@ 4C4E137D2A76D63600BDD832 /* UnmuteThreadNotify.swift in Sources */, 4CE4F0F829DB7399005914DB /* ThiccDivider.swift in Sources */, 4CE0E2B629A3ED5500DB4CA2 /* InnerTimelineView.swift in Sources */, + BA3BF28C2A7F156B00600232 /* CameraService.swift in Sources */, 4C363A8828236948006E126D /* BlocksView.swift in Sources */, 4C06670628FCB08600038D2A /* ImageCarousel.swift in Sources */, 3A23838E2A297DD200E5AA2E /* ZapButtonModel.swift in Sources */, @@ -2301,6 +2330,7 @@ 4C1D4FB12A7958E60024F453 /* VersionInfo.swift in Sources */, 5053ACA72A56DF3B00851AE3 /* DeveloperSettingsView.swift in Sources */, F79C7FAD29D5E9620000F946 /* EditPictureControl.swift in Sources */, + BA3BF2892A7F156B00600232 /* PhotoCaptureProcessor.swift in Sources */, 4C9F18E229AA9B6C008C55EC /* CustomizeZapView.swift in Sources */, 4C2859602A12A2BE004746F7 /* SupporterBadge.swift in Sources */, 4C1A9A2A29DDF54400516EAC /* DamusVideoPlayer.swift in Sources */, @@ -2373,6 +2403,7 @@ 4C3EA66028FF5E7700C48A62 /* node_id.c in Sources */, 4C687C212A5F7ED00092C550 /* DamusBackground.swift in Sources */, 4CA352A02A76AE80003BB08B /* Notify.swift in Sources */, + BA3BF28B2A7F156B00600232 /* CameraService+Extensions.swift in Sources */, 4CE6DEE727F7A08100C66700 /* damusApp.swift in Sources */, 4C1253582A76C9060004F4B8 /* PresentSheetNotify.swift in Sources */, 4C363A962827096D006E126D /* PostBlock.swift in Sources */, @@ -2417,6 +2448,7 @@ 5C513FBA297F72980072348F /* CustomPicker.swift in Sources */, 4C1253622A76D00B0004F4B8 /* PostNotify.swift in Sources */, 4CACA9D5280C31E100D9BBE8 /* ReplyView.swift in Sources */, + BA3BF28F2A7F1B2D00600232 /* CameraModel.swift in Sources */, F7908E92298B0F0700AB113A /* RelayDetailView.swift in Sources */, 4C9147002A2A891E00DDEA40 /* error.c in Sources */, 4CE879552996BAB900F758CC /* RelayPaidDetail.swift in Sources */, @@ -2764,7 +2796,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 11; DEVELOPMENT_ASSET_PATHS = "\"damus/Preview Content\""; - DEVELOPMENT_TEAM = XK7H4JAB3D; + DEVELOPMENT_TEAM = XL4476DR2X; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = damus/Info.plist; @@ -2790,7 +2822,7 @@ "$(PROJECT_DIR)", ); MARKETING_VERSION = 1.6; - PRODUCT_BUNDLE_IDENTIFIER = com.jb55.damus2; + PRODUCT_BUNDLE_IDENTIFIER = com.suhail.damus2; PRODUCT_NAME = "$(TARGET_NAME)"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = YES; @@ -2813,7 +2845,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 11; DEVELOPMENT_ASSET_PATHS = "\"damus/Preview Content\""; - DEVELOPMENT_TEAM = XK7H4JAB3D; + DEVELOPMENT_TEAM = XL4476DR2X; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = damus/Info.plist; diff --git a/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved new file mode 100644 index 0000000000..c8409240c6 --- /dev/null +++ b/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -0,0 +1,39 @@ +{ + "pins" : [ + { + "identity" : "gsplayer", + "kind" : "remoteSourceControl", + "location" : "https://github.com/wxxsw/GSPlayer", + "state" : { + "revision" : "aa6dad7943d52f5207f7fcc2ad3e4274583443b8", + "version" : "0.2.26" + } + }, + { + "identity" : "kingfisher", + "kind" : "remoteSourceControl", + "location" : "https://github.com/onevcat/Kingfisher", + "state" : { + "revision" : "415b1d97fb38bda1e5a6b2dde63354720832110b", + "version" : "7.6.1" + } + }, + { + "identity" : "secp256k1.swift", + "kind" : "remoteSourceControl", + "location" : "https://github.com/jb55/secp256k1.swift", + "state" : { + "revision" : "40b4b38b3b1c83f7088c76189a742870e0ca06a9" + } + }, + { + "identity" : "swift-markdown-ui", + "kind" : "remoteSourceControl", + "location" : "https://github.com/damus-io/swift-markdown-ui", + "state" : { + "revision" : "76bb7971da7fbf429de1c84f1244adf657242fee" + } + } + ], + "version" : 2 +} diff --git a/damus/Models/Camera/CameraModel.swift b/damus/Models/Camera/CameraModel.swift new file mode 100644 index 0000000000..73def5730d --- /dev/null +++ b/damus/Models/Camera/CameraModel.swift @@ -0,0 +1,122 @@ +// +// CameraModel.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import AVFoundation +import Combine + +final class CameraModel: ObservableObject { + private let service = CameraService() + + @Published var showAlertError = false + + @Published var isFlashOn = false + + @Published var willCapturePhoto = false + + @Published var isCameraButtonDisabled = false + + @Published var isPhotoProcessing = false + + @Published var isRecording = false + + @Published var captureMode: CameraMediaType = .image + + @Published public var mediaItems: [MediaItem] = [] + + @Published var thumbnail: Thumbnail! + + var alertError: AlertError! + + var session: AVCaptureSession + + private var subscriptions = Set() + + init() { + self.session = service.session + + service.$shouldShowAlertView.sink { [weak self] (val) in + self?.alertError = self?.service.alertError + self?.showAlertError = val + } + .store(in: &self.subscriptions) + + service.$flashMode.sink { [weak self] (mode) in + self?.isFlashOn = mode == .on + } + .store(in: &self.subscriptions) + + service.$willCapturePhoto.sink { [weak self] (val) in + self?.willCapturePhoto = val + } + .store(in: &self.subscriptions) + + service.$isCameraButtonDisabled.sink { [weak self] (val) in + self?.isCameraButtonDisabled = val + } + .store(in: &self.subscriptions) + + service.$isPhotoProcessing.sink { [weak self] (val) in + self?.isPhotoProcessing = val + } + .store(in: &self.subscriptions) + + service.$isRecording.sink { [weak self] (val) in + self?.isRecording = val + } + .store(in: &self.subscriptions) + + service.$captureMode.sink { [weak self] (mode) in + self?.captureMode = mode + } + .store(in: &self.subscriptions) + + service.$mediaItems.sink { [weak self] (mode) in + self?.mediaItems = mode + } + .store(in: &self.subscriptions) + + service.$thumbnail.sink { [weak self] (thumbnail) in + guard let pic = thumbnail else { return } + self?.thumbnail = pic + } + .store(in: &self.subscriptions) + } + + func configure() { + service.checkForPermissions() + service.configure() + } + + func stop() { + service.stop() + } + + func capturePhoto() { + service.capturePhoto() + } + + func startRecording() { + service.startRecording() + } + + func stopRecording() { + service.stopRecording() + } + + func flipCamera() { + service.changeCamera() + } + + func zoom(with factor: CGFloat) { + service.set(zoom: factor) + } + + func switchFlash() { + service.flashMode = service.flashMode == .on ? .off : .on + } +} diff --git a/damus/Models/Camera/CameraService+Extensions.swift b/damus/Models/Camera/CameraService+Extensions.swift new file mode 100644 index 0000000000..4f0d50d783 --- /dev/null +++ b/damus/Models/Camera/CameraService+Extensions.swift @@ -0,0 +1,33 @@ +// +// CameraService+Extensions.swift +// Campus +// +// Created by Rolando Rodriguez on 1/11/20. +// Copyright © 2020 Rolando Rodriguez. All rights reserved. +// + +import Foundation +import UIKit +import AVFoundation + +extension AVCaptureVideoOrientation { + init?(deviceOrientation: UIDeviceOrientation) { + switch deviceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeRight + case .landscapeRight: self = .landscapeLeft + default: return nil + } + } + + init?(interfaceOrientation: UIInterfaceOrientation) { + switch interfaceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeLeft + case .landscapeRight: self = .landscapeRight + default: return nil + } + } +} diff --git a/damus/Models/Camera/CameraService.swift b/damus/Models/Camera/CameraService.swift new file mode 100644 index 0000000000..a924cc252d --- /dev/null +++ b/damus/Models/Camera/CameraService.swift @@ -0,0 +1,686 @@ +// +// CameraService.swift +// Campus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import Combine +import AVFoundation +import Photos +import UIKit + +// MARK: Class Camera Service, handles setup of AVFoundation needed for a basic camera app. +public struct Thumbnail: Identifiable, Equatable { + public var id: String + public var type: CameraMediaType + public var url: URL + + public init(id: String = UUID().uuidString, type: CameraMediaType, url: URL) { + self.id = id + self.type = type + self.url = url + } + + public var thumbnailImage: UIImage? { + switch type { + case .image: + return ImageResizer(targetWidth: 100).resize(at: url) + case .video: + return generateVideoThumbnail(for: url) + } + } +} + +public struct AlertError { + public var title: String = "" + public var message: String = "" + public var primaryButtonTitle = "Accept" + public var secondaryButtonTitle: String? + public var primaryAction: (() -> ())? + public var secondaryAction: (() -> ())? + + public init(title: String = "", message: String = "", primaryButtonTitle: String = "Accept", secondaryButtonTitle: String? = nil, primaryAction: (() -> ())? = nil, secondaryAction: (() -> ())? = nil) { + self.title = title + self.message = message + self.primaryAction = primaryAction + self.primaryButtonTitle = primaryButtonTitle + self.secondaryAction = secondaryAction + } +} + +func generateVideoThumbnail(for videoURL: URL) -> UIImage? { + let asset = AVAsset(url: videoURL) + let imageGenerator = AVAssetImageGenerator(asset: asset) + imageGenerator.appliesPreferredTrackTransform = true + + do { + let cgImage = try imageGenerator.copyCGImage(at: .zero, actualTime: nil) + return UIImage(cgImage: cgImage) + } catch { + print("Error generating thumbnail: \(error)") + return nil + } +} + +public struct MediaItem { + let url: URL + let type: CameraMediaType +} + +public class CameraService: NSObject, Identifiable { + public let session = AVCaptureSession() + + public var isSessionRunning = false + public var isConfigured = false + var setupResult: SessionSetupResult = .success + + public var alertError: AlertError = AlertError() + + @Published public var flashMode: AVCaptureDevice.FlashMode = .off + @Published public var shouldShowAlertView = false + @Published public var isPhotoProcessing = false + @Published public var captureMode: CameraMediaType = .image + @Published public var isRecording: Bool = false + + @Published public var willCapturePhoto = false + @Published public var isCameraButtonDisabled = false + @Published public var isCameraUnavailable = false + @Published public var thumbnail: Thumbnail? + @Published public var mediaItems: [MediaItem] = [] + + public let sessionQueue = DispatchQueue(label: "io.damus.camera") + + @objc dynamic public var videoDeviceInput: AVCaptureDeviceInput! + @objc dynamic public var audioDeviceInput: AVCaptureDeviceInput! + + public let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified) + + public let photoOutput = AVCapturePhotoOutput() + + public let movieOutput = AVCaptureMovieFileOutput() + + var videoCaptureProcessor: VideoCaptureProcessor? + var photoCaptureProcessor: PhotoCaptureProcessor? + + public var keyValueObservations = [NSKeyValueObservation]() + + override public init() { + super.init() + + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + } + + enum SessionSetupResult { + case success + case notAuthorized + case configurationFailed + } + + public func configure() { + if !self.isSessionRunning && !self.isConfigured { + sessionQueue.async { + self.configureSession() + } + } + } + + public func checkForPermissions() { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + break + case .notDetermined: + sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in + if !granted { + self.setupResult = .notAuthorized + } + self.sessionQueue.resume() + }) + + default: + setupResult = .notAuthorized + + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Access", message: "Campus no tiene permiso para usar la cámara, por favor cambia la configruación de privacidad", primaryButtonTitle: "Configuración", secondaryButtonTitle: nil, primaryAction: { + UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!, + options: [:], completionHandler: nil) + + }, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraUnavailable = true + self.isCameraButtonDisabled = true + } + } + } + + private func configureSession() { + if setupResult != .success { + return + } + + session.beginConfiguration() + + session.sessionPreset = .high + + // Add video input. + do { + var defaultVideoDevice: AVCaptureDevice? + + if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) { + // If a rear dual camera is not available, default to the rear wide angle camera. + defaultVideoDevice = backCameraDevice + } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) { + // If the rear wide angle camera isn't available, default to the front wide angle camera. + defaultVideoDevice = frontCameraDevice + } + + guard let videoDevice = defaultVideoDevice else { + print("Default video device is unavailable.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) + + if session.canAddInput(videoDeviceInput) { + session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + print("Couldn't add video device input to the session.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + let audioDevice = AVCaptureDevice.default(for: .audio) + let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!) + + if session.canAddInput(audioDeviceInput) { + session.addInput(audioDeviceInput) + self.audioDeviceInput = audioDeviceInput + } else { + print("Couldn't add audio device input to the session.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + // Add video output + if session.canAddOutput(movieOutput) { + session.addOutput(movieOutput) + } else { + print("Could not add movie output to the session") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + } catch { + print("Couldn't create video device input: \(error)") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + // Add the photo output. + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + + photoOutput.isHighResolutionCaptureEnabled = true + photoOutput.maxPhotoQualityPrioritization = .quality + + } else { + print("Could not add photo output to the session") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + session.commitConfiguration() + self.isConfigured = true + + self.start() + } + + private func resumeInterruptedSession() { + sessionQueue.async { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + if !self.session.isRunning { + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Error", message: "Unable to resume camera", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraUnavailable = true + self.isCameraButtonDisabled = true + } + } else { + DispatchQueue.main.async { + self.isCameraUnavailable = false + self.isCameraButtonDisabled = false + } + } + } + } + + public func changeCamera() { + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + } + + sessionQueue.async { + let currentVideoDevice = self.videoDeviceInput.device + let currentPosition = currentVideoDevice.position + + let preferredPosition: AVCaptureDevice.Position + let preferredDeviceType: AVCaptureDevice.DeviceType + + switch currentPosition { + case .unspecified, .front: + preferredPosition = .back + preferredDeviceType = .builtInWideAngleCamera + + case .back: + preferredPosition = .front + preferredDeviceType = .builtInWideAngleCamera + + @unknown default: + print("Unknown capture position. Defaulting to back, dual-camera.") + preferredPosition = .back + preferredDeviceType = .builtInWideAngleCamera + } + let devices = self.videoDeviceDiscoverySession.devices + var newVideoDevice: AVCaptureDevice? = nil + + if let device = devices.first(where: { $0.position == preferredPosition && $0.deviceType == preferredDeviceType }) { + newVideoDevice = device + } else if let device = devices.first(where: { $0.position == preferredPosition }) { + newVideoDevice = device + } + + if let videoDevice = newVideoDevice { + do { + let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) + + self.session.beginConfiguration() + + self.session.removeInput(self.videoDeviceInput) + + if self.session.canAddInput(videoDeviceInput) { + NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: currentVideoDevice) + NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: videoDeviceInput.device) + + self.session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + self.session.addInput(self.videoDeviceInput) + } + + if let connection = self.photoOutput.connection(with: .video) { + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = .auto + } + } + + self.photoOutput.maxPhotoQualityPrioritization = .quality + + self.session.commitConfiguration() + } catch { + print("Error occurred while creating video device input: \(error)") + } + } + + DispatchQueue.main.async { + self.isCameraButtonDisabled = false + } + } + } + + public func focus(with focusMode: AVCaptureDevice.FocusMode, exposureMode: AVCaptureDevice.ExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool) { + sessionQueue.async { + guard let device = self.videoDeviceInput?.device else { return } + do { + try device.lockForConfiguration() + + if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) { + device.focusPointOfInterest = devicePoint + device.focusMode = focusMode + } + + if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) { + device.exposurePointOfInterest = devicePoint + device.exposureMode = exposureMode + } + + device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange + device.unlockForConfiguration() + } catch { + print("Could not lock device for configuration: \(error)") + } + } + } + + + public func focus(at focusPoint: CGPoint) { + let device = self.videoDeviceInput.device + do { + try device.lockForConfiguration() + if device.isFocusPointOfInterestSupported { + device.focusPointOfInterest = focusPoint + device.exposurePointOfInterest = focusPoint + device.exposureMode = .continuousAutoExposure + device.focusMode = .continuousAutoFocus + device.unlockForConfiguration() + } + } + catch { + print(error.localizedDescription) + } + } + + @objc public func stop(completion: (() -> ())? = nil) { + sessionQueue.async { + if self.isSessionRunning { + if self.setupResult == .success { + self.session.stopRunning() + self.isSessionRunning = self.session.isRunning + print("CAMERA STOPPED") + self.removeObservers() + + if !self.session.isRunning { + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + completion?() + } + } + } + } + } + } + + @objc public func start() { + sessionQueue.async { + if !self.isSessionRunning && self.isConfigured { + switch self.setupResult { + case .success: + self.addObservers() + self.session.startRunning() + print("CAMERA RUNNING") + self.isSessionRunning = self.session.isRunning + + if self.session.isRunning { + DispatchQueue.main.async { + self.isCameraButtonDisabled = false + self.isCameraUnavailable = false + } + } + + case .notAuthorized: + print("Application not authorized to use camera") + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + + case .configurationFailed: + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Error", message: "Camera configuration failed. Either your device camera is not available or other application is using it", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + } + } + } + } + + public func set(zoom: CGFloat) { + let factor = zoom < 1 ? 1 : zoom + let device = self.videoDeviceInput.device + + do { + try device.lockForConfiguration() + device.videoZoomFactor = factor + device.unlockForConfiguration() + } + catch { + print(error.localizedDescription) + } + } + + public func capturePhoto() { + if self.setupResult != .configurationFailed { + let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait + self.isCameraButtonDisabled = true + + sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video) { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + var photoSettings = AVCapturePhotoSettings() + + // Capture HEIF photos when supported. Enable according to user settings and high-resolution photos. + if (self.photoOutput.availablePhotoCodecTypes.contains(.hevc)) { + photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) + } + + if self.videoDeviceInput.device.isFlashAvailable { + photoSettings.flashMode = self.flashMode + } + + photoSettings.isHighResolutionPhotoEnabled = true + if !photoSettings.__availablePreviewPhotoPixelFormatTypes.isEmpty { + photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: photoSettings.__availablePreviewPhotoPixelFormatTypes.first!] + } + + photoSettings.photoQualityPrioritization = .speed + + if self.photoCaptureProcessor == nil { + self.photoCaptureProcessor = PhotoCaptureProcessor(with: photoSettings, photoOutput: self.photoOutput, willCapturePhotoAnimation: { + DispatchQueue.main.async { + self.willCapturePhoto.toggle() + self.willCapturePhoto.toggle() + } + }, completionHandler: { (photoCaptureProcessor) in + if let data = photoCaptureProcessor.photoData { + let url = self.savePhoto(data: data) + if let unwrappedURL = url { + self.thumbnail = Thumbnail(type: .image, url: unwrappedURL) + } + } else { + print("Data for photo not found") + } + + self.isCameraButtonDisabled = false + }, photoProcessingHandler: { animate in + self.isPhotoProcessing = animate + }) + } + + self.photoCaptureProcessor?.capturePhoto(settings: photoSettings) + } + } + } + + public func startRecording() { + if self.setupResult != .configurationFailed { + let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait + self.isCameraButtonDisabled = true + + sessionQueue.async { + if let videoOutputConnection = self.movieOutput.connection(with: .video) { + videoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + + let videoSettings = AVCaptureVideoDataOutput() + + if self.videoCaptureProcessor == nil { + self.videoCaptureProcessor = VideoCaptureProcessor(with: videoSettings, movieOutput: self.movieOutput, beginHandler: { + self.isRecording = true + }, completionHandler: { (videoCaptureProcessor, outputFileURL) in + self.isCameraButtonDisabled = false + self.captureMode = .image + + self.mediaItems.append(MediaItem(url: outputFileURL, type: .video)) + self.thumbnail = Thumbnail(type: .video, url: outputFileURL) + }, videoProcessingHandler: { animate in + self.isPhotoProcessing = animate + }) + } + + self.videoCaptureProcessor?.startCapture(session: self.session) + } + } + } + + func stopRecording() { + if let videoCaptureProcessor = self.videoCaptureProcessor { + isRecording = false + videoCaptureProcessor.stopCapture() + } + } + + func savePhoto(imageType: String = "jpeg", data: Data) -> URL? { + guard let uiImage = UIImage(data: data) else { + print("Error converting media data to UIImage") + return nil + } + + guard let compressedData = uiImage.jpegData(compressionQuality: 0.8) else { + print("Error converting UIImage to JPEG data") + return nil + } + + let temporaryDirectory = NSTemporaryDirectory() + let tempFileName = "\(UUID().uuidString).\(imageType)" + let tempFileURL = URL(fileURLWithPath: temporaryDirectory).appendingPathComponent(tempFileName) + + do { + try compressedData.write(to: tempFileURL) + self.mediaItems.append(MediaItem(url: tempFileURL, type: .image)) + return tempFileURL + } catch { + print("Error saving image data to temporary URL: \(error.localizedDescription)") + } + return nil + } + + private func addObservers() { + let systemPressureStateObservation = observe(\.videoDeviceInput.device.systemPressureState, options: .new) { _, change in + guard let systemPressureState = change.newValue else { return } + self.setRecommendedFrameRateRangeForPressureState(systemPressureState: systemPressureState) + } + keyValueObservations.append(systemPressureStateObservation) + +// NotificationCenter.default.addObserver(self, selector: #selector(self.onOrientationChange), name: UIDevice.orientationDidChangeNotification, object: nil) + + NotificationCenter.default.addObserver(self, + selector: #selector(subjectAreaDidChange), + name: .AVCaptureDeviceSubjectAreaDidChange, + object: videoDeviceInput.device) + + NotificationCenter.default.addObserver(self, selector: #selector(uiRequestedNewFocusArea), name: .init(rawValue: "UserDidRequestNewFocusPoint"), object: nil) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionRuntimeError), + name: .AVCaptureSessionRuntimeError, + object: session) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionWasInterrupted), + name: .AVCaptureSessionWasInterrupted, + object: session) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionInterruptionEnded), + name: .AVCaptureSessionInterruptionEnded, + object: session) + } + + private func removeObservers() { + NotificationCenter.default.removeObserver(self) + + for keyValueObservation in keyValueObservations { + keyValueObservation.invalidate() + } + keyValueObservations.removeAll() + } + + @objc private func uiRequestedNewFocusArea(notification: NSNotification) { + guard let userInfo = notification.userInfo as? [String: Any], let devicePoint = userInfo["devicePoint"] as? CGPoint else { return } + self.focus(at: devicePoint) + } + + @objc + private func subjectAreaDidChange(notification: NSNotification) { + let devicePoint = CGPoint(x: 0.5, y: 0.5) + focus(with: .continuousAutoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false) + } + + @objc + private func sessionRuntimeError(notification: NSNotification) { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } + + print("Capture session runtime error: \(error)") + + if error.code == .mediaServicesWereReset { + sessionQueue.async { + if self.isSessionRunning { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + } + } + } + } + + private func setRecommendedFrameRateRangeForPressureState(systemPressureState: AVCaptureDevice.SystemPressureState) { + let pressureLevel = systemPressureState.level + if pressureLevel == .serious || pressureLevel == .critical { + do { + try self.videoDeviceInput.device.lockForConfiguration() + print("WARNING: Reached elevated system pressure level: \(pressureLevel). Throttling frame rate.") + self.videoDeviceInput.device.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 20) + self.videoDeviceInput.device.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 15) + self.videoDeviceInput.device.unlockForConfiguration() + } catch { + print("Could not lock device for configuration: \(error)") + } + } else if pressureLevel == .shutdown { + print("Session stopped running due to shutdown system pressure level.") + } + } + + @objc + private func sessionWasInterrupted(notification: NSNotification) { + DispatchQueue.main.async { + self.isCameraUnavailable = true + } + + if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, + let reasonIntegerValue = userInfoValue.integerValue, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) { + print("Capture session was interrupted with reason \(reason)") + + if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient { + print("Session stopped running due to video devies in use by another client.") + } else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps { + print("Session stopped running due to video devies is not available with multiple foreground apps.") + } else if reason == .videoDeviceNotAvailableDueToSystemPressure { + print("Session stopped running due to shutdown system pressure level.") + } + } + } + + @objc + private func sessionInterruptionEnded(notification: NSNotification) { + print("Capture session interruption ended") + DispatchQueue.main.async { + self.isCameraUnavailable = false + } + } +} diff --git a/damus/Models/Camera/ImageResizer.swift b/damus/Models/Camera/ImageResizer.swift new file mode 100644 index 0000000000..d912cc14d1 --- /dev/null +++ b/damus/Models/Camera/ImageResizer.swift @@ -0,0 +1,50 @@ +// +// ImageResizer.swift +// Campus +// +// Created by Rolando Rodriguez on 12/21/19. +// Copyright © 2019 Rolando Rodriguez. All rights reserved. +// + +import Foundation +import UIKit + +public enum ImageResizingError: Error { + case cannotRetrieveFromURL + case cannotRetrieveFromData +} + +public struct ImageResizer { + public var targetWidth: CGFloat + + public init(targetWidth: CGFloat) { + self.targetWidth = targetWidth + } + + public func resize(at url: URL) -> UIImage? { + guard let image = UIImage(contentsOfFile: url.path) else { + return nil + } + + return self.resize(image: image) + } + + public func resize(image: UIImage) -> UIImage { + let originalSize = image.size + let targetSize = CGSize(width: targetWidth, height: targetWidth*originalSize.height/originalSize.width) + let renderer = UIGraphicsImageRenderer(size: targetSize) + return renderer.image { (context) in + image.draw(in: CGRect(origin: .zero, size: targetSize)) + } + } +} + +public struct MemorySizer { + public static func size(of data: Data) -> String { + let bcf = ByteCountFormatter() + bcf.allowedUnits = [.useMB] + bcf.countStyle = .file + let string = bcf.string(fromByteCount: Int64(data.count)) + return string + } +} diff --git a/damus/Models/Camera/PhotoCaptureProcessor.swift b/damus/Models/Camera/PhotoCaptureProcessor.swift new file mode 100644 index 0000000000..9d5956daa0 --- /dev/null +++ b/damus/Models/Camera/PhotoCaptureProcessor.swift @@ -0,0 +1,91 @@ +// +// PhotoCaptureProcessor.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import Photos + +class PhotoCaptureProcessor: NSObject { + private(set) var requestedPhotoSettings: AVCapturePhotoSettings + private(set) var photoOutput: AVCapturePhotoOutput? + + lazy var context = CIContext() + var photoData: Data? + private var maxPhotoProcessingTime: CMTime? + + private let willCapturePhotoAnimation: () -> Void + private let completionHandler: (PhotoCaptureProcessor) -> Void + private let photoProcessingHandler: (Bool) -> Void + + init(with requestedPhotoSettings: AVCapturePhotoSettings, + photoOutput: AVCapturePhotoOutput?, + willCapturePhotoAnimation: @escaping () -> Void, + completionHandler: @escaping (PhotoCaptureProcessor) -> Void, + photoProcessingHandler: @escaping (Bool) -> Void) { + self.requestedPhotoSettings = requestedPhotoSettings + self.willCapturePhotoAnimation = willCapturePhotoAnimation + self.completionHandler = completionHandler + self.photoProcessingHandler = photoProcessingHandler + self.photoOutput = photoOutput + } + + func capturePhoto(settings: AVCapturePhotoSettings) { + if let photoOutput = self.photoOutput { + photoOutput.capturePhoto(with: settings, delegate: self) + } + } +} + +extension PhotoCaptureProcessor: AVCapturePhotoCaptureDelegate { + func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + maxPhotoProcessingTime = resolvedSettings.photoProcessingTimeRange.start + resolvedSettings.photoProcessingTimeRange.duration + } + + func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + DispatchQueue.main.async { + self.willCapturePhotoAnimation() + } + + guard let maxPhotoProcessingTime = maxPhotoProcessingTime else { + return + } + + DispatchQueue.main.async { + self.photoProcessingHandler(true) + } + + let oneSecond = CMTime(seconds: 2, preferredTimescale: 1) + if maxPhotoProcessingTime > oneSecond { + DispatchQueue.main.async { + self.photoProcessingHandler(true) + } + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + DispatchQueue.main.async { + self.photoProcessingHandler(false) + } + + if let error = error { + print("Error capturing photo: \(error)") + } else { + photoData = photo.fileDataRepresentation() + + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) { + if let error = error { + print("Error capturing photo: \(error)") + return + } + + DispatchQueue.main.async { + self.completionHandler(self) + } + } +} diff --git a/damus/Models/Camera/VideoCaptureProcessor.swift b/damus/Models/Camera/VideoCaptureProcessor.swift new file mode 100644 index 0000000000..81820d287f --- /dev/null +++ b/damus/Models/Camera/VideoCaptureProcessor.swift @@ -0,0 +1,78 @@ +// +// VideoCaptureProcessor.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import AVFoundation +import Photos + +class VideoCaptureProcessor: NSObject { + private(set) var requestedVideoSettings: AVCaptureVideoDataOutput + private(set) var movieOutput: AVCaptureMovieFileOutput? + + private let beginHandler: () -> Void + private let completionHandler: (VideoCaptureProcessor, URL) -> Void + private let videoProcessingHandler: (Bool) -> Void + private var session: AVCaptureSession? + + init(with requestedVideoSettings: AVCaptureVideoDataOutput, + movieOutput: AVCaptureMovieFileOutput?, + beginHandler: @escaping () -> Void, + completionHandler: @escaping (VideoCaptureProcessor, URL) -> Void, + videoProcessingHandler: @escaping (Bool) -> Void) { + self.requestedVideoSettings = requestedVideoSettings + self.beginHandler = beginHandler + self.completionHandler = completionHandler + self.videoProcessingHandler = videoProcessingHandler + self.movieOutput = movieOutput + } + + func startCapture(session: AVCaptureSession) { + if let movieOutput = self.movieOutput, session.isRunning { + let outputFileURL = uniqueOutputFileURL() + movieOutput.startRecording(to: outputFileURL, recordingDelegate: self) + } + } + + func stopCapture() { + if let movieOutput = self.movieOutput { + if movieOutput.isRecording { + movieOutput.stopRecording() + } + } + } + + private func uniqueOutputFileURL() -> URL { + let tempDirectory = FileManager.default.temporaryDirectory + let fileName = UUID().uuidString + ".mov" + return tempDirectory.appendingPathComponent(fileName) + } +} + +extension VideoCaptureProcessor: AVCaptureFileOutputRecordingDelegate { + + func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { + self.beginHandler() + } + + func fileOutput(_ output: AVCaptureFileOutput, willFinishRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { + DispatchQueue.main.async { + self.videoProcessingHandler(true) + } + } + + func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { + if let error = error { + print("Error capturing video: \(error)") + return + } + + DispatchQueue.main.async { + self.completionHandler(self, outputFileURL) + self.videoProcessingHandler(false) + } + } +} diff --git a/damus/Views/Camera/CameraPreview.swift b/damus/Views/Camera/CameraPreview.swift index 677f175c84..d6b284cc02 100644 --- a/damus/Views/Camera/CameraPreview.swift +++ b/damus/Views/Camera/CameraPreview.swift @@ -1,83 +1,88 @@ // // CameraPreview.swift -// damus +// Campus // -// Created by Suhail Saqan on 6/4/23. +// Created by Rolando Rodriguez on 12/17/19. +// Copyright © 2019 Rolando Rodriguez. All rights reserved. // -import Foundation -import SwiftUI -import AVFoundation import UIKit -import AVKit +import AVFoundation +import SwiftUI public struct CameraPreview: UIViewRepresentable { - @EnvironmentObject var camera: CameraViewModel - - public func makeUIView(context: Context) -> UIView { - let view = UIView(frame: UIScreen.main.bounds) - - let previewLayer = AVCaptureVideoPreviewLayer(session: camera.session) - previewLayer.frame = view.frame - previewLayer.videoGravity = .resizeAspectFill - view.layer.addSublayer(previewLayer) + public class VideoPreviewView: UIView { + public override class var layerClass: AnyClass { + AVCaptureVideoPreviewLayer.self + } - DispatchQueue.global(qos: .userInteractive).async { - camera.session.startRunning() + var videoPreviewLayer: AVCaptureVideoPreviewLayer { + return layer as! AVCaptureVideoPreviewLayer } - DispatchQueue.main.async { - camera.preview = previewLayer + let focusView: UIView = { + let focusView = UIView(frame: CGRect(x: 0, y: 0, width: 30, height: 30)) + focusView.layer.borderColor = UIColor.white.cgColor + focusView.layer.borderWidth = 1.5 + focusView.layer.cornerRadius = 15 + focusView.layer.opacity = 0 + focusView.backgroundColor = .clear + return focusView + }() + + @objc func focusAndExposeTap(gestureRecognizer: UITapGestureRecognizer) { + let layerPoint = gestureRecognizer.location(in: gestureRecognizer.view) + let devicePoint = videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: layerPoint) + + self.focusView.layer.frame = CGRect(origin: layerPoint, size: CGSize(width: 30, height: 30)) + + + NotificationCenter.default.post(.init(name: .init("UserDidRequestNewFocusPoint"), object: nil, userInfo: ["devicePoint": devicePoint] as [AnyHashable: Any])) + + UIView.animate(withDuration: 0.3, animations: { + self.focusView.layer.opacity = 1 + }) { (completed) in + if completed { + UIView.animate(withDuration: 0.3) { + self.focusView.layer.opacity = 0 + } + } + } } - return view + public override func layoutSubviews() { + super.layoutSubviews() + + self.layer.addSublayer(focusView.layer) + + let gRecognizer = UITapGestureRecognizer(target: self, action: #selector(VideoPreviewView.focusAndExposeTap(gestureRecognizer:))) + self.addGestureRecognizer(gRecognizer) + } } - public func updateUIView(_ uiView: UIView, context: Context) { + public let session: AVCaptureSession + + public init(session: AVCaptureSession) { + self.session = session } -} - -public struct CameraContentPreview: View { - let url: URL? - public var body: some View { - ZStack { - if url != nil { - if checkURL() == "video" { - let player = AVPlayer(url: url!) - AVPlayerControllerRepresented(player: player) - .onAppear { - player.play() - }.onDisappear { - player.pause() - } - } else { - Image(uiImage: UIImage(contentsOfFile: url!.path)!) - .resizable() - } - } - }.background(Color.black) + public func makeUIView(context: Context) -> VideoPreviewView { + let viewFinder = VideoPreviewView() + viewFinder.backgroundColor = .black + viewFinder.videoPreviewLayer.cornerRadius = 0 + viewFinder.videoPreviewLayer.session = session + viewFinder.videoPreviewLayer.connection?.videoOrientation = .portrait + return viewFinder } - func checkURL() -> String { - if url!.absoluteString.hasSuffix(".mov") { - return "video" - } - return "photo" + public func updateUIView(_ uiView: VideoPreviewView, context: Context) { + } } -struct AVPlayerControllerRepresented : UIViewControllerRepresentable { - var player : AVPlayer - - func makeUIViewController(context: Context) -> AVPlayerViewController { - let controller = AVPlayerViewController() - controller.player = player - controller.showsPlaybackControls = false - return controller - } - - func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) { - +struct CameraPreview_Previews: PreviewProvider { + static var previews: some View { + CameraPreview(session: AVCaptureSession()) + .frame(height: 300) } } diff --git a/damus/Views/Camera/CameraView.swift b/damus/Views/Camera/CameraView.swift index 4b9c8d5a15..ee59782b97 100644 --- a/damus/Views/Camera/CameraView.swift +++ b/damus/Views/Camera/CameraView.swift @@ -2,154 +2,202 @@ // CameraView.swift // damus // -// Created by Suhail Saqan on 6/4/23. +// Created by Suhail Saqan on 8/5/23. // import SwiftUI +import Combine +import AVFoundation -public struct CameraView: View { +struct CameraView: View { + let damus_state: DamusState + let action: (([MediaItem]) -> Void) + @Environment(\.presentationMode) var presentationMode - @StateObject var camera = CameraViewModel() - let action: ((URL, CameraMediaType?) -> Void) - var videoAllowed: Bool - let maxVideoDuration: Int + @StateObject var model: CameraModel + + @State var currentZoomFactor: CGFloat = 1.0 - public init(action: @escaping ((URL, CameraMediaType?) -> Void), - useMediaContent: String = "Upload", - videoAllowed: Bool = true, - maxVideoDuration: Int = 15) { - + public init(damus_state: DamusState, action: @escaping (([MediaItem]) -> Void)) { + self.damus_state = damus_state self.action = action - self.videoAllowed = videoAllowed - self.maxVideoDuration = maxVideoDuration + _model = StateObject(wrappedValue: CameraModel()) } - - public var body: some View { - ZStack { - if camera.isTaken { - CameraContentPreview(url: camera.previewURL) - .ignoresSafeArea(.all, edges: .all) + + var captureButton: some View { + Button { + if model.isRecording { + model.stopRecording() } else { - CameraPreview() - .environmentObject(camera) - .ignoresSafeArea(.all, edges: .all) + print("capturing photo1") + model.capturePhoto() } - - VStack { - if camera.isTaken { - HStack { - Button { - camera.retakePic() - } label: { - Image(systemName: "chevron.backward") - .font(.system(size: 24)) - .foregroundColor(.white) - .padding() - }.padding(.leading) - Spacer() - }.padding(.top) - } + } label: { + ZStack { + Circle() + .fill( model.isRecording ? .red : .white) + .frame(width: model.isRecording ? 85 : 65, height: model.isRecording ? 85 : 65, alignment: .center) - if !camera.isTaken && !camera.isRecording { - HStack { - Button { - presentationMode.wrappedValue.dismiss() - } label: { - Image(systemName: "xmark") - .font(.system(size: 24)) - .foregroundColor(.white) - .padding() - }.padding(.leading) - + Circle() + .stroke( model.isRecording ? .red : .white, lineWidth: 4) + .frame(width: model.isRecording ? 95 : 75, height: model.isRecording ? 95 : 75, alignment: .center) + } + }.simultaneousGesture( + LongPressGesture(minimumDuration: 0.5).onEnded({ value in + if (!model.isCameraButtonDisabled) { + withAnimation { + model.captureMode = .video + model.startRecording() + } + } + }) + ).buttonStyle(.plain) + } + + var capturedPhotoThumbnail: some View { + ZStack { + if model.thumbnail != nil { + Image(uiImage: model.thumbnail.thumbnailImage!) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 60, height: 60) + .clipShape(RoundedRectangle(cornerRadius: 10, style: .continuous)) + .animation(.spring()) + } else { + RoundedRectangle(cornerRadius: 10) + .frame(width: 60, height: 60, alignment: .center) + .foregroundColor(.black) + } + if model.isPhotoProcessing { + ProgressView() + .progressViewStyle(CircularProgressViewStyle(tint: .white)) + } + } + } + + var flipCameraButton: some View { + Button(action: { + model.flipCamera() + }, label: { + Circle() + .foregroundColor(Color.gray.opacity(0.2)) + .frame(width: 45, height: 45, alignment: .center) + .overlay( + Image(systemName: "camera.rotate.fill") + .foregroundColor(.white)) + }) + } + + var body: some View { + NavigationView { + GeometryReader { reader in + ZStack { + Color.black.edgesIgnoringSafeArea(.all) + + VStack { Spacer() - Button { - if camera.position == .back { - camera.position = .front - } else { - camera.position = .back + HStack { + Button { + presentationMode.wrappedValue.dismiss() + model.stop() + } label: { + Image(systemName: "xmark") + .font(.system(size: 24)) + .foregroundColor(.white) } - camera.setUp() - } label: { - Image(systemName: "arrow.triangle.2.circlepath.camera.fill") - .font(.system(size: 24)) - .foregroundColor(.white) - .padding() - }.padding(.trailing) - }.padding(.top) - } - - Spacer() - - HStack { - if camera.isTaken { + Spacer() + + Button(action: { + model.switchFlash() + }, label: { + Image(systemName: model.isFlashOn ? "bolt.fill" : "bolt.slash.fill") + .font(.system(size: 24)) + }) + .accentColor(model.isFlashOn ? .yellow : .white) + } + .padding(.horizontal, 30) + Spacer() - Button { - if let url = camera.previewURL { - action(url, camera.mediaType) - presentationMode.wrappedValue.dismiss() - } - } label: { - Text("Upload") - .foregroundColor(.black) - .font(.subheadline) - .kerning(0.12) - .padding(.vertical, 10) - .padding(.horizontal, 20) - .background(Color.white) - .clipShape(Capsule()) - .padding() - }.padding(.horizontal) - } else { - Button { - if camera.video { - camera.stopRecording() - } else { - camera.takePic() - } - } label: { - ZStack { - Circle() - .fill( camera.video ? .red : .white) - .frame(width: camera.video ? 85 : 65, height: camera.video ? 85 : 65) - - Circle() - .stroke( camera.video ? .red : .white, lineWidth: 4) - .frame(width: camera.video ? 95 : 75, height: camera.video ? 95 : 75) + CameraPreview(session: model.session) + .frame(maxWidth: .infinity) + .gesture( + DragGesture().onChanged({ (val) in + // Only accept vertical drag + if abs(val.translation.height) > abs(val.translation.width) { + // Get the percentage of vertical screen space covered by drag + let percentage: CGFloat = -(val.translation.height / reader.size.height) + // Calculate new zoom factor + let calc = currentZoomFactor + percentage + // Limit zoom factor to a maximum of 5x and a minimum of 1x + let zoomFactor: CGFloat = min(max(calc, 1), 5) + // Store the newly calculated zoom factor + currentZoomFactor = zoomFactor + // Sets the zoom factor to the capture device session + model.zoom(with: zoomFactor) + } + }) + ) + .onAppear { + model.configure() } - }.simultaneousGesture( - LongPressGesture(minimumDuration: 0.5).onEnded({ value in - if camera.recordPermission == .granted && videoAllowed{ - withAnimation { - camera.video = true - camera.setUp() - camera.startRecording() + .alert(isPresented: $model.showAlertError, content: { + Alert(title: Text(model.alertError.title), message: Text(model.alertError.message), dismissButton: .default(Text(model.alertError.primaryButtonTitle), action: { + model.alertError.primaryAction?() + })) + }) + .overlay( + Group { + if model.willCapturePhoto { + Color.black } } - }) - ).buttonStyle(.plain) + ) + + Spacer() + + HStack { +// NavigationLink(destination: ImageCarousel(state: damus_state, evid: "qwdqwdwq", urls: model.mediaItems)) { +// capturedPhotoThumbnail +// } + NavigationLink(destination: Text(model.mediaItems.map { $0.url.absoluteString }.joined(separator: ", "))) { + capturedPhotoThumbnail + } + + Spacer() + + captureButton + + Spacer() + + flipCameraButton + + Button { + action(model.mediaItems) + presentationMode.wrappedValue.dismiss() + model.stop() + } label: { + Text("Upload") + .foregroundColor(.black) + .font(.subheadline) + .kerning(0.12) + .padding(.vertical, 10) + .padding(.horizontal, 20) + .background(Color.white) + .clipShape(Capsule()) + .padding() + } + .padding(.horizontal) + + } + .padding(.horizontal, 30) + + Spacer() } - }.frame(height: 95) - .padding(.bottom) - } - }.onAppear { - camera.checkPermission() - camera.checkAudioPermission() - }.alert(isPresented: $camera.alert) { - Alert(title: Text(NSLocalizedString("youFoundInterlocutor", comment: "")), - primaryButton: .default(Text(NSLocalizedString("goToSettings", comment: "")), action: { - UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!) - }), - secondaryButton: .cancel(Text(NSLocalizedString("cancel", comment: "")))) - }.onReceive(Timer.publish(every: 0.01, on: .main, in: .common).autoconnect()) { _ in - if camera.recordedDuration <= Double(maxVideoDuration) && camera.isRecording { - camera.recordedDuration += 0.01 - } - if camera.recordedDuration >= Double(maxVideoDuration) && camera.isRecording { - camera.stopRecording() + } } } } diff --git a/damus/Views/PostView.swift b/damus/Views/PostView.swift index 7cad34918f..d2fde5597f 100644 --- a/damus/Views/PostView.swift +++ b/damus/Views/PostView.swift @@ -56,7 +56,7 @@ struct PostView: View { @State var newCursorIndex: Int? @State var postTextViewCanScroll: Bool = true - @State var mediaToUpload: MediaUpload? = nil + @State var mediaToUpload: [MediaUpload] = [] @StateObject var image_upload: ImageUploadModel = ImageUploadModel() @StateObject var tagModel: TagModel = TagModel() @@ -325,6 +325,15 @@ struct PostView: View { pks.append(pk) } } + + func addToMediaToUpload(mediaItem: MediaItem) { + switch mediaItem.type { + case .image: + mediaToUpload.append(.image(mediaItem.url)) + case .video: + mediaToUpload.append(.video(mediaItem.url)) + } + } var body: some View { GeometryReader { (deviceSize: GeometryProxy) in @@ -363,14 +372,14 @@ struct PostView: View { } .sheet(isPresented: $attach_media) { ImagePicker(uploader: damus_state.settings.default_media_uploader, sourceType: .photoLibrary, pubkey: damus_state.pubkey, image_upload_confirm: $image_upload_confirm) { img in - self.mediaToUpload = .image(img) + self.mediaToUpload.append(.image(img)) } onVideoPicked: { url in - self.mediaToUpload = .video(url) + self.mediaToUpload.append(.video(url)) } .alert(NSLocalizedString("Are you sure you want to upload this media?", comment: "Alert message asking if the user wants to upload media."), isPresented: $image_upload_confirm) { Button(NSLocalizedString("Upload", comment: "Button to proceed with uploading."), role: .none) { - if let mediaToUpload { - self.handle_upload(media: mediaToUpload) + if !mediaToUpload.isEmpty { + self.handle_upload(media: mediaToUpload[0]) self.attach_media = false } } @@ -378,19 +387,12 @@ struct PostView: View { } } .fullScreenCover(isPresented: $attach_camera) { - CameraView(action: { url, type in - if let mediaType = type { - switch mediaType { - case .image: - self.mediaToUpload = .image(url) - case .video: - self.mediaToUpload = .video(url) - } - } else { - print("The media type is not set") + CameraView(damus_state: damus_state, action: { items in + for item in items { + addToMediaToUpload(mediaItem: item) } - if let mediaToUpload { - self.handle_upload(media: mediaToUpload) + for media in mediaToUpload { + self.handle_upload(media: media) } }) }