ios – Swift Digital camera Magnification Gesture would not work correctly


I’ve a video recorder in swift constructed with AVFoundation. I attempted to manually register magnification gestures and zoom in/out the digital camera. It looks as if my calculation for the zoom degree is off when the gesture begins. The zoom in/out just isn’t clean and fairly buggy. If I repeatedly zoom in-out with out releasing then the zoom is added to the digital camera easily (not very clean however works). Nonetheless after I start the gesture the digital camera both zooms in utterly or zooms out, and that is primarily the issue Im dealing with.

import SwiftUI
import SwiftUI
import AVKit
import AVFoundation

struct HomeStory: View {
   @StateObject var cameraModel = CameraViewModel()
   @GestureState non-public var scale: CGFloat = 1.0
   @State non-public var previousScale: CGFloat = 1.0
   
   var physique: some View {
       ZStack(alignment: .backside) {
           CameraStoryView()
               .environmentObject(cameraModel)
               .clipShape(RoundedRectangle(cornerRadius: 30, type: .steady))
               .gesture(MagnificationGesture()
                   .updating($scale, physique: { (worth, state, _) in
                       state = worth
                   })
                   .onChanged { worth in
                       let delta = worth / previousScale
                       cameraModel.zoom(delta)
                       previousScale = worth
                   }
               )
       }
   }
}

struct CameraStoryView: View {
   @EnvironmentObject var cameraModel: CameraViewModel
   var physique: some View {
       
       GeometryReader { proxy in
           let dimension = proxy.dimension
           
           CameraPreview(dimension: dimension)
               .environmentObject(cameraModel)
       }
   }
}

struct CameraPreview: UIViewRepresentable {
   @EnvironmentObject var cameraModel : CameraViewModel
   var dimension: CGSize
   
   func makeUIView(context: Context) ->  UIView {
       let view = UIView()
       
       cameraModel.preview = AVCaptureVideoPreviewLayer(session: cameraModel.session)
       cameraModel.preview.body.dimension = dimension
       
       cameraModel.preview.videoGravity = .resizeAspectFill
       view.layer.addSublayer(cameraModel.preview)
       
       DispatchQueue.international(qos: .userInitiated).async {
           cameraModel.session.startRunning()
       }
       
       return view
   }
   
   func updateUIView(_ uiView: UIView, context: Context) { }
}


class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate, AVCapturePhotoCaptureDelegate {
   @Revealed var session = AVCaptureSession()
   @Revealed var alert = false
   @Revealed var output = AVCaptureMovieFileOutput()
   @Revealed var preview: AVCaptureVideoPreviewLayer!
   @Revealed var isRecording: Bool = false
   @Revealed var recordedURLs: [URL] = []
   @Revealed var previewURL: URL?
   @Revealed var showPreview: Bool = false
   @Revealed var recordedDuration: CGFloat = 0
   @Revealed var maxDuration: CGFloat = 20
   
   @Revealed var capturedImage: UIImage?
   @Revealed var photoOutput = AVCapturePhotoOutput()
   @Revealed var flashMode: AVCaptureDevice.FlashMode = .off
   var currentCameraPosition: AVCaptureDevice.Place = .again
   
   func zoom(_ delta: CGFloat) {
       if currentCameraPosition == .again {
           guard let gadget = AVCaptureDevice.default(for: .video) else { return }
           do {
               strive gadget.lockForConfiguration()
               
               let currentZoomFactor = gadget.videoZoomFactor
               var newZoomFactor = currentZoomFactor * delta
               
               newZoomFactor = max(1.0, min(newZoomFactor, 3.0))
               
               gadget.videoZoomFactor = newZoomFactor
               gadget.unlockForConfiguration()
           } catch {
               print("Error zooming digital camera: (error.localizedDescription)")
           }
       } else {
           guard let gadget = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: .entrance) else { return }
           do {
               strive gadget.lockForConfiguration()
               
               let currentZoomFactor = gadget.videoZoomFactor
               var newZoomFactor = currentZoomFactor * delta
               
               newZoomFactor = max(1.0, min(newZoomFactor, 3.0))
               
               gadget.videoZoomFactor = newZoomFactor
               gadget.unlockForConfiguration()
           } catch {
               print("Error zooming digital camera: (error.localizedDescription)")
           }
       }
   }
   
   func flipCamera() {
       // Create a discovery session to search out all accessible video gadgets
       let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, place: .unspecified)

       // Get all accessible video gadgets
       let videoDevices = discoverySession.gadgets

       // Verify if there's a couple of video gadget
       guard videoDevices.rely > 1 else {
           return // If not, return early
       }

       // Get the present enter
       guard let currentVideoInput = session.inputs.first as? AVCaptureDeviceInput else {
           return
       }

       // Get the brand new digital camera place
       let newCameraPosition: AVCaptureDevice.Place = (currentCameraPosition == .again) ? .entrance : .again

       // Discover the brand new digital camera gadget
       if let newCamera = videoDevices.first(the place: { $0.place == newCameraPosition }) {
           // Create a brand new video enter
           do {
               let newVideoInput = strive AVCaptureDeviceInput(gadget: newCamera)

               // Take away the present enter
               session.removeInput(currentVideoInput)

               // Add the brand new enter
               if session.canAddInput(newVideoInput) {
                   session.addInput(newVideoInput)
                   currentCameraPosition = newCameraPosition
               } else {
                   // Deal with the case the place including the brand new enter fails
                   print("Failed so as to add new digital camera enter")
               }
           } catch {
               // Deal with any errors that happen whereas creating the brand new enter
               print("Error creating new digital camera enter: (error.localizedDescription)")
           }
       }
   }
   
   func takePhoto() {
       let photoSettings = AVCapturePhotoSettings()
       self.photoOutput.capturePhoto(with: photoSettings, delegate: self)
   }
   
   func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto picture: AVCapturePhoto, error: Error?) {
       if let error = error {
           print("Error capturing picture: (error.localizedDescription)")
           return
       }

       if let imageData = picture.fileDataRepresentation(), let capturedImage = UIImage(knowledge: imageData) {
           self.capturedImage = capturedImage
       }
   }
   
   func checkPermission(){
       
       change AVCaptureDevice.authorizationStatus(for: .video) {
       case .approved:
           setUp()
           return
       case .notDetermined:
           AVCaptureDevice.requestAccess(for: .video) { (standing) in
               
               if standing{
                   self.setUp()
               }
           }
       case .denied:
           self.alert.toggle()
           return
       default:
           return
       }
   }
   
   func setUp(){
       
       do{
           self.session.beginConfiguration()
           let cameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, place: .again)
           let videoInput = strive AVCaptureDeviceInput(gadget: cameraDevice!)
           let audioDevice = AVCaptureDevice.default(for: .audio)
           let audioInput = strive AVCaptureDeviceInput(gadget: audioDevice!)
           
           // MARK: Audio Enter
           
           if self.session.canAddInput(videoInput) && self.session.canAddInput(audioInput){
               self.session.addInput(videoInput)
               self.session.addInput(audioInput)
           }

           if self.session.canAddOutput(self.output){
               self.session.addOutput(self.output)
           }
           
           if self.session.canAddOutput(self.photoOutput) {
               self.session.addOutput(self.photoOutput)
           }
           
           self.session.commitConfiguration()
       }
       catch{
           print(error.localizedDescription)
       }
   }
   
   func startRecording(){
       // MARK: Short-term URL for recording Video
       let tempURL = NSTemporaryDirectory() + "(Date()).mov"
       output.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self)
       isRecording = true
   }
   
   func stopRecording(){
       output.stopRecording()
       isRecording = false
   }
   
   func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
       if let error = error {
           print(error.localizedDescription)
           return
       }
       
       // CREATED SUCCESSFULLY
       print(outputFileURL)
       self.recordedURLs.append(outputFileURL)
       if self.recordedURLs.rely == 1{
           self.previewURL = outputFileURL
           return
       }
       
       // CONVERTING URLs TO ASSETS
       let property = recordedURLs.compactMap { url -> AVURLAsset in
           return AVURLAsset(url: url)
       }
       
       self.previewURL = nil
       // MERGING VIDEOS
       Job {
           await mergeVideos(property: property) { exporter in
               exporter.exportAsynchronously {
                   if exporter.standing == .failed{
                       // HANDLE ERROR
                       print(exporter.error!)
                   }
                   else{
                       if let finalURL = exporter.outputURL{
                           print(finalURL)
                           DispatchQueue.principal.async {
                               self.previewURL = finalURL
                           }
                       }
                   }
               }
           }
       }
   }
   
   func mergeVideos(property: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()) async {
       
       let compostion = AVMutableComposition()
       var lastTime: CMTime = .zero
       
       guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
       guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
       
       for asset in property {
           // Linking Audio and Video
           do {
               strive await videoTrack.insertTimeRange(CMTimeRange(begin: .zero, length: asset.load(.length)), of: asset.loadTracks(withMediaType: .video)[0], at: lastTime)
               // Protected Verify if Video has Audio
               if strive await !asset.loadTracks(withMediaType: .audio).isEmpty {
                   strive await audioTrack.insertTimeRange(CMTimeRange(begin: .zero, length: asset.load(.length)), of: asset.loadTracks(withMediaType: .audio)[0], at: lastTime)
               }
           }
           catch {
               print(error.localizedDescription)
           }
           
           // Updating Final Time
           do {
               lastTime = strive await CMTimeAdd(lastTime, asset.load(.length))
           } catch {
               print(error.localizedDescription)
           }
       }
       
       // MARK: Temp Output URL
       let tempURL = URL(fileURLWithPath: NSTemporaryDirectory() + "Reel-(Date()).mp4")
       
       // VIDEO IS ROTATED
       // BRINGING BACK TO ORIGNINAL TRANSFORM
       
       let layerInstructions = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
       
       // MARK: Rework
       var remodel = CGAffineTransform.id
       remodel = remodel.rotated(by: 90 * (.pi / 180))
       remodel = remodel.translatedBy(x: 0, y: -videoTrack.naturalSize.top)
       layerInstructions.setTransform(remodel, at: .zero)
       
       let directions = AVMutableVideoCompositionInstruction()
       directions.timeRange = CMTimeRange(begin: .zero, length: lastTime)
       directions.layerInstructions = [layerInstructions]
       
       let videoComposition = AVMutableVideoComposition()
       videoComposition.renderSize = CGSize(width: videoTrack.naturalSize.top, top: videoTrack.naturalSize.width)
       videoComposition.directions = [instructions]
       videoComposition.frameDuration = CMTimeMake(worth: 1, timescale: 30)
       
       guard let exporter = AVAssetExportSession(asset: compostion, presetName: AVAssetExportPresetHighestQuality) else{return}
       exporter.outputFileType = .mp4
       exporter.outputURL = tempURL
       exporter.videoComposition = videoComposition
       completion(exporter)
   }
}

Related Articles

LEAVE A REPLY

Please enter your comment!
Please enter your name here

Latest Articles