关于 ios:从同一个 AVCaptureSession 拍摄视频和照片时应用程序崩溃?

App crashing while taking both video and photo from same AVCaptureSession?

我正在尝试使用 AVFoundation 使用相同的 AVCaptureSession 制作一个包含音频、视频和照片的应用程序。

以下是我如何设置相机和捕获会话。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
func setUpCaptureSession(){
    captureSession.sessionPreset = AVCaptureSession.Preset.photo
}

func setUpDevice(){
    let deviceDiscoverSession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
    let devices = deviceDiscoverSession.devices
    for device in devices{
        if device.position == AVCaptureDevice.Position.back{
            backCamera = device
        }else{
            frontCamera = device
        }
    }

    currentCamera = backCamera
}

func setUpInputOutput(){
    do{
        let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
        captureSession.addInput(captureDeviceInput)

        photoOutput = AVCapturePhotoOutput()
        photoOutput?.isHighResolutionCaptureEnabled = true
        self.captureSession.addOutput(photoOutput!)

        if #available(iOS 11.0, *) {
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
        } else {
            // Fallback on earlier versions
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])], completionHandler: nil)
        }
    }catch{
        print(error)
    }
}

func setUpPreviewLayer(){
    cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
    cameraPreviewLayer?.frame = self.cameraView.frame
    self.cameraView.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}

func startRunningCaptureSession(){
    captureSession.startRunning()

    //Session starts with camera as default so set it to gesture
    let doubleTap = UITapGestureRecognizer(target: self, action: #selector(takePhoto(sender:)))
    doubleTap.numberOfTapsRequired = 2
    self.view.addGestureRecognizer(doubleTap)
}

要从相机拍照(正在工作,照片存储在图库中),我做了以下操作:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
@objc func cameraAction(sender: UIButton){

    if(imageSetAction()){
        cameraImage.image = UIImage(named:"photo")
        cameraLabel.textColor = ConstantColors.selectedTextColor
        currentSelected = sender.tag

        let doubleTap = UITapGestureRecognizer(target: self, action: #selector(takePhoto(sender:)))
        doubleTap.numberOfTapsRequired = 2
        self.view.addGestureRecognizer(doubleTap)
    }

}

@objc func takePhoto(sender: Any){

    guard let capturePhotoOutput = self.photoOutput else {
        return
    }
    let photoSettings = AVCapturePhotoSettings()
    photoSettings.isAutoStillImageStabilizationEnabled = true
    photoSettings.isHighResolutionPhotoEnabled = true
    photoSettings.flashMode = .auto
    capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)

}

func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {

    guard error == nil,
        let photoSampleBuffer = photoSampleBuffer else {
            print("Error capturing photo: //(String(describing: error))")
            return
    }

    guard let imageData =
        AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else {
            return
    }

    let capturedImage = UIImage.init(data: imageData , scale: 1.0)
    if let image = capturedImage {
        //UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
        let vc = UIStoryboard.init(name:"ImageEdit", bundle: Bundle.main).instantiateViewController(withIdentifier:"imageEdit") as? ImageEdit
        vc?.imagetoEdit = image
        self.navigationController?.pushViewController(vc!, animated: true)

    }

}

但是当我尝试从同一个捕获会话中拍照时,我收到错误 “NSInvalidArgumentException//’,原因://’*** -[AVCaptureMovieFileOutput startRecordingToOutputFileURL:recordingDelegate:] No active/enabled connections//'”。下面是我尝试做的代码。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
    print("completed")
}

@objc func videoAction(sender: UIButton){

    if(imageSetAction()){
        videoImage.image = UIImage(named:"video")
        videoLabel.textColor = ConstantColors.selectedTextColor
        currentSelected = sender.tag

        captureSession.removeOutput(photoOutput!)

        self.movieFileOutput = AVCaptureMovieFileOutput()
        self.captureSession.addOutput(movieFileOutput!)

        let longPressGesture = UILongPressGestureRecognizer.init(target: self, action: #selector(handleLongPress))
        self.view.addGestureRecognizer(longPressGesture);
    }
}

@objc func handleLongPress(gestureRecognizer: UILongPressGestureRecognizer) {

    if gestureRecognizer.state == UIGestureRecognizerState.began {
        debugPrint("long press started")
        let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
        let filePath = documentsURL.appendingPathComponent("tempMovie.mp4")
        if FileManager.default.fileExists(atPath: filePath.absoluteString) {
            do {
                try FileManager.default.removeItem(at: filePath)
            }
            catch {
                // exception while deleting old cached file
                // ignore error if any
            }
        }
        movieFileOutput?.startRecording(to: filePath, recordingDelegate: self)
    }
    else if gestureRecognizer.state == UIGestureRecognizerState.ended {
        debugPrint("longpress ended")
        movieFileOutput?.stopRecording()
    }
}

对此的任何帮助表示赞赏。


您可以尝试像这样更改 videoAction 中的预设

1
captureSession.sessionPreset = AVCaptureSessionPresetHigh

你可以在这里看到类似的问题
AVCaptureMovieFileOutput – 没有活动/启用的连接


原创文章,作者:ItWorker,如若转载,请注明出处:https://blog.ytso.com/268615.html

(0)
上一篇 2022年6月20日
下一篇 2022年6月20日

相关推荐

发表回复

登录后才能评论