Renamming FrameHandler in CameraModel. Renamming FrameView in CameraView. Refactoring ViewModel: Retrieving output in progress, removing permissionGranted property: I use it directly, adding permissionAlert property for UI.

This commit is contained in:
Maxime Delporte 2025-05-06 19:57:53 +02:00
parent 0190c52bd5
commit 79748524c6
3 changed files with 26 additions and 18 deletions

View File

@ -1,5 +1,5 @@
//
// FrameHandler.swift
// CameraModel.swift
// macamera
//
// Created by Maxime on 06/05/2025.
@ -10,11 +10,13 @@
import AVFoundation
import CoreImage
class FrameHandler: NSObject, ObservableObject {
class CameraModel: NSObject, ObservableObject {
@Published var frame: CGImage?
private var permissionGranted: Bool = true
@Published var permissionAlert = false
@Published var output = AVCapturePhotoOutput()
private let captureSession = AVCaptureSession()
private let context = CIContext()
@ -22,37 +24,43 @@ class FrameHandler: NSObject, ObservableObject {
super.init()
Task.detached(priority: .background) {
self.permissionGranted = await self.checkPermission()
self.setupCaptureSession()
let accessIsGranted = await self.checkPermission()
self.setupCaptureSession(accessIsGranted: accessIsGranted)
}
}
}
// MARK: - Convenience Methods
extension FrameHandler {
extension CameraModel {
private func checkPermission() async -> Bool {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .notDetermined:
return await AVCaptureDevice.requestAccess(for: .video)
case .authorized:
return true
case .notDetermined:
let isGranted = await AVCaptureDevice.requestAccess(for: .video)
if isGranted == false {
permissionAlert.toggle()
}
return isGranted
default:
permissionAlert.toggle()
return false
}
}
private func setupCaptureSession() {
let videoOutput = AVCaptureVideoDataOutput()
guard permissionGranted else { return }
private func setupCaptureSession(accessIsGranted: Bool) {
guard accessIsGranted else { return }
guard let videoDevice = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) else { return }
guard let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return }
guard captureSession.canAddInput(videoDeviceInput) else { return }
captureSession.addInput(videoDeviceInput)
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sampleBufferQueue"))
captureSession.addOutput(videoOutput)
@ -64,7 +72,7 @@ extension FrameHandler {
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
extension FrameHandler: AVCaptureVideoDataOutputSampleBufferDelegate {
extension CameraModel: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let cgImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return }

View File

@ -1,5 +1,5 @@
//
// FrameView.swift
// CameraView.swift
// macamera
//
// Created by Maxime on 06/05/2025.
@ -7,7 +7,7 @@
import SwiftUI
struct FrameView: View {
struct CameraView: View {
var image: CGImage?
private let label = Text("frame")
@ -22,5 +22,5 @@ struct FrameView: View {
}
#Preview {
FrameView()
CameraView()
}

View File

@ -8,10 +8,10 @@
import SwiftUI
struct ContentView: View {
@StateObject private var model = FrameHandler()
@StateObject private var model = CameraModel()
var body: some View {
FrameView(image: model.frame)
CameraView(image: model.frame)
.ignoresSafeArea()
}
}