Updating README with inspirations. Updating the way camera is presented. Creating PhotoPreviewView allowing us to see the picture.

This commit is contained in:
Maxime Delporte 2025-05-06 23:56:35 +02:00
parent 79748524c6
commit c3c58b7b39
8 changed files with 229 additions and 123 deletions

View File

@ -1,4 +1,15 @@
# macamera
Personal Camera.
AVFoundation Exploration
AVFoundation Exploration.
I've found 3 ways to do the same thing.
- Original Inspiration : https://github.com/daved01/LiveCameraSwiftUI/tree/main
- Then : https://www.youtube.com/watch?v=T7R7rNOIjfc
- Finally : https://www.youtube.com/watch?v=ZmPJBiwgZoQ
The last one is the best for my needs.
The goal right now is to produce.
Some questions persist.

View File

@ -1,92 +0,0 @@
//
// CameraModel.swift
// macamera
//
// Created by Maxime on 06/05/2025.
//
// Original Repository : https://github.com/daved01/LiveCameraSwiftUI/tree/main
import AVFoundation
import CoreImage
class CameraModel: NSObject, ObservableObject {
@Published var frame: CGImage?
@Published var permissionAlert = false
@Published var output = AVCapturePhotoOutput()
private let captureSession = AVCaptureSession()
private let context = CIContext()
override init() {
super.init()
Task.detached(priority: .background) {
let accessIsGranted = await self.checkPermission()
self.setupCaptureSession(accessIsGranted: accessIsGranted)
}
}
}
// MARK: - Convenience Methods
extension CameraModel {
private func checkPermission() async -> Bool {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
return true
case .notDetermined:
let isGranted = await AVCaptureDevice.requestAccess(for: .video)
if isGranted == false {
permissionAlert.toggle()
}
return isGranted
default:
permissionAlert.toggle()
return false
}
}
private func setupCaptureSession(accessIsGranted: Bool) {
guard accessIsGranted else { return }
guard let videoDevice = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) else { return }
guard let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return }
guard captureSession.canAddInput(videoDeviceInput) else { return }
captureSession.addInput(videoDeviceInput)
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sampleBufferQueue"))
captureSession.addOutput(videoOutput)
videoOutput.connection(with: .video)?.videoRotationAngle = 90
captureSession.startRunning()
}
}
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
extension CameraModel: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let cgImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return }
// All UI updates should be/ must be performed on the main queue.
DispatchQueue.main.async { [unowned self] in
self.frame = cgImage
}
}
private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> CGImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
return cgImage
}
}

View File

@ -1,26 +0,0 @@
//
// CameraView.swift
// macamera
//
// Created by Maxime on 06/05/2025.
//
import SwiftUI
struct CameraView: View {
var image: CGImage?
private let label = Text("frame")
var body: some View {
if let image = image {
Image(image, scale: 1.0, orientation: .up, label: label)
} else {
Color.black
}
}
}
#Preview {
CameraView()
}

View File

@ -8,11 +8,18 @@
import SwiftUI
struct ContentView: View {
@StateObject private var model = CameraModel()
@State private var capturedImage: UIImage?
var body: some View {
CameraView(image: model.frame)
if capturedImage == nil {
CustomCameraView(capturedImage: $capturedImage)
.ignoresSafeArea()
} else {
PhotoPreviewView(capturedImage: $capturedImage)
.ignoresSafeArea()
}
}
}

View File

@ -0,0 +1,74 @@
//
// CameraService.swift
// macamera
//
// Created by Maxime on 06/05/2025.
//
import AVFoundation
import Foundation
class CameraService {
var session: AVCaptureSession?
var delegate: AVCapturePhotoCaptureDelegate?
let output = AVCapturePhotoOutput()
let previewLayer = AVCaptureVideoPreviewLayer()
}
// MARK: - Public Interface
extension CameraService {
func start(delegate: AVCapturePhotoCaptureDelegate) async {
self.delegate = delegate
let accessIsGranted = await self.checkPermission()
self.setupCaptureSession(accessIsGranted: accessIsGranted)
}
func capturePhoto(with settings: AVCapturePhotoSettings = AVCapturePhotoSettings()) {
guard let delegate = delegate else { return }
output.capturePhoto(with: settings, delegate: delegate)
}
}
// MARK: - Initialization
extension CameraService {
private func checkPermission() async -> Bool {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
return true
case .notDetermined:
return await AVCaptureDevice.requestAccess(for: .video)
default:
return false
}
}
private func setupCaptureSession(accessIsGranted: Bool) {
guard accessIsGranted,
let dualCamera = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back),
let input = try? AVCaptureDeviceInput(device: dualCamera) else {
return
}
let session = AVCaptureSession()
guard session.canAddInput(input), session.canAddOutput(output) else {
return
}
session.addInput(input)
session.addOutput(output)
output.connection(with: .video)?.videoRotationAngle = 90
previewLayer.videoGravity = .resizeAspectFill
previewLayer.session = session
session.startRunning()
self.session = session
}
}

View File

@ -0,0 +1,44 @@
//
// CustomCameraView.swift
// macamera
//
// Created by Maxime on 06/05/2025.
//
import SwiftUI
struct CustomCameraView: View {
let cameraService = CameraService()
@Binding var capturedImage: UIImage?
var body: some View {
ZStack {
CameraView(cameraService: cameraService, didFinishProcessingPhoto: { result in
switch result {
case .success(let photo):
if let data = photo.fileDataRepresentation() {
capturedImage = UIImage(data: data)
} else {
print("Error: no image data found.")
}
case .failure(let error):
print(error.localizedDescription)
}
})
VStack {
Spacer()
Button(action: {
cameraService.capturePhoto()
}, label: {
Image(systemName: "circle")
.font(.system(size: 80))
.foregroundStyle(.white)
})
.padding(.bottom, 42)
}
}
}
}

View File

@ -0,0 +1,54 @@
//
// CameraView.swift
// macamera
//
// Created by Maxime on 06/05/2025.
//
import SwiftUI
import UIKit
import AVFoundation
struct CameraView: UIViewControllerRepresentable {
typealias UIViewControllerType = UIViewController
typealias ProcessingPhotoResult = Result<AVCapturePhoto, Error>
let cameraService: CameraService
let didFinishProcessingPhoto: (ProcessingPhotoResult) -> Void
func makeUIViewController(context: Context) -> UIViewController {
Task.detached {
await cameraService.start(delegate: context.coordinator)
}
let viewController = UIViewController()
viewController.view.backgroundColor = .black
viewController.view.layer.addSublayer(cameraService.previewLayer)
cameraService.previewLayer.frame = viewController.view.bounds
return viewController
}
func makeCoordinator() -> Coordinator {
Coordinator(self, didFinishProcessingPhoto: didFinishProcessingPhoto)
}
func updateUIViewController(_ uiViewController: UIViewController, context: Context) { }
class Coordinator: NSObject, AVCapturePhotoCaptureDelegate {
let parent: CameraView
private var didFinishProcessingPhoto: (ProcessingPhotoResult) -> Void
init(_ parent: CameraView, didFinishProcessingPhoto: @escaping (ProcessingPhotoResult) -> Void) {
self.parent = parent
self.didFinishProcessingPhoto = didFinishProcessingPhoto
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: (any Error)?) {
if let error = error {
didFinishProcessingPhoto(.failure(error))
return
}
didFinishProcessingPhoto(.success(photo))
}
}
}

View File

@ -0,0 +1,34 @@
//
// PhotoPreviewView.swift
// macamera
//
// Created by Maxime on 06/05/2025.
//
import SwiftUI
struct PhotoPreviewView: View {
@Binding var capturedImage: UIImage?
var body: some View {
if let capturedImage = capturedImage {
ZStack(alignment: .topLeading) {
Image(uiImage: capturedImage)
.resizable()
.scaledToFill()
Button(action: {
self.capturedImage = nil
}, label: {
Image(systemName: "xmark.circle.fill")
.font(.system(size: 42))
.foregroundStyle(.white)
})
.padding(.leading, 42)
.padding(.top, 62)
}
.ignoresSafeArea()
}
}
}