У меня есть DataModel:
Код: Выделить всё
final class DataModel: ObservableObject {
let camera = Camera()
@Published var frame: Image?
var isPhotosLoaded = false
init() {
print("DataModel init")
Task {
await handleCameraPreviews()
}
}
deinit {
print("DataModel deinit")
}
func handleCameraPreviews() async {
let imageStream = camera.previewStream
.map { $0.image }
for await image in imageStream {
Task { @MainActor in
// CIFilters to come...
frame = image
}
}
}
}
Код: Выделить всё
class Camera: NSObject {
...
deinit() {
print("Camera > deinit")
}
private var addToPreviewStream: ((CIImage) -> Void)?
lazy var previewStream: AsyncStream = {
AsyncStream { continuation in
addToPreviewStream = { ciImage in
if !self.isPreviewPaused {
continuation.yield(ciImage)
}
}
}
}()
...
}
extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let pixelBuffer = sampleBuffer.imageBuffer else { return }
addToPreviewStream?(CIImage(cvPixelBuffer: pixelBuffer))
}
}
Каждый раз, когда я перехожу ко второму виду, я никогда не вижу «deinit», напечатанного ни из DataModel(), ни из Camera(). И когда я возвращаюсь к представлению камеры, я вижу, что DataModel() и Camera() печатают «init».
Я признаю, что очень мало понимаю в управлении памятью, но подозреваю, что существует сильный цикл сохранения.
Когда я закомментирую функцию handleCameraPreviews(), проблема исчезает. Я подозреваю, что именно отсюда происходит цикл сохранения или какая-то другая утечка, потому что оба объекта инициализируют и деинитируют, как и ожидалось.
Я попробовал обновить handleCameraPreviews(), чтобы добавить [weak self]:
Код: Выделить всё
init() {
print("DataModel init")
cameraTask = Task {
await camera.start()
}
previewTask = Task { [weak self] in
await self?.handleCameraPreviews()
}
}
func handleCameraPreviews() async {
let stream = camera.previewStream
for await frame in stream {
try Task.checkCancellation()
await MainActor.run { [weak self] in
guard let self else { return }
self.frame = frame.image
}
}
Спасибо!
Изменить 01:
В соответствии с просьбой, вот полный код:
Camera()
Код: Выделить всё
import AVFoundation
import CoreImage
import UIKit
import os.log
class Camera: NSObject {
private let captureSession = AVCaptureSession()
private var isCaptureSessionConfigured = false
private var deviceInput: AVCaptureDeviceInput?
private var photoOutput: AVCapturePhotoOutput?
private var videoOutput: AVCaptureVideoDataOutput?
private var sessionQueue: DispatchQueue!
private var captureSessionNotConfigured: Bool {
!isCaptureSessionConfigured
}
private var captureDevice: AVCaptureDevice? {
didSet {
guard let captureDevice = captureDevice else { return }
logger.debug("Using capture device: \(captureDevice.localizedName)")
}
}
var sessionIsRunning: Bool {
captureSession.isRunning
}
var sessionIsNotRunning: Bool {
!captureSession.isRunning
}
var isPreviewPaused = false
private var addToPreviewStream: ((CIImage) -> Void)?
lazy var previewStream: AsyncStream = {
AsyncStream { continuation in
addToPreviewStream = { ciImage in
if !self.isPreviewPaused {
continuation.yield(ciImage)
}
}
}
}()
override init() {
super.init()
initialize()
}
deinit {
logger.debug(">>> Camera deinit")
}
private func initialize() {
logger.debug(">>> Camera Initialize")
sessionQueue = DispatchQueue(label: "session queue")
captureDevice = AVCaptureDevice.default(for: .video)
}
func start() async {
let authorized = await checkAuthorization()
guard authorized else {
logger.error("Camera access was not authorized.")
return
}
if isCaptureSessionConfigured && sessionIsNotRunning {
sessionQueue.async { [weak self] in
self?.captureSession.startRunning()
}
return
}
// not configured, so configure now
sessionQueue.async { [weak self] in
self?.configureCaptureSession { success in
guard success else { return }
self?.captureSession.startRunning()
}
}
}
func stop() {
guard isCaptureSessionConfigured else { return }
if sessionIsRunning {
sessionQueue.async {
self.captureSession.stopRunning()
}
}
}
private func configureCaptureSession(completionHandler: (_ success: Bool) -> Void) {
var success = false
self.captureSession.beginConfiguration()
defer {
self.captureSession.commitConfiguration()
completionHandler(success)
}
guard
let captureDevice = captureDevice,
let deviceInput = try? AVCaptureDeviceInput(device: captureDevice)
else {
logger.error("Failed to obtain video input.")
return
}
let photoOutput = AVCapturePhotoOutput()
captureSession.sessionPreset = AVCaptureSession.Preset.photo
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "VideoDataOutputQueue"))
guard captureSession.canAddInput(deviceInput) else {
logger.error("Unable to add device input to capture session.")
return
}
guard captureSession.canAddOutput(photoOutput) else {
logger.error("Unable to add photo output to capture session.")
return
}
guard captureSession.canAddOutput(videoOutput) else {
logger.error("Unable to add video output to capture session.")
return
}
captureSession.addInput(deviceInput)
captureSession.addOutput(photoOutput)
captureSession.addOutput(videoOutput)
self.deviceInput = deviceInput
self.photoOutput = photoOutput
self.videoOutput = videoOutput
photoOutput.maxPhotoQualityPrioritization = .speed
updateVideoOutputConnection()
isCaptureSessionConfigured = true
success = true
}
private func checkAuthorization() async -> Bool {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
logger.debug("Camera access authorized.")
return true
case .notDetermined:
logger.debug("Camera access not determined.")
sessionQueue.suspend()
let status = await AVCaptureDevice.requestAccess(for: .video)
sessionQueue.resume()
return status
case .denied:
logger.debug("Camera access denied.")
return false
case .restricted:
logger.debug("Camera library access restricted.")
return false
@unknown default:
return false
}
}
private func updateVideoOutputConnection() {
if let videoOutput = videoOutput,
let videoOutputConnection = videoOutput.connection(with: .video) {
let angle = 90.0
if videoOutputConnection.isVideoRotationAngleSupported(angle) {
videoOutputConnection.videoRotationAngle = angle
}
}
}
func takePhoto() {
guard let photoOutput = self.photoOutput else { return }
sessionQueue.async {
var photoSettings = AVCapturePhotoSettings()
if photoOutput.availablePhotoCodecTypes.contains(.hevc) {
photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
}
photoOutput.capturePhoto(with: photoSettings, delegate: self)
}
}
}
extension Camera: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
}
}
extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let pixelBuffer = sampleBuffer.imageBuffer else { return }
addToPreviewStream?(CIImage(cvPixelBuffer: pixelBuffer))
}
}
fileprivate let logger = Logger(subsystem: "com.apple.swiftplaygroundscontent.capturingphotos", category: "Camera")
Код: Выделить всё
import Combine
import AVFoundation
import SwiftUI
import os.log
final class DataModel: ObservableObject {
let camera = Camera()
@Published var frame: Image?
var isPhotosLoaded = false
init() {
print("DataModel init")
Task {
await handleCameraPreviews()
}
}
deinit {
print("DataModel deinit")
}
func handleCameraPreviews() async {
let imageStream = camera.previewStream
.map { $0.image }
for await image in imageStream {
Task { @MainActor in
frame = image
}
}
}
}
fileprivate extension CIImage {
var image: Image? {
let ciContext = CIContext()
guard let cgImage = ciContext.createCGImage(self, from: self.extent) else { return nil }
return Image(decorative: cgImage, scale: 1, orientation: .up)
}
}
fileprivate extension Image.Orientation {
init(_ cgImageOrientation: CGImagePropertyOrientation) {
switch cgImageOrientation {
case .up: self = .up
case .upMirrored: self = .upMirrored
case .down: self = .down
case .downMirrored: self = .downMirrored
case .left: self = .left
case .leftMirrored: self = .leftMirrored
case .right: self = .right
case .rightMirrored: self = .rightMirrored
}
}
}
fileprivate let logger = Logger(subsystem: "com.apple.swiftplaygroundscontent.capturingphotos", category: "DataModel")
Код: Выделить всё
struct ContentView: View {
enum Screen {
case camera
case settings
}
@State var currentView = Screen.camera
var body: some View {
VStack {
VStack {
switch currentView {
case .camera: CameraView()
case .settings: SettingsView()
}
}.frame(maxHeight: .infinity)
HStack {
Button("Camera") {
currentView = .camera
}.padding()
Button("Settings") {
currentView = .settings
}.padding()
}
}
}
}
Код: Выделить всё
struct CameraView: View {
@StateObject private var model = DataModel()
var body: some View {
VStack {
if let frame = model.frame {
frame
.resizable()
.scaledToFit()
}
}
.task {
await model.camera.start()
}
}
}
Код: Выделить всё
struct SettingsView: View {
var body: some View {
Text("Settings View")
}
}

Подробнее здесь: https://stackoverflow.com/questions/798 ... p-tutorial
Мобильная версия