IOS Avmulticampip Проблема видеозаписи для мостики реагированияIOS

Программируем под IOS
Ответить
Anonymous
 IOS Avmulticampip Проблема видеозаписи для мостики реагирования

Сообщение Anonymous »

На данный момент я разрабатываю одновременную запись передней и задней камеры, и я нашел этот код. Я добавил немного фрагмента кода. Вы можете взглянуть на полный код в этом URL (https://github.com/lax/learn-ios-swift- ... ller.swift) написан/>>>>>class MyCameraView: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {

// MARK: View Controller Life Cycle

override func viewDidLoad() {
super.viewDidLoad()

// Allow users to double tap to switch between the front and back cameras being in a PiP
let togglePiPDoubleTapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(togglePiP))
togglePiPDoubleTapGestureRecognizer.numberOfTapsRequired = 2
view.addGestureRecognizer(togglePiPDoubleTapGestureRecognizer)

// Disable UI. Enable the UI later, if and only if the session starts running.
recordButton.isEnabled = false

// Set up the back and front video preview views.
backCameraVideoPreviewView.videoPreviewLayer.setSessionWithNoConnection(session)
frontCameraVideoPreviewView.videoPreviewLayer.setSessionWithNoConnection(session)

// Store the back and front video preview layers so we can connect them to their inputs
backCameraVideoPreviewLayer = backCameraVideoPreviewView.videoPreviewLayer
frontCameraVideoPreviewLayer = frontCameraVideoPreviewView.videoPreviewLayer

// Store the location of the pip's frame in relation to the full screen video preview
updateNormalizedPiPFrame()

UIDevice.current.beginGeneratingDeviceOrientationNotifications()

/*
Configure the capture session.
In general it is not safe to mutate an AVCaptureSession or any of its
inputs, outputs, or connections from multiple threads at the same time.

Don't do this on the main queue, because AVCaptureMultiCamSession.startRunning()
is a blocking call, which can take a long time. Dispatch session setup
to the sessionQueue so as not to block the main queue, which keeps the UI responsive.
*/
sessionQueue.async {
self.configureSession()
}

// Keep the screen awake
UIApplication.shared.isIdleTimerDisabled = true
}

override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)

sessionQueue.async {
switch self.setupResult {
case .success:
// Only setup observers and start the session running if setup succeeded.
self.addObservers()
self.session.startRunning()
self.isSessionRunning = self.session.isRunning

case .notAuthorized:
DispatchQueue.main.async {
let changePrivacySetting = "\(Bundle.main.applicationName) doesn't have permission to use the camera, please change privacy settings"
let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when the user has denied access to the camera")
let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)

alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))

alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"),
style: .`default`,
handler: { _ in
if let settingsURL = URL(string: UIApplication.openSettingsURLString) {
UIApplication.shared.open(settingsURL,
options: [:],
completionHandler: nil)
}
}))

self.present(alertController, animated: true, completion: nil)
}

case .configurationFailed:
DispatchQueue.main.async {
let alertMsg = "Alert message when something goes wrong during capture session configuration"
let message = NSLocalizedString("Unable to capture media", comment: alertMsg)
let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)

alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))

self.present(alertController, animated: true, completion: nil)
}

case .multiCamNotSupported:
DispatchQueue.main.async {
let alertMessage = "Alert message when multi cam is not supported"
let message = NSLocalizedString("Multi Cam Not Supported", comment: alertMessage)
let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)

self.present(alertController, animated: true, completion: nil)
}
}
}
}

override func viewWillDisappear(_ animated: Bool) {
sessionQueue.async {
if self.setupResult == .success {
self.session.stopRunning()
self.isSessionRunning = self.session.isRunning
self.removeObservers()
}
}

super.viewWillDisappear(animated)
}

@objc // Expose to Objective-C for use with #selector()
private func didEnterBackground(notification: NSNotification) {
// Free up resources.
dataOutputQueue.async {
self.renderingEnabled = false
self.videoMixer.reset()
self.currentPiPSampleBuffer = nil
}
}

@objc // Expose to Objective-C for use with #selector()
func willEnterForground(notification: NSNotification) {
dataOutputQueue.async {
self.renderingEnabled = true
}
}

// MARK: KVO and Notifications

private var sessionRunningContext = 0

private var keyValueObservations = [NSKeyValueObservation]()

private func addObservers() {
let keyValueObservation = session.observe(\.isRunning, options: .new) { _, change in
guard let isSessionRunning = change.newValue else { return }

DispatchQueue.main.async {
self.recordButton.isEnabled = isSessionRunning
}
}
keyValueObservations.append(keyValueObservation)

let systemPressureStateObservation = observe(\.self.backCameraDeviceInput?.device.systemPressureState, options: .new) { _, change in
guard let systemPressureState = change.newValue as? AVCaptureDevice.SystemPressureState else { return }
self.setRecommendedFrameRateRangeForPressureState(systemPressureState)
}
keyValueObservations.append(systemPressureStateObservation)

NotificationCenter.default.addObserver(self,
selector: #selector(didEnterBackground),
name: UIApplication.didEnterBackgroundNotification,
object: nil)

NotificationCenter.default.addObserver(self,
selector: #selector(willEnterForground),
name: UIApplication.willEnterForegroundNotification,
object: nil)

NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: session)

// A session can run only when the app is full screen. It will be interrupted in a multi-app layout.
// Add observers to handle these session interruptions and inform the user.
// See AVCaptureSessionWasInterruptedNotification for other interruption reasons.

NotificationCenter.default.addObserver(self,
selector: #selector(sessionWasInterrupted),
name: .AVCaptureSessionWasInterrupted,
object: session)

NotificationCenter.default.addObserver(self,
selector: #selector(sessionInterruptionEnded),
name: .AVCaptureSessionInterruptionEnded,
object: session)
}

private func removeObservers() {
for keyValueObservation in keyValueObservations {
keyValueObservation.invalidate()
}

keyValueObservations.removeAll()
}

// MARK: Video Preview PiP Management

private var pipDevicePosition: AVCaptureDevice.Position = .front

private var normalizedPipFrame = CGRect.zero

@IBOutlet private var frontCameraPiPConstraints: [NSLayoutConstraint]!

@IBOutlet private var backCameraPiPConstraints: [NSLayoutConstraint]!

@objc // Expose to Objective-C for use with #selector()
private func togglePiP() {
// Disable animations so the views move immediately
CATransaction.begin()
UIView.setAnimationsEnabled(false)
CATransaction.setDisableActions(true)

if pipDevicePosition == .front {
NSLayoutConstraint.deactivate(frontCameraPiPConstraints)
NSLayoutConstraint.activate(backCameraPiPConstraints)
view.sendSubviewToBack(frontCameraVideoPreviewView)
pipDevicePosition = .back
} else {
NSLayoutConstraint.deactivate(backCameraPiPConstraints)
NSLayoutConstraint.activate(frontCameraPiPConstraints)
view.sendSubviewToBack(backCameraVideoPreviewView)
pipDevicePosition = .front
}

CATransaction.commit()
UIView.setAnimationsEnabled(true)
CATransaction.setDisableActions(false)
}

private func updateNormalizedPiPFrame() {
let fullScreenVideoPreviewView: PreviewView
let pipVideoPreviewView: PreviewView

if pipDevicePosition == .back {
fullScreenVideoPreviewView = frontCameraVideoPreviewView
pipVideoPreviewView = backCameraVideoPreviewView
} else if pipDevicePosition == .front {
fullScreenVideoPreviewView = backCameraVideoPreviewView
pipVideoPreviewView = frontCameraVideoPreviewView
} else {
fatalError("Unexpected pip device position: \(pipDevicePosition)")
}

let pipFrameInFullScreenVideoPreview = pipVideoPreviewView.convert(pipVideoPreviewView.bounds, to: fullScreenVideoPreviewView)
let normalizedTransform = CGAffineTransform(scaleX: 1.0 / fullScreenVideoPreviewView.frame.width, y: 1.0 / fullScreenVideoPreviewView.frame.height)

normalizedPipFrame = pipFrameInFullScreenVideoPreview.applying(normalizedTransform)
}

// MARK: Capture Session Management

@IBOutlet private var resumeButton: UIButton!

@IBOutlet private var cameraUnavailableLabel: UILabel!

private enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
case multiCamNotSupported
}

private let session = AVCaptureMultiCamSession()

private var isSessionRunning = false

private let sessionQueue = DispatchQueue(label: "session queue") // Communicate with the session and other session objects on this queue.

private let dataOutputQueue = DispatchQueue(label: "data output queue")

private var setupResult: SessionSetupResult = .success

@objc dynamic private(set) var backCameraDeviceInput: AVCaptureDeviceInput?

private let backCameraVideoDataOutput = AVCaptureVideoDataOutput()

@IBOutlet private var backCameraVideoPreviewView: PreviewView!

private weak var backCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?

private var frontCameraDeviceInput: AVCaptureDeviceInput?

private let frontCameraVideoDataOutput = AVCaptureVideoDataOutput()

@IBOutlet private var frontCameraVideoPreviewView: PreviewView!

private weak var frontCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?

private var microphoneDeviceInput: AVCaptureDeviceInput?

private let backMicrophoneAudioDataOutput = AVCaptureAudioDataOutput()

private let frontMicrophoneAudioDataOutput = AVCaptureAudioDataOutput()

// Must be called on the session queue
private func configureSession() {
guard setupResult == .success else { return }

guard AVCaptureMultiCamSession.isMultiCamSupported else {
print("MultiCam not supported on this device")
setupResult = .multiCamNotSupported
return
}

// When using AVCaptureMultiCamSession, it is best to manually add connections from AVCaptureInputs to AVCaptureOutputs
session.beginConfiguration()
defer {
session.commitConfiguration()
if setupResult == .success {
checkSystemCost()
}
}

guard configureBackCamera() else {
setupResult = .configurationFailed
return
}

guard configureFrontCamera() else {
setupResult = .configurationFailed
return
}

guard configureMicrophone() else {
setupResult = .configurationFailed
return
}
}

private func configureBackCamera() -> Bool {
session.beginConfiguration()
defer {
session.commitConfiguration()
}

// Find the back camera
guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
print("Could not find the back camera")
return false
}

// Add the back camera input to the session
do {
backCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)

guard let backCameraDeviceInput = backCameraDeviceInput,
session.canAddInput(backCameraDeviceInput) else {
print("Could not add back camera device input")
return false
}
session.addInputWithNoConnections(backCameraDeviceInput)
} catch {
print("Could not create back camera device input: \(error)")
return false
}

// Find the back camera device input's video port
guard let backCameraDeviceInput = backCameraDeviceInput,
let backCameraVideoPort = backCameraDeviceInput.ports(for: .video,
sourceDeviceType: backCamera.deviceType,
sourceDevicePosition: backCamera.position).first else {
print("Could not find the back camera device input's video port")
return false
}

// Add the back camera video data output
guard session.canAddOutput(backCameraVideoDataOutput) else {
print("Could not add the back camera video data output")
return false
}
session.addOutputWithNoConnections(backCameraVideoDataOutput)
// Check if CVPixelFormat Lossy or Lossless Compression is supported

if backCameraVideoDataOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossy_32BGRA) {
// Set the Lossy format
print("Selecting lossy pixel format")
backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_Lossy_32BGRA)]
} else if backCameraVideoDataOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_32BGRA) {
// Set the Lossless format
print("Selecting a lossless pixel format")
backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_Lossless_32BGRA)]
} else {
// Set to the fallback format
print("Selecting a 32BGRA pixel format")
backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
}

backCameraVideoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)

// Connect the back camera device input to the back camera video data output
let backCameraVideoDataOutputConnection = AVCaptureConnection(inputPorts: [backCameraVideoPort], output: backCameraVideoDataOutput)
guard session.canAddConnection(backCameraVideoDataOutputConnection) else {
print("Could not add a connection to the back camera video data output")
return false
}
session.addConnection(backCameraVideoDataOutputConnection)
backCameraVideoDataOutputConnection.videoOrientation = .portrait

// Connect the back camera device input to the back camera video preview layer
guard let backCameraVideoPreviewLayer = backCameraVideoPreviewLayer else {
return false
}
let backCameraVideoPreviewLayerConnection = AVCaptureConnection(inputPort: backCameraVideoPort, videoPreviewLayer: backCameraVideoPreviewLayer)
guard session.canAddConnection(backCameraVideoPreviewLayerConnection) else {
print("Could not add a connection to the back camera video preview layer")
return false
}
session.addConnection(backCameraVideoPreviewLayerConnection)

return true
}
< /code>
, а затем для реактивного моста, я сделал этот класс < /p>
import Foundation

@objc(MyCameraViewManager)
public class MyCameraViewManager: RCTViewManager {
public override func view() -> UIView {
return MyCameraView()
}
}
< /code>
Но когда я пытаюсь запустить этот код, получая эту ошибку. Как это исправить? Введите описание изображения здесь

Подробнее здесь: https://stackoverflow.com/questions/795 ... e-bridging
Ответить

Быстрый ответ

Изменение регистра текста: 
Смайлики
:) :( :oops: :roll: :wink: :muza: :clever: :sorry: :angel: :read: *x)
Ещё смайлики…
   
К этому ответу прикреплено по крайней мере одно вложение.

Если вы не хотите добавлять вложения, оставьте поля пустыми.

Максимально разрешённый размер вложения: 15 МБ.

Вернуться в «IOS»