Код: Выделить всё
import Foundation
import CoreMedia
import AVFoundation
import ffmpegkit
final class FFmpegBufferProcessor: AnyBufferProcessor {
weak var delegate: BufferProcessorDelegate?
private var pipePath: String = NSTemporaryDirectory() + "ffmpeg_pipe"
private var isProcessing: Bool = false
private var videoWidth = 1920
private var videoHeight = 1080
private let pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
init() {
setupPipe()
}
deinit {
cleanupPipe()
}
private func setupPipe() {
do {
if FileManager.default.fileExists(atPath: pipePath) {
try FileManager.default.removeItem(atPath: pipePath)
}
let result = mkfifo(pipePath.cString(using: .utf8), 0o644)
if result != 0 {
print("\(#function); Pipe creation failed.")
return
}
} catch {
print("\(#function); Setup pipe error: \(error.localizedDescription)")
}
}
private func cleanupPipe() {
do {
try FileManager.default.removeItem(atPath: pipePath)
} catch {
print("\(#function); Cleanup pipe error: \(error.localizedDescription)")
}
}
func startProcessingStream(from udpURL: String) {
guard !isProcessing else {
print("\(#function); Already processing stream.")
return
}
isProcessing = true
let command = """
-i \(udpURL) -f rawvideo -pix_fmt nv12 \(pipePath)
"""
FFmpegKit.executeAsync(command) { [weak self] session in
let returnCode = session?.getReturnCode()
if ReturnCode.isSuccess(returnCode) {
print("\(#function); FFmpeg session completed.")
} else {
print("\(#function); FFmpeg session error: \(String(describing: session?.getFailStackTrace())).")
}
self?.isProcessing = false
}
readFromPipe()
}
func stopProcessingStream() {
isProcessing = false
FFmpegKit.cancel()
}
}
// MARK: - Private methods
private extension FFmpegBufferProcessor {
func readFromPipe() {
DispatchQueue.global(qos: .background).async { [unowned self] in
guard let fileHandle = FileHandle(forReadingAtPath: self.pipePath) else {
print("\(#function); Fail to read file handle from pipe path.")
return
}
autoreleasepool {
while self.isProcessing {
let frameSize = self.videoWidth * self.videoHeight * 3 / 2
let rawData = fileHandle.readData(ofLength: frameSize)
if rawData.isEmpty {
print("\(#function); Pipe closed / no more data to read.")
break
}
self.handleRawFrameData(rawData)
}
fileHandle.closeFile()
}
}
}
func handleRawFrameData(_ data: Data) {
let width = 1920
let height = 1080
// Creating the Pixel Buffer (if possible)
guard let pixelBuffer = createPixelBuffer(from: data, width: width, height: height) else {
print("\(#function); Failed to create pixel buffer")
return
}
var timing = CMSampleTimingInfo(duration: CMTime(value: 1, timescale: 30), presentationTimeStamp: .zero, decodeTimeStamp: .invalid)
// Creating the Sample Buffer (if possible)
guard let sampleBuffer = createSampleBuffer(from: pixelBuffer, timing: &timing) else {
print("\(#function); Failed to create sample buffer")
return
}
delegate?.bufferProcessor(self, didOutput: sampleBuffer)
}
}
[img]https://i.sstatic .net/mk5efYDs.png[/img]
Также небольшое примечание: я использую AVSampleBufferDisplayLayer для постановки в очередь и отображения буферов, но, очевидно, он не отображается.
Что мне делать, чтобы это исправить? Или, может быть, есть какой-то другой способ получить буферы кадров с камеры GoPro и показать их в iOS? Любая помощь будет оценена по достоинству. Спасибо.
Подробнее здесь: https://stackoverflow.com/questions/792 ... pro-camera
Мобильная версия