У меня есть две проблемы с отображающими видеоизображениями. В основном представлении и покрыто символом Pip. < /li>
< /ol>
class VideoRenderer : UIView {
let displayLayer = AVSampleBufferDisplayLayer()
private var pipController: AVPictureInPictureController?
var userData = mydata_t()
weak var delegate : VLCPlayer?
private var frameIndex: Int64 = 0
private var fps: Int32 = 25
private let maxFrameWindow = 60
private var frameTimes: [Double] = []
private var timebase: CMTimebase?
override class var layerClass: AnyClass {
return AVSampleBufferDisplayLayer.self
}
override init(frame: CGRect) {
super.init(frame: frame)
setupViewAndPiP()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupViewAndPiP()
}
override func layoutSubviews() {
super.layoutSubviews()
displayLayer.frame = bounds
print("Change view: \(bounds.width)x\(bounds.height)")
}
override func didMoveToWindow() {
super.didMoveToWindow()
print("Window attached: \(window != nil)")
if window != nil {
displayLayer.frame = bounds
if displayLayer.superlayer == nil {
layer.addSublayer(displayLayer)
}
}
}
private func setupTimebase() {
CMTimebaseCreateWithSourceClock(allocator: kCFAllocatorDefault, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &timebase)
if let tb = timebase {
timebase = tb
CMTimebaseSetTime(tb, time: CMTime.zero)
CMTimebaseSetRate(tb, rate: 0.0)
displayLayer.controlTimebase = tb
}
}
private func setupViewAndPiP() {
print("
displayLayer.frame = bounds
displayLayer.videoGravity = .resizeAspect
displayLayer.drawsAsynchronously = true
// displayLayer.backgroundColor = UIColor.black.cgColor
layer.addSublayer(displayLayer)
setupTimebase()
guard AVPictureInPictureController.isPictureInPictureSupported() else {
print("PiP not supported on this device")
return
}
let contentSource = AVPictureInPictureController.ContentSource(
sampleBufferDisplayLayer: displayLayer,
playbackDelegate: self
)
pipController = AVPictureInPictureController(contentSource: contentSource)
pipController?.delegate = self
pipController?.requiresLinearPlayback = true
}
func resumeTimebase() {
if let tb = timebase {
CMTimebaseSetRate(tb, rate: 1.0)
}
}
func pauseTimebase() {
if let tb = timebase {
CMTimebaseSetRate(tb, rate: 0.0)
}
}
func startPiP() {
if pipController?.isPictureInPicturePossible == true {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.pipController?.startPictureInPicture()
}
}
}
< /code>
internal func render() {
guard
let controlTimebase = timebase,
let img = userData.img,
displayLayer.isReadyForMoreMediaData else {
print("
return
}
let currentTime = CMTimebaseGetTime(controlTimebase)
let now = CFAbsoluteTimeGetCurrent()
let delta = now - userData.lastRenderTime
userData.lastRenderTime = now
// Filter out outliers
if delta > 0.005 && delta < 1.0 {
frameTimes.append(delta)
if frameTimes.count > 60 { // keep a max history
frameTimes.removeFirst()
}
let avgFrameTime = frameTimes.reduce(0, +) / Double(frameTimes.count)
let estimatedFPS = Int32(1.0 / avgFrameTime)
if estimatedFPS > 0 {
fps = estimatedFPS
}
}
print("
let width = Int(userData.width)
let height = Int(userData.height)
var pixelBuffer: CVPixelBuffer?
let attrs: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: true,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true,
kCVPixelBufferWidthKey as String: width,
kCVPixelBufferHeightKey as String: height,
kCVPixelBufferBytesPerRowAlignmentKey as String: width * 4,
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
let status = CVPixelBufferCreateWithBytes(
kCFAllocatorDefault,
width,
height,
kCVPixelFormatType_32BGRA,
img,
width * 4,
nil,
nil,
attrs as CFDictionary,
&pixelBuffer
)
guard status == kCVReturnSuccess, let pb = pixelBuffer else { return }
var timingInfo = CMSampleTimingInfo(
duration: .invalid,
presentationTimeStamp: currentTime,
decodeTimeStamp: .invalid
)
var formatDesc: CMVideoFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(
allocator: kCFAllocatorDefault,
// codecType: kCMPixelFormat_32BGRA,
imageBuffer: pb,
formatDescriptionOut: &formatDesc
)
guard let format = formatDesc else { return }
print("
var sampleBuffer: CMSampleBuffer?
CMSampleBufferCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: pb,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: format,
sampleTiming: &timingInfo,
sampleBufferOut: &sampleBuffer
)
if let sb = sampleBuffer {
if CMSampleBufferIsValid(sb) {
if CMSampleBufferGetPresentationTimeStamp(sb) == .invalid {
print("Invalid video timestamp")
}
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
if (displayLayer.status == .failed) {
displayLayer.flush()
}
displayLayer.enqueue(sb)
}
frameIndex += 1
} else {
print("Sample buffer is invalid!!!!")
}
}
}
< /code>
extension VideoRenderer: AVPictureInPictureSampleBufferPlaybackDelegate {
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) {
print("
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime) async {
print("
}
func pictureInPictureController(_ controller: AVPictureInPictureController, setPlaying playing: Bool) {
print("PiP wants to: \(playing ? "play" : "pause")")
delegate?.setPlaying(setPlaying: playing)
// You can trigger libvlc_media_player_pause() here if needed
}
func pictureInPictureControllerTimeRangeForPlayback(_ controller: AVPictureInPictureController) -> CMTimeRange {
print("PiP -> pictureInPictureControllerTimeRangeForPlayback")
return CMTimeRange(start: .negativeInfinity, duration: .positiveInfinity)
}
func pictureInPictureControllerIsPlaybackPaused(_ controller: AVPictureInPictureController) -> Bool {
print("PiP -> pictureInPictureControllerIsPlaybackPaused - Start")
if let isPlaying = delegate?.isPlaying() {
print("PiP -> pictureInPictureControllerIsPlaybackPaused - status: \(isPlaying ? "play" : "pause")")
return isPlaying // or true if you paused VLC
} else {
return false
}
}
}
extension VideoRenderer: AVPictureInPictureControllerDelegate {
func pictureInPictureController(_ controller: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void) {
// Handle PiP exit (like showing UI again)
print("PiP -> pictureInPictureController - Start")
completionHandler(true)
}
func pictureInPictureControllerWillStartPictureInPicture(_ controller: AVPictureInPictureController) {
print("
}
func pictureInPictureControllerDidStartPictureInPicture(_ controller: AVPictureInPictureController) {
print("
}
func pictureInPictureControllerWillStopPictureInPicture(_ controller: AVPictureInPictureController) {
print("
}
func pictureInPictureControllerDidStopPictureInPicture(_ controller: AVPictureInPictureController) {
print("
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
print("\(#function)")
print("pip error: \(error)")
}
}
Подробнее здесь: https://stackoverflow.com/questions/795 ... r-with-pip