作者:手机用户2602891283 | 来源:互联网 | 2023-01-30 22:15
1> Guig..:
等待您的videoItem准备播放
向其添加输出
添加一个周期性的观察者,该观察者应在每个帧上执行ping操作
提取新的像素缓冲区,然后根据需要在Vision / CoreML中对其进行处理:
如果使用视觉框架,则要使用VNSequenceRequestHandler
而不是VNImageRequestHandler
。
。
import UIKit
import AVFoundation
import CoreML
import Vision
class ViewController: UIViewController {
var player: AVPlayer!
var videoOutput: AVPlayerItemVideoOutput?
override func viewDidLoad() {
super.viewDidLoad()
let player = AVPlayer(url: localURL)
player.play()
player.currentItem?.addObserver(
self,
forKeyPath: #keyPath(AVPlayerItem.status),
options: [.initial, .old, .new],
context: nil)
player.addPeriodicTimeObserver(
forInterval: CMTime(value: 1, timescale: 30),
queue: DispatchQueue(label: "videoProcessing", qos: .background),
using: { time in
self.doThingsWithFaces()
})
self.player = player
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
guard let keyPath = keyPath, let item = object as? AVPlayerItem
else { return }
switch keyPath {
case #keyPath(AVPlayerItem.status):
if item.status == .readyToPlay {
self.setUpOutput()
}
break
default: break
}
}
func setUpOutput() {
guard self.videoOutput == nil else { return }
let videoItem = player.currentItem!
if videoItem.status != AVPlayerItemStatus.readyToPlay {
// see https://forums.developer.apple.com/thread/27589#128476
return
}
let pixelBuffAttributes = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
] as [String: Any]
let videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: pixelBuffAttributes)
videoItem.add(videoOutput)
self.videoOutput = videoOutput
}
func getNewFrame() -> CVPixelBuffer? {
guard let videoOutput = videoOutput, let currentItem = player.currentItem else { return nil }
let time = currentItem.currentTime()
if !videoOutput.hasNewPixelBuffer(forItemTime: time) { return nil }
guard let buffer = videoOutput.copyPixelBuffer(forItemTime: time, itemTimeForDisplay: nil)
else { return nil }
return buffer
}
func doThingsWithFaces() {
guard let buffer = getNewFrame() else { return }
// some CoreML / Vision things on that.
// There are numerous examples with this
}
}