I'm trying to use AVPlayer to capture frames from a livestream that is remotely playing. Eventually I want to convert these frames to UIImages to be displayed. The code I have right now is not working because pixel_buffer doesn't have an actual value for some reason. When I print itemTime its value is continuously 0, which I think might be a potential cause of this issue. Would appreciate any help with getting this to work.
import RealityKit
import RealityKitContent
import AVFoundation
import AVKit
class ViewController: UIViewController {
let player = AVPlayer(url: URL(string: {webrtc stream link})!)
let videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: [String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA)])
override func viewDidLoad() {
print("doing viewDidLoad")
super.viewDidLoad()
player.currentItem!.add(videoOutput)
player.play()
let displayLink = CADisplayLink(target: self, selector: #selector(displayLinkDidRefresh(link:)))
displayLink.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
}
@objc func displayLinkDidRefresh(link: CADisplayLink) {
let itemTime = videoOutput.itemTime(forHostTime: CACurrentMediaTime())
if videoOutput.hasNewPixelBuffer(forItemTime: itemTime) {
if let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: itemTime, itemTimeForDisplay: nil) {
print("pixelBuffer \(pixelBuffer)") // yay, pixel buffer
let image = CIImage(cvImageBuffer: pixelBuffer) // or maybe CIImage?
print("CIImage \(image)")
}
}
}
}
struct ImmersiveView: View {
var body: some View {
RealityView { content in
if let scene = try? await Entity(named:"Immersive", in: realityKitContentBundle) {
content.add(scene)
}
let viewcontroller = ViewController()
viewcontroller.viewDidLoad()
}