LiveApple/LiveApple/PlayerController.swift

124 lines
4.8 KiB
Swift

//
// PlayerController.swift
// LiveApple
//
// Created by Shadowfacts on 9/24/22.
//
import Foundation
import AVKit
import ActivityKit
import AudioToolbox
import MediaPlayer
@MainActor
class PlayerController: ObservableObject {
private let reader: AVAssetReader
private let readQueue = DispatchQueue(label: "PlayerController reading", qos: .userInitiated)
private let asset: AVAsset
let player: AVPlayer
@Published private(set) var frames: [(CMTime, Frame)] = []
private var lastPlayedFrameIndex = 0
@Published var currentFrame: Frame?
var activity: Activity<BadAppleAttributes>?
private var timer: Timer?
var initializedNowPlaying = false
init() {
let url = Bundle.main.url(forResource: "badapple", withExtension: "mp4")!
asset = AVURLAsset(url: url)
player = AVPlayer(playerItem: AVPlayerItem(asset: asset))
reader = try! AVAssetReader(asset: asset)
player.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 30), queue: .main) { [weak self] time in
self?.emitFrame(for: time)
}
}
func start() async {
let track = try! await asset.loadTracks(withMediaType: .video).first!
let trackOutput = AVAssetReaderTrackOutput(track: track, outputSettings: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
])
reader.add(trackOutput)
let date = Date()
print("start reading")
let frames = await withCheckedContinuation { continuation in
self.readQueue.async {
guard self.reader.startReading() else {
fatalError()
}
var frames: [(CMTime, Frame)] = []
while let buffer = trackOutput.copyNextSampleBuffer() {
frames.append((buffer.presentationTimeStamp, Frame(pixelBuffer: buffer.imageBuffer!)))
}
continuation.resume(returning: frames)
}
}
print("done reading after: \(-date.timeIntervalSinceNow)")
self.frames = frames
// using the AVPlayer gives you sound
// self.player.play()
// doing it manually lets you update while in the background
let start = Date()
timer = .scheduledTimer(withTimeInterval: 1/30, repeats: true, block: { [unowned self] _ in
let diff = -start.timeIntervalSinceNow
let cmTime = CMTime(value: CMTimeValue(diff * 1000), timescale: 1000)
Task {
await self.emitFrame(for: cmTime)
}
})
}
func stop() {
reader.cancelReading()
}
func emitFrame(for time: CMTime) {
if let index = frames[lastPlayedFrameIndex...].firstIndex(where: { $0.0 >= time }) {
lastPlayedFrameIndex = index
let (time, frame) = frames[index]
// print("playing frame at \(time)")
currentFrame = frame
// if !initializedNowPlaying {
// initializedNowPlaying = true
// let artwork = MPMediaItemArtwork(boundsSize: CGSize(width: 60, height: 45)) { _ in
// UIImage(cgImage: frame.createImage())
// }
// let center = MPNowPlayingInfoCenter.default()
// let duration = Double(asset.duration.value) / Double(asset.duration.timescale)
// center.nowPlayingInfo = [
// MPMediaItemPropertyTitle: "Bad Apple!",
// MPMediaItemPropertyArtist: "ZUN",
// MPMediaItemPropertyAlbumArtist: "ZUN",
// MPMediaItemPropertyAlbumTitle: "asdf",
// MPMediaItemPropertyArtwork: artwork,
// MPMediaItemPropertyPlaybackDuration: duration as NSNumber,
// MPNowPlayingInfoPropertyMediaType: MPMediaType.music.rawValue,
// MPNowPlayingInfoPropertyAssetURL: Bundle.main.url(forResource: "badapple", withExtension: "mp4")!,
// MPNowPlayingInfoPropertyIsLiveStream: false,
// MPNowPlayingInfoPropertyPlaybackRate: 1.0,
// MPNowPlayingInfoPropertyDefaultPlaybackRate: 1.0,
// ]
// }
//
//
// let artwork = MPMediaItemArtwork(boundsSize: CGSize(width: 60, height: 45)) { size in
// UIImage(cgImage: frame.createImage())
// }
// var info = MPNowPlayingInfoCenter.default().nowPlayingInfo!
// info[MPMediaItemPropertyArtwork] = artwork
// info[MPMediaItemPropertyTitle] = index.description
// MPNowPlayingInfoCenter.default().nowPlayingInfo = info
} else {
print("no frame")
}
}
}