// // VideoPlayerContentView.swift // WeSports // // Created Jure Cular on 07.06.2022.. // Copyright © 2022 WeSports. All rights reserved. // import AVFoundation import RxCocoa import RxRelay import RxSwift import UIKit final class VideoPlayerContentView: UIView, VideoPlayerViewProtocol { let muteVideoRelay = BehaviorRelay<Bool>(value: true) // MARK: - Private Properties - private let currentTimeRelay = BehaviorRelay<Double>(value: 0) private let playerStatusRelay = BehaviorRelay<VideoPlayerStatus>(value: .loading) private let bufferQueue: DispatchQueue = .init(label: "com.sportening.sample-buffer-queue") private let displayLayer: AVSampleBufferDisplayLayer? = AVSampleBufferDisplayLayer() private let audioRenderer: AVSampleBufferAudioRenderer? = AVSampleBufferAudioRenderer() private let synchronizer = AVSampleBufferRenderSynchronizer() private let sampleBufferProvider = SampleBufferProvider() private let disposeBag = DisposeBag() private var periodicObserverToken: Any? // MARK: - Init - override init(frame: CGRect) { super.init(frame: frame) setupRenderers() } @available(*, unavailable) required init?(coder _: NSCoder) { fatalError("init(coder:) has not been implemented") } // MARK: - UIView - override func layoutSubviews() { super.layoutSubviews() if let displayLayer = displayLayer, displayLayer.bounds != bounds { displayLayer.bounds = bounds displayLayer.position = center } } // MARK: - VideoViewProtocol - func set(url: URL) { bufferQueue.async { self.bind() self.sampleBufferProvider.setup(for: url) } } // MARK: - Private Functions - private func pauseSynchronizer() { // Synchronizes renderers and stops rendering audio and sound synchronizer.rate = 0 } private func startSynchronizer() { // Synchronizes renderers and starts rendering audio and sound at normal rate/speed synchronizer.rate = 1 } private func setupRenderers() { periodicObserverToken = synchronizer.addPeriodicTimeObserver( forInterval: CMTime(seconds: 1, preferredTimescale: 1), queue: bufferQueue ) { [weak self] time in self?.currentTimeRelay.accept(time.seconds) } if let displayLayer = displayLayer { synchronizer.addRenderer(displayLayer) layer.addSublayer(displayLayer) } if let audioRenderer = audioRenderer { synchronizer.addRenderer(audioRenderer) } } private func startRequestingVideo() { if sampleBufferProvider.areVideoSamplesAvailable { guard let displayLayer = displayLayer else { return } // If display renderer is in failed status, flush it and remove the image if displayLayer.status == AVQueuedSampleBufferRenderingStatus.failed { displayLayer.flushAndRemoveImage() } displayLayer.requestMediaDataWhenReady(on: bufferQueue) { [weak self] in guard let self = self, let displayLayer = self.displayLayer else { return } if displayLayer.status == AVQueuedSampleBufferRenderingStatus.failed { displayLayer.flush() } if let sampleBuffer = self.sampleBufferProvider.requestVideoBuffer() { displayLayer.enqueue(sampleBuffer) } } } } private func startRequestingAudio() { if sampleBufferProvider.areAudioSamplesAvailable { guard let audioRenderer = audioRenderer else { return } // If display audio is in failed status, flush sample buffers if audioRenderer.status == .failed { audioRenderer.flush() } audioRenderer.requestMediaDataWhenReady(on: bufferQueue) { [weak self] in guard let self = self, let audioRenderer = self.audioRenderer else { return } if audioRenderer.status == .failed { audioRenderer.flush() } if let sampleBuffer = self.sampleBufferProvider.requestAudioBuffer() { audioRenderer.enqueue(sampleBuffer) } } } } private func stopRequestingVideo() { guard let displayLayer = displayLayer else { return } displayLayer.stopRequestingMediaData() displayLayer.flush() } private func stopRequestingAudio() { guard let audioRenderer = audioRenderer else { return } audioRenderer.stopRequestingMediaData() audioRenderer .flush(fromSourceTime: sampleBufferProvider.lastSampleBufferTime) { [weak audioRenderer] didSucceed in if !didSucceed { audioRenderer?.flush() } } } // MARK: - Setup Bindings - // swiftlint:disable cyclomatic_complexity private func bind() { muteVideoRelay .bind { [weak self] isMuted in guard let self = self else { return } self.bufferQueue.async { self.audioRenderer?.isMuted = isMuted } } .disposed(by: disposeBag) // Observe player status to know when to start sampleBufferProvider.status .bind { [weak self] status in guard let self = self else { return } self.bufferQueue.async { switch status { case .readyToProvide: // Set initial display buffer self.setDisplayBuffer(self.sampleBufferProvider.requestVideoBuffer()) case .providing: self.startSynchronizer() self.startRequestingAudio() self.startRequestingVideo() case .paused, .stopped: self.stopRequestingVideo() self.stopRequestingAudio() self.pauseSynchronizer() case .finished: self.stopRequestingVideo() self.stopRequestingAudio() self.finished() case .failed: self.stopRequestingAudio() self.stopRequestingVideo() default: break } } }.disposed(by: disposeBag) sampleBufferProvider.status .map { status -> VideoPlayerStatus in switch status { case .loading: return .loading case .readyToProvide: return .readyToPlay case .providing: return .playing case .paused: return .paused case .stopped: return .stopped case .finished: return .finished case .failed: return .failed } } .bind(to: playerStatusRelay) .disposed(by: disposeBag) bindNotifications() bindDisplayLayer() bindAudioRendered() } private func bindNotifications() { // When app goes to background or resigns active and video is playing we need to stop requesting sample buffers. NotificationCenter.default.rx .notification(UIApplication.willResignActiveNotification) .withLatestFrom(sampleBufferProvider.status) .filter { $0 == .providing } .subscribe(onNext: { [weak self] _ in guard let self = self else { return } self.bufferQueue.async { self.sampleBufferProvider.stop() } }) .disposed(by: disposeBag) // Once app is active/in the foreground, requesting buffers needs to be restarted but also asset reader needs to // be restarted too. If the reader isn't restarted sample buffers which have been read, but haven't been // rendered would be lost (noticeable with audio). Observable.merge( NotificationCenter.default.rx .notification(UIApplication.didBecomeActiveNotification), NotificationCenter.default.rx .notification(UIApplication.willEnterForegroundNotification) ) .withLatestFrom(sampleBufferProvider.status) .filter { $0 == .stopped } .subscribe(onNext: { [weak self] _ in guard let self = self else { return } self.bufferQueue.async { // Restart reading asset when app comes back from background, because // audio sample buffers might have been read until end self.sampleBufferProvider.restartReadingAsset() self.sampleBufferProvider.startProvidingSampleBuffers() } }) .disposed(by: disposeBag) } private func bindDisplayLayer() { // When both YoutubeVideoPlayer and this view are on screen // display layer might fail rendering: // - when youtube starts playing if youtube video isn't muted // - when youtube is paused and this view starts playing video while unmuted // - when youtube us paused and this view is playing and video gets unmuted // In those cases we catch that status change and depending on the current view status we need to re-render the // last frame and possibly restart requesting videos if let displayLayer = displayLayer { displayLayer .rx.observeWeakly(AVQueuedSampleBufferRenderingStatus.self, "status") .withLatestFrom(sampleBufferProvider.status) .subscribe(onNext: { [weak self] status in guard let self = self, let displayLayer = self.displayLayer, displayLayer.status == .failed else { return } self.bufferQueue.async { // Display layer has failed presenting, // First we flush the display layer displayLayer.flush() switch status { case .providing: // If video was playing we start requesting sample buffers again and start playing self.startRequestingVideo() self.startSynchronizer() case .paused, .finished: // If video was paused or finished we request the // last sample buffer and set it for display self.setDisplayBuffer(self.sampleBufferProvider.lastVideoSampleBuffer) default: break } } }) .disposed(by: disposeBag) } } private func bindAudioRendered() { if let audioRenderer = audioRenderer { audioRenderer .rx.observeWeakly(AVQueuedSampleBufferRenderingStatus.self, "status") .withLatestFrom(sampleBufferProvider.status) .subscribe(onNext: { [weak self] status in guard let self = self, let audioRenderer = self.audioRenderer, audioRenderer.status == .failed else { return } self.bufferQueue.async { // Audio renderer has failed playing sample buffers // First we flush the sample buffers audioRenderer.flush() // If audio was playing we start requesting sample buffers again // and start playing switch status { case .providing: self.startRequestingAudio() self.startSynchronizer() default: break } } }) .disposed(by: disposeBag) } } private func setDisplayBuffer(_ sampleBuffer: CMSampleBuffer?) { guard let displayLayer = displayLayer, displayLayer.isReadyForMoreMediaData, let sampleBuffer = sampleBuffer else { return } displayLayer.enqueue(sampleBuffer) } private func finished() { if let periodicObserverToken = periodicObserverToken { synchronizer.removeTimeObserver(periodicObserverToken) self.periodicObserverToken = nil } // Set the rate and time to 0 then flush the renderers for replayability synchronizer.setRate(0, time: .zero) audioRenderer?.flush() displayLayer?.flush() } } extension VideoPlayerContentView: VideoPlayerProtocol { var currentTime: Observable<Double> { currentTimeRelay.asObservable() } var playerStatus: Observable<VideoPlayerStatus> { return playerStatusRelay.asObservable() } func play() { bufferQueue.async { self.playerStatus .take(1) .flatMapCompletable { [weak self] status in guard let self = self else { return .empty() } if case .finished = status { return self.play(from: .zero) } else if case .paused = status { let time = self.synchronizer.currentTime() return self.play(from: time) } else if case .readyToPlay = status { self.sampleBufferProvider.startProvidingSampleBuffers() } return .empty() } .subscribe() .disposed(by: self.disposeBag) } } func pause() { bufferQueue.async { self.sampleBufferProvider.pause() } } private func play(from time: CMTime) -> Completable { sampleBufferProvider.prepareToRead(from: time) return playerStatus .filter { $0 == .readyToPlay } .take(1) .flatMapCompletable { [weak self] _ in guard let self = self else { return .empty() } self.sampleBufferProvider.startProvidingSampleBuffers() return .empty() } } }