I want to use a landscape video as the animated background texture for a UIView.
The video should always to fill the full height of the screen, resized in the original aspect ratio and **cropped on the right ** if necessary.
AVPlayer
and AVPlayerLayer
seem to make hard assumptions that if I insist on not wanting the video full frame, I must be interested in just the central portion of the video: I can either have the video fitted to the width (because that's the larger dimension) or fitted to the height but centred.
I can't find a way to override this. Am I missing something?
The closest I can get is to display the video in a layer sized to the same dimensions as the video... this retains the aspect and anligns top left and won't stretch to full height.
import SwiftUI
import AVFoundation
class CroppedVideoViewModel: ObservableObject {
@Published var videoSize: CGSize = .zero
func loadVideo(url: URL) {
let asset = AVAsset(url: url)
let track = asset.tracks(withMediaType: .video).first
let size = track?.naturalSize ?? CGSize.zero
self.videoSize = size
}
}
struct ContentView: View {
@StateObject var viewModel = CroppedVideoViewModel()
var body: some View {
ZStack {
if viewModel.videoSize != .zero {
let aspectRatio = viewModel.videoSize.width / viewModel.videoSize.height
let screenHeight = UIScreen.main.bounds.height
let screenWidth = screenHeight * aspectRatio
CroppedVideoRepresentable(viewModel: viewModel)
.frame(width: screenWidth, height: screenHeight)
.position(x: UIScreen.main.bounds.width / 2, y: screenHeight / 2)
}
}
.onAppear {
if let url = Bundle.main.url(forResource: "example", withExtension: "MP4") {
viewModel.loadVideo(url: url)
} else {
print("Video not found!")
}
}
}
}
struct CroppedVideoRepresentable: UIViewRepresentable {
@ObservedObject var viewModel: CroppedVideoViewModel
func makeUIView(context: Context) -> UIView {
return CroppedVideo(viewModel: viewModel, frame: CGRect(x: 0, y: 0, width: viewModel.videoSize.width, height: viewModel.videoSize.height))
}
func updateUIView(_ uiView: UIView, context: Context) {
}
}
class CroppedVideo: UIView {
@ObservedObject var viewModel: CroppedVideoViewModel
private var videoLayer: AVPlayerLayer!
init(viewModel: CroppedVideoViewModel, frame: CGRect) {
self.viewModel = viewModel
super.init(frame: frame)
self.backgroundColor = .black
videoLayer = AVPlayerLayer()
videoLayer.frame = CGRect(x: 0, y: 0, width: viewModel.videoSize.width, height: viewModel.videoSize.height)
self.layer.addSublayer(videoLayer)
if let url = Bundle.main.url(forResource: "example", withExtension: "MP4") {
loadAndPlayVideo(url: url)
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func loadAndPlayVideo(url: URL) {
let player = AVPlayer(url: url)
videoLayer.player = player
player.play()
}
}
Fwiw I was able to both simplify the approach, improve the performance and refine the behaviour (thanks to Matt for getting me on the right track). Here's the full code in case it helps anybody else.
import SwiftUI
import UIKit
import AVFoundation
class VideoLayerView: UIView {
private var playerLayer: AVPlayerLayer?
private var player: AVPlayer?
private var videoSize: CGSize = .zero
override init(frame: CGRect) {
super.init(frame: frame)
setupVideo()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupVideo()
}
private func setupVideo() {
guard let videoURL = Bundle.main.url(forResource: "example", withExtension: "MP4") else {
print("Could not find example.MP4 in bundle")
return
}
let asset = AVURLAsset(url: videoURL)
player = AVPlayer(playerItem: AVPlayerItem(asset: asset))
playerLayer = AVPlayerLayer(player: player)
Task {
do {
let tracks = try await asset.loadTracks(withMediaType: .video)
if let videoTrack = tracks.first {
let size = try await videoTrack.load(.naturalSize)
await MainActor.run {
self.videoSize = size
self.updateVideoLayout()
}
}
} catch {
print("Error loading video dimensions: \(error)")
}
}
playerLayer?.videoGravity = .resizeAspect
layer.addSublayer(playerLayer!)
NotificationCenter.default.addObserver(
self,
selector: #selector(playerDidFinishPlaying),
name: .AVPlayerItemDidPlayToEndTime,
object: player?.currentItem
)
player?.play()
}
@objc private func playerDidFinishPlaying() {
print("Video finished playing")
}
override func layoutSubviews() {
super.layoutSubviews()
updateVideoLayout()
}
private func updateVideoLayout() {
guard let playerLayer = playerLayer, videoSize.width > 0, videoSize.height > 0 else { return }
let viewWidth = bounds.width
let viewHeight = bounds.height
let videoAspectRatio = videoSize.width / videoSize.height
let viewAspectRatio = viewWidth / viewHeight
var videoWidth: CGFloat
var videoHeight: CGFloat
if videoAspectRatio > viewAspectRatio {
videoHeight = viewHeight
videoWidth = viewHeight * videoAspectRatio
} else {
videoWidth = viewWidth
videoHeight = viewWidth / videoAspectRatio
}
playerLayer.frame = CGRect(
x: 0,
y: 0,
width: videoWidth,
height: videoHeight
)
}
func restartVideo() {
player?.seek(to: .zero)
player?.play()
}
deinit {
NotificationCenter.default.removeObserver(self)
}
}
class SequenceView: UIView {
private var videoLayerView: VideoLayerView!
private var currentOrientation: UIDeviceOrientation = .unknown
override init(frame: CGRect) {
super.init(frame: frame)
setupView()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupView()
}
private func setupView() {
backgroundColor = .white
videoLayerView = VideoLayerView()
videoLayerView.translatesAutoresizingMaskIntoConstraints = false
addSubview(videoLayerView)
NSLayoutConstraint.activate([
videoLayerView.topAnchor.constraint(equalTo: topAnchor),
videoLayerView.leadingAnchor.constraint(equalTo: leadingAnchor),
videoLayerView.trailingAnchor.constraint(equalTo: trailingAnchor),
videoLayerView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
NotificationCenter.default.addObserver(
self,
selector: #selector(orientationDidChange),
name: UIDevice.orientationDidChangeNotification,
object: nil
)
currentOrientation = UIDevice.current.orientation
}
@objc private func orientationDidChange() {
let newOrientation = UIDevice.current.orientation
if newOrientation != currentOrientation && newOrientation != .unknown {
currentOrientation = newOrientation
DispatchQueue.main.async { [weak self] in
self?.videoLayerView.restartVideo()
}
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
}
struct SequenceViewWrapper: UIViewRepresentable {
func makeUIView(context: Context) -> SequenceView {
return SequenceView()
}
func updateUIView(_ uiView: SequenceView, context: Context) {
}
}
struct ContentView: View {
var body: some View {
SequenceViewWrapper()
.ignoresSafeArea()
}
}