import SwiftUI
import AVFoundation
import AVKit
struct VideoPlayerView: View {
let videoURL: URL
@StateObject private var playerManager = VideoPlayerManager()
var body: some View {
VStack {
VideoPlayer(player: playerManager.player)
.frame(height: 300)
.onAppear {
playerManager.loadVideo(url: videoURL)
}
.onDisappear {
playerManager.pause()
}
// Custom controls
HStack(spacing: 30) {
Button(action: playerManager.rewind) {
Image(systemName: "gobackward.15")
}
Button(action: playerManager.togglePlayPause) {
Image(systemName: playerManager.isPlaying ? "pause.fill" : "play.fill")
}
.font(.title)
Button(action: playerManager.forward) {
Image(systemName: "goforward.15")
}
}
.padding()
// Progress slider
Slider(
value: $playerManager.currentTime,
in: 0...playerManager.duration
) { editing in
if !editing {
playerManager.seek(to: playerManager.currentTime)
}
}
.padding(.horizontal)
HStack {
Text(playerManager.currentTime.formatTime())
Spacer()
Text(playerManager.duration.formatTime())
}
.font(.caption)
.padding(.horizontal)
}
}
}
class VideoPlayerManager: ObservableObject {
@Published var player: AVPlayer?
@Published var isPlaying = false
@Published var currentTime: Double = 0
@Published var duration: Double = 0
private var timeObserver: Any?
func loadVideo(url: URL) {
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
// Observe playback progress
let interval = CMTime(seconds: 0.5, preferredTimescale: 600)
timeObserver = player?.addPeriodicTimeObserver(forInterval: interval, queue: .main) { [weak self] time in
self?.currentTime = time.seconds
}
// Get duration
playerItem.asset.loadValuesAsynchronously(forKeys: ["duration"]) { [weak self] in
DispatchQueue.main.async {
if let duration = self?.player?.currentItem?.asset.duration {
self?.duration = duration.seconds
}
}
}
}
func togglePlayPause() {
if isPlaying {
pause()
} else {
play()
}
}
func play() {
player?.play()
isPlaying = true
}
func pause() {
player?.pause()
isPlaying = false
}
func seek(to time: Double) {
let cmTime = CMTime(seconds: time, preferredTimescale: 600)
player?.seek(to: cmTime)
}
func rewind() {
seek(to: max(0, currentTime - 15))
}
func forward() {
seek(to: min(duration, currentTime + 15))
}
deinit {
if let observer = timeObserver {
player?.removeTimeObserver(observer)
}
}
}
extension Double {
func formatTime() -> String {
let minutes = Int(self) / 60
let seconds = Int(self) % 60
return String(format: "%d:%02d", minutes, seconds)
}
}
AVFoundation provides comprehensive audio and video capabilities. AVPlayer plays media from URLs or local files with playback controls. AVPlayerLayer renders video content in views. I observe player state with KVO or Combine to track playback progress, buffering, and errors. AVPlayerItem represents the asset being played, providing duration and status. For background audio, I configure the audio session with AVAudioSession. Picture-in-picture requires AVPictureInPictureController. Custom controls replace system controls for branded experiences. For recording, AVCaptureSession manages camera and microphone input. AVFoundation also handles audio processing, speech synthesis, and QR code scanning.