diff --git a/Demo/Resources/Localizable.xcstrings b/Demo/Resources/Localizable.xcstrings index 8a078b8ed..7a2ba93ec 100644 --- a/Demo/Resources/Localizable.xcstrings +++ b/Demo/Resources/Localizable.xcstrings @@ -180,6 +180,9 @@ }, "Simulate memory warning" : { + }, + "Skip" : { + }, "Smart navigation" : { diff --git a/Demo/Sources/Examples/ExamplesViewModel.swift b/Demo/Sources/Examples/ExamplesViewModel.swift index b3e481316..1daa94b9e 100644 --- a/Demo/Sources/Examples/ExamplesViewModel.swift +++ b/Demo/Sources/Examples/ExamplesViewModel.swift @@ -19,7 +19,8 @@ final class ExamplesViewModel: ObservableObject { URLTemplate.dvrVideoHLS, URLTemplate.liveTimestampVideoHLS, URLTemplate.onDemandAudioMP3, - URLTemplate.liveAudioMP3 + URLTemplate.liveAudioMP3, + URLTemplate.timeRangesVideo ]) let urnMedias = Template.medias(from: [ diff --git a/Demo/Sources/Model/Media.swift b/Demo/Sources/Model/Media.swift index edc541fc3..38be868cf 100644 --- a/Demo/Sources/Model/Media.swift +++ b/Demo/Sources/Model/Media.swift @@ -30,6 +30,7 @@ struct Media: Hashable { let type: `Type` let isMonoscopic: Bool let startTime: CMTime + let timeRanges: [TimeRange] init( title: String, @@ -38,7 +39,8 @@ struct Media: Hashable { image: UIImage? = nil, type: `Type`, isMonoscopic: Bool = false, - startTime: CMTime = .zero + startTime: CMTime = .zero, + timeRanges: [TimeRange] = [] ) { self.title = title self.subtitle = subtitle @@ -47,6 +49,7 @@ struct Media: Hashable { self.type = type self.isMonoscopic = isMonoscopic self.startTime = startTime + self.timeRanges = timeRanges } init(from template: Template, startTime: CMTime = .zero) { @@ -56,7 +59,8 @@ struct Media: Hashable { imageUrl: template.imageUrl, type: template.type, isMonoscopic: template.isMonoscopic, - startTime: startTime + startTime: startTime, + timeRanges: template.timeRanges ) } @@ -91,7 +95,7 @@ extension Media { .map { image in .simple( url: url, - metadata: Media(title: title, subtitle: subtitle, image: image, type: type), + metadata: Media(title: title, subtitle: subtitle, image: image, type: type, timeRanges: timeRanges), configuration: configuration ) }, @@ -115,6 +119,6 @@ extension Media { extension Media: AssetMetadata { var playerMetadata: PlayerMetadata { - .init(title: title, subtitle: subtitle, image: image) + .init(title: title, subtitle: subtitle, image: image, timeRanges: timeRanges) } } diff --git a/Demo/Sources/Model/Template.swift b/Demo/Sources/Model/Template.swift index 31a9c512e..365b3ce2c 100644 --- a/Demo/Sources/Model/Template.swift +++ b/Demo/Sources/Model/Template.swift @@ -5,6 +5,7 @@ // import AVFoundation +import PillarboxPlayer private let kAppleImageUrl = URL("https://www.apple.com/newsroom/images/default/apple-logo-og.jpg?202312141200") private let kBitmovinImageUrl = URL(""" @@ -65,6 +66,16 @@ enum URLTemplate { imageUrl: "https://img.rts.ch/articles/2017/image/cxsqgp-25867841.image?w=640&h=640", type: .url("http://stream.srg-ssr.ch/m/couleur3/mp3_128") ) + static let timeRangesVideo = Template( + title: "Bip", + subtitle: "Content with opening and closing credits", + imageUrl: "https://www.rts.ch/2023/05/01/10/22/10253916.image/16x9", + type: .url("https://rts-vod-amd.akamaized.net/ch/13986102/d13bcd9d-7030-3f5a-b28c-f9abfa6795b8/master.m3u8"), + timeRanges: [ + .init(kind: .credits(.opening), start: .init(value: 3, timescale: 1), end: .init(value: 7, timescale: 1)), + .init(kind: .credits(.closing), start: .init(value: 163, timescale: 1), end: .init(value: 183_680, timescale: 1000)) + ] + ) static let appleBasic_4_3_HLS = Template( title: "Apple Basic 4:3", subtitle: "4x3 aspect ratio, H.264 @ 30Hz", @@ -321,13 +332,22 @@ struct Template: Hashable { let imageUrl: URL? let type: Media.`Type` let isMonoscopic: Bool + let timeRanges: [TimeRange] - init(title: String, subtitle: String? = nil, imageUrl: URL? = nil, type: Media.`Type`, isMonoscopic: Bool = false) { + init( + title: String, + subtitle: String? = nil, + imageUrl: URL? = nil, + type: Media.`Type`, + isMonoscopic: Bool = false, + timeRanges: [TimeRange] = [] + ) { self.title = title self.subtitle = subtitle self.imageUrl = imageUrl self.type = type self.isMonoscopic = isMonoscopic + self.timeRanges = timeRanges } static func medias(from templates: [Self]) -> [Media] { diff --git a/Demo/Sources/Players/PlaybackView.swift b/Demo/Sources/Players/PlaybackView.swift index 9620ac681..92b510c0e 100644 --- a/Demo/Sources/Players/PlaybackView.swift +++ b/Demo/Sources/Players/PlaybackView.swift @@ -17,10 +17,10 @@ private struct MainView: View { @Binding var layout: PlaybackView.Layout let isMonoscopic: Bool let supportsPictureInPicture: Bool + let progressTracker: ProgressTracker @StateObject private var visibilityTracker = VisibilityTracker() - @State private var progressTracker = ProgressTracker(interval: CMTime(value: 1, timescale: 1)) @State private var layoutInfo: LayoutInfo = .none @State private var selectedGravity: AVLayerVideoGravity = .resizeAspect @State private var isInteracting = false @@ -42,8 +42,6 @@ private struct MainView: View { .statusBarHidden(isFullScreen ? isUserInterfaceHidden : false) .animation(.defaultLinear, value: isUserInterfaceHidden) .bind(visibilityTracker, to: player) - .bind(progressTracker, to: player) - ._debugBodyCounter() } private var isFullScreen: Bool { @@ -122,6 +120,18 @@ private struct MainView: View { @ViewBuilder private func bottomBar() -> some View { + VStack(spacing: 20) { + skipButton() + bottomControls() + } + .animation(.linear(duration: 0.2), values: isUserInterfaceHidden, isInteracting) + .padding(.horizontal) + .padding(.vertical, 10) + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .bottom) + } + + @ViewBuilder + private func bottomControls() -> some View { VStack(spacing: 0) { HStack(alignment: .bottom) { metadata() @@ -129,6 +139,7 @@ private struct MainView: View { bottomButtons() } } + HStack(spacing: 20) { TimeBar(player: player, visibilityTracker: visibilityTracker, isInteracting: $isInteracting) if !isFullScreen { @@ -138,10 +149,6 @@ private struct MainView: View { } .preventsTouchPropagation() .opacity(isUserInterfaceHidden ? 0 : 1) - .animation(.linear(duration: 0.2), values: isUserInterfaceHidden, isInteracting) - .padding(.horizontal) - .padding(.vertical, 10) - .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .bottom) } @ViewBuilder @@ -225,6 +232,13 @@ private struct MainView: View { } } + @ViewBuilder + private func skipButton() -> some View { + SkipButton(player: player, progressTacker: progressTracker) + .padding(.trailing, 20) + .frame(maxWidth: .infinity, alignment: .trailing) + } + @ViewBuilder private func image(name: String) -> some View { Image(systemName: name) @@ -238,6 +252,39 @@ private struct MainView: View { } } +private struct SkipButton: View { + let player: Player + @ObservedObject var progressTacker: ProgressTracker + + private var skippableTimeRange: TimeRange? { + player.skippableTimeRange(at: progressTacker.time) + } + + var body: some View { + Button(action: skip) { + Text("Skip") + .font(.footnote) + .foregroundStyle(.white) + .padding(.vertical, 5) + .padding(.horizontal, 10) + .background { + RoundedRectangle(cornerRadius: 2) + .fill(Color(uiColor: UIColor.darkGray)) + RoundedRectangle(cornerRadius: 2) + .stroke(lineWidth: 2.0) + .foregroundStyle(.gray) + } + } + .opacity(skippableTimeRange != nil ? 1 : 0) + .animation(.easeInOut, value: skippableTimeRange) + } + + private func skip() { + guard let skippableTimeRange else { return } + player.seek(to: skippableTimeRange.end) + } +} + private struct ControlsView: View { @ObservedObject var player: Player @ObservedObject var progressTracker: ProgressTracker @@ -526,6 +573,34 @@ private struct TimeSlider: View { } } +#else + +private struct MainSystemView: View { + let player: Player + let supportsPictureInPicture: Bool + @ObservedObject var progressTracker: ProgressTracker + + private var contextualActions: [ContextualAction] { + if let skippableTimeRange = player.skippableTimeRange(at: progressTracker.time) { + return [ + .init(title: "Skip") { + player.seek(to: skippableTimeRange.end) + } + ] + } + else { + return [] + } + } + + var body: some View { + SystemVideoView(player: player) + .supportsPictureInPicture(supportsPictureInPicture) + .contextualActions(contextualActions) + .ignoresSafeArea() + } +} + #endif // Behavior: h-hug, v-hug @@ -609,6 +684,7 @@ struct PlaybackView: View { @ObservedObject private var player: Player @Binding private var layout: Layout + @State private var progressTracker = ProgressTracker(interval: CMTime(value: 1, timescale: 1)) private var isMonoscopic = false private var supportsPictureInPicture = false @@ -632,6 +708,7 @@ struct PlaybackView: View { } } .background(.black) + .bind(progressTracker, to: player) } init(player: Player, layout: Binding = .constant(.inline)) { @@ -647,7 +724,8 @@ struct PlaybackView: View { player: player, layout: $layout, isMonoscopic: isMonoscopic, - supportsPictureInPicture: supportsPictureInPicture + supportsPictureInPicture: supportsPictureInPicture, + progressTracker: progressTracker ) #else if isMonoscopic { @@ -656,12 +734,15 @@ struct PlaybackView: View { .ignoresSafeArea() } else { - SystemVideoView(player: player) - .supportsPictureInPicture(supportsPictureInPicture) - .ignoresSafeArea() + MainSystemView( + player: player, + supportsPictureInPicture: supportsPictureInPicture, + progressTracker: progressTracker + ) } #endif } + ._debugBodyCounter() } } @@ -686,6 +767,14 @@ private extension View { } } +private extension Player { + func skippableTimeRange(at time: CMTime) -> TimeRange? { + metadata.timeRanges.first { timeRange in + timeRange.containsTime(time) + } + } +} + #Preview { PlaybackView(player: Player(item: Media(from: URLTemplate.onDemandVideoLocalHLS).playerItem())) } diff --git a/Sources/CoreBusiness/Model/BlockingReason.swift b/Sources/CoreBusiness/Model/MediaComposition+BlockingReason.swift similarity index 100% rename from Sources/CoreBusiness/Model/BlockingReason.swift rename to Sources/CoreBusiness/Model/MediaComposition+BlockingReason.swift diff --git a/Sources/CoreBusiness/Model/Chapter.swift b/Sources/CoreBusiness/Model/MediaComposition+Chapter.swift similarity index 91% rename from Sources/CoreBusiness/Model/Chapter.swift rename to Sources/CoreBusiness/Model/MediaComposition+Chapter.swift index a5d00897a..36f276510 100644 --- a/Sources/CoreBusiness/Model/Chapter.swift +++ b/Sources/CoreBusiness/Model/MediaComposition+Chapter.swift @@ -17,6 +17,7 @@ public extension MediaComposition { case _segments = "segmentList" case _markIn = "fullLengthMarkIn" case _markOut = "fullLengthMarkOut" + case _timeIntervals = "timeIntervalList" case blockingReason = "blockReason" case contentType = "type" case date @@ -77,6 +78,11 @@ public extension MediaComposition { _analyticsMetadata ?? [:] } + /// The time interval associated with the chapter. + public var timeIntervals: [TimeInterval] { + _timeIntervals ?? [] + } + /// Time range associated with the chapter. public var timeRange: CMTimeRange { guard let _markIn, let _markOut else { return .zero } @@ -98,6 +104,9 @@ public extension MediaComposition { // swiftlint:disable:next discouraged_optional_collection private let _resources: [Resource]? + // swiftlint:disable:next discouraged_optional_collection + private let _timeIntervals: [TimeInterval]? + private let _markIn: Int64? private let _markOut: Int64? } diff --git a/Sources/CoreBusiness/Model/ContentType.swift b/Sources/CoreBusiness/Model/MediaComposition+ContentType.swift similarity index 100% rename from Sources/CoreBusiness/Model/ContentType.swift rename to Sources/CoreBusiness/Model/MediaComposition+ContentType.swift diff --git a/Sources/CoreBusiness/Model/Episode.swift b/Sources/CoreBusiness/Model/MediaComposition+Episode.swift similarity index 100% rename from Sources/CoreBusiness/Model/Episode.swift rename to Sources/CoreBusiness/Model/MediaComposition+Episode.swift diff --git a/Sources/CoreBusiness/Model/MediaType.swift b/Sources/CoreBusiness/Model/MediaComposition+MediaType.swift similarity index 100% rename from Sources/CoreBusiness/Model/MediaType.swift rename to Sources/CoreBusiness/Model/MediaComposition+MediaType.swift diff --git a/Sources/CoreBusiness/Model/Resource.swift b/Sources/CoreBusiness/Model/MediaComposition+Resource.swift similarity index 100% rename from Sources/CoreBusiness/Model/Resource.swift rename to Sources/CoreBusiness/Model/MediaComposition+Resource.swift diff --git a/Sources/CoreBusiness/Model/Segment.swift b/Sources/CoreBusiness/Model/MediaComposition+Segment.swift similarity index 100% rename from Sources/CoreBusiness/Model/Segment.swift rename to Sources/CoreBusiness/Model/MediaComposition+Segment.swift diff --git a/Sources/CoreBusiness/Model/Show.swift b/Sources/CoreBusiness/Model/MediaComposition+Show.swift similarity index 100% rename from Sources/CoreBusiness/Model/Show.swift rename to Sources/CoreBusiness/Model/MediaComposition+Show.swift diff --git a/Sources/CoreBusiness/Model/MediaComposition+TimeInterval.swift b/Sources/CoreBusiness/Model/MediaComposition+TimeInterval.swift new file mode 100644 index 000000000..88f0f1fa1 --- /dev/null +++ b/Sources/CoreBusiness/Model/MediaComposition+TimeInterval.swift @@ -0,0 +1,41 @@ +// +// Copyright (c) SRG SSR. All rights reserved. +// +// License information is available from the LICENSE file. +// + +import CoreMedia + +public extension MediaComposition { + /// A time interval. + struct TimeInterval: Decodable { + enum CodingKeys: String, CodingKey { + case kind = "type" + case _markIn = "markIn" + case _markOut = "markOut" + } + + /// A kind of time interval. + public enum Kind: String, Decodable { + /// Openning credits. + case openingCredits = "OPENING_CREDITS" + + /// Closing credits. + case closingCredits = "CLOSING_CREDITS" + } + + /// The kind of interval. + public let kind: Kind + + /// The associated time range. + public var timeRange: CMTimeRange { + CMTimeRange( + start: .init(value: _markIn, timescale: 1000), + end: .init(value: _markOut, timescale: 1000) + ) + } + + private let _markIn: Int64 + private let _markOut: Int64 + } +} diff --git a/Sources/CoreBusiness/Model/MediaMetadata.swift b/Sources/CoreBusiness/Model/MediaMetadata.swift index 5be330aef..5988e9b3c 100644 --- a/Sources/CoreBusiness/Model/MediaMetadata.swift +++ b/Sources/CoreBusiness/Model/MediaMetadata.swift @@ -70,7 +70,8 @@ extension MediaMetadata: AssetMetadata { description: description, image: artworkImage(for: mediaComposition.mainChapter), episodeInformation: episodeInformation, - chapters: chapters + chapters: chapters, + timeRanges: timeRanges ) } @@ -126,6 +127,17 @@ extension MediaMetadata: AssetMetadata { } } + private var timeRanges: [TimeRange] { + mediaComposition.mainChapter.timeIntervals.map { interval in + switch interval.kind { + case .openingCredits: + TimeRange(kind: .credits(.opening), start: interval.timeRange.start, end: interval.timeRange.end) + case .closingCredits: + TimeRange(kind: .credits(.closing), start: interval.timeRange.start, end: interval.timeRange.end) + } + } + } + private func image(for chapter: MediaComposition.Chapter) -> UIImage? { imageCatalog.image(for: chapter.urn) } diff --git a/Sources/Player/Types/PlayerMetadata.swift b/Sources/Player/Types/PlayerMetadata.swift index 6d08a964a..58b423504 100644 --- a/Sources/Player/Types/PlayerMetadata.swift +++ b/Sources/Player/Types/PlayerMetadata.swift @@ -36,9 +36,12 @@ public struct PlayerMetadata: Equatable { /// Episode information associated with the content. public let episodeInformation: EpisodeInformation? - /// Chapter associated with the content. + /// Chapters associated with the content. public let chapters: [Chapter] + /// Time ranges associated with the content. + public let timeRanges: [TimeRange] + var episodeDescription: String? { switch episodeInformation { case let .long(season: season, episode: episode): @@ -85,6 +88,7 @@ public struct PlayerMetadata: Equatable { /// - image: The image associated with the content. /// - episodeInformation: Episode information associated with the content. /// - chapters: Chapter associated with the content. + /// - timeRanges: Time ranges associated with the content. public init( identifier: String? = nil, title: String? = nil, @@ -92,7 +96,8 @@ public struct PlayerMetadata: Equatable { description: String? = nil, image: UIImage? = nil, episodeInformation: EpisodeInformation? = nil, - chapters: [Chapter] = [] + chapters: [Chapter] = [], + timeRanges: [TimeRange] = [] ) { self.identifier = identifier self.title = title @@ -101,5 +106,6 @@ public struct PlayerMetadata: Equatable { self.image = image self.episodeInformation = episodeInformation self.chapters = chapters + self.timeRanges = timeRanges } } diff --git a/Sources/Player/Types/QueuePlayer.swift b/Sources/Player/Types/QueuePlayer.swift index feee0f14b..6b0f1f583 100644 --- a/Sources/Player/Types/QueuePlayer.swift +++ b/Sources/Player/Types/QueuePlayer.swift @@ -14,7 +14,7 @@ class QueuePlayer: AVQueuePlayer { // Starting with iOS 17 accessing media selection criteria might be slow. Use a cache for the lifetime of the // player. - var mediaSelectionCriteria: [AVMediaCharacteristic: AVPlayerMediaSelectionCriteria?] = [:] + private var mediaSelectionCriteria: [AVMediaCharacteristic: AVPlayerMediaSelectionCriteria?] = [:] private var targetSeek: Seek? { pendingSeeks.last diff --git a/Sources/Player/Types/TimeRange.swift b/Sources/Player/Types/TimeRange.swift new file mode 100644 index 000000000..d4eb13b05 --- /dev/null +++ b/Sources/Player/Types/TimeRange.swift @@ -0,0 +1,77 @@ +// +// Copyright (c) SRG SSR. All rights reserved. +// +// License information is available from the LICENSE file. +// + +import CoreMedia + +/// Represents a time range. +public struct TimeRange: Hashable, Equatable { + /// The range type. + public enum Kind: Hashable, Equatable { + /// Credits. + case credits(Credits) + } + + /// The credits type. + public enum Credits { + /// Opening. + case opening + /// Closing. + case closing + } + + private let timeRange: CMTimeRange + + /// The kind of the time range. + public let kind: Kind + + /// The start time of the time range. + public var start: CMTime { + timeRange.start + } + + /// The end time of the time range. + public var end: CMTime { + timeRange.end + } + + /// The duration of the time range. + public var duration: CMTime { + timeRange.duration + } + + private init(kind: Kind, timeRange: CMTimeRange) { + self.kind = kind + self.timeRange = timeRange + } + + /// Creates a time range with a start time and duration. + /// + /// - Parameters: + /// - kind: The kind of the time range. + /// - start: The start time of the time range. + /// - duration: The start time of the time range. + public init(kind: Kind, start: CMTime, duration: CMTime) { + self.init(kind: kind, timeRange: .init(start: start, duration: duration)) + } + + /// Creates a time range from a start and end time. + /// + /// - Parameters: + /// - kind: The kind of the time range. + /// - start: The start time of the time range. + /// - end: The end time of the time range. + public init(kind: Kind, start: CMTime, end: CMTime) { + self.init(kind: kind, timeRange: .init(start: start, end: end)) + } + + /// Returns a Boolean value that indicates whether the time range contains a time. + /// + /// - Parameter time: A time value to test for in the time range. + /// - Returns: true if the time range contains the time value; otherwise, false. + public func containsTime(_ time: CMTime) -> Bool { + timeRange.containsTime(time) + } +} diff --git a/Sources/Player/UserInterface/BasicSystemVideoView.swift b/Sources/Player/UserInterface/BasicSystemVideoView.swift index d82b1137d..d57082b5c 100644 --- a/Sources/Player/UserInterface/BasicSystemVideoView.swift +++ b/Sources/Player/UserInterface/BasicSystemVideoView.swift @@ -10,6 +10,7 @@ import SwiftUI struct BasicSystemVideoView: UIViewControllerRepresentable { let player: Player let gravity: AVLayerVideoGravity + let contextualActions: [UIAction] #if os(tvOS) func makeCoordinator() -> AVPlayerViewControllerSpeedCoordinator { @@ -27,6 +28,7 @@ struct BasicSystemVideoView: UIViewControllerRepresentable { uiViewController.player = player.systemPlayer uiViewController.videoGravity = gravity #if os(tvOS) + uiViewController.contextualActions = contextualActions context.coordinator.player = player context.coordinator.controller = uiViewController #endif diff --git a/Sources/Player/UserInterface/ContextualAction.swift b/Sources/Player/UserInterface/ContextualAction.swift new file mode 100644 index 000000000..5d5c05e17 --- /dev/null +++ b/Sources/Player/UserInterface/ContextualAction.swift @@ -0,0 +1,31 @@ +// +// Copyright (c) SRG SSR. All rights reserved. +// +// License information is available from the LICENSE file. +// + +import UIKit + +/// Actions to present contextually during playback. +public struct ContextualAction { + /// Short display title. + public let title: String + + /// Image that can appear next to this action. + public let image: UIImage? + + /// The handler to invoke. + public let handler: () -> Void + + /// Creates a new contextual action. + /// + /// - Parameters: + /// - title: Short display title. + /// - image: Image that can appear next to this action. + /// - handler: The handler to invoke. + public init(title: String, image: UIImage? = nil, handler: @escaping () -> Void) { + self.title = title + self.image = image + self.handler = handler + } +} diff --git a/Sources/Player/UserInterface/PictureInPictureSupportingSystemVideoView.swift b/Sources/Player/UserInterface/PictureInPictureSupportingSystemVideoView.swift index 324180301..65e08d190 100644 --- a/Sources/Player/UserInterface/PictureInPictureSupportingSystemVideoView.swift +++ b/Sources/Player/UserInterface/PictureInPictureSupportingSystemVideoView.swift @@ -28,6 +28,7 @@ struct PictureInPictureSupportingSystemVideoView: UIViewControllerRepresentable let player: Player let gravity: AVLayerVideoGravity + let contextualActions: [UIAction] static func dismantleUIViewController(_ uiViewController: AVPlayerViewController, coordinator: Coordinator) { PictureInPicture.shared.system.relinquish(for: uiViewController) @@ -50,6 +51,7 @@ struct PictureInPictureSupportingSystemVideoView: UIViewControllerRepresentable uiViewController.player = player.systemPlayer uiViewController.videoGravity = gravity #if os(tvOS) + uiViewController.contextualActions = contextualActions context.coordinator.player = player context.coordinator.controller = uiViewController #endif diff --git a/Sources/Player/UserInterface/SystemVideoView.swift b/Sources/Player/UserInterface/SystemVideoView.swift index 892c277ff..ddb052ac8 100644 --- a/Sources/Player/UserInterface/SystemVideoView.swift +++ b/Sources/Player/UserInterface/SystemVideoView.swift @@ -13,14 +13,15 @@ public struct SystemVideoView: View { private var gravity: AVLayerVideoGravity = .resizeAspect private var supportsPictureInPicture = false + private var contextualActions: [UIAction] = [] public var body: some View { ZStack { if supportsPictureInPicture { - PictureInPictureSupportingSystemVideoView(player: player, gravity: gravity) + PictureInPictureSupportingSystemVideoView(player: player, gravity: gravity, contextualActions: contextualActions) } else { - BasicSystemVideoView(player: player, gravity: gravity) + BasicSystemVideoView(player: player, gravity: gravity, contextualActions: contextualActions) } } .onAppear { @@ -61,4 +62,19 @@ public extension SystemVideoView { view.supportsPictureInPicture = supportsPictureInPicture return view } + + /// Actions to present contextually during playback. + /// + /// - Parameter contextualActions: An array of action controls to present contextually during playback. + @available(iOS, unavailable) + @available(tvOS 16, *) + func contextualActions(_ contextualActions: [ContextualAction]) -> SystemVideoView { + var view = self + view.contextualActions = contextualActions.map { action in + UIAction(title: action.title, image: action.image, identifier: .init(rawValue: action.title)) { _ in + action.handler() + } + } + return view + } }