1
0
mirror of git://jb55.com/damus synced 2024-10-06 11:43:21 +00:00

video-player: hide mute button when we have no audio

This commit is contained in:
William Casarin 2023-05-30 17:58:16 -07:00
parent 9e359650bf
commit 554c091d57
2 changed files with 44 additions and 11 deletions

View File

@ -13,13 +13,22 @@ struct DamusVideoPlayer: View {
@Binding var video_size: CGSize? @Binding var video_size: CGSize?
var mute_icon: String { var mute_icon: String {
if model.muted { if model.has_audio == false || model.muted {
return "speaker.slash" return "speaker.slash"
} else { } else {
return "speaker" return "speaker"
} }
} }
var mute_icon_color: Color {
switch self.model.has_audio {
case .none:
return .white
case .some(let has_audio):
return has_audio ? .white : .red
}
}
var MuteIcon: some View { var MuteIcon: some View {
ZStack { ZStack {
Circle() Circle()
@ -29,7 +38,7 @@ struct DamusVideoPlayer: View {
Image(systemName: mute_icon) Image(systemName: mute_icon)
.padding() .padding()
.foregroundColor(.white) .foregroundColor(mute_icon_color)
} }
} }
@ -43,10 +52,13 @@ struct DamusVideoPlayer: View {
model.stop() model.stop()
} }
MuteIcon if model.has_audio == true {
} MuteIcon
.onTapGesture { .zIndex(11.0)
self.model.muted = !self.model.muted .onTapGesture {
self.model.muted = !self.model.muted
}
}
} }
.onChange(of: model.size) { size in .onChange(of: model.size) { size in
guard let size else { guard let size else {

View File

@ -44,7 +44,9 @@ public class VideoPlayerModel: ObservableObject {
@Published var muted: Bool = true @Published var muted: Bool = true
@Published var play: Bool = true @Published var play: Bool = true
@Published var size: CGSize? = nil @Published var size: CGSize? = nil
@Published var has_audio: Bool? = nil
@Published var contentMode: UIView.ContentMode = .scaleAspectFill @Published var contentMode: UIView.ContentMode = .scaleAspectFill
var time: CMTime = CMTime() var time: CMTime = CMTime()
var handlers: [VideoHandler] = [] var handlers: [VideoHandler] = []
@ -168,6 +170,16 @@ public extension VideoPlayer {
} }
func get_video_size(player: AVPlayer) -> CGSize? {
// TODO: make this async?
return player.currentImage?.size
}
func video_has_audio(player: AVPlayer) async -> Bool {
let tracks = try? await player.currentItem?.asset.load(.tracks)
return tracks?.filter({ t in t.mediaType == .audio }).first != nil
}
@available(iOS 13, *) @available(iOS 13, *)
extension VideoPlayer: UIViewRepresentable { extension VideoPlayer: UIViewRepresentable {
@ -204,14 +216,24 @@ extension VideoPlayer: UIViewRepresentable {
if case .playing = state { if case .playing = state {
context.coordinator.startObserver(uiView: uiView) context.coordinator.startObserver(uiView: uiView)
if let player = uiView.player {
Task {
let has_audio = await video_has_audio(player: player)
let size = get_video_size(player: player)
Task { @MainActor in
if let size {
self.model.size = size
}
self.model.has_audio = has_audio
}
}
}
} else { } else {
context.coordinator.stopObserver(uiView: uiView) context.coordinator.stopObserver(uiView: uiView)
} }
if model.size == nil, let size = uiView.player?.currentImage?.size {
model.size = size
}
DispatchQueue.main.async { DispatchQueue.main.async {
for handler in model.handlers { for handler in model.handlers {
if case .onStateChanged(let cb) = handler { if case .onStateChanged(let cb) = handler {
@ -240,7 +262,6 @@ extension VideoPlayer: UIViewRepresentable {
uiView.pause(reason: .userInteraction) uiView.pause(reason: .userInteraction)
} }
print("intrinsic video size \(uiView.intrinsicContentSize)")
uiView.isMuted = model.muted uiView.isMuted = model.muted
uiView.isAutoReplay = model.autoReplay uiView.isAutoReplay = model.autoReplay