mirror of
https://github.com/element-hq/element-x-ios.git
synced 2025-03-10 21:39:12 +00:00
Add support for showing media playback controls on the lock screen
This commit is contained in:
parent
d0d24e2d09
commit
a4166de502
@ -273,9 +273,7 @@ class OnboardingFlowCoordinator: FlowCoordinatorProtocol {
|
||||
let coordinator = SessionVerificationScreenCoordinator(parameters: parameters)
|
||||
|
||||
coordinator.actions
|
||||
.sink { [weak self] action in
|
||||
guard let self else { return }
|
||||
|
||||
.sink { action in
|
||||
switch action {
|
||||
case .done:
|
||||
break // Moving to next state is handled by the global session verification listener
|
||||
|
@ -55,7 +55,9 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
|
||||
mentionBuilder = MentionBuilder()
|
||||
attributedStringBuilder = AttributedStringBuilder(cacheKey: "Composer", mentionBuilder: mentionBuilder)
|
||||
|
||||
super.init(initialViewState: ComposerToolbarViewState(audioPlayerState: .init(id: .recorderPreview, duration: 0),
|
||||
super.init(initialViewState: ComposerToolbarViewState(audioPlayerState: .init(id: .recorderPreview,
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 0),
|
||||
audioRecorderState: .init(),
|
||||
bindings: .init()),
|
||||
mediaProvider: mediaProvider)
|
||||
|
@ -392,7 +392,11 @@ extension ComposerToolbar {
|
||||
mentionDisplayHelper: ComposerMentionDisplayHelper.mock,
|
||||
analyticsService: ServiceLocator.shared.analytics,
|
||||
composerDraftService: ComposerDraftServiceMock())
|
||||
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData), isUploading: uploading)
|
||||
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview,
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 10.0),
|
||||
waveform: .data(waveformData),
|
||||
isUploading: uploading)
|
||||
return model
|
||||
}
|
||||
return ComposerToolbar(context: composerViewModel.context,
|
||||
|
@ -97,6 +97,7 @@ private extension DateFormatter {
|
||||
|
||||
struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
|
||||
static let playerState = AudioPlayerState(id: .recorderPreview,
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 10.0,
|
||||
waveform: EstimatedWaveform.mockWaveform,
|
||||
progress: 0.4)
|
||||
|
@ -479,6 +479,7 @@ class TimelineInteractionHandler {
|
||||
}
|
||||
|
||||
let playerState = AudioPlayerState(id: .timelineItemIdentifier(itemID),
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: voiceMessageRoomTimelineItem.content.duration,
|
||||
waveform: voiceMessageRoomTimelineItem.content.waveform)
|
||||
mediaPlayerProvider.register(audioPlayerState: playerState)
|
||||
|
@ -425,7 +425,10 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
|
||||
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
|
||||
eventID: "123",
|
||||
eventContent: .message(.text(.init(body: "Short"))))),
|
||||
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
|
||||
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random),
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 10,
|
||||
waveform: EstimatedWaveform.mockWaveform))
|
||||
}
|
||||
.environmentObject(viewModel.context)
|
||||
}
|
||||
@ -543,7 +546,10 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
|
||||
source: nil,
|
||||
contentType: nil),
|
||||
properties: RoomTimelineItemProperties(encryptionAuthenticity: .notGuaranteed(color: .gray))),
|
||||
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
|
||||
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random),
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 10,
|
||||
waveform: EstimatedWaveform.mockWaveform))
|
||||
}
|
||||
.environmentObject(viewModel.context)
|
||||
}
|
||||
|
@ -148,8 +148,8 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
releaseAudioSessionTask = Task { [weak self] in
|
||||
try? await Task.sleep(for: .seconds(timeInterval))
|
||||
guard !Task.isCancelled else { return }
|
||||
guard let self else { return }
|
||||
self.releaseAudioSession()
|
||||
|
||||
self?.releaseAudioSession()
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,10 +180,10 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
|
||||
switch playerItem.status {
|
||||
case .failed:
|
||||
self.setInternalState(.error(playerItem.error ?? AudioPlayerError.genericError))
|
||||
setInternalState(.error(playerItem.error ?? AudioPlayerError.genericError))
|
||||
case .readyToPlay:
|
||||
guard state == .loading else { return }
|
||||
self.setInternalState(.readyToPlay)
|
||||
setInternalState(.readyToPlay)
|
||||
default:
|
||||
break
|
||||
}
|
||||
@ -193,20 +193,20 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
guard let self else { return }
|
||||
|
||||
if internalAudioPlayer.rate == 0 {
|
||||
if self.isStopped {
|
||||
self.setInternalState(.stopped)
|
||||
if isStopped {
|
||||
setInternalState(.stopped)
|
||||
} else {
|
||||
self.setInternalState(.paused)
|
||||
setInternalState(.paused)
|
||||
}
|
||||
} else {
|
||||
self.setInternalState(.playing)
|
||||
setInternalState(.playing)
|
||||
}
|
||||
}
|
||||
|
||||
NotificationCenter.default.publisher(for: Notification.Name.AVPlayerItemDidPlayToEndTime)
|
||||
.sink { [weak self] _ in
|
||||
guard let self else { return }
|
||||
self.setInternalState(.finishedPlaying)
|
||||
setInternalState(.finishedPlaying)
|
||||
}
|
||||
.store(in: &cancellables)
|
||||
}
|
||||
|
@ -7,6 +7,7 @@
|
||||
|
||||
import Combine
|
||||
import Foundation
|
||||
import MediaPlayer
|
||||
import UIKit
|
||||
|
||||
enum AudioPlayerPlaybackState {
|
||||
@ -25,16 +26,15 @@ enum AudioPlayerStateIdentifier {
|
||||
@MainActor
|
||||
class AudioPlayerState: ObservableObject, Identifiable {
|
||||
let id: AudioPlayerStateIdentifier
|
||||
let title: String
|
||||
private(set) var duration: Double
|
||||
let waveform: EstimatedWaveform
|
||||
@Published private(set) var progress: Double
|
||||
|
||||
@Published private(set) var playbackState: AudioPlayerPlaybackState
|
||||
/// It's similar to `playbackState`, with the a difference: `.loading`
|
||||
/// updates are delayed by a fixed amount of time
|
||||
@Published private(set) var playerButtonPlaybackState: AudioPlayerPlaybackState
|
||||
@Published private(set) var progress: Double
|
||||
var showProgressIndicator: Bool {
|
||||
progress > 0
|
||||
}
|
||||
|
||||
private weak var audioPlayer: AudioPlayerProtocol?
|
||||
private var audioPlayerSubscription: AnyCancellable?
|
||||
@ -44,6 +44,10 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
||||
/// The file url that the last player attached to this object has loaded.
|
||||
/// The file url persists even if the AudioPlayer will be detached later.
|
||||
private(set) var fileURL: URL?
|
||||
|
||||
var showProgressIndicator: Bool {
|
||||
progress > 0
|
||||
}
|
||||
|
||||
var isAttached: Bool {
|
||||
audioPlayer != nil
|
||||
@ -53,8 +57,9 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
||||
displayLink != nil
|
||||
}
|
||||
|
||||
init(id: AudioPlayerStateIdentifier, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
|
||||
init(id: AudioPlayerStateIdentifier, title: String, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
|
||||
self.id = id
|
||||
self.title = title
|
||||
self.duration = duration
|
||||
self.waveform = waveform ?? EstimatedWaveform(data: [])
|
||||
self.progress = progress
|
||||
@ -137,12 +142,19 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
||||
}
|
||||
startPublishProgress()
|
||||
playbackState = .playing
|
||||
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
|
||||
setUpRemoteCommandCenter()
|
||||
case .didPausePlaying:
|
||||
stopPublishProgress()
|
||||
playbackState = .stopped
|
||||
if case .didFinishPlaying = action {
|
||||
progress = 0.0
|
||||
}
|
||||
case .didStopPlaying:
|
||||
playbackState = .stopped
|
||||
stopPublishProgress()
|
||||
tearDownRemoteCommandCenter()
|
||||
case .didFinishPlaying:
|
||||
playbackState = .stopped
|
||||
progress = 0.0
|
||||
stopPublishProgress()
|
||||
tearDownRemoteCommandCenter()
|
||||
case .didFailWithError:
|
||||
stopPublishProgress()
|
||||
playbackState = .error
|
||||
@ -163,6 +175,8 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
||||
if let currentTime = audioPlayer?.currentTime, duration > 0 {
|
||||
progress = currentTime / duration
|
||||
}
|
||||
|
||||
updateNowPlayingInfoCenter()
|
||||
}
|
||||
|
||||
private func stopPublishProgress() {
|
||||
@ -191,6 +205,93 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
||||
.removeDuplicates()
|
||||
.weakAssign(to: \.playerButtonPlaybackState, on: self)
|
||||
}
|
||||
|
||||
private func setUpRemoteCommandCenter() {
|
||||
UIApplication.shared.beginReceivingRemoteControlEvents()
|
||||
|
||||
let commandCenter = MPRemoteCommandCenter.shared()
|
||||
|
||||
commandCenter.playCommand.isEnabled = true
|
||||
commandCenter.playCommand.removeTarget(nil)
|
||||
commandCenter.playCommand.addTarget { [weak self] _ in
|
||||
guard let audioPlayer = self?.audioPlayer else {
|
||||
return MPRemoteCommandHandlerStatus.commandFailed
|
||||
}
|
||||
|
||||
audioPlayer.play()
|
||||
|
||||
return MPRemoteCommandHandlerStatus.success
|
||||
}
|
||||
|
||||
commandCenter.pauseCommand.isEnabled = true
|
||||
commandCenter.pauseCommand.removeTarget(nil)
|
||||
commandCenter.pauseCommand.addTarget { [weak self] _ in
|
||||
guard let audioPlayer = self?.audioPlayer else {
|
||||
return MPRemoteCommandHandlerStatus.commandFailed
|
||||
}
|
||||
|
||||
audioPlayer.pause()
|
||||
|
||||
return MPRemoteCommandHandlerStatus.success
|
||||
}
|
||||
|
||||
commandCenter.skipForwardCommand.isEnabled = true
|
||||
commandCenter.skipForwardCommand.removeTarget(nil)
|
||||
commandCenter.skipForwardCommand.addTarget { [weak self] event in
|
||||
guard let audioPlayer = self?.audioPlayer, let skipEvent = event as? MPSkipIntervalCommandEvent else {
|
||||
return MPRemoteCommandHandlerStatus.commandFailed
|
||||
}
|
||||
|
||||
Task {
|
||||
await audioPlayer.seek(to: audioPlayer.currentTime + skipEvent.interval)
|
||||
}
|
||||
|
||||
return MPRemoteCommandHandlerStatus.success
|
||||
}
|
||||
|
||||
commandCenter.skipBackwardCommand.isEnabled = true
|
||||
commandCenter.skipBackwardCommand.removeTarget(nil)
|
||||
commandCenter.skipBackwardCommand.addTarget { [weak self] event in
|
||||
guard let audioPlayer = self?.audioPlayer, let skipEvent = event as? MPSkipIntervalCommandEvent else {
|
||||
return MPRemoteCommandHandlerStatus.commandFailed
|
||||
}
|
||||
|
||||
Task {
|
||||
await audioPlayer.seek(to: audioPlayer.currentTime - skipEvent.interval)
|
||||
}
|
||||
|
||||
return MPRemoteCommandHandlerStatus.success
|
||||
}
|
||||
}
|
||||
|
||||
private func tearDownRemoteCommandCenter() {
|
||||
UIApplication.shared.endReceivingRemoteControlEvents()
|
||||
|
||||
let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
|
||||
nowPlayingInfoCenter.nowPlayingInfo = nil
|
||||
nowPlayingInfoCenter.playbackState = .stopped
|
||||
|
||||
let commandCenter = MPRemoteCommandCenter.shared()
|
||||
commandCenter.playCommand.isEnabled = false
|
||||
commandCenter.playCommand.removeTarget(nil)
|
||||
commandCenter.pauseCommand.isEnabled = false
|
||||
commandCenter.pauseCommand.removeTarget(nil)
|
||||
commandCenter.skipForwardCommand.isEnabled = false
|
||||
commandCenter.skipForwardCommand.removeTarget(nil)
|
||||
commandCenter.skipBackwardCommand.isEnabled = false
|
||||
commandCenter.skipBackwardCommand.removeTarget(nil)
|
||||
}
|
||||
|
||||
private func updateNowPlayingInfoCenter() {
|
||||
guard let audioPlayer else {
|
||||
return
|
||||
}
|
||||
|
||||
let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
|
||||
nowPlayingInfoCenter.nowPlayingInfo = [MPMediaItemPropertyTitle: title,
|
||||
MPMediaItemPropertyPlaybackDuration: audioPlayer.duration as Any,
|
||||
MPNowPlayingInfoPropertyElapsedPlaybackTime: audioPlayer.currentTime as Any]
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioPlayerState: Equatable {
|
||||
|
@ -98,6 +98,7 @@ struct VoiceMessageRoomPlaybackView_Previews: PreviewProvider, TestablePreview {
|
||||
0, 0, 0, 0, 0, 3])
|
||||
|
||||
static var playerState = AudioPlayerState(id: .timelineItemIdentifier(.random),
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 10.0,
|
||||
waveform: waveform,
|
||||
progress: 0.3)
|
||||
|
@ -70,6 +70,7 @@ struct VoiceMessageRoomTimelineView_Previews: PreviewProvider, TestablePreview {
|
||||
contentType: nil))
|
||||
|
||||
static let playerState = AudioPlayerState(id: .timelineItemIdentifier(timelineItemIdentifier),
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 10.0,
|
||||
waveform: EstimatedWaveform.mockWaveform,
|
||||
progress: 0.4)
|
||||
|
@ -64,7 +64,9 @@ struct RoomTimelineItemView: View {
|
||||
case .poll(let item):
|
||||
PollRoomTimelineView(timelineItem: item)
|
||||
case .voice(let item):
|
||||
VoiceMessageRoomTimelineView(timelineItem: item, playerState: context?.viewState.audioPlayerStateProvider?(item.id) ?? AudioPlayerState(id: .timelineItemIdentifier(item.id), duration: 0))
|
||||
VoiceMessageRoomTimelineView(timelineItem: item, playerState: context?.viewState.audioPlayerStateProvider?(item.id) ?? AudioPlayerState(id: .timelineItemIdentifier(item.id),
|
||||
title: L10n.commonVoiceMessage,
|
||||
duration: 0))
|
||||
case .callInvite(let item):
|
||||
CallInviteRoomTimelineView(timelineItem: item)
|
||||
case .callNotification(let item):
|
||||
|
@ -239,7 +239,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
|
||||
}
|
||||
|
||||
// Build the preview audio player state
|
||||
previewAudioPlayerState = await AudioPlayerState(id: .recorderPreview, duration: recordingDuration, waveform: EstimatedWaveform(data: []))
|
||||
previewAudioPlayerState = await AudioPlayerState(id: .recorderPreview, title: L10n.commonVoiceMessage, duration: recordingDuration, waveform: EstimatedWaveform(data: []))
|
||||
|
||||
// Build the preview audio player
|
||||
let mediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
|
||||
|
@ -28,6 +28,7 @@ class AudioPlayerStateTests: XCTestCase {
|
||||
audioPlayerMock.underlyingActions = audioPlayerActions
|
||||
audioPlayerMock.state = .stopped
|
||||
audioPlayerMock.currentTime = 0.0
|
||||
audioPlayerMock.duration = 0.0
|
||||
audioPlayerMock.seekToClosure = { [audioPlayerSeekCallsSubject] progress in
|
||||
audioPlayerSeekCallsSubject?.send(progress)
|
||||
}
|
||||
@ -37,7 +38,7 @@ class AudioPlayerStateTests: XCTestCase {
|
||||
override func setUp() async throws {
|
||||
audioPlayerActionsSubject = .init()
|
||||
audioPlayerSeekCallsSubject = .init()
|
||||
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), duration: Self.audioDuration)
|
||||
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), title: "", duration: Self.audioDuration)
|
||||
audioPlayerMock = buildAudioPlayerMock()
|
||||
audioPlayerMock.seekToClosure = { [weak self] progress in
|
||||
self?.audioPlayerMock.currentTime = Self.audioDuration * progress
|
||||
@ -161,7 +162,7 @@ class AudioPlayerStateTests: XCTestCase {
|
||||
func testHandlingAudioPlayerActionDidFinishLoading() async throws {
|
||||
audioPlayerMock.duration = 10.0
|
||||
|
||||
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0)
|
||||
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), title: "", duration: 0)
|
||||
audioPlayerState.attachAudioPlayer(audioPlayerMock)
|
||||
|
||||
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
|
||||
|
@ -323,7 +323,9 @@ class ComposerToolbarViewModelTests: XCTestCase {
|
||||
viewModel.context.composerFormattingEnabled = false
|
||||
let waveformData: [Float] = Array(repeating: 1.0, count: 1000)
|
||||
viewModel.context.plainComposerText = .init(string: "Hello world!")
|
||||
viewModel.process(timelineAction: .setMode(mode: .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData), isUploading: false)))
|
||||
viewModel.process(timelineAction: .setMode(mode: .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, title: "", duration: 10.0),
|
||||
waveform: .data(waveformData),
|
||||
isUploading: false)))
|
||||
viewModel.saveDraft()
|
||||
|
||||
await fulfillment(of: [expectation], timeout: 10)
|
||||
|
@ -64,7 +64,7 @@ class MediaPlayerProviderTests: XCTestCase {
|
||||
// By default, there should be no player state
|
||||
XCTAssertNil(mediaPlayerProvider.playerState(for: audioPlayerStateId))
|
||||
|
||||
let audioPlayerState = AudioPlayerState(id: audioPlayerStateId, duration: 10.0)
|
||||
let audioPlayerState = AudioPlayerState(id: audioPlayerStateId, title: "", duration: 10.0)
|
||||
mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
XCTAssertEqual(audioPlayerState, mediaPlayerProvider.playerState(for: audioPlayerStateId))
|
||||
|
||||
@ -76,7 +76,7 @@ class MediaPlayerProviderTests: XCTestCase {
|
||||
let audioPlayer = AudioPlayerMock()
|
||||
audioPlayer.actions = PassthroughSubject<AudioPlayerAction, Never>().eraseToAnyPublisher()
|
||||
|
||||
let audioPlayerStates = Array(repeating: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0), count: 10)
|
||||
let audioPlayerStates = Array(repeating: AudioPlayerState(id: .timelineItemIdentifier(.random), title: "", duration: 0), count: 10)
|
||||
for audioPlayerState in audioPlayerStates {
|
||||
mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
audioPlayerState.attachAudioPlayer(audioPlayer)
|
||||
@ -95,7 +95,7 @@ class MediaPlayerProviderTests: XCTestCase {
|
||||
let audioPlayer = AudioPlayerMock()
|
||||
audioPlayer.actions = PassthroughSubject<AudioPlayerAction, Never>().eraseToAnyPublisher()
|
||||
|
||||
let audioPlayerStates = Array(repeating: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0), count: 10)
|
||||
let audioPlayerStates = Array(repeating: AudioPlayerState(id: .timelineItemIdentifier(.random), title: "", duration: 0), count: 10)
|
||||
for audioPlayerState in audioPlayerStates {
|
||||
mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
audioPlayerState.attachAudioPlayer(audioPlayer)
|
||||
|
Loading…
x
Reference in New Issue
Block a user