Fix: Keep the progress indicator visible after pausing or scrubbing a voice message. (#1969)

This commit is contained in:
Nicolas Mauri 2023-10-27 17:24:22 +02:00 committed by GitHub
parent 0b31446817
commit a32a7c8f2c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 110 additions and 23 deletions

View File

@ -35,11 +35,6 @@ private struct WaveformInteractionModifier: ViewModifier {
func body(content: Content) -> some View {
GeometryReader { geometry in
content
.gesture(SpatialTapGesture()
.onEnded { tapGesture in
let progress = tapGesture.location.x / geometry.size.width
onSeek(max(0, min(progress, 1.0)))
})
.progressCursor(progress: progress) {
WaveformCursorView(color: .compound.iconAccentTertiary)
.frame(width: cursorVisibleWidth, height: cursorVisibleHeight)
@ -56,6 +51,11 @@ private struct WaveformInteractionModifier: ViewModifier {
)
.offset(x: -cursorInteractiveSize / 2, y: 0)
}
.gesture(SpatialTapGesture()
.onEnded { tapGesture in
let progress = tapGesture.location.x / geometry.size.width
onSeek(max(0, min(progress, 1.0)))
})
}
.coordinateSpace(name: Self.namespaceName)
.animation(nil, value: progress)

View File

@ -37,6 +37,7 @@ enum ComposerToolbarViewModelAction {
case deleteVoiceMessageRecording
case startVoiceMessagePlayback
case pauseVoiceMessagePlayback
case scrubVoiceMessagePlayback(scrubbing: Bool)
case seekVoiceMessagePlayback(progress: Double)
case sendVoiceMessage
}
@ -61,6 +62,7 @@ enum ComposerToolbarViewAction {
case deleteVoiceMessageRecording
case startVoiceMessagePlayback
case pauseVoiceMessagePlayback
case scrubVoiceMessagePlayback(scrubbing: Bool)
case seekVoiceMessagePlayback(progress: Double)
}

View File

@ -155,6 +155,8 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
actionsSubject.send(.pauseVoiceMessagePlayback)
case .seekVoiceMessagePlayback(let progress):
actionsSubject.send(.seekVoiceMessagePlayback(progress: progress))
case .scrubVoiceMessagePlayback(let scrubbing):
actionsSubject.send(.scrubVoiceMessagePlayback(scrubbing: scrubbing))
}
}

View File

@ -325,6 +325,8 @@ struct ComposerToolbar: View {
context.send(viewAction: .pauseVoiceMessagePlayback)
} onSeek: { progress in
context.send(viewAction: .seekVoiceMessagePlayback(progress: progress))
} onScrubbing: { isScrubbing in
context.send(viewAction: .scrubVoiceMessagePlayback(scrubbing: isScrubbing))
}
}
}

View File

@ -30,6 +30,7 @@ struct VoiceMessagePreviewComposer: View {
let onPlay: () -> Void
let onPause: () -> Void
let onSeek: (Double) -> Void
let onScrubbing: (Bool) -> Void
var timeLabelContent: String {
// Display the duration if progress is 0.0
@ -39,10 +40,6 @@ struct VoiceMessagePreviewComposer: View {
return DateFormatter.elapsedTimeFormatter.string(from: elapsed)
}
var showWaveformCursor: Bool {
playerState.playbackState == .playing || isDragging
}
var body: some View {
HStack {
HStack {
@ -60,9 +57,12 @@ struct VoiceMessagePreviewComposer: View {
waveformView
.waveformInteraction(isDragging: $isDragging,
progress: playerState.progress,
showCursor: showWaveformCursor,
showCursor: playerState.showProgressIndicator,
onSeek: onSeek)
}
.onChange(of: isDragging) { isDragging in
onScrubbing(isDragging)
}
.padding(.vertical, 4.0)
.padding(.horizontal, 6.0)
.background {
@ -133,7 +133,7 @@ struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
static var previews: some View {
VStack {
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in })
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in }, onScrubbing: { _ in })
.fixedSize(horizontal: false, vertical: true)
}
}

View File

@ -41,6 +41,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
private let actionsSubject: PassthroughSubject<RoomScreenViewModelAction, Never> = .init()
private var canCurrentUserRedact = false
private var paginateBackwardsTask: Task<Void, Never>?
private var resumeVoiceMessagePlaybackAfterScrubbing = false
init(timelineController: RoomTimelineControllerProtocol,
mediaProvider: MediaProviderProtocol,
@ -208,14 +209,13 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
case .sendVoiceMessage:
Task { await sendCurrentVoiceMessage() }
case .startVoiceMessagePlayback:
Task {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
await startPlayingRecordedVoiceMessage()
}
Task { await startPlayingRecordedVoiceMessage() }
case .pauseVoiceMessagePlayback:
pausePlayingRecordedVoiceMessage()
case .seekVoiceMessagePlayback(let progress):
Task { await seekRecordedVoiceMessage(to: progress) }
case .scrubVoiceMessagePlayback(let scrubbing):
Task { await scrubVoiceMessagePlayback(scrubbing: scrubbing) }
}
}
@ -349,7 +349,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}
switch await timelineController.sendReadReceipt(for: eventItemID) {
case .success():
case .success:
break
case let .failure(error):
MXLog.error("[TimelineViewController] Failed to send read receipt: \(error)")
@ -1015,6 +1015,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}
private func startPlayingRecordedVoiceMessage() async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
MXLog.error("failed to play recorded voice message. \(error)")
}
@ -1025,9 +1026,27 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}
private func seekRecordedVoiceMessage(to progress: Double) async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
await voiceMessageRecorder.seekPlayback(to: progress)
}
private func scrubVoiceMessagePlayback(scrubbing: Bool) async {
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
return
}
if scrubbing {
if audioPlayerState.playbackState == .playing {
resumeVoiceMessagePlaybackAfterScrubbing = true
pausePlayingRecordedVoiceMessage()
}
} else {
if resumeVoiceMessagePlaybackAfterScrubbing {
resumeVoiceMessagePlaybackAfterScrubbing = false
await startPlayingRecordedVoiceMessage()
}
}
}
private func openSystemSettings() {
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
application.open(url)

View File

@ -38,6 +38,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
let waveform: EstimatedWaveform
@Published private(set) var playbackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double
@Published private(set) var showProgressIndicator: Bool
private weak var audioPlayer: AudioPlayerProtocol?
private var cancellables: Set<AnyCancellable> = []
@ -60,6 +61,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
self.duration = duration
self.waveform = waveform ?? EstimatedWaveform(data: [])
self.progress = progress
showProgressIndicator = false
playbackState = .stopped
}
@ -71,8 +73,17 @@ class AudioPlayerState: ObservableObject, Identifiable {
func updateState(progress: Double) async {
let progress = max(0.0, min(progress, 1.0))
self.progress = progress
showProgressIndicator = true
if let audioPlayer {
var shouldResumeProgressPublishing = false
if audioPlayer.state == .playing {
shouldResumeProgressPublishing = true
stopPublishProgress()
}
await audioPlayer.seek(to: progress)
if shouldResumeProgressPublishing, audioPlayer.state == .playing {
startPublishProgress()
}
}
}
@ -86,12 +97,12 @@ class AudioPlayerState: ObservableObject, Identifiable {
}
func detachAudioPlayer() {
guard audioPlayer != nil else { return }
audioPlayer?.stop()
stopPublishProgress()
cancellables = []
audioPlayer = nil
playbackState = .stopped
showProgressIndicator = false
}
func reportError(_ error: Error) {
@ -127,14 +138,17 @@ class AudioPlayerState: ObservableObject, Identifiable {
}
startPublishProgress()
playbackState = .playing
showProgressIndicator = true
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
stopPublishProgress()
playbackState = .stopped
if case .didFinishPlaying = action {
progress = 0.0
showProgressIndicator = false
}
case .didFailWithError:
stopPublishProgress()
playbackState = .error
}
}

View File

@ -322,6 +322,7 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
return
}
await mediaPlayerProvider.detachAllStates(except: playerState)
await playerState.updateState(progress: progress)
}

View File

@ -40,10 +40,6 @@ struct VoiceMessageRoomPlaybackView: View {
}
}
var showWaveformCursor: Bool {
playerState.playbackState == .playing || isDragging
}
var body: some View {
HStack {
HStack {
@ -61,7 +57,7 @@ struct VoiceMessageRoomPlaybackView: View {
waveformView
.waveformInteraction(isDragging: $isDragging,
progress: playerState.progress,
showCursor: showWaveformCursor,
showCursor: playerState.showProgressIndicator,
onSeek: onSeek)
}
.padding(.leading, 2)

View File

@ -91,12 +91,15 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
return .failure(.previewNotAvailable)
}
if await !previewAudioPlayerState.isAttached {
await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
}
if audioPlayer.url == url {
audioPlayer.play()
return .success(())
}
await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
let pendingMediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
audioPlayer.load(mediaSource: pendingMediaSource, using: url, autoplay: true)
return .success(())

View File

@ -37,6 +37,7 @@ class AudioPlayerStateTests: XCTestCase {
private func buildAudioPlayerMock() -> AudioPlayerMock {
let audioPlayerMock = AudioPlayerMock()
audioPlayerMock.underlyingActions = audioPlayerActions
audioPlayerMock.state = .stopped
audioPlayerMock.currentTime = 0.0
audioPlayerMock.seekToClosure = { [audioPlayerSeekCallsSubject] progress in
audioPlayerSeekCallsSubject?.send(progress)
@ -65,6 +66,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssert(audioPlayerMock.stopCalled)
XCTAssertFalse(audioPlayerState.isAttached)
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
XCTAssertFalse(audioPlayerState.showProgressIndicator)
}
func testReportError() async throws {
@ -91,9 +93,19 @@ class AudioPlayerStateTests: XCTestCase {
}
do {
audioPlayerMock.state = .stopped
await audioPlayerState.updateState(progress: 0.4)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
}
do {
audioPlayerMock.state = .playing
await audioPlayerState.updateState(progress: 0.4)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssert(audioPlayerState.isPublishingProgress)
}
}
@ -153,6 +165,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssertEqual(audioPlayerState.playbackState, .playing)
XCTAssert(audioPlayerState.isPublishingProgress)
XCTAssert(audioPlayerState.showProgressIndicator)
}
func testHandlingAudioPlayerActionDidPausePlaying() async throws {
@ -173,6 +186,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssert(audioPlayerState.showProgressIndicator)
}
func testHandlingAudioPlayerActionsidStopPlaying() async throws {
@ -193,6 +207,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssert(audioPlayerState.showProgressIndicator)
}
func testAudioPlayerActionsDidFinishPlaying() async throws {
@ -214,5 +229,37 @@ class AudioPlayerStateTests: XCTestCase {
// Progress should be reset to 0
XCTAssertEqual(audioPlayerState.progress, 0.0)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssertFalse(audioPlayerState.showProgressIndicator)
}
func testAudioPlayerActionsDidFailed() async throws {
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferredPlayingState = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .playing:
return true
default:
return false
}
}
audioPlayerActionsSubject.send(.didStartPlaying)
try await deferredPlayingState.fulfill()
XCTAssertTrue(audioPlayerState.showProgressIndicator)
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .error:
return true
default:
return false
}
}
audioPlayerActionsSubject.send(.didFailWithError(error: AudioPlayerError.genericError))
try await deferred.fulfill()
XCTAssertEqual(audioPlayerState.playbackState, .error)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssertTrue(audioPlayerState.showProgressIndicator)
}
}

View File

@ -43,6 +43,7 @@ class VoiceMessageRecorderTests: XCTestCase {
audioRecorder.averagePowerForChannelNumberReturnValue = 0
audioPlayer = AudioPlayerMock()
audioPlayer.actions = audioPlayerActions
audioPlayer.state = .stopped
mediaPlayerProvider = MediaPlayerProviderMock()
mediaPlayerProvider.playerForClosure = { _ in