mirror of
https://github.com/element-hq/element-x-ios.git
synced 2025-03-10 21:39:12 +00:00
Fix: Keep the progress indicator visible after pausing or scrubbing a voice message. (#1969)
This commit is contained in:
parent
0b31446817
commit
a32a7c8f2c
@ -35,11 +35,6 @@ private struct WaveformInteractionModifier: ViewModifier {
|
|||||||
func body(content: Content) -> some View {
|
func body(content: Content) -> some View {
|
||||||
GeometryReader { geometry in
|
GeometryReader { geometry in
|
||||||
content
|
content
|
||||||
.gesture(SpatialTapGesture()
|
|
||||||
.onEnded { tapGesture in
|
|
||||||
let progress = tapGesture.location.x / geometry.size.width
|
|
||||||
onSeek(max(0, min(progress, 1.0)))
|
|
||||||
})
|
|
||||||
.progressCursor(progress: progress) {
|
.progressCursor(progress: progress) {
|
||||||
WaveformCursorView(color: .compound.iconAccentTertiary)
|
WaveformCursorView(color: .compound.iconAccentTertiary)
|
||||||
.frame(width: cursorVisibleWidth, height: cursorVisibleHeight)
|
.frame(width: cursorVisibleWidth, height: cursorVisibleHeight)
|
||||||
@ -56,6 +51,11 @@ private struct WaveformInteractionModifier: ViewModifier {
|
|||||||
)
|
)
|
||||||
.offset(x: -cursorInteractiveSize / 2, y: 0)
|
.offset(x: -cursorInteractiveSize / 2, y: 0)
|
||||||
}
|
}
|
||||||
|
.gesture(SpatialTapGesture()
|
||||||
|
.onEnded { tapGesture in
|
||||||
|
let progress = tapGesture.location.x / geometry.size.width
|
||||||
|
onSeek(max(0, min(progress, 1.0)))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
.coordinateSpace(name: Self.namespaceName)
|
.coordinateSpace(name: Self.namespaceName)
|
||||||
.animation(nil, value: progress)
|
.animation(nil, value: progress)
|
||||||
|
@ -37,6 +37,7 @@ enum ComposerToolbarViewModelAction {
|
|||||||
case deleteVoiceMessageRecording
|
case deleteVoiceMessageRecording
|
||||||
case startVoiceMessagePlayback
|
case startVoiceMessagePlayback
|
||||||
case pauseVoiceMessagePlayback
|
case pauseVoiceMessagePlayback
|
||||||
|
case scrubVoiceMessagePlayback(scrubbing: Bool)
|
||||||
case seekVoiceMessagePlayback(progress: Double)
|
case seekVoiceMessagePlayback(progress: Double)
|
||||||
case sendVoiceMessage
|
case sendVoiceMessage
|
||||||
}
|
}
|
||||||
@ -61,6 +62,7 @@ enum ComposerToolbarViewAction {
|
|||||||
case deleteVoiceMessageRecording
|
case deleteVoiceMessageRecording
|
||||||
case startVoiceMessagePlayback
|
case startVoiceMessagePlayback
|
||||||
case pauseVoiceMessagePlayback
|
case pauseVoiceMessagePlayback
|
||||||
|
case scrubVoiceMessagePlayback(scrubbing: Bool)
|
||||||
case seekVoiceMessagePlayback(progress: Double)
|
case seekVoiceMessagePlayback(progress: Double)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,6 +155,8 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
|
|||||||
actionsSubject.send(.pauseVoiceMessagePlayback)
|
actionsSubject.send(.pauseVoiceMessagePlayback)
|
||||||
case .seekVoiceMessagePlayback(let progress):
|
case .seekVoiceMessagePlayback(let progress):
|
||||||
actionsSubject.send(.seekVoiceMessagePlayback(progress: progress))
|
actionsSubject.send(.seekVoiceMessagePlayback(progress: progress))
|
||||||
|
case .scrubVoiceMessagePlayback(let scrubbing):
|
||||||
|
actionsSubject.send(.scrubVoiceMessagePlayback(scrubbing: scrubbing))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -325,6 +325,8 @@ struct ComposerToolbar: View {
|
|||||||
context.send(viewAction: .pauseVoiceMessagePlayback)
|
context.send(viewAction: .pauseVoiceMessagePlayback)
|
||||||
} onSeek: { progress in
|
} onSeek: { progress in
|
||||||
context.send(viewAction: .seekVoiceMessagePlayback(progress: progress))
|
context.send(viewAction: .seekVoiceMessagePlayback(progress: progress))
|
||||||
|
} onScrubbing: { isScrubbing in
|
||||||
|
context.send(viewAction: .scrubVoiceMessagePlayback(scrubbing: isScrubbing))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ struct VoiceMessagePreviewComposer: View {
|
|||||||
let onPlay: () -> Void
|
let onPlay: () -> Void
|
||||||
let onPause: () -> Void
|
let onPause: () -> Void
|
||||||
let onSeek: (Double) -> Void
|
let onSeek: (Double) -> Void
|
||||||
|
let onScrubbing: (Bool) -> Void
|
||||||
|
|
||||||
var timeLabelContent: String {
|
var timeLabelContent: String {
|
||||||
// Display the duration if progress is 0.0
|
// Display the duration if progress is 0.0
|
||||||
@ -39,10 +40,6 @@ struct VoiceMessagePreviewComposer: View {
|
|||||||
return DateFormatter.elapsedTimeFormatter.string(from: elapsed)
|
return DateFormatter.elapsedTimeFormatter.string(from: elapsed)
|
||||||
}
|
}
|
||||||
|
|
||||||
var showWaveformCursor: Bool {
|
|
||||||
playerState.playbackState == .playing || isDragging
|
|
||||||
}
|
|
||||||
|
|
||||||
var body: some View {
|
var body: some View {
|
||||||
HStack {
|
HStack {
|
||||||
HStack {
|
HStack {
|
||||||
@ -60,9 +57,12 @@ struct VoiceMessagePreviewComposer: View {
|
|||||||
waveformView
|
waveformView
|
||||||
.waveformInteraction(isDragging: $isDragging,
|
.waveformInteraction(isDragging: $isDragging,
|
||||||
progress: playerState.progress,
|
progress: playerState.progress,
|
||||||
showCursor: showWaveformCursor,
|
showCursor: playerState.showProgressIndicator,
|
||||||
onSeek: onSeek)
|
onSeek: onSeek)
|
||||||
}
|
}
|
||||||
|
.onChange(of: isDragging) { isDragging in
|
||||||
|
onScrubbing(isDragging)
|
||||||
|
}
|
||||||
.padding(.vertical, 4.0)
|
.padding(.vertical, 4.0)
|
||||||
.padding(.horizontal, 6.0)
|
.padding(.horizontal, 6.0)
|
||||||
.background {
|
.background {
|
||||||
@ -133,7 +133,7 @@ struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
|
|||||||
|
|
||||||
static var previews: some View {
|
static var previews: some View {
|
||||||
VStack {
|
VStack {
|
||||||
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in })
|
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in }, onScrubbing: { _ in })
|
||||||
.fixedSize(horizontal: false, vertical: true)
|
.fixedSize(horizontal: false, vertical: true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -41,6 +41,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
|||||||
private let actionsSubject: PassthroughSubject<RoomScreenViewModelAction, Never> = .init()
|
private let actionsSubject: PassthroughSubject<RoomScreenViewModelAction, Never> = .init()
|
||||||
private var canCurrentUserRedact = false
|
private var canCurrentUserRedact = false
|
||||||
private var paginateBackwardsTask: Task<Void, Never>?
|
private var paginateBackwardsTask: Task<Void, Never>?
|
||||||
|
private var resumeVoiceMessagePlaybackAfterScrubbing = false
|
||||||
|
|
||||||
init(timelineController: RoomTimelineControllerProtocol,
|
init(timelineController: RoomTimelineControllerProtocol,
|
||||||
mediaProvider: MediaProviderProtocol,
|
mediaProvider: MediaProviderProtocol,
|
||||||
@ -208,14 +209,13 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
|||||||
case .sendVoiceMessage:
|
case .sendVoiceMessage:
|
||||||
Task { await sendCurrentVoiceMessage() }
|
Task { await sendCurrentVoiceMessage() }
|
||||||
case .startVoiceMessagePlayback:
|
case .startVoiceMessagePlayback:
|
||||||
Task {
|
Task { await startPlayingRecordedVoiceMessage() }
|
||||||
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
|
|
||||||
await startPlayingRecordedVoiceMessage()
|
|
||||||
}
|
|
||||||
case .pauseVoiceMessagePlayback:
|
case .pauseVoiceMessagePlayback:
|
||||||
pausePlayingRecordedVoiceMessage()
|
pausePlayingRecordedVoiceMessage()
|
||||||
case .seekVoiceMessagePlayback(let progress):
|
case .seekVoiceMessagePlayback(let progress):
|
||||||
Task { await seekRecordedVoiceMessage(to: progress) }
|
Task { await seekRecordedVoiceMessage(to: progress) }
|
||||||
|
case .scrubVoiceMessagePlayback(let scrubbing):
|
||||||
|
Task { await scrubVoiceMessagePlayback(scrubbing: scrubbing) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -349,7 +349,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch await timelineController.sendReadReceipt(for: eventItemID) {
|
switch await timelineController.sendReadReceipt(for: eventItemID) {
|
||||||
case .success():
|
case .success:
|
||||||
break
|
break
|
||||||
case let .failure(error):
|
case let .failure(error):
|
||||||
MXLog.error("[TimelineViewController] Failed to send read receipt: \(error)")
|
MXLog.error("[TimelineViewController] Failed to send read receipt: \(error)")
|
||||||
@ -1015,6 +1015,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func startPlayingRecordedVoiceMessage() async {
|
private func startPlayingRecordedVoiceMessage() async {
|
||||||
|
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
|
||||||
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
|
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
|
||||||
MXLog.error("failed to play recorded voice message. \(error)")
|
MXLog.error("failed to play recorded voice message. \(error)")
|
||||||
}
|
}
|
||||||
@ -1025,9 +1026,27 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func seekRecordedVoiceMessage(to progress: Double) async {
|
private func seekRecordedVoiceMessage(to progress: Double) async {
|
||||||
|
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
|
||||||
await voiceMessageRecorder.seekPlayback(to: progress)
|
await voiceMessageRecorder.seekPlayback(to: progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func scrubVoiceMessagePlayback(scrubbing: Bool) async {
|
||||||
|
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if scrubbing {
|
||||||
|
if audioPlayerState.playbackState == .playing {
|
||||||
|
resumeVoiceMessagePlaybackAfterScrubbing = true
|
||||||
|
pausePlayingRecordedVoiceMessage()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if resumeVoiceMessagePlaybackAfterScrubbing {
|
||||||
|
resumeVoiceMessagePlaybackAfterScrubbing = false
|
||||||
|
await startPlayingRecordedVoiceMessage()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private func openSystemSettings() {
|
private func openSystemSettings() {
|
||||||
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
|
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
|
||||||
application.open(url)
|
application.open(url)
|
||||||
|
@ -38,6 +38,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
|||||||
let waveform: EstimatedWaveform
|
let waveform: EstimatedWaveform
|
||||||
@Published private(set) var playbackState: AudioPlayerPlaybackState
|
@Published private(set) var playbackState: AudioPlayerPlaybackState
|
||||||
@Published private(set) var progress: Double
|
@Published private(set) var progress: Double
|
||||||
|
@Published private(set) var showProgressIndicator: Bool
|
||||||
|
|
||||||
private weak var audioPlayer: AudioPlayerProtocol?
|
private weak var audioPlayer: AudioPlayerProtocol?
|
||||||
private var cancellables: Set<AnyCancellable> = []
|
private var cancellables: Set<AnyCancellable> = []
|
||||||
@ -60,6 +61,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
|||||||
self.duration = duration
|
self.duration = duration
|
||||||
self.waveform = waveform ?? EstimatedWaveform(data: [])
|
self.waveform = waveform ?? EstimatedWaveform(data: [])
|
||||||
self.progress = progress
|
self.progress = progress
|
||||||
|
showProgressIndicator = false
|
||||||
playbackState = .stopped
|
playbackState = .stopped
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,8 +73,17 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
|||||||
func updateState(progress: Double) async {
|
func updateState(progress: Double) async {
|
||||||
let progress = max(0.0, min(progress, 1.0))
|
let progress = max(0.0, min(progress, 1.0))
|
||||||
self.progress = progress
|
self.progress = progress
|
||||||
|
showProgressIndicator = true
|
||||||
if let audioPlayer {
|
if let audioPlayer {
|
||||||
|
var shouldResumeProgressPublishing = false
|
||||||
|
if audioPlayer.state == .playing {
|
||||||
|
shouldResumeProgressPublishing = true
|
||||||
|
stopPublishProgress()
|
||||||
|
}
|
||||||
await audioPlayer.seek(to: progress)
|
await audioPlayer.seek(to: progress)
|
||||||
|
if shouldResumeProgressPublishing, audioPlayer.state == .playing {
|
||||||
|
startPublishProgress()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,12 +97,12 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func detachAudioPlayer() {
|
func detachAudioPlayer() {
|
||||||
guard audioPlayer != nil else { return }
|
|
||||||
audioPlayer?.stop()
|
audioPlayer?.stop()
|
||||||
stopPublishProgress()
|
stopPublishProgress()
|
||||||
cancellables = []
|
cancellables = []
|
||||||
audioPlayer = nil
|
audioPlayer = nil
|
||||||
playbackState = .stopped
|
playbackState = .stopped
|
||||||
|
showProgressIndicator = false
|
||||||
}
|
}
|
||||||
|
|
||||||
func reportError(_ error: Error) {
|
func reportError(_ error: Error) {
|
||||||
@ -127,14 +138,17 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
|||||||
}
|
}
|
||||||
startPublishProgress()
|
startPublishProgress()
|
||||||
playbackState = .playing
|
playbackState = .playing
|
||||||
|
showProgressIndicator = true
|
||||||
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
|
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
|
||||||
stopPublishProgress()
|
stopPublishProgress()
|
||||||
playbackState = .stopped
|
playbackState = .stopped
|
||||||
if case .didFinishPlaying = action {
|
if case .didFinishPlaying = action {
|
||||||
progress = 0.0
|
progress = 0.0
|
||||||
|
showProgressIndicator = false
|
||||||
}
|
}
|
||||||
case .didFailWithError:
|
case .didFailWithError:
|
||||||
stopPublishProgress()
|
stopPublishProgress()
|
||||||
|
playbackState = .error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -322,6 +322,7 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
|
|||||||
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
|
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
await mediaPlayerProvider.detachAllStates(except: playerState)
|
||||||
await playerState.updateState(progress: progress)
|
await playerState.updateState(progress: progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,10 +40,6 @@ struct VoiceMessageRoomPlaybackView: View {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var showWaveformCursor: Bool {
|
|
||||||
playerState.playbackState == .playing || isDragging
|
|
||||||
}
|
|
||||||
|
|
||||||
var body: some View {
|
var body: some View {
|
||||||
HStack {
|
HStack {
|
||||||
HStack {
|
HStack {
|
||||||
@ -61,7 +57,7 @@ struct VoiceMessageRoomPlaybackView: View {
|
|||||||
waveformView
|
waveformView
|
||||||
.waveformInteraction(isDragging: $isDragging,
|
.waveformInteraction(isDragging: $isDragging,
|
||||||
progress: playerState.progress,
|
progress: playerState.progress,
|
||||||
showCursor: showWaveformCursor,
|
showCursor: playerState.showProgressIndicator,
|
||||||
onSeek: onSeek)
|
onSeek: onSeek)
|
||||||
}
|
}
|
||||||
.padding(.leading, 2)
|
.padding(.leading, 2)
|
||||||
|
@ -91,12 +91,15 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
|
|||||||
return .failure(.previewNotAvailable)
|
return .failure(.previewNotAvailable)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if await !previewAudioPlayerState.isAttached {
|
||||||
|
await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
|
||||||
|
}
|
||||||
|
|
||||||
if audioPlayer.url == url {
|
if audioPlayer.url == url {
|
||||||
audioPlayer.play()
|
audioPlayer.play()
|
||||||
return .success(())
|
return .success(())
|
||||||
}
|
}
|
||||||
|
|
||||||
await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
|
|
||||||
let pendingMediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
|
let pendingMediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
|
||||||
audioPlayer.load(mediaSource: pendingMediaSource, using: url, autoplay: true)
|
audioPlayer.load(mediaSource: pendingMediaSource, using: url, autoplay: true)
|
||||||
return .success(())
|
return .success(())
|
||||||
|
@ -37,6 +37,7 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
private func buildAudioPlayerMock() -> AudioPlayerMock {
|
private func buildAudioPlayerMock() -> AudioPlayerMock {
|
||||||
let audioPlayerMock = AudioPlayerMock()
|
let audioPlayerMock = AudioPlayerMock()
|
||||||
audioPlayerMock.underlyingActions = audioPlayerActions
|
audioPlayerMock.underlyingActions = audioPlayerActions
|
||||||
|
audioPlayerMock.state = .stopped
|
||||||
audioPlayerMock.currentTime = 0.0
|
audioPlayerMock.currentTime = 0.0
|
||||||
audioPlayerMock.seekToClosure = { [audioPlayerSeekCallsSubject] progress in
|
audioPlayerMock.seekToClosure = { [audioPlayerSeekCallsSubject] progress in
|
||||||
audioPlayerSeekCallsSubject?.send(progress)
|
audioPlayerSeekCallsSubject?.send(progress)
|
||||||
@ -65,6 +66,7 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
XCTAssert(audioPlayerMock.stopCalled)
|
XCTAssert(audioPlayerMock.stopCalled)
|
||||||
XCTAssertFalse(audioPlayerState.isAttached)
|
XCTAssertFalse(audioPlayerState.isAttached)
|
||||||
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
|
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
|
||||||
|
XCTAssertFalse(audioPlayerState.showProgressIndicator)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testReportError() async throws {
|
func testReportError() async throws {
|
||||||
@ -91,9 +93,19 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
do {
|
do {
|
||||||
|
audioPlayerMock.state = .stopped
|
||||||
await audioPlayerState.updateState(progress: 0.4)
|
await audioPlayerState.updateState(progress: 0.4)
|
||||||
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
||||||
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
|
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
|
||||||
|
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
||||||
|
}
|
||||||
|
|
||||||
|
do {
|
||||||
|
audioPlayerMock.state = .playing
|
||||||
|
await audioPlayerState.updateState(progress: 0.4)
|
||||||
|
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
||||||
|
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
|
||||||
|
XCTAssert(audioPlayerState.isPublishingProgress)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -153,6 +165,7 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
|
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
|
||||||
XCTAssertEqual(audioPlayerState.playbackState, .playing)
|
XCTAssertEqual(audioPlayerState.playbackState, .playing)
|
||||||
XCTAssert(audioPlayerState.isPublishingProgress)
|
XCTAssert(audioPlayerState.isPublishingProgress)
|
||||||
|
XCTAssert(audioPlayerState.showProgressIndicator)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testHandlingAudioPlayerActionDidPausePlaying() async throws {
|
func testHandlingAudioPlayerActionDidPausePlaying() async throws {
|
||||||
@ -173,6 +186,7 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
|
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
|
||||||
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
||||||
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
||||||
|
XCTAssert(audioPlayerState.showProgressIndicator)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testHandlingAudioPlayerActionsidStopPlaying() async throws {
|
func testHandlingAudioPlayerActionsidStopPlaying() async throws {
|
||||||
@ -193,6 +207,7 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
|
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
|
||||||
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
XCTAssertEqual(audioPlayerState.progress, 0.4)
|
||||||
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
||||||
|
XCTAssert(audioPlayerState.showProgressIndicator)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testAudioPlayerActionsDidFinishPlaying() async throws {
|
func testAudioPlayerActionsDidFinishPlaying() async throws {
|
||||||
@ -214,5 +229,37 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
// Progress should be reset to 0
|
// Progress should be reset to 0
|
||||||
XCTAssertEqual(audioPlayerState.progress, 0.0)
|
XCTAssertEqual(audioPlayerState.progress, 0.0)
|
||||||
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
||||||
|
XCTAssertFalse(audioPlayerState.showProgressIndicator)
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAudioPlayerActionsDidFailed() async throws {
|
||||||
|
audioPlayerState.attachAudioPlayer(audioPlayerMock)
|
||||||
|
|
||||||
|
let deferredPlayingState = deferFulfillment(audioPlayerState.$playbackState) { action in
|
||||||
|
switch action {
|
||||||
|
case .playing:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
audioPlayerActionsSubject.send(.didStartPlaying)
|
||||||
|
try await deferredPlayingState.fulfill()
|
||||||
|
XCTAssertTrue(audioPlayerState.showProgressIndicator)
|
||||||
|
|
||||||
|
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
|
||||||
|
switch action {
|
||||||
|
case .error:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
audioPlayerActionsSubject.send(.didFailWithError(error: AudioPlayerError.genericError))
|
||||||
|
try await deferred.fulfill()
|
||||||
|
XCTAssertEqual(audioPlayerState.playbackState, .error)
|
||||||
|
XCTAssertFalse(audioPlayerState.isPublishingProgress)
|
||||||
|
XCTAssertTrue(audioPlayerState.showProgressIndicator)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,7 @@ class VoiceMessageRecorderTests: XCTestCase {
|
|||||||
audioRecorder.averagePowerForChannelNumberReturnValue = 0
|
audioRecorder.averagePowerForChannelNumberReturnValue = 0
|
||||||
audioPlayer = AudioPlayerMock()
|
audioPlayer = AudioPlayerMock()
|
||||||
audioPlayer.actions = audioPlayerActions
|
audioPlayer.actions = audioPlayerActions
|
||||||
|
audioPlayer.state = .stopped
|
||||||
|
|
||||||
mediaPlayerProvider = MediaPlayerProviderMock()
|
mediaPlayerProvider = MediaPlayerProviderMock()
|
||||||
mediaPlayerProvider.playerForClosure = { _ in
|
mediaPlayerProvider.playerForClosure = { _ in
|
||||||
|
Loading…
x
Reference in New Issue
Block a user