mirror of
https://github.com/element-hq/element-x-ios.git
synced 2025-03-10 21:39:12 +00:00
Fixed some issues with voice messages when sent from a bridge. (#2190)
This commit is contained in:
parent
9d9cf1c5a1
commit
edbac5fe85
@ -438,6 +438,11 @@ class AudioPlayerMock: AudioPlayerProtocol {
|
|||||||
}
|
}
|
||||||
var underlyingActions: AnyPublisher<AudioPlayerAction, Never>!
|
var underlyingActions: AnyPublisher<AudioPlayerAction, Never>!
|
||||||
var mediaSource: MediaSourceProxy?
|
var mediaSource: MediaSourceProxy?
|
||||||
|
var duration: TimeInterval {
|
||||||
|
get { return underlyingDuration }
|
||||||
|
set(value) { underlyingDuration = value }
|
||||||
|
}
|
||||||
|
var underlyingDuration: TimeInterval!
|
||||||
var currentTime: TimeInterval {
|
var currentTime: TimeInterval {
|
||||||
get { return underlyingCurrentTime }
|
get { return underlyingCurrentTime }
|
||||||
set(value) { underlyingCurrentTime = value }
|
set(value) { underlyingCurrentTime = value }
|
||||||
@ -1082,6 +1087,11 @@ class KeychainControllerMock: KeychainControllerProtocol {
|
|||||||
}
|
}
|
||||||
class MediaPlayerMock: MediaPlayerProtocol {
|
class MediaPlayerMock: MediaPlayerProtocol {
|
||||||
var mediaSource: MediaSourceProxy?
|
var mediaSource: MediaSourceProxy?
|
||||||
|
var duration: TimeInterval {
|
||||||
|
get { return underlyingDuration }
|
||||||
|
set(value) { underlyingDuration = value }
|
||||||
|
}
|
||||||
|
var underlyingDuration: TimeInterval!
|
||||||
var currentTime: TimeInterval {
|
var currentTime: TimeInterval {
|
||||||
get { return underlyingCurrentTime }
|
get { return underlyingCurrentTime }
|
||||||
set(value) { underlyingCurrentTime = value }
|
set(value) { underlyingCurrentTime = value }
|
||||||
|
@ -34,7 +34,7 @@ enum AudioPlayerStateIdentifier {
|
|||||||
@MainActor
|
@MainActor
|
||||||
class AudioPlayerState: ObservableObject, Identifiable {
|
class AudioPlayerState: ObservableObject, Identifiable {
|
||||||
let id: AudioPlayerStateIdentifier
|
let id: AudioPlayerStateIdentifier
|
||||||
let duration: Double
|
private(set) var duration: Double
|
||||||
let waveform: EstimatedWaveform
|
let waveform: EstimatedWaveform
|
||||||
@Published private(set) var playbackState: AudioPlayerPlaybackState
|
@Published private(set) var playbackState: AudioPlayerPlaybackState
|
||||||
/// It's similar to `playbackState`, with the a difference: `.loading`
|
/// It's similar to `playbackState`, with the a difference: `.loading`
|
||||||
@ -134,8 +134,12 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
|||||||
case .didStartLoading:
|
case .didStartLoading:
|
||||||
playbackState = .loading
|
playbackState = .loading
|
||||||
case .didFinishLoading:
|
case .didFinishLoading:
|
||||||
playbackState = .readyToPlay
|
if let audioPlayerDuration = audioPlayer?.duration, audioPlayerDuration != duration {
|
||||||
|
MXLog.info("updating duration: \(duration) -> \(audioPlayerDuration)")
|
||||||
|
duration = audioPlayerDuration
|
||||||
|
}
|
||||||
fileURL = audioPlayer?.url
|
fileURL = audioPlayer?.url
|
||||||
|
playbackState = .readyToPlay
|
||||||
case .didStartPlaying:
|
case .didStartPlaying:
|
||||||
if let audioPlayer {
|
if let audioPlayer {
|
||||||
await restoreAudioPlayerState(audioPlayer: audioPlayer)
|
await restoreAudioPlayerState(audioPlayer: audioPlayer)
|
||||||
|
@ -26,7 +26,7 @@ enum MediaPlayerState {
|
|||||||
|
|
||||||
protocol MediaPlayerProtocol: AnyObject {
|
protocol MediaPlayerProtocol: AnyObject {
|
||||||
var mediaSource: MediaSourceProxy? { get }
|
var mediaSource: MediaSourceProxy? { get }
|
||||||
|
var duration: TimeInterval { get }
|
||||||
var currentTime: TimeInterval { get }
|
var currentTime: TimeInterval { get }
|
||||||
var url: URL? { get }
|
var url: URL? { get }
|
||||||
var state: MediaPlayerState { get }
|
var state: MediaPlayerState { get }
|
||||||
|
@ -56,7 +56,7 @@ class VoiceMessageMediaManager: VoiceMessageMediaManagerProtocol {
|
|||||||
let loadFileBgTask = await backgroundTaskService?.startBackgroundTask(withName: "LoadFile: \(source.url.hashValue)")
|
let loadFileBgTask = await backgroundTaskService?.startBackgroundTask(withName: "LoadFile: \(source.url.hashValue)")
|
||||||
defer { loadFileBgTask?.stop() }
|
defer { loadFileBgTask?.stop() }
|
||||||
|
|
||||||
guard let mimeType = source.mimeType, mimeType == supportedVoiceMessageMimeType else {
|
guard let mimeType = source.mimeType, mimeType.starts(with: supportedVoiceMessageMimeType) else {
|
||||||
throw VoiceMessageMediaManagerError.unsupportedMimeTye
|
throw VoiceMessageMediaManagerError.unsupportedMimeTye
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,8 +171,9 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func testHandlingAudioPlayerActionDidFinishLoading() async throws {
|
func testHandlingAudioPlayerActionDidFinishLoading() async throws {
|
||||||
let originalStateProgress = 0.4
|
audioPlayerMock.duration = 10.0
|
||||||
await audioPlayerState.updateState(progress: originalStateProgress)
|
|
||||||
|
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0)
|
||||||
audioPlayerState.attachAudioPlayer(audioPlayerMock)
|
audioPlayerState.attachAudioPlayer(audioPlayerMock)
|
||||||
|
|
||||||
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
|
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
|
||||||
@ -189,6 +190,8 @@ class AudioPlayerStateTests: XCTestCase {
|
|||||||
|
|
||||||
// The state is expected to be .readyToPlay
|
// The state is expected to be .readyToPlay
|
||||||
XCTAssertEqual(audioPlayerState.playbackState, .readyToPlay)
|
XCTAssertEqual(audioPlayerState.playbackState, .readyToPlay)
|
||||||
|
// The duration should have been updated with the player's duration
|
||||||
|
XCTAssertEqual(audioPlayerState.duration, audioPlayerMock.duration)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testHandlingAudioPlayerActionDidStartPlaying() async throws {
|
func testHandlingAudioPlayerActionDidStartPlaying() async throws {
|
||||||
|
@ -52,6 +52,29 @@ class VoiceMessageMediaManagerTests: XCTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testLoadVoiceMessageFromSourceMimeTypeWithParameters() async throws {
|
||||||
|
// URL representing the file loaded by the media provider
|
||||||
|
let loadedFile = URL("/some/url/loaded_file.ogg")
|
||||||
|
// URL representing the final cached file
|
||||||
|
let cachedConvertedFileURL = URL("/some/url/cached_converted_file.m4a")
|
||||||
|
|
||||||
|
voiceMessageCache.fileURLForReturnValue = nil
|
||||||
|
let mediaSource = MediaSourceProxy(url: someURL, mimeType: "audio/ogg; codecs=opus")
|
||||||
|
mediaProvider.loadFileFromSourceReturnValue = MediaFileHandleProxy.unmanaged(url: loadedFile)
|
||||||
|
voiceMessageCache.cacheMediaSourceUsingMoveReturnValue = .success(cachedConvertedFileURL)
|
||||||
|
|
||||||
|
voiceMessageMediaManager = VoiceMessageMediaManager(mediaProvider: mediaProvider,
|
||||||
|
voiceMessageCache: voiceMessageCache,
|
||||||
|
audioConverter: AudioConverterMock(),
|
||||||
|
backgroundTaskService: MockBackgroundTaskService())
|
||||||
|
|
||||||
|
do {
|
||||||
|
_ = try await voiceMessageMediaManager.loadVoiceMessageFromSource(mediaSource, body: nil)
|
||||||
|
} catch {
|
||||||
|
XCTFail("An unexpected error has occured: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func testLoadVoiceMessageFromSourceAlreadyCached() async throws {
|
func testLoadVoiceMessageFromSourceAlreadyCached() async throws {
|
||||||
// Check if the file is already present in cache
|
// Check if the file is already present in cache
|
||||||
voiceMessageCache.fileURLForReturnValue = URL("/converted_file/url")
|
voiceMessageCache.fileURLForReturnValue = URL("/converted_file/url")
|
||||||
|
1
changelog.d/2006.bugfix
Normal file
1
changelog.d/2006.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fixed some issues with voice messages when sent from a bridge.
|
Loading…
x
Reference in New Issue
Block a user