Voice message recording enhancements (#2042)

* Use Combine to report AudioRecorder state changes

* AudioRecorder is now using AVAudioEngine instead of AVAudioRecorder

* Stop recording when audio configuration changes

* AudioRecorder error handling

* UnitTests

* Use Accelerate to calculate RMS
This commit is contained in:
Nicolas Mauri 2023-11-08 10:19:28 +01:00 committed by GitHub
parent 14e0eba914
commit e04a55eaef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 817 additions and 269 deletions

View File

@ -93,6 +93,7 @@
17BC15DA08A52587466698C5 /* RoomMessageEventStringBuilder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 80E815FF3CC5E5A355E3A25E /* RoomMessageEventStringBuilder.swift */; };
1830E5431DB426E2F3660D58 /* NotificationSettingsEditScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F52419AEEDA2C006CB7181 /* NotificationSettingsEditScreenUITests.swift */; };
18867F4F1C8991EEC56EA932 /* UTType.swift in Sources */ = {isa = PBXBuildFile; fileRef = 897DF5E9A70CE05A632FC8AF /* UTType.swift */; };
192A3CDCD0174AD1E4A128E4 /* AudioRecorderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2441E2424E78A40FC95DBA76 /* AudioRecorderTests.swift */; };
1950A80CD198BED283DFC2CE /* ClientProxy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18F2958E6D247AE2516BEEE8 /* ClientProxy.swift */; };
19DED23340D0855B59693ED2 /* VoiceMessageRecorderProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = D45C9EAA86423D7D3126DE4F /* VoiceMessageRecorderProtocol.swift */; };
19FE025AE9BA2959B6589B0D /* RoomMemberDetailsScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1CC575D1895FA62591451A93 /* RoomMemberDetailsScreen.swift */; };
@ -545,6 +546,7 @@
92133B170A1F917685E9FF78 /* OnboardingScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8D168471461717AF5689F64B /* OnboardingScreenUITests.swift */; };
9219640F4D980CFC5FE855AD /* target.yml in Resources */ = {isa = PBXBuildFile; fileRef = 536E72DCBEEC4A1FE66CFDCE /* target.yml */; };
92720AB0DA9AB5EEF1DAF56B /* SecureBackupLogoutConfirmationScreenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7DC017C3CB6B0F7C63F460F2 /* SecureBackupLogoutConfirmationScreenViewModel.swift */; };
9278EC51D24E57445B290521 /* AudioSessionProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = BB284643AF7AB131E307DCE0 /* AudioSessionProtocol.swift */; };
92D9088B901CEBB1A99ECA4E /* RoomMemberProxyMock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 36FD673E24FBFCFDF398716A /* RoomMemberProxyMock.swift */; };
93875ADD456142D20823ED24 /* ServerSelectionViewModelTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = EDAA4472821985BF868CC21C /* ServerSelectionViewModelTests.swift */; };
93A549135E6C027A0D823BFE /* DTCoreText in Frameworks */ = {isa = PBXBuildFile; productRef = 593FBBF394712F2963E98A0B /* DTCoreText */; };
@ -1159,6 +1161,7 @@
23AA3F4B285570805CB0CCDD /* MapTiler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MapTiler.swift; sourceTree = "<group>"; };
24227FF9A2797F6EA7F69CDD /* HomeScreenInvitesButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomeScreenInvitesButton.swift; sourceTree = "<group>"; };
2429224EB0EEA34D35CE9249 /* UserIndicatorControllerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserIndicatorControllerTests.swift; sourceTree = "<group>"; };
2441E2424E78A40FC95DBA76 /* AudioRecorderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioRecorderTests.swift; sourceTree = "<group>"; };
248649EBA5BC33DB93698734 /* SessionVerificationControllerProxyMock.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionVerificationControllerProxyMock.swift; sourceTree = "<group>"; };
24DEE0682C95F897B6C7CB0D /* ServerConfirmationScreenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ServerConfirmationScreenViewModel.swift; sourceTree = "<group>"; };
24F5530B2212862FA4BEFF2D /* HomeScreenViewModelProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomeScreenViewModelProtocol.swift; sourceTree = "<group>"; };
@ -1671,6 +1674,7 @@
BA40B98B098B6F0371B750B3 /* TemplateScreenModels.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TemplateScreenModels.swift; sourceTree = "<group>"; };
BA919F521E9F0EE3638AFC85 /* BugReportScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BugReportScreen.swift; sourceTree = "<group>"; };
BB23BEAF8831DC6A57E39F52 /* CreatePollScreenCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreatePollScreenCoordinator.swift; sourceTree = "<group>"; };
BB284643AF7AB131E307DCE0 /* AudioSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSessionProtocol.swift; sourceTree = "<group>"; };
BB8BC4C791D0E88CFCF4E5DF /* ServerSelectionScreenCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ServerSelectionScreenCoordinator.swift; sourceTree = "<group>"; };
BBEC57C204D77908E355EF42 /* AudioRecorderProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioRecorderProtocol.swift; sourceTree = "<group>"; };
BC8AA23D4F37CC26564F63C5 /* LayoutMocks.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LayoutMocks.swift; sourceTree = "<group>"; };
@ -3198,6 +3202,7 @@
37CA26F55123E36B50DB0B3A /* AttributedStringTests.swift */,
89233612A8632AD7E2803620 /* AudioPlayerStateTests.swift */,
C55CC239AE12339C565F6C9A /* AudioRecorderStateTests.swift */,
2441E2424E78A40FC95DBA76 /* AudioRecorderTests.swift */,
6DFCAA239095A116976E32C4 /* BackgroundTaskTests.swift */,
EFFD3200F9960D4996159F10 /* BugReportServiceTests.swift */,
7AB7ED3A898B07976F3AA90F /* BugReportViewModelTests.swift */,
@ -4632,6 +4637,7 @@
children = (
E44E35AA87F49503E7B3BF6E /* AudioConverter.swift */,
2757B1BE23DF8AA239937243 /* AudioConverterProtocol.swift */,
BB284643AF7AB131E307DCE0 /* AudioSessionProtocol.swift */,
3A2CAA4ABF5E66C3C8BBA3E9 /* Player */,
0371482D36C95ABAF9D4C651 /* Recorder */,
);
@ -5142,6 +5148,7 @@
5100F53E6884A15F9BA07CC3 /* AttributedStringTests.swift in Sources */,
C1429699A6A5BB09A25775C1 /* AudioPlayerStateTests.swift in Sources */,
3042527CB344A9EF1157FC26 /* AudioRecorderStateTests.swift in Sources */,
192A3CDCD0174AD1E4A128E4 /* AudioRecorderTests.swift in Sources */,
0F9E38A75337D0146652ACAB /* BackgroundTaskTests.swift in Sources */,
7F61F9ACD5EC9E845EF3EFBF /* BugReportServiceTests.swift in Sources */,
C7CFDB4929DDD9A3B5BA085D /* BugReportViewModelTests.swift in Sources */,
@ -5328,6 +5335,7 @@
F8E725D42023ECA091349245 /* AudioRoomTimelineItem.swift in Sources */,
88F348E2CB14FF71CBBB665D /* AudioRoomTimelineItemContent.swift in Sources */,
E62EC30B39354A391E32A126 /* AudioRoomTimelineView.swift in Sources */,
9278EC51D24E57445B290521 /* AudioSessionProtocol.swift in Sources */,
EA65360A0EC026DD83AC0CF5 /* AuthenticationCoordinator.swift in Sources */,
7F08F4BC1312075E2B5EAEFA /* AuthenticationServiceProxy.swift in Sources */,
64FF5CB4E35971255872E1BB /* AuthenticationServiceProxyProtocol.swift in Sources */,

View File

@ -3,6 +3,7 @@
// swiftlint:disable all
import AnalyticsEvents
import AVFoundation
import Combine
import Foundation
import LocalAuthentication
@ -534,7 +535,7 @@ class AudioRecorderMock: AudioRecorderProtocol {
set(value) { underlyingIsRecording = value }
}
var underlyingIsRecording: Bool!
var url: URL?
var audioFileUrl: URL?
//MARK: - record
@ -544,18 +545,13 @@ class AudioRecorderMock: AudioRecorderProtocol {
}
var recordWithReceivedRecordID: AudioRecordingIdentifier?
var recordWithReceivedInvocations: [AudioRecordingIdentifier] = []
var recordWithReturnValue: Result<Void, AudioRecorderError>!
var recordWithClosure: ((AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError>)?
var recordWithClosure: ((AudioRecordingIdentifier) async -> Void)?
func record(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError> {
func record(with recordID: AudioRecordingIdentifier) async {
recordWithCallsCount += 1
recordWithReceivedRecordID = recordID
recordWithReceivedInvocations.append(recordID)
if let recordWithClosure = recordWithClosure {
return await recordWithClosure(recordID)
} else {
return recordWithReturnValue
}
await recordWithClosure?(recordID)
}
//MARK: - stopRecording
@ -581,28 +577,103 @@ class AudioRecorderMock: AudioRecorderProtocol {
deleteRecordingCallsCount += 1
await deleteRecordingClosure?()
}
//MARK: - averagePowerForChannelNumber
//MARK: - averagePower
var averagePowerForChannelNumberCallsCount = 0
var averagePowerForChannelNumberCalled: Bool {
return averagePowerForChannelNumberCallsCount > 0
var averagePowerCallsCount = 0
var averagePowerCalled: Bool {
return averagePowerCallsCount > 0
}
var averagePowerForChannelNumberReceivedChannelNumber: Int?
var averagePowerForChannelNumberReceivedInvocations: [Int] = []
var averagePowerForChannelNumberReturnValue: Float!
var averagePowerForChannelNumberClosure: ((Int) -> Float)?
var averagePowerReturnValue: Float!
var averagePowerClosure: (() -> Float)?
func averagePowerForChannelNumber(_ channelNumber: Int) -> Float {
averagePowerForChannelNumberCallsCount += 1
averagePowerForChannelNumberReceivedChannelNumber = channelNumber
averagePowerForChannelNumberReceivedInvocations.append(channelNumber)
if let averagePowerForChannelNumberClosure = averagePowerForChannelNumberClosure {
return averagePowerForChannelNumberClosure(channelNumber)
func averagePower() -> Float {
averagePowerCallsCount += 1
if let averagePowerClosure = averagePowerClosure {
return averagePowerClosure()
} else {
return averagePowerForChannelNumberReturnValue
return averagePowerReturnValue
}
}
}
class AudioSessionMock: AudioSessionProtocol {
//MARK: - requestRecordPermission
var requestRecordPermissionCallsCount = 0
var requestRecordPermissionCalled: Bool {
return requestRecordPermissionCallsCount > 0
}
var requestRecordPermissionReceivedResponse: ((Bool) -> Void)?
var requestRecordPermissionReceivedInvocations: [((Bool) -> Void)] = []
var requestRecordPermissionClosure: ((@escaping (Bool) -> Void) -> Void)?
func requestRecordPermission(_ response: @escaping (Bool) -> Void) {
requestRecordPermissionCallsCount += 1
requestRecordPermissionReceivedResponse = response
requestRecordPermissionReceivedInvocations.append(response)
requestRecordPermissionClosure?(response)
}
//MARK: - setAllowHapticsAndSystemSoundsDuringRecording
var setAllowHapticsAndSystemSoundsDuringRecordingThrowableError: Error?
var setAllowHapticsAndSystemSoundsDuringRecordingCallsCount = 0
var setAllowHapticsAndSystemSoundsDuringRecordingCalled: Bool {
return setAllowHapticsAndSystemSoundsDuringRecordingCallsCount > 0
}
var setAllowHapticsAndSystemSoundsDuringRecordingReceivedInValue: Bool?
var setAllowHapticsAndSystemSoundsDuringRecordingReceivedInvocations: [Bool] = []
var setAllowHapticsAndSystemSoundsDuringRecordingClosure: ((Bool) throws -> Void)?
func setAllowHapticsAndSystemSoundsDuringRecording(_ inValue: Bool) throws {
if let error = setAllowHapticsAndSystemSoundsDuringRecordingThrowableError {
throw error
}
setAllowHapticsAndSystemSoundsDuringRecordingCallsCount += 1
setAllowHapticsAndSystemSoundsDuringRecordingReceivedInValue = inValue
setAllowHapticsAndSystemSoundsDuringRecordingReceivedInvocations.append(inValue)
try setAllowHapticsAndSystemSoundsDuringRecordingClosure?(inValue)
}
//MARK: - setCategory
var setCategoryModeOptionsThrowableError: Error?
var setCategoryModeOptionsCallsCount = 0
var setCategoryModeOptionsCalled: Bool {
return setCategoryModeOptionsCallsCount > 0
}
var setCategoryModeOptionsReceivedArguments: (category: AVAudioSession.Category, mode: AVAudioSession.Mode, options: AVAudioSession.CategoryOptions)?
var setCategoryModeOptionsReceivedInvocations: [(category: AVAudioSession.Category, mode: AVAudioSession.Mode, options: AVAudioSession.CategoryOptions)] = []
var setCategoryModeOptionsClosure: ((AVAudioSession.Category, AVAudioSession.Mode, AVAudioSession.CategoryOptions) throws -> Void)?
func setCategory(_ category: AVAudioSession.Category, mode: AVAudioSession.Mode, options: AVAudioSession.CategoryOptions) throws {
if let error = setCategoryModeOptionsThrowableError {
throw error
}
setCategoryModeOptionsCallsCount += 1
setCategoryModeOptionsReceivedArguments = (category: category, mode: mode, options: options)
setCategoryModeOptionsReceivedInvocations.append((category: category, mode: mode, options: options))
try setCategoryModeOptionsClosure?(category, mode, options)
}
//MARK: - setActive
var setActiveOptionsThrowableError: Error?
var setActiveOptionsCallsCount = 0
var setActiveOptionsCalled: Bool {
return setActiveOptionsCallsCount > 0
}
var setActiveOptionsReceivedArguments: (active: Bool, options: AVAudioSession.SetActiveOptions)?
var setActiveOptionsReceivedInvocations: [(active: Bool, options: AVAudioSession.SetActiveOptions)] = []
var setActiveOptionsClosure: ((Bool, AVAudioSession.SetActiveOptions) throws -> Void)?
func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions) throws {
if let error = setActiveOptionsThrowableError {
throw error
}
setActiveOptionsCallsCount += 1
setActiveOptionsReceivedArguments = (active: active, options: options)
setActiveOptionsReceivedInvocations.append((active: active, options: options))
try setActiveOptionsClosure?(active, options)
}
}
class BugReportServiceMock: BugReportServiceProtocol {
var isRunning: Bool {
get { return underlyingIsRunning }
@ -3168,6 +3239,11 @@ class VoiceMessageRecorderMock: VoiceMessageRecorderProtocol {
set(value) { underlyingRecordingDuration = value }
}
var underlyingRecordingDuration: TimeInterval!
var actions: AnyPublisher<VoiceMessageRecorderAction, Never> {
get { return underlyingActions }
set(value) { underlyingActions = value }
}
var underlyingActions: AnyPublisher<VoiceMessageRecorderAction, Never>!
//MARK: - startRecording
@ -3175,16 +3251,11 @@ class VoiceMessageRecorderMock: VoiceMessageRecorderProtocol {
var startRecordingCalled: Bool {
return startRecordingCallsCount > 0
}
var startRecordingReturnValue: Result<Void, VoiceMessageRecorderError>!
var startRecordingClosure: (() async -> Result<Void, VoiceMessageRecorderError>)?
var startRecordingClosure: (() async -> Void)?
func startRecording() async -> Result<Void, VoiceMessageRecorderError> {
func startRecording() async {
startRecordingCallsCount += 1
if let startRecordingClosure = startRecordingClosure {
return await startRecordingClosure()
} else {
return startRecordingReturnValue
}
await startRecordingClosure?()
}
//MARK: - stopRecording
@ -3192,16 +3263,11 @@ class VoiceMessageRecorderMock: VoiceMessageRecorderProtocol {
var stopRecordingCalled: Bool {
return stopRecordingCallsCount > 0
}
var stopRecordingReturnValue: Result<Void, VoiceMessageRecorderError>!
var stopRecordingClosure: (() async -> Result<Void, VoiceMessageRecorderError>)?
var stopRecordingClosure: (() async -> Void)?
func stopRecording() async -> Result<Void, VoiceMessageRecorderError> {
func stopRecording() async {
stopRecordingCallsCount += 1
if let stopRecordingClosure = stopRecordingClosure {
return await stopRecordingClosure()
} else {
return stopRecordingReturnValue
}
await stopRecordingClosure?()
}
//MARK: - cancelRecording

View File

@ -42,6 +42,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
private var canCurrentUserRedact = false
private var paginateBackwardsTask: Task<Void, Never>?
private var resumeVoiceMessagePlaybackAfterScrubbing = false
private var voiceMessageRecorderObserver: AnyCancellable?
init(timelineController: RoomTimelineControllerProtocol,
mediaProvider: MediaProviderProtocol,
@ -935,19 +936,16 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
// MARK: - Voice message
private func stopVoiceMessageRecorder() async {
_ = await voiceMessageRecorder.stopRecording()
await voiceMessageRecorder.stopPlayback()
}
private func startRecordingVoiceMessage() async {
let audioRecordState = AudioRecorderState()
audioRecordState.attachAudioRecorder(voiceMessageRecorder.audioRecorder)
switch await voiceMessageRecorder.startRecording() {
case .success:
private func handleVoiceMessageRecorderAction(_ action: VoiceMessageRecorderAction) {
MXLog.debug("handling voice recorder action: \(action) - (audio)")
switch action {
case .didStartRecording(let audioRecorder):
let audioRecordState = AudioRecorderState()
audioRecordState.attachAudioRecorder(audioRecorder)
actionsSubject.send(.composer(action: .setMode(mode: .recordVoiceMessage(state: audioRecordState))))
case .failure(let error):
case .didStopRecording(let previewAudioPlayerState, let url):
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: previewAudioPlayerState, waveform: .url(url), isUploading: false))))
case .didFailWithError(let error):
switch error {
case .audioRecorderError(.recordPermissionNotGranted):
MXLog.info("permission to record audio has not been granted.")
@ -958,37 +956,34 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
secondaryButton: .init(title: L10n.actionNotNow, role: .cancel, action: nil))
default:
MXLog.error("failed to start voice message recording. \(error)")
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
}
}
private func startRecordingVoiceMessage() async {
voiceMessageRecorderObserver = voiceMessageRecorder.actions
.receive(on: DispatchQueue.main)
.sink { [weak self] action in
self?.handleVoiceMessageRecorderAction(action)
}
await voiceMessageRecorder.startRecording()
}
private func stopRecordingVoiceMessage() async {
if case .failure(let error) = await voiceMessageRecorder.stopRecording() {
MXLog.error("failed to stop the recording. \(error)")
return
}
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
MXLog.error("the recorder preview is missing after the recording has been stopped")
return
}
guard let recordingURL = voiceMessageRecorder.recordingURL else {
MXLog.error("the recording URL is missing after the recording has been stopped")
return
}
mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState, waveform: .url(recordingURL), isUploading: false))))
await voiceMessageRecorder.stopRecording()
}
private func cancelRecordingVoiceMessage() async {
await voiceMessageRecorder.cancelRecording()
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
private func deleteCurrentVoiceMessage() async {
await voiceMessageRecorder.deleteRecording()
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}

View File

@ -0,0 +1,39 @@
//
// Copyright 2023 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import AVFoundation
protocol AudioSessionProtocol: AnyObject {
func requestRecordPermission(_ response: @escaping (Bool) -> Void)
func setAllowHapticsAndSystemSoundsDuringRecording(_ inValue: Bool) throws
func setCategory(_ category: AVAudioSession.Category, mode: AVAudioSession.Mode, options: AVAudioSession.CategoryOptions) throws
func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions) throws
}
extension AudioSessionProtocol {
func setCategory(_ category: AVAudioSession.Category, mode: AVAudioSession.Mode) throws {
try setCategory(category, mode: mode, options: [])
}
func setActive(_ active: Bool) throws {
try setActive(active, options: [])
}
}
// sourcery: AutoMockable
extension AudioSessionProtocol { }
extension AVAudioSession: AudioSessionProtocol { }

View File

@ -14,15 +14,25 @@
// limitations under the License.
//
import Accelerate
import AVFoundation
import Combine
import Foundation
import UIKit
class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
private let silenceThreshold: Float = -50.0
private var audioRecorder: AVAudioRecorder?
private enum InternalAudioRecorderState: Equatable {
case recording
case suspended
case stopped
case error(AudioRecorderError)
}
class AudioRecorder: AudioRecorderProtocol {
private let audioSession: AudioSessionProtocol
private var audioEngine: AVAudioEngine?
private var mixer: AVAudioMixerNode?
private var audioFile: AVAudioFile?
private var internalState = InternalAudioRecorderState.stopped
private var cancellables = Set<AnyCancellable>()
private let actionsSubject: PassthroughSubject<AudioRecorderAction, Never> = .init()
@ -30,34 +40,35 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
actionsSubject.eraseToAnyPublisher()
}
var url: URL? {
audioRecorder?.url
}
var currentTime: TimeInterval {
audioRecorder?.currentTime ?? 0
}
private let silenceThreshold: Float = -50.0
private var meterLevel: Float = 0
private(set) var audioFileUrl: URL?
var currentTime: TimeInterval = .zero
var isRecording: Bool {
audioRecorder?.isRecording ?? false
audioEngine?.isRunning ?? false
}
private let dispatchQueue = DispatchQueue(label: "io.element.elementx.audio_recorder", qos: .userInitiated)
private var stopped = false
func record(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError> {
init(audioSession: AudioSessionProtocol = AVAudioSession.sharedInstance()) {
self.audioSession = audioSession
}
func record(with recordID: AudioRecordingIdentifier) async {
stopped = false
guard await requestRecordPermission() else {
return .failure(.recordPermissionNotGranted)
setInternalState(.error(.recordPermissionNotGranted))
return
}
let result = await startRecording(with: recordID)
switch result {
case .success:
actionsSubject.send(.didStartRecording)
setInternalState(.recording)
case .failure(let error):
actionsSubject.send(.didFailWithError(error: error))
setInternalState(.error(error))
}
return result
}
func stopRecording() async {
@ -68,6 +79,11 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
}
}
func cancelRecording() async {
await stopRecording()
await deleteRecording()
}
func deleteRecording() async {
await withCheckedContinuation { continuation in
deleteRecording {
@ -76,49 +92,37 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
}
}
func peakPowerForChannelNumber(_ channelNumber: Int) -> Float {
guard isRecording, let audioRecorder else {
return 0.0
}
audioRecorder.updateMeters()
return normalizedPowerLevelFromDecibels(audioRecorder.peakPower(forChannel: channelNumber))
}
func averagePowerForChannelNumber(_ channelNumber: Int) -> Float {
guard isRecording, let audioRecorder else {
return 0.0
}
audioRecorder.updateMeters()
return normalizedPowerLevelFromDecibels(audioRecorder.averagePower(forChannel: channelNumber))
func averagePower() -> Float {
meterLevel
}
// MARK: - Private
private func addObservers() {
// Stop recording uppon UIApplication.didEnterBackgroundNotification notification
NotificationCenter.default.publisher(for: UIApplication.didEnterBackgroundNotification)
.sink { [weak self] _ in
guard let self else { return }
Task { await self.stopRecording() }
}
.store(in: &cancellables)
}
private func removeObservers() {
cancellables.removeAll()
}
private func requestRecordPermission() async -> Bool {
await withCheckedContinuation { continuation in
AVAudioSession.sharedInstance().requestRecordPermission { granted in
audioSession.requestRecordPermission { granted in
continuation.resume(returning: granted)
}
}
}
// MARK: - Private
private func setupAudioSession() {
MXLog.info("setup audio session")
do {
try audioSession.setAllowHapticsAndSystemSoundsDuringRecording(true)
try audioSession.setCategory(.playAndRecord, mode: .default, options: [.allowBluetooth])
try audioSession.setActive(true)
} catch {
MXLog.error("Could not redirect audio playback to speakers.")
}
addObservers()
}
private func releaseAudioSession() {
MXLog.info("releasing audio session")
try? audioSession.setActive(false)
removeObservers()
}
private func startRecording(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError> {
await withCheckedContinuation { continuation in
@ -128,37 +132,64 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
}
}
private func createAudioFile(with recordID: AudioRecordingIdentifier, sampleRate: Int) throws -> AVAudioFile {
let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: sampleRate,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue]
MXLog.info("creating audio file with format: \(settings)")
let outputURL = URL.temporaryDirectory.appendingPathComponent("voice-message-\(recordID.identifier).m4a")
return try AVAudioFile(forWriting: outputURL, settings: settings)
}
private func startRecording(with recordID: AudioRecordingIdentifier, completion: @escaping (Result<Void, AudioRecorderError>) -> Void) {
dispatchQueue.async { [weak self] in
guard let self, !self.stopped else {
completion(.failure(.recordingCancelled))
return
}
let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 48000,
AVEncoderBitRateKey: 128_000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue]
setupAudioSession()
let audioEngine = AVAudioEngine()
self.audioEngine = audioEngine
// The sample rate must match the hardware sample rate for the audio engine to work.
let sampleRate = audioEngine.inputNode.inputFormat(forBus: 0).sampleRate
let recordingFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32,
sampleRate: sampleRate,
channels: 1,
interleaved: false)
// Make sure we have 1 channel at the end by using a mixer.
let mixer = AVAudioMixerNode()
self.mixer = mixer
audioEngine.attach(mixer)
audioEngine.connect(audioEngine.inputNode, to: mixer, format: recordingFormat)
// Reset the recording duration
currentTime = 0
let audioFile: AVAudioFile
do {
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setAllowHapticsAndSystemSoundsDuringRecording(true)
try audioSession.setCategory(.playAndRecord, mode: .default)
try audioSession.setActive(true)
let url = URL.temporaryDirectory.appendingPathComponent("voice-message-\(recordID.identifier).m4a")
let audioRecorder = try AVAudioRecorder(url: url, settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
if audioRecorder.record() {
self.audioRecorder = audioRecorder
completion(.success(()))
} else {
MXLog.error("audio recording failed to start")
completion(.failure(.recordingFailed))
}
audioFile = try createAudioFile(with: recordID, sampleRate: Int(sampleRate))
self.audioFile = audioFile
audioFileUrl = audioFile.url
} catch {
MXLog.error("failed to create an audio file. \(error)")
completion(.failure(.audioFileCreationFailure))
releaseAudioSession()
return
}
mixer.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { [weak self] buffer, _ in
self?.processAudioBuffer(buffer)
}
do {
try audioEngine.start()
completion(.success(()))
} catch {
MXLog.error("audio recording failed to start. \(error)")
completion(.failure(.internalError(error: error)))
completion(.failure(.audioEngineFailure))
}
}
}
@ -170,42 +201,186 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
}
guard let self else { return }
stopped = true
guard let audioRecorder, audioRecorder.isRecording else {
return
}
audioRecorder.stop()
cleanupAudioEngine()
MXLog.info("audio recorder stopped")
setInternalState(.stopped)
}
}
private func cleanupAudioEngine() {
if let audioEngine {
audioEngine.stop()
if let mixer {
mixer.removeTap(onBus: 0)
audioEngine.detach(mixer)
}
}
audioFile = nil // this will close the file
audioEngine = nil
releaseAudioSession()
}
private func deleteRecording(completion: @escaping () -> Void) {
dispatchQueue.async { [weak self] in
defer {
completion()
}
guard let self else { return }
audioRecorder?.deleteRecording()
if let audioFileUrl {
try? FileManager.default.removeItem(at: audioFileUrl)
}
audioFileUrl = nil
currentTime = 0
}
}
// MARK: - AVAudioRecorderDelegate
// MARK: Audio Processing
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully success: Bool) {
try? AVAudioSession.sharedInstance().setActive(false)
if success {
actionsSubject.send(.didStopRecording)
private func processAudioBuffer(_ buffer: AVAudioPCMBuffer) {
// Write the buffer into the audio file
do {
try audioFile?.write(from: buffer)
// Compute the sample value for the waveform
updateMeterLevel(buffer)
// Update the recording duration only if we succeed to write the buffer
currentTime += Double(buffer.frameLength) / buffer.format.sampleRate
} catch {
MXLog.error("failed to write sample. \(error)")
}
}
// MARK: Observers
private func addObservers() {
removeObservers()
// Stop recording uppon UIApplication.didEnterBackgroundNotification notification
NotificationCenter.default.publisher(for: UIApplication.willResignActiveNotification)
.sink { [weak self] _ in
guard let self else { return }
MXLog.warning("Application will resign active while recording.")
Task { await self.stopRecording() }
}
.store(in: &cancellables)
NotificationCenter.default.publisher(for: Notification.Name.AVAudioEngineConfigurationChange)
.sink { [weak self] notification in
guard let self else { return }
self.handleConfigurationChange(notification: notification)
}
.store(in: &cancellables)
NotificationCenter.default.publisher(for: AVAudioSession.interruptionNotification)
.sink { [weak self] notification in
guard let self else { return }
self.handleInterruption(notification: notification)
}
.store(in: &cancellables)
}
private func removeObservers() {
cancellables.removeAll()
}
func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
MXLog.info("Interruption started: \(notification)")
setInternalState(.suspended)
case .ended:
MXLog.info("Interruption ended: \(notification)")
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
do {
try audioEngine?.start()
setInternalState(.recording)
} catch {
MXLog.debug("Error restarting audio: \(error)")
setInternalState(.error(.interrupted))
}
} else {
MXLog.warning("AudioSession was interrupted: \(notification)")
setInternalState(.error(.interrupted))
}
@unknown default:
break
}
}
func handleConfigurationChange(notification: Notification) {
guard let audioEngine else { return }
MXLog.warning("Configuration changed: \(audioEngine.inputNode.inputFormat(forBus: 0))")
if internalState != .suspended {
Task { await stopRecording() }
}
}
// MARK: Internal State
private func setInternalState(_ state: InternalAudioRecorderState) {
dispatchQueue.async { [weak self] in
guard let self else { return }
guard internalState != state else { return }
MXLog.debug("internal state: \(internalState) -> \(state)")
internalState = state
switch internalState {
case .recording:
actionsSubject.send(.didStartRecording)
case .suspended:
break
case .stopped:
actionsSubject.send(.didStopRecording)
case .error(let error):
cleanupAudioEngine()
actionsSubject.send(.didFailWithError(error: error))
}
}
}
// MARK: Audio Metering
private func scaledPower(power: Float) -> Float {
guard power.isFinite else {
return 0.0
}
let minDb: Float = silenceThreshold
if power < minDb {
return 0.0
} else if power >= 1.0 {
return 1.0
} else {
MXLog.error("audio recorder did finish recording with an error.")
actionsSubject.send(.didFailWithError(error: AudioRecorderError.genericError))
return (abs(minDb) - abs(power)) / abs(minDb)
}
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
try? AVAudioSession.sharedInstance().setActive(false)
MXLog.error("audio recorder encode error did occur. \(error?.localizedDescription ?? "")")
actionsSubject.send(.didFailWithError(error: error ?? AudioRecorderError.genericError))
}
private func normalizedPowerLevelFromDecibels(_ decibels: Float) -> Float {
decibels / silenceThreshold
private func updateMeterLevel(_ buffer: AVAudioPCMBuffer) {
// Get an array of pointer to each sample's data
guard let channelData = buffer.floatChannelData else {
return
}
// Compute RMS
var rms: Float = .nan
vDSP_rmsqv(channelData.pointee, buffer.stride, &rms, vDSP_Length(buffer.frameLength))
// Convert to decibels
let avgPower = 20 * log10(rms)
meterLevel = scaledPower(power: avgPower)
}
}

View File

@ -30,30 +30,31 @@ extension AudioRecordingIdentifier {
}
}
enum AudioRecorderError: Error {
case genericError
case internalError(error: Error)
case recordPermissionNotGranted
case recordingFailed
enum AudioRecorderError: Error, Equatable {
case audioEngineFailure
case audioFileCreationFailure
case interrupted
case recordingCancelled
case recordingFailed
case recordPermissionNotGranted
}
enum AudioRecorderAction {
case didStartRecording
case didStopRecording
case didFailWithError(error: Error)
case didFailWithError(error: AudioRecorderError)
}
protocol AudioRecorderProtocol: AnyObject {
var actions: AnyPublisher<AudioRecorderAction, Never> { get }
var currentTime: TimeInterval { get }
var isRecording: Bool { get }
var url: URL? { get }
var audioFileUrl: URL? { get }
func record(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError>
func record(with recordID: AudioRecordingIdentifier) async
func stopRecording() async
func deleteRecording() async
func averagePowerForChannelNumber(_ channelNumber: Int) -> Float
func averagePower() -> Float
}
// sourcery: AutoMockable

View File

@ -101,8 +101,8 @@ class AudioRecorderState: ObservableObject, Identifiable {
if let currentTime = audioRecorder?.currentTime {
duration = currentTime
}
if let sample = audioRecorder?.averagePowerForChannelNumber(0) {
waveformSamples.append(sample)
if let averagePower = audioRecorder?.averagePower() {
waveformSamples.append(1.0 - averagePower)
}
}

View File

@ -14,6 +14,7 @@
// limitations under the License.
//
import Combine
import DSWaveformImage
import Foundation
import MatrixRustSDK
@ -24,15 +25,28 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
private let voiceMessageCache: VoiceMessageCacheProtocol
private let mediaPlayerProvider: MediaPlayerProviderProtocol
private let actionsSubject: PassthroughSubject<VoiceMessageRecorderAction, Never> = .init()
var actions: AnyPublisher<VoiceMessageRecorderAction, Never> {
actionsSubject.eraseToAnyPublisher()
}
private let mp4accMimeType = "audio/m4a"
private let waveformSamplesCount = 100
private(set) var recordingURL: URL?
private(set) var recordingDuration: TimeInterval = 0.0
var recordingURL: URL? {
audioRecorder.audioFileUrl
}
var recordingDuration: TimeInterval {
audioRecorder.currentTime
}
private var recordingCancelled = false
private(set) var previewAudioPlayerState: AudioPlayerState?
private(set) var previewAudioPlayer: AudioPlayerProtocol?
private var cancellables = Set<AnyCancellable>()
init(audioRecorder: AudioRecorderProtocol = AudioRecorder(),
mediaPlayerProvider: MediaPlayerProviderProtocol,
audioConverter: AudioConverterProtocol = AudioConverter(),
@ -41,35 +55,32 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
self.mediaPlayerProvider = mediaPlayerProvider
self.audioConverter = audioConverter
self.voiceMessageCache = voiceMessageCache
addObservers()
}
deinit {
removeObservers()
}
// MARK: - Recording
func startRecording() async -> Result<Void, VoiceMessageRecorderError> {
func startRecording() async {
await stopPlayback()
recordingURL = nil
switch await audioRecorder.record(with: .uuid(UUID())) {
case .failure(let error):
return .failure(.audioRecorderError(error))
case .success:
recordingURL = audioRecorder.url
return .success(())
}
recordingCancelled = false
await audioRecorder.record(with: .uuid(UUID()))
}
func stopRecording() async -> Result<Void, VoiceMessageRecorderError> {
recordingDuration = audioRecorder.currentTime
func stopRecording() async {
recordingCancelled = false
await audioRecorder.stopRecording()
guard case .success = await finalizeRecording() else {
return .failure(.previewNotAvailable)
}
return .success(())
}
func cancelRecording() async {
recordingCancelled = true
await audioRecorder.stopRecording()
await audioRecorder.deleteRecording()
recordingURL = nil
previewAudioPlayerState = nil
}
@ -77,13 +88,12 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
await stopPlayback()
await audioRecorder.deleteRecording()
previewAudioPlayerState = nil
recordingURL = nil
}
// MARK: - Preview
func startPlayback() async -> Result<Void, VoiceMessageRecorderError> {
guard let previewAudioPlayerState, let url = recordingURL else {
guard let previewAudioPlayerState, let url = audioRecorder.audioFileUrl else {
return .failure(.previewNotAvailable)
}
@ -122,7 +132,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
}
func buildRecordingWaveform() async -> Result<[UInt16], VoiceMessageRecorderError> {
guard let url = recordingURL else {
guard let url = audioRecorder.audioFileUrl else {
return .failure(.missingRecordingFile)
}
// build the waveform
@ -134,12 +144,13 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
waveformData = samples.map { UInt16(max(0, (1 - $0) * 1024)) }
} catch {
MXLog.error("Waveform analysis failed. \(error)")
return .failure(.waveformAnalysisError)
}
return .success(waveformData)
}
func sendVoiceMessage(inRoom roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol) async -> Result<Void, VoiceMessageRecorderError> {
guard let url = recordingURL else {
guard let url = audioRecorder.audioFileUrl else {
return .failure(VoiceMessageRecorderError.missingRecordingFile)
}
@ -182,8 +193,49 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
// MARK: - Private
private func addObservers() {
audioRecorder.actions
.sink { [weak self] action in
guard let self else { return }
self.handleAudioRecorderAction(action)
}
.store(in: &cancellables)
}
private func removeObservers() {
cancellables.removeAll()
}
private func handleAudioRecorderAction(_ action: AudioRecorderAction) {
switch action {
case .didStartRecording:
MXLog.info("audio recorder did start recording")
actionsSubject.send(.didStartRecording(audioRecorder: audioRecorder))
case .didStopRecording, .didFailWithError(error: .interrupted):
MXLog.info("audio recorder did stop recording")
if !recordingCancelled {
Task {
guard case .success = await finalizeRecording() else {
actionsSubject.send(.didFailWithError(error: VoiceMessageRecorderError.previewNotAvailable))
return
}
guard let recordingURL = audioRecorder.audioFileUrl, let previewAudioPlayerState else {
actionsSubject.send(.didFailWithError(error: VoiceMessageRecorderError.previewNotAvailable))
return
}
await mediaPlayerProvider.register(audioPlayerState: previewAudioPlayerState)
actionsSubject.send(.didStopRecording(previewState: previewAudioPlayerState, url: recordingURL))
}
}
case .didFailWithError(let error):
MXLog.info("audio recorder did failed with error: \(error)")
actionsSubject.send(.didFailWithError(error: .audioRecorderError(error)))
}
}
private func finalizeRecording() async -> Result<Void, VoiceMessageRecorderError> {
guard let url = recordingURL else {
MXLog.info("finalize audio recording")
guard let url = audioRecorder.audioFileUrl, audioRecorder.currentTime > 0 else {
return .failure(.previewNotAvailable)
}
@ -196,6 +248,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
return .failure(.previewNotAvailable)
}
previewAudioPlayer = audioPlayer
return .success(())
}
}

View File

@ -14,6 +14,7 @@
// limitations under the License.
//
import Combine
import Foundation
enum VoiceMessageRecorderError: Error {
@ -21,17 +22,26 @@ enum VoiceMessageRecorderError: Error {
case missingRecordingFile
case previewNotAvailable
case audioRecorderError(AudioRecorderError)
case waveformAnalysisError
case failedSendingVoiceMessage
}
enum VoiceMessageRecorderAction {
case didStartRecording(audioRecorder: AudioRecorderProtocol)
case didStopRecording(previewState: AudioPlayerState, url: URL)
case didFailWithError(error: VoiceMessageRecorderError)
}
protocol VoiceMessageRecorderProtocol {
var audioRecorder: AudioRecorderProtocol { get }
var previewAudioPlayerState: AudioPlayerState? { get }
var recordingURL: URL? { get }
var recordingDuration: TimeInterval { get }
func startRecording() async -> Result<Void, VoiceMessageRecorderError>
func stopRecording() async -> Result<Void, VoiceMessageRecorderError>
var actions: AnyPublisher<VoiceMessageRecorderAction, Never> { get }
func startRecording() async
func stopRecording() async
func cancelRecording() async
func startPlayback() async -> Result<Void, VoiceMessageRecorderError>
func pausePlayback()

View File

@ -9,4 +9,4 @@ output:
../../ElementX/Sources/Mocks/Generated/GeneratedMocks.swift
args:
automMockableTestableImports: []
autoMockableImports: [AnalyticsEvents, Combine, Foundation, LocalAuthentication, MatrixRustSDK, SwiftUI]
autoMockableImports: [AnalyticsEvents, AVFoundation, Combine, Foundation, LocalAuthentication, MatrixRustSDK, SwiftUI]

View File

@ -34,7 +34,7 @@ class AudioRecorderStateTests: XCTestCase {
audioRecorderMock.isRecording = false
audioRecorderMock.underlyingActions = audioRecorderActions
audioRecorderMock.currentTime = 0.0
audioRecorderMock.averagePowerForChannelNumberReturnValue = 0
audioRecorderMock.averagePowerReturnValue = 0
return audioRecorderMock
}
@ -59,7 +59,7 @@ class AudioRecorderStateTests: XCTestCase {
func testReportError() async throws {
XCTAssertEqual(audioRecorderState.recordingState, .stopped)
audioRecorderState.reportError(AudioRecorderError.genericError)
audioRecorderState.reportError(AudioRecorderError.audioEngineFailure)
XCTAssertEqual(audioRecorderState.recordingState, .error)
}

View File

@ -0,0 +1,56 @@
//
// Copyright 2023 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import Combine
@testable import ElementX
import Foundation
import XCTest
@MainActor
class AudioRecorderTests: XCTestCase {
private var audioRecorder: AudioRecorder!
private var audioSessionMock: AudioSessionMock!
override func setUp() async throws {
audioSessionMock = AudioSessionMock()
audioSessionMock.requestRecordPermissionClosure = { completion in
completion(true)
}
audioRecorder = AudioRecorder(audioSession: audioSessionMock)
}
override func tearDown() async throws {
await audioRecorder?.cancelRecording()
}
func testRecordWithoutPermission() async throws {
audioSessionMock.requestRecordPermissionClosure = { completion in
completion(false)
}
let deferred = deferFulfillment(audioRecorder.actions) { action in
switch action {
case .didFailWithError(.recordPermissionNotGranted):
return true
default:
return false
}
}
await audioRecorder.record(with: .uuid(UUID()))
try await deferred.fulfill()
XCTAssertFalse(audioRecorder.isRecording)
}
}

View File

@ -24,6 +24,11 @@ class VoiceMessageRecorderTests: XCTestCase {
private var voiceMessageRecorder: VoiceMessageRecorder!
private var audioRecorder: AudioRecorderMock!
private var audioRecorderActionsSubject: PassthroughSubject<AudioRecorderAction, Never> = .init()
private var audioRecorderActions: AnyPublisher<AudioRecorderAction, Never> {
audioRecorderActionsSubject.eraseToAnyPublisher()
}
private var mediaPlayerProvider: MediaPlayerProviderMock!
private var audioConverter: AudioConverterMock!
private var voiceMessageCache: VoiceMessageCacheMock!
@ -38,9 +43,10 @@ class VoiceMessageRecorderTests: XCTestCase {
override func setUp() async throws {
audioRecorder = AudioRecorderMock()
audioRecorder.recordWithReturnValue = .success(())
audioRecorder.underlyingCurrentTime = 0
audioRecorder.averagePowerForChannelNumberReturnValue = 0
audioRecorder.averagePowerReturnValue = 0
audioRecorder.actions = audioRecorderActions
audioPlayer = AudioPlayerMock()
audioPlayer.actions = audioPlayerActions
audioPlayer.state = .stopped
@ -57,56 +63,73 @@ class VoiceMessageRecorderTests: XCTestCase {
audioConverter: audioConverter,
voiceMessageCache: voiceMessageCache)
}
private func setRecordingComplete() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.currentTime = 5
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
switch action {
case .didStopRecording(_, let url) where url == self.recordingURL:
return true
default:
return false
}
}
audioRecorderActionsSubject.send(.didStopRecording)
try await deferred.fulfill()
}
func testRecordingURL() async throws {
audioRecorder.audioFileUrl = recordingURL
XCTAssertEqual(voiceMessageRecorder.recordingURL, recordingURL)
}
func testRecordingDuration() async throws {
audioRecorder.currentTime = 10.3
XCTAssertEqual(voiceMessageRecorder.recordingDuration, 10.3)
}
func testStartRecording() async throws {
audioRecorder.url = recordingURL
_ = await voiceMessageRecorder.startRecording()
XCTAssert(audioRecorder.recordWithCalled)
XCTAssertEqual(voiceMessageRecorder.recordingURL, audioRecorder.url)
}
func testStopRecording() async throws {
audioRecorder.isRecording = true
audioRecorder.currentTime = 14.0
audioRecorder.url = recordingURL
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
// Internal audio recorder must have been stopped
XCTAssert(audioRecorder.stopRecordingCalled)
// A preview player state must be available
let previewPlayerState = voiceMessageRecorder.previewAudioPlayerState
XCTAssertNotNil(previewPlayerState)
XCTAssertEqual(previewPlayerState?.duration, audioRecorder.currentTime)
}
func testCancelRecording() async throws {
audioRecorder.isRecording = true
await voiceMessageRecorder.cancelRecording()
// Internal audio recorder must have been stopped
XCTAssert(audioRecorder.stopRecordingCalled)
// The recording audio file must have been deleted
XCTAssert(audioRecorder.deleteRecordingCalled)
}
func testDeleteRecording() async throws {
await voiceMessageRecorder.deleteRecording()
// The recording audio file must have been deleted
XCTAssert(audioRecorder.deleteRecordingCalled)
}
func testStartPlaybackNoPreview() async throws {
guard case .failure(.previewNotAvailable) = await voiceMessageRecorder.startPlayback() else {
XCTFail("An error is expected")
return
}
}
func testStartPlayback() async throws {
audioRecorder.url = recordingURL
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
// if the player url doesn't match the recording url
try await setRecordingComplete()
guard case .success = await voiceMessageRecorder.startPlayback() else {
XCTFail("Playback should start")
return
}
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, true)
XCTAssert(audioPlayer.loadMediaSourceUsingAutoplayCalled)
XCTAssertEqual(audioPlayer.loadMediaSourceUsingAutoplayReceivedArguments?.url, recordingURL)
XCTAssertEqual(audioPlayer.loadMediaSourceUsingAutoplayReceivedArguments?.mediaSource.mimeType, "audio/m4a")
@ -115,40 +138,35 @@ class VoiceMessageRecorderTests: XCTestCase {
XCTAssertFalse(audioPlayer.playCalled)
}
func testResumePlayback() async throws {
audioRecorder.url = recordingURL
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
// if the player url matches the recording url
audioPlayer.url = recordingURL
guard case .success = await voiceMessageRecorder.startPlayback() else {
XCTFail("Playback should start")
return
}
XCTAssertFalse(audioPlayer.loadMediaSourceUsingAutoplayCalled)
XCTAssert(audioPlayer.playCalled)
}
func testPausePlayback() async throws {
audioRecorder.url = recordingURL
switch await voiceMessageRecorder.startRecording() {
case .failure(let error):
XCTFail("Recording should start. \(error)")
case .success:
break
}
_ = await voiceMessageRecorder.stopRecording()
try await setRecordingComplete()
_ = await voiceMessageRecorder.startPlayback()
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, true)
voiceMessageRecorder.pausePlayback()
XCTAssert(audioPlayer.pauseCalled)
}
func testResumePlayback() async throws {
try await setRecordingComplete()
audioPlayer.url = recordingURL
guard case .success = await voiceMessageRecorder.startPlayback() else {
XCTFail("Playback should start")
return
}
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, true)
// The media must not have been reloaded
XCTAssertFalse(audioPlayer.loadMediaSourceUsingAutoplayCalled)
XCTAssertTrue(audioPlayer.playCalled)
}
func testStopPlayback() async throws {
audioRecorder.url = recordingURL
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
try await setRecordingComplete()
_ = await voiceMessageRecorder.startPlayback()
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, true)
await voiceMessageRecorder.stopPlayback()
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, false)
@ -156,26 +174,28 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testSeekPlayback() async throws {
audioRecorder.url = recordingURL
// Calling stop will generate the preview player state needed to have an audio player
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
voiceMessageRecorder.previewAudioPlayerState?.attachAudioPlayer(audioPlayer)
try await setRecordingComplete()
_ = await voiceMessageRecorder.startPlayback()
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, true)
await voiceMessageRecorder.seekPlayback(to: 0.4)
XCTAssert(audioPlayer.seekToCalled)
XCTAssertEqual(audioPlayer.seekToReceivedProgress, 0.4)
}
func testBuildRecordedWaveform() async throws {
// If there is no recording file, an error is expected
audioRecorder.audioFileUrl = nil
guard case .failure(.missingRecordingFile) = await voiceMessageRecorder.buildRecordingWaveform() else {
XCTFail("An error is expected")
return
}
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_audio", withExtension: "mp3") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.url = audioFileUrl
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
audioRecorder.audioFileUrl = audioFileUrl
guard case .success(let data) = await voiceMessageRecorder.buildRecordingWaveform() else {
XCTFail("A waveform is expected")
return
@ -183,18 +203,99 @@ class VoiceMessageRecorderTests: XCTestCase {
XCTAssert(!data.isEmpty)
}
func testSendVoiceMessage_NoRecordingFile() async throws {
let roomProxy = RoomProxyMock()
// If there is no recording file, an error is expected
audioRecorder.audioFileUrl = nil
guard case .failure(.missingRecordingFile) = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
XCTFail("An error is expected")
return
}
}
func testSendVoiceMessage_ConversionError() async throws {
audioRecorder.audioFileUrl = recordingURL
// If the converter returns an error
audioConverter.convertToOpusOggSourceURLDestinationURLThrowableError = AudioConverterError.conversionFailed(nil)
let roomProxy = RoomProxyMock()
guard case .failure(.failedSendingVoiceMessage) = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
XCTFail("An error is expected")
return
}
}
func testSendVoiceMessage_InvalidFile() async throws {
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = audioFileUrl
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { _, destination in
try? FileManager.default.removeItem(at: destination)
}
let roomProxy = RoomProxyMock()
roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleReturnValue = .failure(.failedSendingMedia)
guard case .failure(.failedSendingVoiceMessage) = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
XCTFail("An error is expected")
return
}
}
func testSendVoiceMessage_WaveformAnlyseFailed() async throws {
guard let imageFileUrl = Bundle(for: Self.self).url(forResource: "test_image", withExtension: "png") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = imageFileUrl
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { _, destination in
try? FileManager.default.removeItem(at: destination)
try? FileManager.default.copyItem(at: imageFileUrl, to: destination)
}
let roomProxy = RoomProxyMock()
roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleReturnValue = .failure(.failedSendingMedia)
guard case .failure(.failedSendingVoiceMessage) = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
XCTFail("An error is expected")
return
}
}
func testSendVoiceMessage_SendError() async throws {
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = audioFileUrl
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { source, destination in
try? FileManager.default.removeItem(at: destination)
let internalConverter = AudioConverter()
try internalConverter.convertToOpusOgg(sourceURL: source, destinationURL: destination)
}
// If the media upload fails
let roomProxy = RoomProxyMock()
roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleReturnValue = .failure(.failedSendingMedia)
guard case .failure(.failedSendingVoiceMessage) = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
XCTFail("An error is expected")
return
}
}
func testSendVoiceMessage() async throws {
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
XCTFail("Test audio file is missing")
return
}
let roomProxy = RoomProxyMock()
audioRecorder.currentTime = 42
audioRecorder.url = audioFileUrl
audioRecorder.audioFileUrl = audioFileUrl
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
let roomProxy = RoomProxyMock()
let audioConverter = AudioConverterMock()
var convertedFileUrl: URL?
var convertedFileSize: UInt64?
@ -235,4 +336,48 @@ class VoiceMessageRecorderTests: XCTestCase {
XCTFail("converted file URL is missing")
}
}
func testAudioRecorderActionHandling_didStartRecording() async throws {
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
switch action {
case .didStartRecording:
return true
default:
return false
}
}
audioRecorderActionsSubject.send(.didStartRecording)
try await deferred.fulfill()
}
func testAudioRecorderActionHandling_didStopRecording() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.currentTime = 5
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
switch action {
case .didStopRecording(_, let url) where url == self.recordingURL:
return true
default:
return false
}
}
audioRecorderActionsSubject.send(.didStopRecording)
try await deferred.fulfill()
}
func testAudioRecorderActionHandling_didFailed() async throws {
audioRecorder.audioFileUrl = recordingURL
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
switch action {
case .didFailWithError:
return true
default:
return false
}
}
audioRecorderActionsSubject.send(.didFailWithError(error: .audioEngineFailure))
try await deferred.fulfill()
}
}