New voice message recording mode (#2051)

* Lock voice message recording

* Use the VoiceMessageCache to store the recording file

* Rework on some composer toolbar buttons

* Update accessibility labels for voice message recording button

* PreviewTests
This commit is contained in:
Nicolas Mauri 2023-11-10 16:32:22 +01:00 committed by GitHub
parent 5a0dab1ce8
commit da831f6725
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 283 additions and 296 deletions

View File

@ -527,7 +527,6 @@
8BC8EF6705A78946C1F22891 /* SoftLogoutScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 71A7D4DDEEE5D2CA0C8D63CD /* SoftLogoutScreen.swift */; };
8C050A8012E6078BEAEF5BC8 /* PillTextAttachmentData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 913C8E13B8B602C7B6C0C4AE /* PillTextAttachmentData.swift */; };
8C1A5ECAF895D4CAF8C4D461 /* AppActivityView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8F21ED7205048668BEB44A38 /* AppActivityView.swift */; };
8C27BEB00B903D953F31F962 /* VoiceMessageRecordingButtonTooltipView.swift in Sources */ = {isa = PBXBuildFile; fileRef = FF449205DF1E9817115245C4 /* VoiceMessageRecordingButtonTooltipView.swift */; };
8C42B5B1642D189C362A5EDF /* SecureBackupScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 91831D7042EADD0CC2B5EC36 /* SecureBackupScreenUITests.swift */; };
8C454500B8073E1201F801A9 /* MXLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34A814CBD56230BC74FFCF4 /* MXLogger.swift */; };
8C706DA7EAC0974CA2F8F1CD /* MentionBuilder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 15748C254911E3654C93B0ED /* MentionBuilder.swift */; };
@ -1918,7 +1917,6 @@
FE87C931165F5E201CACBB87 /* MediaPlayerProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaPlayerProtocol.swift; sourceTree = "<group>"; };
FEC2E8E1B20BB2EA07B0B61E /* WelcomeScreenScreenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WelcomeScreenScreenViewModel.swift; sourceTree = "<group>"; };
FEFEEE93B82937B2E86F92EB /* AnalyticsScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnalyticsScreen.swift; sourceTree = "<group>"; };
FF449205DF1E9817115245C4 /* VoiceMessageRecordingButtonTooltipView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageRecordingButtonTooltipView.swift; sourceTree = "<group>"; };
FFECCE59967018204876D0A5 /* LocationMarkerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationMarkerView.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
@ -2827,7 +2825,6 @@
3E6A9B9DFEE964962C179DE3 /* RoomAttachmentPicker.swift */,
BFC9F57320EC80C7CE34FE4A /* VoiceMessagePreviewComposer.swift */,
D2E61DDB42C0DE429C0955D8 /* VoiceMessageRecordingButton.swift */,
FF449205DF1E9817115245C4 /* VoiceMessageRecordingButtonTooltipView.swift */,
CCB6F36CCE44A29A06FCAF1C /* VoiceMessageRecordingComposer.swift */,
0A634D8DD1E10D858CF7995D /* VoiceMessageRecordingView.swift */,
);
@ -5909,7 +5906,6 @@
C2879369106A419A5071F1F8 /* VoiceMessageRecorder.swift in Sources */,
19DED23340D0855B59693ED2 /* VoiceMessageRecorderProtocol.swift in Sources */,
09EF4222EEBBA1A7B8F4071E /* VoiceMessageRecordingButton.swift in Sources */,
8C27BEB00B903D953F31F962 /* VoiceMessageRecordingButtonTooltipView.swift in Sources */,
CA5BFF0C2EF5A8EF40CA2D69 /* VoiceMessageRecordingComposer.swift in Sources */,
EBDB339A7C127F068B6E52E5 /* VoiceMessageRecordingView.swift in Sources */,
A9482B967FC85DA611514D35 /* VoiceMessageRoomPlaybackView.swift in Sources */,

View File

@ -0,0 +1,16 @@
{
"images" : [
{
"filename" : "stop-recording.svg",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
},
"properties" : {
"preserves-vector-representation" : true,
"template-rendering-intent" : "template"
}
}

View File

@ -0,0 +1,5 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="stop_FILL1_wght400_GRAD0_opsz24 1">
<path id="Vector" d="M6 16V8C6 7.45 6.19583 6.97917 6.5875 6.5875C6.97917 6.19583 7.45 6 8 6H16C16.55 6 17.0208 6.19583 17.4125 6.5875C17.8042 6.97917 18 7.45 18 8V16C18 16.55 17.8042 17.0208 17.4125 17.4125C17.0208 17.8042 16.55 18 16 18H8C7.45 18 6.97917 17.8042 6.5875 17.4125C6.19583 17.0208 6 16.55 6 16Z" fill="white"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 477 B

View File

@ -12,7 +12,8 @@
"a11y_show_password" = "Show password";
"a11y_start_call" = "Start a call";
"a11y_user_menu" = "User menu";
"a11y_voice_message_record" = "Record voice message. Double tap and hold to record. Release to end recording.";
"a11y_voice_message_record" = "Record voice message.";
"a11y_voice_message_stop_recording" = "Stop recording";
"action_accept" = "Accept";
"action_add_to_timeline" = "Add to timeline";
"action_back" = "Back";

View File

@ -52,6 +52,7 @@ internal enum Asset {
internal static let numberedList = ImageAsset(name: "images/numbered-list")
internal static let quote = ImageAsset(name: "images/quote")
internal static let sendMessage = ImageAsset(name: "images/send-message")
internal static let stopRecording = ImageAsset(name: "images/stop-recording")
internal static let strikethrough = ImageAsset(name: "images/strikethrough")
internal static let textFormat = ImageAsset(name: "images/text-format")
internal static let underline = ImageAsset(name: "images/underline")

View File

@ -40,8 +40,10 @@ public enum L10n {
public static var a11yStartCall: String { return L10n.tr("Localizable", "a11y_start_call") }
/// User menu
public static var a11yUserMenu: String { return L10n.tr("Localizable", "a11y_user_menu") }
/// Record voice message. Double tap and hold to record. Release to end recording.
/// Record voice message.
public static var a11yVoiceMessageRecord: String { return L10n.tr("Localizable", "a11y_voice_message_record") }
/// Stop recording
public static var a11yVoiceMessageStopRecording: String { return L10n.tr("Localizable", "a11y_voice_message_stop_recording") }
/// Accept
public static var actionAccept: String { return L10n.tr("Localizable", "action_accept") }
/// Add to timeline

View File

@ -466,6 +466,18 @@ class AudioPlayerMock: AudioPlayerProtocol {
loadMediaSourceUsingAutoplayReceivedInvocations.append((mediaSource: mediaSource, url: url, autoplay: autoplay))
loadMediaSourceUsingAutoplayClosure?(mediaSource, url, autoplay)
}
//MARK: - reset
var resetCallsCount = 0
var resetCalled: Bool {
return resetCallsCount > 0
}
var resetClosure: (() -> Void)?
func reset() {
resetCallsCount += 1
resetClosure?()
}
//MARK: - play
var playCallsCount = 0
@ -535,23 +547,23 @@ class AudioRecorderMock: AudioRecorderProtocol {
set(value) { underlyingIsRecording = value }
}
var underlyingIsRecording: Bool!
var audioFileUrl: URL?
var audioFileURL: URL?
//MARK: - record
var recordWithCallsCount = 0
var recordWithCalled: Bool {
return recordWithCallsCount > 0
var recordAudioFileURLCallsCount = 0
var recordAudioFileURLCalled: Bool {
return recordAudioFileURLCallsCount > 0
}
var recordWithReceivedRecordID: AudioRecordingIdentifier?
var recordWithReceivedInvocations: [AudioRecordingIdentifier] = []
var recordWithClosure: ((AudioRecordingIdentifier) async -> Void)?
var recordAudioFileURLReceivedAudioFileURL: URL?
var recordAudioFileURLReceivedInvocations: [URL] = []
var recordAudioFileURLClosure: ((URL) async -> Void)?
func record(with recordID: AudioRecordingIdentifier) async {
recordWithCallsCount += 1
recordWithReceivedRecordID = recordID
recordWithReceivedInvocations.append(recordID)
await recordWithClosure?(recordID)
func record(audioFileURL: URL) async {
recordAudioFileURLCallsCount += 1
recordAudioFileURLReceivedAudioFileURL = audioFileURL
recordAudioFileURLReceivedInvocations.append(audioFileURL)
await recordAudioFileURLClosure?(audioFileURL)
}
//MARK: - stopRecording
@ -1098,6 +1110,18 @@ class MediaPlayerMock: MediaPlayerProtocol {
loadMediaSourceUsingAutoplayReceivedInvocations.append((mediaSource: mediaSource, url: url, autoplay: autoplay))
loadMediaSourceUsingAutoplayClosure?(mediaSource, url, autoplay)
}
//MARK: - reset
var resetCallsCount = 0
var resetCalled: Bool {
return resetCallsCount > 0
}
var resetClosure: (() -> Void)?
func reset() {
resetCallsCount += 1
resetClosure?()
}
//MARK: - play
var playCallsCount = 0
@ -3143,6 +3167,11 @@ class UserNotificationCenterMock: UserNotificationCenterProtocol {
}
}
class VoiceMessageCacheMock: VoiceMessageCacheProtocol {
var urlForRecording: URL {
get { return underlyingUrlForRecording }
set(value) { underlyingUrlForRecording = value }
}
var underlyingUrlForRecording: URL!
//MARK: - fileURL
@ -3234,6 +3263,11 @@ class VoiceMessageRecorderMock: VoiceMessageRecorderProtocol {
}
var underlyingAudioRecorder: AudioRecorderProtocol!
var previewAudioPlayerState: AudioPlayerState?
var isRecording: Bool {
get { return underlyingIsRecording }
set(value) { underlyingIsRecording = value }
}
var underlyingIsRecording: Bool!
var recordingURL: URL?
var recordingDuration: TimeInterval {
get { return underlyingRecordingDuration }

View File

@ -24,9 +24,13 @@ struct ComposerToolbar: View {
let keyCommandHandler: KeyCommandHandler
@FocusState private var composerFocused: Bool
@ScaledMetric private var sendButtonIconSize = 16
@ScaledMetric(relativeTo: .title) private var sendButtonIconSize = 16
@ScaledMetric(relativeTo: .title) private var sendButtonIconPadding = 10
@ScaledMetric(relativeTo: .title) private var sendButtonIconOffsetX = 1
@ScaledMetric(relativeTo: .title) private var spinnerSize = 44
@ScaledMetric(relativeTo: .title) private var closeRTEButtonSize = 30
@ScaledMetric(relativeTo: .title) private var deleteRecordingButtonSize = 30
@State private var frame: CGRect = .zero
@Environment(\.verticalSizeClass) private var verticalSizeClass
@ -77,8 +81,8 @@ struct ComposerToolbar: View {
sendButton
.padding(.leading, 3)
} else {
voiceMessageRecordingButton
.padding(.leading, 4)
voiceMessageRecordingButton(mode: context.viewState.isVoiceMessageModeActivated ? .recording : .idle)
.padding(.leading, 3)
}
}
}
@ -122,6 +126,7 @@ struct ComposerToolbar: View {
if context.viewState.isVoiceMessageModeActivated {
voiceMessageContent
.fixedSize(horizontal: false, vertical: true)
}
}
}
@ -214,8 +219,9 @@ struct ComposerToolbar: View {
.accessibilityHidden(!context.viewState.composerMode.isEdit)
Image(asset: Asset.Images.sendMessage)
.resizable()
.offset(x: sendButtonIconOffsetX)
.frame(width: sendButtonIconSize, height: sendButtonIconSize)
.padding(EdgeInsets(top: 10, leading: 11, bottom: 10, trailing: 9))
.padding(sendButtonIconPadding)
.opacity(context.viewState.composerMode.isEdit ? 0 : 1)
.accessibilityLabel(L10n.actionSend)
.accessibilityHidden(context.viewState.composerMode.isEdit)
@ -235,8 +241,10 @@ struct ComposerToolbar: View {
// Display the voice message composer above to keep the focus and keep the keyboard open if it's already open.
switch context.viewState.composerMode {
case .recordVoiceMessage(let state):
VoiceMessageRecordingComposer(recorderState: state)
.padding(.leading, 12)
topBarLayout {
voiceMessageTrashButton
VoiceMessageRecordingComposer(recorderState: state)
}
case .previewVoiceMessage(let state, let waveform, let isUploading):
topBarLayout {
voiceMessageTrashButton
@ -248,17 +256,12 @@ struct ComposerToolbar: View {
}
}
private var voiceMessageRecordingButton: some View {
VoiceMessageRecordingButton {
private func voiceMessageRecordingButton(mode: VoiceMessageRecordingButtonMode) -> some View {
VoiceMessageRecordingButton(mode: mode) {
context.send(viewAction: .voiceMessage(.startRecording))
} stopRecording: { minimumRecordTimeReached in
if minimumRecordTimeReached {
context.send(viewAction: .voiceMessage(.stopRecording))
} else {
context.send(viewAction: .voiceMessage(.cancelRecording))
}
} stopRecording: {
context.send(viewAction: .voiceMessage(.stopRecording))
}
.padding(4)
}
private var voiceMessageTrashButton: some View {
@ -266,7 +269,9 @@ struct ComposerToolbar: View {
context.send(viewAction: .voiceMessage(.deleteRecording))
} label: {
CompoundIcon(\.delete)
.padding(EdgeInsets(top: 10, leading: 11, bottom: 10, trailing: 11))
.scaledToFit()
.frame(width: deleteRecordingButtonSize, height: deleteRecordingButtonSize)
.padding(7)
}
.buttonStyle(.compound(.plain))
.accessibilityLabel(L10n.a11yDelete)

View File

@ -72,8 +72,6 @@ struct VoiceMessagePreviewComposer: View {
.fill(Color.compound.bgSubtleSecondary)
}
}
.frame(minHeight: 42)
.fixedSize(horizontal: false, vertical: true)
}
@ViewBuilder

View File

@ -17,79 +17,71 @@
import Compound
import SwiftUI
enum VoiceMessageRecordingButtonMode {
case idle
case recording
}
struct VoiceMessageRecordingButton: View {
let mode: VoiceMessageRecordingButtonMode
var startRecording: (() -> Void)?
var stopRecording: ((_ minimumRecordTimeReached: Bool) -> Void)?
var stopRecording: (() -> Void)?
@ScaledMetric private var tooltipPointerHeight = 6
@State private var buttonPressed = false
@State private var recordingStartTime: Date?
@State private var showTooltip = false
@State private var frame: CGRect = .zero
private let minimumRecordingDuration = 1.0
private let tooltipDuration = 1.0
private let impactFeedbackGenerator = UIImpactFeedbackGenerator()
private let notificationFeedbackGenerator = UINotificationFeedbackGenerator()
@ScaledMetric(relativeTo: .title) private var idleImageSize = 16
@ScaledMetric(relativeTo: .title) private var idleImagePadding = 10
@ScaledMetric(relativeTo: .title) private var recordingImageSize = 24
@ScaledMetric(relativeTo: .title) private var recordingImagePadding = 6
var body: some View {
Button { } label: {
CompoundIcon(buttonPressed ? \.micOnSolid : \.micOnOutline)
.foregroundColor(.compound.iconSecondary)
.padding(EdgeInsets(top: 6, leading: 6, bottom: 6, trailing: 6))
}
.readFrame($frame, in: .global)
.accessibilityLabel(L10n.a11yVoiceMessageRecord)
.onLongPressGesture { } onPressingChanged: { isPressing in
buttonPressed = isPressing
if isPressing {
showTooltip = false
recordingStartTime = Date.now
impactFeedbackGenerator.impactOccurred()
Button {
impactFeedbackGenerator.impactOccurred()
switch mode {
case .idle:
startRecording?()
} else {
if let recordingStartTime, Date.now.timeIntervalSince(recordingStartTime) < minimumRecordingDuration {
withElementAnimation {
showTooltip = true
}
notificationFeedbackGenerator.notificationOccurred(.error)
stopRecording?(false)
} else {
impactFeedbackGenerator.impactOccurred()
stopRecording?(true)
}
}
}
.overlay(alignment: .bottomTrailing) {
if showTooltip {
tooltipView
.offset(y: -frame.height - tooltipPointerHeight)
case .recording:
stopRecording?()
}
} label: {
switch mode {
case .idle:
CompoundIcon(\.micOnOutline, size: .medium, relativeTo: .title)
.foregroundColor(.compound.iconSecondary)
.frame(width: idleImageSize, height: idleImageSize)
.padding(idleImagePadding)
.padding(4)
case .recording:
Asset.Images.stopRecording.swiftUIImage
.resizable()
.foregroundColor(.compound.iconOnSolidPrimary)
.frame(width: recordingImageSize, height: recordingImageSize)
.padding(recordingImagePadding)
.background(
Circle()
.foregroundColor(.compound.bgActionPrimaryRest)
)
.padding(4)
}
}
.buttonStyle(VoiceMessageRecordingButtonStyle())
.accessibilityLabel(mode == .idle ? L10n.a11yVoiceMessageRecord : L10n.a11yVoiceMessageStopRecording)
}
private var tooltipView: some View {
VoiceMessageRecordingButtonTooltipView(text: L10n.screenRoomVoiceMessageTooltip,
pointerHeight: tooltipPointerHeight,
pointerLocation: frame.midX,
pointerLocationCoordinateSpace: .global)
.allowsHitTesting(false)
.fixedSize()
.onAppear {
DispatchQueue.main.asyncAfter(deadline: .now() + tooltipDuration) {
withElementAnimation {
showTooltip = false
}
}
}
}
private struct VoiceMessageRecordingButtonStyle: ButtonStyle {
func makeBody(configuration: Configuration) -> some View {
configuration.label
.opacity(configuration.isPressed ? 0.6 : 1)
}
}
struct VoiceMessageRecordingButton_Previews: PreviewProvider, TestablePreview {
static var previews: some View {
VoiceMessageRecordingButton()
.fixedSize(horizontal: true, vertical: true)
HStack {
VoiceMessageRecordingButton(mode: .idle)
VoiceMessageRecordingButton(mode: .recording)
}
}
}

View File

@ -1,110 +0,0 @@
//
// Copyright 2023 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import Compound
import Foundation
import SwiftUI
struct VoiceMessageRecordingButtonTooltipView: View {
var text: String
var radius: CGFloat = 4
var corners: UIRectCorner = .allCorners
@ScaledMetric var pointerHeight: CGFloat = 6
@ScaledMetric var pointerWidth: CGFloat = 10
var pointerLocation: CGFloat = 10
var pointerLocationCoordinateSpace: CoordinateSpace = .local
var body: some View {
Text(text)
.font(.compound.bodySMSemibold)
.foregroundColor(.compound.textOnSolidPrimary)
.padding(6)
.background(
GeometryReader { geometry in
TooltipShape(radius: radius,
corners: corners,
pointerHeight: pointerHeight,
pointerWidth: pointerWidth,
pointerLocation: localPointerLocation(using: geometry))
.fill(.compound.bgActionPrimaryRest)
}
)
}
private func localPointerLocation(using geometry: GeometryProxy) -> CGFloat {
let frame = geometry.frame(in: pointerLocationCoordinateSpace)
let minX = radius + pointerWidth / 2
let maxX = geometry.size.width - radius - pointerWidth / 2
return min(max(minX, pointerLocation - frame.minX), maxX)
}
}
private struct TooltipShape: Shape {
var radius: CGFloat
var corners: UIRectCorner
var pointerHeight: CGFloat
var pointerWidth: CGFloat
var pointerLocation: CGFloat
func path(in rect: CGRect) -> Path {
var path = Path()
let width = rect.size.width
let height = rect.size.height
var topLeft: CGFloat = corners.contains(.topLeft) ? radius : 0.0
var topRight: CGFloat = corners.contains(.topRight) ? radius : 0.0
var bottomLeft: CGFloat = corners.contains(.bottomLeft) ? radius : 0.0
var bottomRight: CGFloat = corners.contains(.bottomRight) ? radius : 0.0
// Make sure we do not exceed the size of the rectangle
topRight = min(min(topRight, height / 2), width / 2)
topLeft = min(min(topLeft, height / 2), width / 2)
bottomLeft = min(min(bottomLeft, height / 2), width / 2)
bottomRight = min(min(bottomRight, height / 2), width / 2)
path.move(to: CGPoint(x: width / 2.0, y: 0))
path.addLine(to: CGPoint(x: width - topRight, y: 0))
path.addArc(center: CGPoint(x: width - topRight, y: topRight), radius: topRight,
startAngle: Angle(degrees: -90), endAngle: Angle(degrees: 0), clockwise: false)
path.addLine(to: CGPoint(x: width, y: height - bottomRight))
path.addArc(center: CGPoint(x: width - bottomRight, y: height - bottomRight), radius: bottomRight,
startAngle: Angle(degrees: 0), endAngle: Angle(degrees: 90), clockwise: false)
path.addLine(to: CGPoint(x: pointerLocation + (pointerWidth / 2.0), y: height))
path.addLine(to: CGPoint(x: pointerLocation, y: height + pointerHeight))
path.addLine(to: CGPoint(x: pointerLocation - (pointerWidth / 2.0), y: height))
path.addLine(to: CGPoint(x: bottomLeft, y: height))
path.addArc(center: CGPoint(x: bottomLeft, y: height - bottomLeft), radius: bottomLeft,
startAngle: Angle(degrees: 90), endAngle: Angle(degrees: 180), clockwise: false)
path.addLine(to: CGPoint(x: 0, y: topLeft))
path.addArc(center: CGPoint(x: topLeft, y: topLeft), radius: topLeft,
startAngle: Angle(degrees: 180), endAngle: Angle(degrees: 270), clockwise: false)
path.closeSubpath()
return path
}
}
struct VoiceMessageRecordingButtonTooltipView_Previews: PreviewProvider, TestablePreview {
static var previews: some View {
VoiceMessageRecordingButtonTooltipView(text: "Hold to record")
.fixedSize()
}
}

View File

@ -32,8 +32,6 @@ struct VoiceMessageRecordingComposer: View {
.fill(Color.compound.bgSubtleSecondary)
}
}
.frame(minHeight: 42)
.fixedSize(horizontal: false, vertical: true)
}
private func onPlaybackPlayPause() { }

View File

@ -390,7 +390,12 @@ class RoomScreenInteractionHandler {
}
func deleteCurrentVoiceMessage() async {
await voiceMessageRecorder.deleteRecording()
if voiceMessageRecorder.isRecording {
await voiceMessageRecorder.cancelRecording()
} else {
await voiceMessageRecorder.deleteRecording()
}
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
@ -473,7 +478,12 @@ class RoomScreenInteractionHandler {
MXLog.error("Cannot play a voice message without an audio player")
return
}
// Stop any recording in progress
if voiceMessageRecorder.isRecording {
await voiceMessageRecorder.stopRecording()
}
let audioPlayerState = audioPlayerState(for: itemID)
// Ensure this one is attached

View File

@ -105,6 +105,11 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
addObservers()
}
func reset() {
stop()
unloadContent()
}
func play() {
isStopped = false
setupAudioSession()

View File

@ -40,10 +40,11 @@ class AudioRecorder: AudioRecorderProtocol {
actionsSubject.eraseToAnyPublisher()
}
private let maximumRecordingTime: TimeInterval = 1800 // 30 minutes
private let silenceThreshold: Float = -50.0
private var meterLevel: Float = 0
private(set) var audioFileUrl: URL?
private(set) var audioFileURL: URL?
var currentTime: TimeInterval = .zero
var isRecording: Bool {
audioEngine?.isRunning ?? false
@ -56,13 +57,21 @@ class AudioRecorder: AudioRecorderProtocol {
self.audioSession = audioSession
}
func record(with recordID: AudioRecordingIdentifier) async {
deinit {
if isRecording {
// Cleanup
cleanupAudioEngine()
deleteRecordingFile()
}
}
func record(audioFileURL: URL) async {
stopped = false
guard await requestRecordPermission() else {
setInternalState(.error(.recordPermissionNotGranted))
return
}
let result = await startRecording(with: recordID)
let result = await startRecording(audioFileURL: audioFileURL)
switch result {
case .success:
setInternalState(.recording)
@ -124,25 +133,25 @@ class AudioRecorder: AudioRecorderProtocol {
removeObservers()
}
private func startRecording(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError> {
private func startRecording(audioFileURL: URL) async -> Result<Void, AudioRecorderError> {
await withCheckedContinuation { continuation in
startRecording(with: recordID) { result in
startRecording(audioFileURL: audioFileURL) { result in
continuation.resume(returning: result)
}
}
}
private func createAudioFile(with recordID: AudioRecordingIdentifier, sampleRate: Int) throws -> AVAudioFile {
private func createAudioFile(at recordingURL: URL, sampleRate: Int) throws -> AVAudioFile {
let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: sampleRate,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue]
MXLog.info("creating audio file with format: \(settings)")
let outputURL = URL.temporaryDirectory.appendingPathComponent("voice-message-\(recordID.identifier).m4a")
return try AVAudioFile(forWriting: outputURL, settings: settings)
try? FileManager.default.removeItem(at: recordingURL)
return try AVAudioFile(forWriting: recordingURL, settings: settings)
}
private func startRecording(with recordID: AudioRecordingIdentifier, completion: @escaping (Result<Void, AudioRecorderError>) -> Void) {
private func startRecording(audioFileURL: URL, completion: @escaping (Result<Void, AudioRecorderError>) -> Void) {
dispatchQueue.async { [weak self] in
guard let self, !self.stopped else {
completion(.failure(.recordingCancelled))
@ -170,9 +179,9 @@ class AudioRecorder: AudioRecorderProtocol {
currentTime = 0
let audioFile: AVAudioFile
do {
audioFile = try createAudioFile(with: recordID, sampleRate: Int(sampleRate))
audioFile = try createAudioFile(at: audioFileURL, sampleRate: Int(sampleRate))
self.audioFile = audioFile
audioFileUrl = audioFile.url
self.audioFileURL = audioFile.url
} catch {
MXLog.error("failed to create an audio file. \(error)")
completion(.failure(.audioFileCreationFailure))
@ -208,6 +217,7 @@ class AudioRecorder: AudioRecorderProtocol {
}
private func cleanupAudioEngine() {
MXLog.info("cleaning up the audio engine")
if let audioEngine {
audioEngine.stop()
if let mixer {
@ -226,14 +236,22 @@ class AudioRecorder: AudioRecorderProtocol {
completion()
}
guard let self else { return }
if let audioFileUrl {
try? FileManager.default.removeItem(at: audioFileUrl)
}
audioFileUrl = nil
deleteRecordingFile()
audioFileURL = nil
currentTime = 0
}
}
private func deleteRecordingFile() {
guard let audioFileURL else { return }
do {
try FileManager.default.removeItem(at: audioFileURL)
MXLog.info("recording file deleted.")
} catch {
MXLog.error("failed to delete recording file. \(error)")
}
}
// MARK: Audio Processing
private func processAudioBuffer(_ buffer: AVAudioPCMBuffer) {
@ -246,6 +264,12 @@ class AudioRecorder: AudioRecorderProtocol {
// Update the recording duration only if we succeed to write the buffer
currentTime += Double(buffer.frameLength) / buffer.format.sampleRate
// Limit the recording time
if currentTime >= maximumRecordingTime {
MXLog.info("Maximum recording time reach (\(maximumRecordingTime))")
Task { await stopRecording() }
}
} catch {
MXLog.error("failed to write sample. \(error)")
}
@ -331,7 +355,6 @@ class AudioRecorder: AudioRecorderProtocol {
private func setInternalState(_ state: InternalAudioRecorderState) {
dispatchQueue.async { [weak self] in
guard let self else { return }
guard internalState != state else { return }
MXLog.debug("internal state: \(internalState) -> \(state)")
internalState = state

View File

@ -17,19 +17,6 @@
import Combine
import Foundation
enum AudioRecordingIdentifier {
case uuid(UUID)
}
extension AudioRecordingIdentifier {
var identifier: String {
switch self {
case .uuid(let uuid):
return uuid.uuidString
}
}
}
enum AudioRecorderError: Error, Equatable {
case audioEngineFailure
case audioFileCreationFailure
@ -49,9 +36,9 @@ protocol AudioRecorderProtocol: AnyObject {
var actions: AnyPublisher<AudioRecorderAction, Never> { get }
var currentTime: TimeInterval { get }
var isRecording: Bool { get }
var audioFileUrl: URL? { get }
var audioFileURL: URL? { get }
func record(with recordID: AudioRecordingIdentifier) async
func record(audioFileURL: URL) async
func stopRecording() async
func deleteRecording() async
func averagePower() -> Float

View File

@ -41,6 +41,7 @@ class AudioRecorderState: ObservableObject, Identifiable {
self.audioRecorder = audioRecorder
subscribeToAudioRecorder(audioRecorder)
if audioRecorder.isRecording {
recordingState = .recording
startPublishUpdates()
}
}

View File

@ -32,6 +32,7 @@ protocol MediaPlayerProtocol: AnyObject {
var state: MediaPlayerState { get }
func load(mediaSource: MediaSourceProxy, using url: URL, autoplay: Bool)
func reset()
func play()
func pause()
func stop()

View File

@ -22,6 +22,12 @@ class VoiceMessageCache: VoiceMessageCacheProtocol {
FileManager.default.temporaryDirectory.appendingPathComponent("media/voice-message")
}
var urlForRecording: URL {
// Make sure the directory exist
setupTemporaryFilesFolder()
return temporaryFilesFolderURL.appendingPathComponent("voice-message-recording").appendingPathExtension(preferredFileExtension)
}
func fileURL(for mediaSource: MediaSourceProxy) -> URL? {
let url = cacheURL(for: mediaSource)
return FileManager.default.fileExists(atPath: url.path()) ? url : nil
@ -69,6 +75,7 @@ class VoiceMessageCache: VoiceMessageCacheProtocol {
} else {
try FileManager.default.copyItem(at: source, to: destination)
}
try (destination as NSURL).setResourceValue(URLFileProtection.complete, forKey: .fileProtectionKey)
}
private func cacheURL(for mediaSource: MediaSourceProxy) -> URL {

View File

@ -22,6 +22,9 @@ enum VoiceMessageCacheError: Error {
}
protocol VoiceMessageCacheProtocol {
/// URL to use for recording
var urlForRecording: URL { get }
/// Returns the URL of the cached audio file for a given media source
/// - Parameter mediaSource: the media source
/// - Returns: the URL of the cached audio file or nil if the file doesn't exist

View File

@ -33,8 +33,12 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
private let mp4accMimeType = "audio/m4a"
private let waveformSamplesCount = 100
var isRecording: Bool {
audioRecorder.isRecording
}
var recordingURL: URL? {
audioRecorder.audioFileUrl
audioRecorder.audioFileURL
}
var recordingDuration: TimeInterval {
@ -67,9 +71,10 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
func startRecording() async {
await stopPlayback()
previewAudioPlayer?.reset()
recordingCancelled = false
await audioRecorder.record(with: .uuid(UUID()))
await audioRecorder.record(audioFileURL: voiceMessageCache.urlForRecording)
}
func stopRecording() async {
@ -78,22 +83,26 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
}
func cancelRecording() async {
MXLog.info("Cancel recording.")
recordingCancelled = true
await audioRecorder.stopRecording()
await audioRecorder.deleteRecording()
previewAudioPlayerState = nil
previewAudioPlayer?.reset()
}
func deleteRecording() async {
MXLog.info("Delete recording.")
await stopPlayback()
await audioRecorder.deleteRecording()
previewAudioPlayer?.reset()
previewAudioPlayerState = nil
}
// MARK: - Preview
func startPlayback() async -> Result<Void, VoiceMessageRecorderError> {
guard let previewAudioPlayerState, let url = audioRecorder.audioFileUrl else {
guard let previewAudioPlayerState, let url = audioRecorder.audioFileURL else {
return .failure(.previewNotAvailable)
}
@ -132,7 +141,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
}
func buildRecordingWaveform() async -> Result<[UInt16], VoiceMessageRecorderError> {
guard let url = audioRecorder.audioFileUrl else {
guard let url = audioRecorder.audioFileURL else {
return .failure(.missingRecordingFile)
}
// build the waveform
@ -150,13 +159,18 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
}
func sendVoiceMessage(inRoom roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol) async -> Result<Void, VoiceMessageRecorderError> {
guard let url = audioRecorder.audioFileUrl else {
guard let url = audioRecorder.audioFileURL else {
return .failure(VoiceMessageRecorderError.missingRecordingFile)
}
// convert the file
let sourceFilename = url.deletingPathExtension().lastPathComponent
let oggFile = URL.temporaryDirectory.appendingPathComponent(sourceFilename).appendingPathExtension("ogg")
defer {
// delete the temporary file
try? FileManager.default.removeItem(at: oggFile)
}
do {
try audioConverter.convertToOpusOgg(sourceURL: url, destinationURL: oggFile)
} catch {
@ -180,8 +194,6 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
audioInfo: audioInfo,
waveform: waveform,
progressSubject: nil) { _ in }
// delete the temporary file
try? FileManager.default.removeItem(at: oggFile)
if case .failure(let error) = result {
MXLog.error("Failed to send the voice message. \(error)")
@ -219,7 +231,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
actionsSubject.send(.didFailWithError(error: VoiceMessageRecorderError.previewNotAvailable))
return
}
guard let recordingURL = audioRecorder.audioFileUrl, let previewAudioPlayerState else {
guard let recordingURL = audioRecorder.audioFileURL, let previewAudioPlayerState else {
actionsSubject.send(.didFailWithError(error: VoiceMessageRecorderError.previewNotAvailable))
return
}
@ -235,7 +247,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
private func finalizeRecording() async -> Result<Void, VoiceMessageRecorderError> {
MXLog.info("finalize audio recording")
guard let url = audioRecorder.audioFileUrl, audioRecorder.currentTime > 0 else {
guard let url = audioRecorder.audioFileURL, audioRecorder.currentTime > 0 else {
return .failure(.previewNotAvailable)
}

View File

@ -35,6 +35,7 @@ enum VoiceMessageRecorderAction {
protocol VoiceMessageRecorderProtocol {
var audioRecorder: AudioRecorderProtocol { get }
var previewAudioPlayerState: AudioPlayerState? { get }
var isRecording: Bool { get }
var recordingURL: URL? { get }
var recordingDuration: TimeInterval { get }

View File

@ -49,7 +49,8 @@ class AudioRecorderTests: XCTestCase {
return false
}
}
await audioRecorder.record(with: .uuid(UUID()))
let url = URL.temporaryDirectory.appendingPathComponent("test-voice-message").appendingPathExtension("m4a")
await audioRecorder.record(audioFileURL: url)
try await deferred.fulfill()
XCTAssertFalse(audioRecorder.isRecording)
}

View File

@ -57,6 +57,7 @@ class VoiceMessageRecorderTests: XCTestCase {
}
audioConverter = AudioConverterMock()
voiceMessageCache = VoiceMessageCacheMock()
voiceMessageCache.urlForRecording = FileManager.default.temporaryDirectory.appendingPathComponent("test-voice-message").appendingPathExtension("m4a")
voiceMessageRecorder = VoiceMessageRecorder(audioRecorder: audioRecorder,
mediaPlayerProvider: mediaPlayerProvider,
@ -65,7 +66,7 @@ class VoiceMessageRecorderTests: XCTestCase {
}
private func setRecordingComplete() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.audioFileURL = recordingURL
audioRecorder.currentTime = 5
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
@ -81,7 +82,7 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testRecordingURL() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.audioFileURL = recordingURL
XCTAssertEqual(voiceMessageRecorder.recordingURL, recordingURL)
}
@ -92,7 +93,7 @@ class VoiceMessageRecorderTests: XCTestCase {
func testStartRecording() async throws {
_ = await voiceMessageRecorder.startRecording()
XCTAssert(audioRecorder.recordWithCalled)
XCTAssert(audioRecorder.recordAudioFileURLCalled)
}
func testStopRecording() async throws {
@ -185,17 +186,17 @@ class VoiceMessageRecorderTests: XCTestCase {
func testBuildRecordedWaveform() async throws {
// If there is no recording file, an error is expected
audioRecorder.audioFileUrl = nil
audioRecorder.audioFileURL = nil
guard case .failure(.missingRecordingFile) = await voiceMessageRecorder.buildRecordingWaveform() else {
XCTFail("An error is expected")
return
}
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_audio", withExtension: "mp3") else {
guard let audioFileURL = Bundle(for: Self.self).url(forResource: "test_audio", withExtension: "mp3") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = audioFileUrl
audioRecorder.audioFileURL = audioFileURL
guard case .success(let data) = await voiceMessageRecorder.buildRecordingWaveform() else {
XCTFail("A waveform is expected")
return
@ -207,7 +208,7 @@ class VoiceMessageRecorderTests: XCTestCase {
let roomProxy = RoomProxyMock()
// If there is no recording file, an error is expected
audioRecorder.audioFileUrl = nil
audioRecorder.audioFileURL = nil
guard case .failure(.missingRecordingFile) = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
XCTFail("An error is expected")
return
@ -215,7 +216,7 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testSendVoiceMessage_ConversionError() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.audioFileURL = recordingURL
// If the converter returns an error
audioConverter.convertToOpusOggSourceURLDestinationURLThrowableError = AudioConverterError.conversionFailed(nil)
@ -227,11 +228,11 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testSendVoiceMessage_InvalidFile() async throws {
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
guard let audioFileURL = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = audioFileUrl
audioRecorder.audioFileURL = audioFileURL
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { _, destination in
try? FileManager.default.removeItem(at: destination)
}
@ -245,14 +246,14 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testSendVoiceMessage_WaveformAnlyseFailed() async throws {
guard let imageFileUrl = Bundle(for: Self.self).url(forResource: "test_image", withExtension: "png") else {
guard let imageFileURL = Bundle(for: Self.self).url(forResource: "test_image", withExtension: "png") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = imageFileUrl
audioRecorder.audioFileURL = imageFileURL
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { _, destination in
try? FileManager.default.removeItem(at: destination)
try? FileManager.default.copyItem(at: imageFileUrl, to: destination)
try? FileManager.default.copyItem(at: imageFileURL, to: destination)
}
let roomProxy = RoomProxyMock()
@ -264,11 +265,11 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testSendVoiceMessage_SendError() async throws {
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
guard let audioFileURL = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
XCTFail("Test audio file is missing")
return
}
audioRecorder.audioFileUrl = audioFileUrl
audioRecorder.audioFileURL = audioFileURL
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { source, destination in
try? FileManager.default.removeItem(at: destination)
let internalConverter = AudioConverter()
@ -285,34 +286,34 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testSendVoiceMessage() async throws {
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
guard let imageFileURL = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
XCTFail("Test audio file is missing")
return
}
let roomProxy = RoomProxyMock()
audioRecorder.currentTime = 42
audioRecorder.audioFileUrl = audioFileUrl
audioRecorder.audioFileURL = imageFileURL
_ = await voiceMessageRecorder.startRecording()
_ = await voiceMessageRecorder.stopRecording()
var convertedFileUrl: URL?
var convertedFileURL: URL?
var convertedFileSize: UInt64?
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { source, destination in
convertedFileUrl = destination
convertedFileURL = destination
try? FileManager.default.removeItem(at: destination)
let internalConverter = AudioConverter()
try internalConverter.convertToOpusOgg(sourceURL: source, destinationURL: destination)
convertedFileSize = try? UInt64(FileManager.default.sizeForItem(at: destination))
// the source URL must be the recorded file
XCTAssertEqual(source, audioFileUrl)
XCTAssertEqual(source, imageFileURL)
// check the converted file extension
XCTAssertEqual(destination.pathExtension, "ogg")
}
roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleClosure = { url, audioInfo, waveform, _, _ in
XCTAssertEqual(url, convertedFileUrl)
XCTAssertEqual(url, convertedFileURL)
XCTAssertEqual(audioInfo.duration, self.audioRecorder.currentTime)
XCTAssertEqual(audioInfo.size, convertedFileSize)
XCTAssertEqual(audioInfo.mimetype, "audio/ogg")
@ -330,8 +331,8 @@ class VoiceMessageRecorderTests: XCTestCase {
XCTAssert(roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleCalled)
// the converted file must have been deleted
if let convertedFileUrl {
XCTAssertFalse(FileManager.default.fileExists(atPath: convertedFileUrl.path()))
if let convertedFileURL {
XCTAssertFalse(FileManager.default.fileExists(atPath: convertedFileURL.path()))
} else {
XCTFail("converted file URL is missing")
}
@ -351,7 +352,7 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testAudioRecorderActionHandling_didStopRecording() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.audioFileURL = recordingURL
audioRecorder.currentTime = 5
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
@ -367,7 +368,7 @@ class VoiceMessageRecorderTests: XCTestCase {
}
func testAudioRecorderActionHandling_didFailed() async throws {
audioRecorder.audioFileUrl = recordingURL
audioRecorder.audioFileURL = recordingURL
let deferred = deferFulfillment(voiceMessageRecorder.actions) { action in
switch action {

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.