mirror of
https://github.com/element-hq/element-x-ios.git
synced 2025-03-10 21:39:12 +00:00
Allow to record a voice message (#1926)
This commit is contained in:
parent
65b7c1df20
commit
2f57fbc77d
@ -25,7 +25,7 @@ line_length:
|
||||
|
||||
file_length:
|
||||
warning: 1000
|
||||
error: 1000
|
||||
error: 1200
|
||||
|
||||
type_name:
|
||||
min_length: 3
|
||||
|
@ -95,6 +95,7 @@
|
||||
1830E5431DB426E2F3660D58 /* NotificationSettingsEditScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F52419AEEDA2C006CB7181 /* NotificationSettingsEditScreenUITests.swift */; };
|
||||
18867F4F1C8991EEC56EA932 /* UTType.swift in Sources */ = {isa = PBXBuildFile; fileRef = 897DF5E9A70CE05A632FC8AF /* UTType.swift */; };
|
||||
1950A80CD198BED283DFC2CE /* ClientProxy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18F2958E6D247AE2516BEEE8 /* ClientProxy.swift */; };
|
||||
19DED23340D0855B59693ED2 /* VoiceMessageRecorderProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = D45C9EAA86423D7D3126DE4F /* VoiceMessageRecorderProtocol.swift */; };
|
||||
19FE025AE9BA2959B6589B0D /* RoomMemberDetailsScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1CC575D1895FA62591451A93 /* RoomMemberDetailsScreen.swift */; };
|
||||
1A70A2199394B5EC660934A5 /* MatrixRustSDK in Frameworks */ = {isa = PBXBuildFile; productRef = A678E40E917620059695F067 /* MatrixRustSDK */; };
|
||||
1A83DD22F3E6F76B13B6E2F9 /* VideoRoomTimelineItemContent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8C8616254EE40CA8BA5E9BC2 /* VideoRoomTimelineItemContent.swift */; };
|
||||
@ -129,6 +130,7 @@
|
||||
2352C541AF857241489756FF /* MockRoomSummaryProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8F7D42E66E939B709C1EC390 /* MockRoomSummaryProvider.swift */; };
|
||||
2355289BB0146231DD8AFFC0 /* AnalyticsMessageType.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2133A5FF0C14986E60326115 /* AnalyticsMessageType.swift */; };
|
||||
23701DE32ACD6FD40AA992C3 /* MediaUploadingPreprocessorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = AE203026B9AD3DB412439866 /* MediaUploadingPreprocessorTests.swift */; };
|
||||
2379B526D29F3AB2A369EB10 /* ProgressMaskModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = B1CDAF97F99E79CC432FB908 /* ProgressMaskModifier.swift */; };
|
||||
237FC70AA257B935F53316BA /* SessionVerificationControllerProxy.swift in Sources */ = {isa = PBXBuildFile; fileRef = C55D7E514F9DE4E3D72FDCAD /* SessionVerificationControllerProxy.swift */; };
|
||||
245F7FE5961BD10C145A26E0 /* UITimelineView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA689E792E679F5E3956F21 /* UITimelineView.swift */; };
|
||||
24A75F72EEB7561B82D726FD /* Date.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2141693488CE5446BB391964 /* Date.swift */; };
|
||||
@ -193,6 +195,7 @@
|
||||
34F1261CEF6D6A00D559B520 /* SettingsScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CFD5EB0B0EEA4549FB49784 /* SettingsScreen.swift */; };
|
||||
352C439BE0F75E101EF11FB1 /* RoomScreenModels.swift in Sources */ = {isa = PBXBuildFile; fileRef = C2886615BEBAE33A0AA4D5F8 /* RoomScreenModels.swift */; };
|
||||
355B11D08CE0CEF97A813236 /* AppRoutes.swift in Sources */ = {isa = PBXBuildFile; fileRef = 27A9E3FBE8A66B5A17AD7F74 /* AppRoutes.swift */; };
|
||||
3582056513A384F110EC8274 /* MediaPlayerProviderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2D7A2C4A3A74F0D2FFE9356A /* MediaPlayerProviderTests.swift */; };
|
||||
35E975CFDA60E05362A7CF79 /* target.yml in Resources */ = {isa = PBXBuildFile; fileRef = 1222DB76B917EB8A55365BA5 /* target.yml */; };
|
||||
3627DFEE96824E0E2EA69B88 /* AppLockSettingsScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6615CBDE154455007F456DBB /* AppLockSettingsScreen.swift */; };
|
||||
366D5BFE52CB79E804C7D095 /* CallScreenViewModelTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CAD9547E47C58930E2CE8306 /* CallScreenViewModelTests.swift */; };
|
||||
@ -291,6 +294,7 @@
|
||||
4FFDC274824F7CC0BBDF581E /* BugReportScreenModels.swift in Sources */ = {isa = PBXBuildFile; fileRef = 51C2BCE0BC1FC69C1B36E688 /* BugReportScreenModels.swift */; };
|
||||
500CB65ED116B81DA52FDAEE /* TimelineView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 874A1842477895F199567BD7 /* TimelineView.swift */; };
|
||||
50381244BA280451771BE3ED /* PINTextFieldTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = EF13BFD415CA84B1272E94F8 /* PINTextFieldTests.swift */; };
|
||||
50539366B408780B232C1910 /* EstimatedWaveformView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AD0FF64B0E6470F66F42E182 /* EstimatedWaveformView.swift */; };
|
||||
50C90117FE25390BFBD40173 /* RustTracing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 542D4F49FABA056DEEEB3400 /* RustTracing.swift */; };
|
||||
5100F53E6884A15F9BA07CC3 /* AttributedStringTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 37CA26F55123E36B50DB0B3A /* AttributedStringTests.swift */; };
|
||||
516534FC5C893D57F169D5A8 /* MapTilerGeocoding.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33720F7AD25E85E4A84669E8 /* MapTilerGeocoding.swift */; };
|
||||
@ -389,7 +393,6 @@
|
||||
6B4BF4A6450F55939B49FAEF /* PollOptionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67779D9A1B797285A09B7720 /* PollOptionView.swift */; };
|
||||
6BAD956B909A6E29F6CC6E7C /* ButtonStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8CC23C63849452BC86EA2852 /* ButtonStyle.swift */; };
|
||||
6BB6944443C421C722ED1E7D /* portrait_test_video.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = F2D513D2477B57F90E98EEC0 /* portrait_test_video.mp4 */; };
|
||||
6BDD969EFFAF18120429084A /* EstimatedWaveformView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5D9A987EAA44E2E1BDCDAFDC /* EstimatedWaveformView.swift */; };
|
||||
6C34237AFB808E38FC8776B9 /* RoomStateEventStringBuilder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8D55702474F279D910D2D162 /* RoomStateEventStringBuilder.swift */; };
|
||||
6C5A2C454E6C198AB39ED760 /* SharedUserDefaultsKeys.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBA8DC95C079805B0B56E8A9 /* SharedUserDefaultsKeys.swift */; };
|
||||
6CD61FAF03E8986523C2ABB8 /* StartChatScreenCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3005886F00029F058DB62BE /* StartChatScreenCoordinator.swift */; };
|
||||
@ -476,7 +479,6 @@
|
||||
8317E1314C00DCCC99D30DA8 /* TextBasedRoomTimelineItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = B9227F7495DA43324050A863 /* TextBasedRoomTimelineItem.swift */; };
|
||||
83A4DAB181C56987C3E804FF /* MapTilerStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = F0B9F5BC4C80543DE7228B9D /* MapTilerStyle.swift */; };
|
||||
84215E902C62E9B8E8AB79F0 /* AppLockSettingsScreenCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 267C0279BB8D907E2C40DDCA /* AppLockSettingsScreenCoordinator.swift */; };
|
||||
8421FFCD5360A15D170922A8 /* ProgressMaskModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79A1D75C7C52CD14A327CC90 /* ProgressMaskModifier.swift */; };
|
||||
84226AD2E1F1FBC965F3B09E /* UnitTestsAppCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6A8E19C4645D3F5F9FB02355 /* UnitTestsAppCoordinator.swift */; };
|
||||
8478992479B296C45150208F /* AppLockScreenViewModelTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = AC0275CEE9CA078B34028BDF /* AppLockScreenViewModelTests.swift */; };
|
||||
847DE3A7EB9FCA2C429C6E85 /* PINTextField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3D1D4A6D451F43A03CACD01D /* PINTextField.swift */; };
|
||||
@ -582,6 +584,7 @@
|
||||
9D2E03DB175A6AB14589076D /* AnalyticsEvents in Frameworks */ = {isa = PBXBuildFile; productRef = 2A3F7BCCB18C15B30CCA39A9 /* AnalyticsEvents */; };
|
||||
9D79B94493FB32249F7E472F /* PlaceholderAvatarImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = C705E605EF57C19DBE86FFA1 /* PlaceholderAvatarImage.swift */; };
|
||||
9D9690D2FD4CD26FF670620F /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = C75EF87651B00A176AB08E97 /* AppDelegate.swift */; };
|
||||
9D9EF9DD484E58A2E8877187 /* WaveformViewDragGestureModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03BA7958A4BB9C22CA8884EF /* WaveformViewDragGestureModifier.swift */; };
|
||||
9DC5FB22B8F86C3B51E907C1 /* HomeScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4D6E4C37E9F0E53D3DF951AC /* HomeScreenUITests.swift */; };
|
||||
9DD5AA10E85137140FEA86A3 /* MediaProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = F17EFA1D3D09FC2F9C5E1CB2 /* MediaProvider.swift */; };
|
||||
9DD84E014ADFB2DD813022D5 /* RoomDetailsEditScreenViewModelTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00E5B2CBEF8F96424F095508 /* RoomDetailsEditScreenViewModelTests.swift */; };
|
||||
@ -611,6 +614,7 @@
|
||||
A33784831AD880A670CAA9F9 /* FileManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04DF593C3F7AF4B2FBAEB05D /* FileManager.swift */; };
|
||||
A37EED79941AD3B7140B3822 /* UIDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = 287FC98AF2664EAD79C0D902 /* UIDevice.swift */; };
|
||||
A3A7A05E8F9B7EB0E1A09A2A /* SoftLogoutScreenCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 05596E4A11A8C9346E9E54AE /* SoftLogoutScreenCoordinator.swift */; };
|
||||
A3D7110C1E75E7B4A73BE71C /* VoiceMessageRecorderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D93C94C30E3135BC9290DE13 /* VoiceMessageRecorderTests.swift */; };
|
||||
A3E390675E9730C176B59E1B /* ImageProviderProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = F7E8A8047B50E3607ACD354E /* ImageProviderProtocol.swift */; };
|
||||
A439B456D0761D6541745CC3 /* NSRegularExpresion.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95BAC0F6C9644336E9567EE6 /* NSRegularExpresion.swift */; };
|
||||
A440D4BC02088482EC633A88 /* KeychainControllerProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = E5E94DCFEE803E5ABAE8ACCE /* KeychainControllerProtocol.swift */; };
|
||||
@ -693,6 +697,7 @@
|
||||
B6EC2148FA5443C9289BEEBA /* MediaProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = F17EFA1D3D09FC2F9C5E1CB2 /* MediaProvider.swift */; };
|
||||
B717A820BE02C6FE2CB53F6E /* WaitlistScreenViewModelProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = B697816AF93DA06EC58C5D70 /* WaitlistScreenViewModelProtocol.swift */; };
|
||||
B721125D17A0BA86794F29FB /* MockServerSelectionScreenState.swift in Sources */ = {isa = PBXBuildFile; fileRef = D8E057FB1F07A5C201C89061 /* MockServerSelectionScreenState.swift */; };
|
||||
B773ACD8881DB18E876D950C /* WaveformSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94028A227645FA880B966211 /* WaveformSource.swift */; };
|
||||
B7888FC1E1DEF816D175C8D6 /* SecureBackupKeyBackupScreenModels.swift in Sources */ = {isa = PBXBuildFile; fileRef = AD72A9B720D75DBE60AC299F /* SecureBackupKeyBackupScreenModels.swift */; };
|
||||
B796A25F282C0A340D1B9C12 /* ImageRoomTimelineItemContent.swift in Sources */ = {isa = PBXBuildFile; fileRef = B2B5EDCD05D50BA9B815C66C /* ImageRoomTimelineItemContent.swift */; };
|
||||
B7C9E07F4F9CCC8DD7156A20 /* CallScreenModels.swift in Sources */ = {isa = PBXBuildFile; fileRef = 28146817C61423CACCF942F5 /* CallScreenModels.swift */; };
|
||||
@ -734,6 +739,7 @@
|
||||
C1D0AB8222D7BAFC9AF9C8C0 /* MapLibreMapView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 622D09D4ECE759189009AEAF /* MapLibreMapView.swift */; };
|
||||
C1F863E16BDBC87255D23B57 /* DeveloperOptionsScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3F684BDD23ECEADB3053BA5A /* DeveloperOptionsScreenUITests.swift */; };
|
||||
C26DB49C06C00B5DF1A991A5 /* InviteUsersScreenModels.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1454CF3AABD242F55C8A2615 /* InviteUsersScreenModels.swift */; };
|
||||
C2879369106A419A5071F1F8 /* VoiceMessageRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 26B0A96B8FE4849227945067 /* VoiceMessageRecorder.swift */; };
|
||||
C287BE1802AD432F3D848D8E /* InvitesScreenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6DF81D7F2A6BA9DE3F6F8D9D /* InvitesScreenViewModel.swift */; };
|
||||
C32765D740C81AD4C42E8F50 /* CreateRoomFlowParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = 935C2FB18EFB8EEE96B26330 /* CreateRoomFlowParameters.swift */; };
|
||||
C3522917C0C367C403429EEC /* CoordinatorProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = B251F5B4511D1CA0BA8361FE /* CoordinatorProtocol.swift */; };
|
||||
@ -767,6 +773,7 @@
|
||||
CB6BCBF28E4B76EA08C2926D /* StateRoomTimelineItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = B16048D30F0438731C41F775 /* StateRoomTimelineItem.swift */; };
|
||||
CB99B0FA38A4AC596F38CC13 /* KeychainControllerProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = E5E94DCFEE803E5ABAE8ACCE /* KeychainControllerProtocol.swift */; };
|
||||
CBA9EDF305036039166E76FF /* StartChatScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = DA2AEC1AB349A341FE13DEC1 /* StartChatScreenUITests.swift */; };
|
||||
CBB4F39A1309F7281AE7AA8E /* test_voice_message.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 0392E3FDE372C9B56FEEED8B /* test_voice_message.m4a */; };
|
||||
CBD2ABE4C1A47ECD99E1488E /* NotificationSettingsScreenViewModelProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 421FA93BCC2840E66E4F306F /* NotificationSettingsScreenViewModelProtocol.swift */; };
|
||||
CC0D088F505F33A20DC5590F /* RoomStateEventStringBuilderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEEAFB646E583655652C3D04 /* RoomStateEventStringBuilderTests.swift */; };
|
||||
CC961529F9F1854BEC3272C9 /* LayoutMocks.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC8AA23D4F37CC26564F63C5 /* LayoutMocks.swift */; };
|
||||
@ -1013,6 +1020,8 @@
|
||||
033DB41C51865A2E83174E87 /* target.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = target.yml; sourceTree = "<group>"; };
|
||||
035177BCD8E8308B098AC3C2 /* WindowManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WindowManager.swift; sourceTree = "<group>"; };
|
||||
0376C429FAB1687C3D905F3E /* MockCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockCoder.swift; sourceTree = "<group>"; };
|
||||
0392E3FDE372C9B56FEEED8B /* test_voice_message.m4a */ = {isa = PBXFileReference; path = test_voice_message.m4a; sourceTree = "<group>"; };
|
||||
03BA7958A4BB9C22CA8884EF /* WaveformViewDragGestureModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WaveformViewDragGestureModifier.swift; sourceTree = "<group>"; };
|
||||
03DD998E523D4EC93C7ED703 /* RoomNotificationSettingsScreenViewModelProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomNotificationSettingsScreenViewModelProtocol.swift; sourceTree = "<group>"; };
|
||||
03FABD73FD8086EFAB699F42 /* MediaUploadPreviewScreenViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaUploadPreviewScreenViewModelTests.swift; sourceTree = "<group>"; };
|
||||
045253F9967A535EE5B16691 /* Label.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Label.swift; sourceTree = "<group>"; };
|
||||
@ -1139,6 +1148,7 @@
|
||||
25F7FE40EF7490A7E09D7BE6 /* NotificationItemProxy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NotificationItemProxy.swift; sourceTree = "<group>"; };
|
||||
260004737C573A56FA01E86E /* Encodable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Encodable.swift; sourceTree = "<group>"; };
|
||||
267C0279BB8D907E2C40DDCA /* AppLockSettingsScreenCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppLockSettingsScreenCoordinator.swift; sourceTree = "<group>"; };
|
||||
26B0A96B8FE4849227945067 /* VoiceMessageRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageRecorder.swift; sourceTree = "<group>"; };
|
||||
26EAAB54C6CE91D64B69A9F8 /* AppLockServiceProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppLockServiceProtocol.swift; sourceTree = "<group>"; };
|
||||
2757B1BE23DF8AA239937243 /* AudioConverterProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioConverterProtocol.swift; sourceTree = "<group>"; };
|
||||
277C20CDD5B64510401B6D0D /* ServerConfigurationScreenViewStateTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ServerConfigurationScreenViewStateTests.swift; sourceTree = "<group>"; };
|
||||
@ -1163,6 +1173,7 @@
|
||||
2CEBCB9676FCD1D0F13188DD /* StringTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StringTests.swift; sourceTree = "<group>"; };
|
||||
2D0946F77B696176E062D037 /* RoomMembersListScreenModels.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomMembersListScreenModels.swift; sourceTree = "<group>"; };
|
||||
2D505843AB66822EB91F0DF0 /* TimelineItemProxy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimelineItemProxy.swift; sourceTree = "<group>"; };
|
||||
2D7A2C4A3A74F0D2FFE9356A /* MediaPlayerProviderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaPlayerProviderTests.swift; sourceTree = "<group>"; };
|
||||
2E88534A39781D76487D59DF /* SecureBackupKeyBackupScreenViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecureBackupKeyBackupScreenViewModelTests.swift; sourceTree = "<group>"; };
|
||||
2EFE1922F39398ABFB36DF3F /* RoomDetailsViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomDetailsViewModelTests.swift; sourceTree = "<group>"; };
|
||||
2F36C5D9B37E50915ECBD3EE /* RoomMemberProxy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomMemberProxy.swift; sourceTree = "<group>"; };
|
||||
@ -1329,7 +1340,6 @@
|
||||
5D26A086A8278D39B5756D6F /* project.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = project.yml; sourceTree = "<group>"; };
|
||||
5D2D0A6F1ABC99D29462FB84 /* AuthenticationCoordinatorUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AuthenticationCoordinatorUITests.swift; sourceTree = "<group>"; };
|
||||
5D99730313BEBF08CDE81EE3 /* EmojiDetection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiDetection.swift; sourceTree = "<group>"; };
|
||||
5D9A987EAA44E2E1BDCDAFDC /* EstimatedWaveformView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EstimatedWaveformView.swift; sourceTree = "<group>"; };
|
||||
5DE8D25D6A91030175D52A20 /* RoomTimelineItemProperties.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomTimelineItemProperties.swift; sourceTree = "<group>"; };
|
||||
5EB2CAA266B921D128C35710 /* LegalInformationScreenCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LegalInformationScreenCoordinator.swift; sourceTree = "<group>"; };
|
||||
5F088B61525099A48909743B /* AppLockSettingsScreenUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppLockSettingsScreenUITests.swift; sourceTree = "<group>"; };
|
||||
@ -1413,7 +1423,6 @@
|
||||
78910787F967CBC6042A101E /* StartChatScreenViewModelProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StartChatScreenViewModelProtocol.swift; sourceTree = "<group>"; };
|
||||
78913D6E120D46138E97C107 /* NavigationSplitCoordinatorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NavigationSplitCoordinatorTests.swift; sourceTree = "<group>"; };
|
||||
7893780A1FD6E3F38B3E9049 /* UserIndicatorControllerMock.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserIndicatorControllerMock.swift; sourceTree = "<group>"; };
|
||||
79A1D75C7C52CD14A327CC90 /* ProgressMaskModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressMaskModifier.swift; sourceTree = "<group>"; };
|
||||
7A5D2323D7B6BF4913EB7EED /* landscape_test_image.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = landscape_test_image.jpg; sourceTree = "<group>"; };
|
||||
7AB7ED3A898B07976F3AA90F /* BugReportViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BugReportViewModelTests.swift; sourceTree = "<group>"; };
|
||||
7AE094FCB6387D268C436161 /* SecureBackupScreenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecureBackupScreenViewModel.swift; sourceTree = "<group>"; };
|
||||
@ -1504,6 +1513,7 @@
|
||||
935C2FB18EFB8EEE96B26330 /* CreateRoomFlowParameters.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreateRoomFlowParameters.swift; sourceTree = "<group>"; };
|
||||
93CF7B19FFCF8EFBE0A8696A /* RoomScreenViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomScreenViewModelTests.swift; sourceTree = "<group>"; };
|
||||
93E7304F5ECB4CB11CB10E60 /* SecureBackupRecoveryKeyScreenViewModelProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecureBackupRecoveryKeyScreenViewModelProtocol.swift; sourceTree = "<group>"; };
|
||||
94028A227645FA880B966211 /* WaveformSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WaveformSource.swift; sourceTree = "<group>"; };
|
||||
94BCC8A9C73C1F838122C645 /* TimelineItemPlainStylerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimelineItemPlainStylerView.swift; sourceTree = "<group>"; };
|
||||
94D670124FC3E84F23A62CCF /* APNSPayload.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APNSPayload.swift; sourceTree = "<group>"; };
|
||||
9501D11B4258DFA33BA3B40F /* ServerSelectionScreenModels.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ServerSelectionScreenModels.swift; sourceTree = "<group>"; };
|
||||
@ -1576,6 +1586,7 @@
|
||||
AC5F5209279A752D98AAC4B2 /* CollapsibleFlowLayoutTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CollapsibleFlowLayoutTests.swift; sourceTree = "<group>"; };
|
||||
AC9104846487244648D32C6D /* AudioPlayerProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioPlayerProtocol.swift; sourceTree = "<group>"; };
|
||||
ACCC1874C122E2BBE648B8F5 /* LegalInformationScreenUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LegalInformationScreenUITests.swift; sourceTree = "<group>"; };
|
||||
AD0FF64B0E6470F66F42E182 /* EstimatedWaveformView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EstimatedWaveformView.swift; sourceTree = "<group>"; };
|
||||
AD378D580A41E42560C60E9C /* sk */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = sk; path = sk.lproj/Localizable.strings; sourceTree = "<group>"; };
|
||||
AD558A898847C179E4B7A237 /* SecureBackupKeyBackupScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecureBackupKeyBackupScreen.swift; sourceTree = "<group>"; };
|
||||
AD6B522BD637845AB9570B10 /* RoomNotificationSettingsProxy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomNotificationSettingsProxy.swift; sourceTree = "<group>"; };
|
||||
@ -1598,6 +1609,7 @@
|
||||
B0BA67B3E4EF9D29D14A78CE /* AppLockSettingsScreenViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppLockSettingsScreenViewModelTests.swift; sourceTree = "<group>"; };
|
||||
B16048D30F0438731C41F775 /* StateRoomTimelineItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StateRoomTimelineItem.swift; sourceTree = "<group>"; };
|
||||
B16CAF20C9AC874A210E2DCF /* SessionVerificationScreenViewModelProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionVerificationScreenViewModelProtocol.swift; sourceTree = "<group>"; };
|
||||
B1CDAF97F99E79CC432FB908 /* ProgressMaskModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressMaskModifier.swift; sourceTree = "<group>"; };
|
||||
B1E227F34BE43B08E098796E /* TestablePreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestablePreview.swift; sourceTree = "<group>"; };
|
||||
B251F5B4511D1CA0BA8361FE /* CoordinatorProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoordinatorProtocol.swift; sourceTree = "<group>"; };
|
||||
B2B5EDCD05D50BA9B815C66C /* ImageRoomTimelineItemContent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageRoomTimelineItemContent.swift; sourceTree = "<group>"; };
|
||||
@ -1736,6 +1748,7 @@
|
||||
D3D455BC2423D911A62ACFB2 /* NSELogger.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NSELogger.swift; sourceTree = "<group>"; };
|
||||
D3F219838588C62198E726E3 /* LABiometryType.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LABiometryType.swift; sourceTree = "<group>"; };
|
||||
D3F275432954C8C6B1B7D966 /* AppLockSetupPINScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppLockSetupPINScreen.swift; sourceTree = "<group>"; };
|
||||
D45C9EAA86423D7D3126DE4F /* VoiceMessageRecorderProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageRecorderProtocol.swift; sourceTree = "<group>"; };
|
||||
D49B9785E3AD7D1C15A29F2F /* MediaSourceProxy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaSourceProxy.swift; sourceTree = "<group>"; };
|
||||
D4DA544B2520BFA65D6DB4BB /* target.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = target.yml; sourceTree = "<group>"; };
|
||||
D529B976F8B2AA654D923422 /* VoiceMessageRoomTimelineItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageRoomTimelineItem.swift; sourceTree = "<group>"; };
|
||||
@ -1755,6 +1768,7 @@
|
||||
D8E057FB1F07A5C201C89061 /* MockServerSelectionScreenState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockServerSelectionScreenState.swift; sourceTree = "<group>"; };
|
||||
D8E60332509665C00179ACF6 /* MessageForwardingScreenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageForwardingScreenViewModel.swift; sourceTree = "<group>"; };
|
||||
D8F5F9E02B1AB5350B1815E7 /* TimelineStartRoomTimelineItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimelineStartRoomTimelineItem.swift; sourceTree = "<group>"; };
|
||||
D93C94C30E3135BC9290DE13 /* VoiceMessageRecorderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageRecorderTests.swift; sourceTree = "<group>"; };
|
||||
DA14564EE143F73F7E4D1F79 /* RoomNotificationSettingsScreenModels.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomNotificationSettingsScreenModels.swift; sourceTree = "<group>"; };
|
||||
DA2AEC1AB349A341FE13DEC1 /* StartChatScreenUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StartChatScreenUITests.swift; sourceTree = "<group>"; };
|
||||
DAB8D7926A5684E18196B538 /* VoiceMessageCache.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageCache.swift; sourceTree = "<group>"; };
|
||||
@ -1982,6 +1996,8 @@
|
||||
43A84EE187D0C772E18A4E39 /* VoiceMessageCacheProtocol.swift */,
|
||||
40076C770A5FB83325252973 /* VoiceMessageMediaManager.swift */,
|
||||
889DEDD63C68ABDA8AD29812 /* VoiceMessageMediaManagerProtocol.swift */,
|
||||
26B0A96B8FE4849227945067 /* VoiceMessageRecorder.swift */,
|
||||
D45C9EAA86423D7D3126DE4F /* VoiceMessageRecorderProtocol.swift */,
|
||||
);
|
||||
path = VoiceMessage;
|
||||
sourceTree = "<group>";
|
||||
@ -2533,8 +2549,6 @@
|
||||
3A542DF1C3BB67D829DFDC40 /* VoiceMessages */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
5D9A987EAA44E2E1BDCDAFDC /* EstimatedWaveformView.swift */,
|
||||
79A1D75C7C52CD14A327CC90 /* ProgressMaskModifier.swift */,
|
||||
3CCD41CD67DB5DA0D436BFE9 /* VoiceMessageRoomPlaybackView.swift */,
|
||||
B70A50C41C5871B4DB905E7E /* VoiceMessageRoomTimelineView.swift */,
|
||||
);
|
||||
@ -3142,6 +3156,7 @@
|
||||
A05707BF550D770168A406DB /* LoginViewModelTests.swift */,
|
||||
376D941BF8BB294389C0DE24 /* MapTilerURLBuildersTests.swift */,
|
||||
F31F59030205A6F65B057E1A /* MatrixEntityRegexTests.swift */,
|
||||
2D7A2C4A3A74F0D2FFE9356A /* MediaPlayerProviderTests.swift */,
|
||||
AE203026B9AD3DB412439866 /* MediaUploadingPreprocessorTests.swift */,
|
||||
03FABD73FD8086EFAB699F42 /* MediaUploadPreviewScreenViewModelTests.swift */,
|
||||
6F6E6EDC4BBF962B2ED595A4 /* MessageForwardingScreenViewModelTests.swift */,
|
||||
@ -3184,6 +3199,7 @@
|
||||
BA241DEEF7C8A7181C0AEDC9 /* UserPreferenceTests.swift */,
|
||||
283974987DA7EC61D2AB57D9 /* VoiceMessageCacheTests.swift */,
|
||||
AC4F10BDD56FA77FEC742333 /* VoiceMessageMediaManagerTests.swift */,
|
||||
D93C94C30E3135BC9290DE13 /* VoiceMessageRecorderTests.swift */,
|
||||
C796FC1DFDBCDD5573D0360F /* WaitlistScreenViewModelTests.swift */,
|
||||
851EF6258DF8B7EF129DC3AC /* WelcomeScreenScreenViewModelTests.swift */,
|
||||
53280D2292E6C9C7821773FD /* UserSession */,
|
||||
@ -3777,6 +3793,17 @@
|
||||
path = Style;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
A33CE1B72A29E3931CBEC2A5 /* VoiceMessage */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
AD0FF64B0E6470F66F42E182 /* EstimatedWaveformView.swift */,
|
||||
B1CDAF97F99E79CC432FB908 /* ProgressMaskModifier.swift */,
|
||||
94028A227645FA880B966211 /* WaveformSource.swift */,
|
||||
03BA7958A4BB9C22CA8884EF /* WaveformViewDragGestureModifier.swift */,
|
||||
);
|
||||
path = VoiceMessage;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
A448A3A8F764174C60CD0CA1 /* Other */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
@ -3881,6 +3908,7 @@
|
||||
D5E26C54362206BBDD096D83 /* test_audio.mp3 */,
|
||||
C733D11B421CFE3A657EF230 /* test_image.png */,
|
||||
3FFDA99C98BE05F43A92343B /* test_pdf.pdf */,
|
||||
0392E3FDE372C9B56FEEED8B /* test_voice_message.m4a */,
|
||||
);
|
||||
path = Media;
|
||||
sourceTree = "<group>";
|
||||
@ -4090,6 +4118,7 @@
|
||||
9C4193C4524B35FD6B94B5A9 /* Pills */,
|
||||
052CC920F473C10B509F9FC1 /* SwiftUI */,
|
||||
B687E3E8C23415A06A3D5C65 /* UserIndicator */,
|
||||
A33CE1B72A29E3931CBEC2A5 /* VoiceMessage */,
|
||||
);
|
||||
path = Other;
|
||||
sourceTree = "<group>";
|
||||
@ -4870,6 +4899,7 @@
|
||||
87CEDB8A0696F0D5AE2ABB28 /* test_audio.mp3 in Resources */,
|
||||
21BF2B7CEDFE3CA67C5355AD /* test_image.png in Resources */,
|
||||
E77469C5CD7F7F58C0AC9752 /* test_pdf.pdf in Resources */,
|
||||
CBB4F39A1309F7281AE7AA8E /* test_voice_message.m4a in Resources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
@ -5075,6 +5105,7 @@
|
||||
77C1A2F49CD90D3EFDF376E5 /* MapTilerURLBuildersTests.swift in Sources */,
|
||||
2E43A3D221BE9587BC19C3F1 /* MatrixEntityRegexTests.swift in Sources */,
|
||||
4B978C09567387EF4366BD7A /* MediaLoaderTests.swift in Sources */,
|
||||
3582056513A384F110EC8274 /* MediaPlayerProviderTests.swift in Sources */,
|
||||
167D00CAA13FAFB822298021 /* MediaProviderTests.swift in Sources */,
|
||||
B9A8C34A00D03094C0CF56F3 /* MediaUploadPreviewScreenViewModelTests.swift in Sources */,
|
||||
23701DE32ACD6FD40AA992C3 /* MediaUploadingPreprocessorTests.swift in Sources */,
|
||||
@ -5130,6 +5161,7 @@
|
||||
81A7C020CB5F6232242A8414 /* UserSessionTests.swift in Sources */,
|
||||
21AFEFB8CEFE56A3811A1F5B /* VoiceMessageCacheTests.swift in Sources */,
|
||||
44BDD670FF9095ACE240A3A2 /* VoiceMessageMediaManagerTests.swift in Sources */,
|
||||
A3D7110C1E75E7B4A73BE71C /* VoiceMessageRecorderTests.swift in Sources */,
|
||||
FB9A1DD83EF641A75ABBCE69 /* WaitlistScreenViewModelTests.swift in Sources */,
|
||||
7F02063FB3D1C3E5601471A1 /* WelcomeScreenScreenViewModelTests.swift in Sources */,
|
||||
3116693C5EB476E028990416 /* XCTestCase.swift in Sources */,
|
||||
@ -5327,7 +5359,7 @@
|
||||
9965CB800CE6BC74ACA969FC /* EncryptedHistoryRoomTimelineView.swift in Sources */,
|
||||
4C5A638DAA8AF64565BA4866 /* EncryptedRoomTimelineItem.swift in Sources */,
|
||||
B5903E48CF43259836BF2DBF /* EncryptedRoomTimelineView.swift in Sources */,
|
||||
6BDD969EFFAF18120429084A /* EstimatedWaveformView.swift in Sources */,
|
||||
50539366B408780B232C1910 /* EstimatedWaveformView.swift in Sources */,
|
||||
F78BAD28482A467287A9A5A3 /* EventBasedMessageTimelineItemProtocol.swift in Sources */,
|
||||
02D8DF8EB7537EB4E9019DDB /* EventBasedTimelineItemProtocol.swift in Sources */,
|
||||
63E46D18B91D08E15FC04125 /* ExpiringTaskRunner.swift in Sources */,
|
||||
@ -5526,7 +5558,7 @@
|
||||
153E22E8227F46545E5D681C /* PollRoomTimelineView.swift in Sources */,
|
||||
DF504B10A4918F971A57BEF2 /* PostHogAnalyticsClient.swift in Sources */,
|
||||
FD4DEC88210F35C35B2FB386 /* ProcessInfo.swift in Sources */,
|
||||
8421FFCD5360A15D170922A8 /* ProgressMaskModifier.swift in Sources */,
|
||||
2379B526D29F3AB2A369EB10 /* ProgressMaskModifier.swift in Sources */,
|
||||
9B356742E035D90A8BB5CABE /* ProposedViewSize.swift in Sources */,
|
||||
2835FD52F3F618D07F799B3D /* Publisher.swift in Sources */,
|
||||
9095B9E40DB5CF8BA26CE0D8 /* ReactionsSummaryView.swift in Sources */,
|
||||
@ -5779,6 +5811,8 @@
|
||||
386720B603F87D156DB01FB2 /* VoiceMessageMediaManager.swift in Sources */,
|
||||
9DE801D278AC34737467F937 /* VoiceMessageMediaManagerProtocol.swift in Sources */,
|
||||
33CA777C9DF263582D77A67F /* VoiceMessagePreviewComposer.swift in Sources */,
|
||||
C2879369106A419A5071F1F8 /* VoiceMessageRecorder.swift in Sources */,
|
||||
19DED23340D0855B59693ED2 /* VoiceMessageRecorderProtocol.swift in Sources */,
|
||||
09EF4222EEBBA1A7B8F4071E /* VoiceMessageRecordingButton.swift in Sources */,
|
||||
8C27BEB00B903D953F31F962 /* VoiceMessageRecordingButtonTooltipView.swift in Sources */,
|
||||
CA5BFF0C2EF5A8EF40CA2D69 /* VoiceMessageRecordingComposer.swift in Sources */,
|
||||
@ -5792,6 +5826,8 @@
|
||||
2F66701B15657A87B4AC3A0A /* WaitlistScreenModels.swift in Sources */,
|
||||
CF3827071B0BC9638BD44F5D /* WaitlistScreenViewModel.swift in Sources */,
|
||||
B717A820BE02C6FE2CB53F6E /* WaitlistScreenViewModelProtocol.swift in Sources */,
|
||||
B773ACD8881DB18E876D950C /* WaveformSource.swift in Sources */,
|
||||
9D9EF9DD484E58A2E8877187 /* WaveformViewDragGestureModifier.swift in Sources */,
|
||||
D871C8CF46950F959C9A62C3 /* WelcomeScreen.swift in Sources */,
|
||||
383055C6ABE5BE058CEE1DDB /* WelcomeScreenScreenCoordinator.swift in Sources */,
|
||||
BD2BF1EC73FFB0C01552ECDA /* WelcomeScreenScreenModels.swift in Sources */,
|
||||
|
@ -334,7 +334,7 @@ class RoomFlowCoordinator: FlowCoordinatorProtocol {
|
||||
|
||||
let userID = userSession.clientProxy.userID
|
||||
|
||||
let mediaPlayerProvider = MediaPlayerProvider(mediaProvider: userSession.mediaProvider)
|
||||
let mediaPlayerProvider = MediaPlayerProvider()
|
||||
|
||||
let timelineItemFactory = RoomTimelineItemFactory(userID: userID,
|
||||
mediaProvider: userSession.mediaProvider,
|
||||
@ -357,6 +357,7 @@ class RoomFlowCoordinator: FlowCoordinatorProtocol {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: roomProxy,
|
||||
timelineController: timelineController,
|
||||
mediaProvider: userSession.mediaProvider,
|
||||
mediaPlayerProvider: mediaPlayerProvider,
|
||||
emojiProvider: emojiProvider,
|
||||
completionSuggestionService: completionSuggestionService,
|
||||
appSettings: appSettings)
|
||||
|
@ -323,19 +323,19 @@ class AudioPlayerMock: AudioPlayerProtocol {
|
||||
|
||||
//MARK: - load
|
||||
|
||||
var loadMediaSourceUsingCallsCount = 0
|
||||
var loadMediaSourceUsingCalled: Bool {
|
||||
return loadMediaSourceUsingCallsCount > 0
|
||||
var loadMediaSourceUsingAutoplayCallsCount = 0
|
||||
var loadMediaSourceUsingAutoplayCalled: Bool {
|
||||
return loadMediaSourceUsingAutoplayCallsCount > 0
|
||||
}
|
||||
var loadMediaSourceUsingReceivedArguments: (mediaSource: MediaSourceProxy, url: URL)?
|
||||
var loadMediaSourceUsingReceivedInvocations: [(mediaSource: MediaSourceProxy, url: URL)] = []
|
||||
var loadMediaSourceUsingClosure: ((MediaSourceProxy, URL) -> Void)?
|
||||
var loadMediaSourceUsingAutoplayReceivedArguments: (mediaSource: MediaSourceProxy, url: URL, autoplay: Bool)?
|
||||
var loadMediaSourceUsingAutoplayReceivedInvocations: [(mediaSource: MediaSourceProxy, url: URL, autoplay: Bool)] = []
|
||||
var loadMediaSourceUsingAutoplayClosure: ((MediaSourceProxy, URL, Bool) -> Void)?
|
||||
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL) {
|
||||
loadMediaSourceUsingCallsCount += 1
|
||||
loadMediaSourceUsingReceivedArguments = (mediaSource: mediaSource, url: url)
|
||||
loadMediaSourceUsingReceivedInvocations.append((mediaSource: mediaSource, url: url))
|
||||
loadMediaSourceUsingClosure?(mediaSource, url)
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL, autoplay: Bool) {
|
||||
loadMediaSourceUsingAutoplayCallsCount += 1
|
||||
loadMediaSourceUsingAutoplayReceivedArguments = (mediaSource: mediaSource, url: url, autoplay: autoplay)
|
||||
loadMediaSourceUsingAutoplayReceivedInvocations.append((mediaSource: mediaSource, url: url, autoplay: autoplay))
|
||||
loadMediaSourceUsingAutoplayClosure?(mediaSource, url, autoplay)
|
||||
}
|
||||
//MARK: - play
|
||||
|
||||
@ -408,21 +408,26 @@ class AudioRecorderMock: AudioRecorderProtocol {
|
||||
var underlyingIsRecording: Bool!
|
||||
var url: URL?
|
||||
|
||||
//MARK: - recordWithOutputURL
|
||||
//MARK: - record
|
||||
|
||||
var recordWithOutputURLCallsCount = 0
|
||||
var recordWithOutputURLCalled: Bool {
|
||||
return recordWithOutputURLCallsCount > 0
|
||||
var recordWithCallsCount = 0
|
||||
var recordWithCalled: Bool {
|
||||
return recordWithCallsCount > 0
|
||||
}
|
||||
var recordWithOutputURLReceivedUrl: URL?
|
||||
var recordWithOutputURLReceivedInvocations: [URL] = []
|
||||
var recordWithOutputURLClosure: ((URL) -> Void)?
|
||||
var recordWithReceivedRecordID: AudioRecordingIdentifier?
|
||||
var recordWithReceivedInvocations: [AudioRecordingIdentifier] = []
|
||||
var recordWithReturnValue: Result<Void, AudioRecorderError>!
|
||||
var recordWithClosure: ((AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError>)?
|
||||
|
||||
func recordWithOutputURL(_ url: URL) {
|
||||
recordWithOutputURLCallsCount += 1
|
||||
recordWithOutputURLReceivedUrl = url
|
||||
recordWithOutputURLReceivedInvocations.append(url)
|
||||
recordWithOutputURLClosure?(url)
|
||||
func record(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError> {
|
||||
recordWithCallsCount += 1
|
||||
recordWithReceivedRecordID = recordID
|
||||
recordWithReceivedInvocations.append(recordID)
|
||||
if let recordWithClosure = recordWithClosure {
|
||||
return await recordWithClosure(recordID)
|
||||
} else {
|
||||
return recordWithReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - stopRecording
|
||||
|
||||
@ -430,11 +435,23 @@ class AudioRecorderMock: AudioRecorderProtocol {
|
||||
var stopRecordingCalled: Bool {
|
||||
return stopRecordingCallsCount > 0
|
||||
}
|
||||
var stopRecordingClosure: (() -> Void)?
|
||||
var stopRecordingClosure: (() async -> Void)?
|
||||
|
||||
func stopRecording() {
|
||||
func stopRecording() async {
|
||||
stopRecordingCallsCount += 1
|
||||
stopRecordingClosure?()
|
||||
await stopRecordingClosure?()
|
||||
}
|
||||
//MARK: - deleteRecording
|
||||
|
||||
var deleteRecordingCallsCount = 0
|
||||
var deleteRecordingCalled: Bool {
|
||||
return deleteRecordingCallsCount > 0
|
||||
}
|
||||
var deleteRecordingClosure: (() -> Void)?
|
||||
|
||||
func deleteRecording() {
|
||||
deleteRecordingCallsCount += 1
|
||||
deleteRecordingClosure?()
|
||||
}
|
||||
//MARK: - averagePowerForChannelNumber
|
||||
|
||||
@ -807,19 +824,19 @@ class MediaPlayerMock: MediaPlayerProtocol {
|
||||
|
||||
//MARK: - load
|
||||
|
||||
var loadMediaSourceUsingCallsCount = 0
|
||||
var loadMediaSourceUsingCalled: Bool {
|
||||
return loadMediaSourceUsingCallsCount > 0
|
||||
var loadMediaSourceUsingAutoplayCallsCount = 0
|
||||
var loadMediaSourceUsingAutoplayCalled: Bool {
|
||||
return loadMediaSourceUsingAutoplayCallsCount > 0
|
||||
}
|
||||
var loadMediaSourceUsingReceivedArguments: (mediaSource: MediaSourceProxy, url: URL)?
|
||||
var loadMediaSourceUsingReceivedInvocations: [(mediaSource: MediaSourceProxy, url: URL)] = []
|
||||
var loadMediaSourceUsingClosure: ((MediaSourceProxy, URL) -> Void)?
|
||||
var loadMediaSourceUsingAutoplayReceivedArguments: (mediaSource: MediaSourceProxy, url: URL, autoplay: Bool)?
|
||||
var loadMediaSourceUsingAutoplayReceivedInvocations: [(mediaSource: MediaSourceProxy, url: URL, autoplay: Bool)] = []
|
||||
var loadMediaSourceUsingAutoplayClosure: ((MediaSourceProxy, URL, Bool) -> Void)?
|
||||
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL) {
|
||||
loadMediaSourceUsingCallsCount += 1
|
||||
loadMediaSourceUsingReceivedArguments = (mediaSource: mediaSource, url: url)
|
||||
loadMediaSourceUsingReceivedInvocations.append((mediaSource: mediaSource, url: url))
|
||||
loadMediaSourceUsingClosure?(mediaSource, url)
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL, autoplay: Bool) {
|
||||
loadMediaSourceUsingAutoplayCallsCount += 1
|
||||
loadMediaSourceUsingAutoplayReceivedArguments = (mediaSource: mediaSource, url: url, autoplay: autoplay)
|
||||
loadMediaSourceUsingAutoplayReceivedInvocations.append((mediaSource: mediaSource, url: url, autoplay: autoplay))
|
||||
loadMediaSourceUsingAutoplayClosure?(mediaSource, url, autoplay)
|
||||
}
|
||||
//MARK: - play
|
||||
|
||||
@ -874,6 +891,99 @@ class MediaPlayerMock: MediaPlayerProtocol {
|
||||
await seekToClosure?(progress)
|
||||
}
|
||||
}
|
||||
class MediaPlayerProviderMock: MediaPlayerProviderProtocol {
|
||||
|
||||
//MARK: - player
|
||||
|
||||
var playerForCallsCount = 0
|
||||
var playerForCalled: Bool {
|
||||
return playerForCallsCount > 0
|
||||
}
|
||||
var playerForReceivedMediaSource: MediaSourceProxy?
|
||||
var playerForReceivedInvocations: [MediaSourceProxy] = []
|
||||
var playerForReturnValue: Result<MediaPlayerProtocol, MediaPlayerProviderError>!
|
||||
var playerForClosure: ((MediaSourceProxy) -> Result<MediaPlayerProtocol, MediaPlayerProviderError>)?
|
||||
|
||||
func player(for mediaSource: MediaSourceProxy) -> Result<MediaPlayerProtocol, MediaPlayerProviderError> {
|
||||
playerForCallsCount += 1
|
||||
playerForReceivedMediaSource = mediaSource
|
||||
playerForReceivedInvocations.append(mediaSource)
|
||||
if let playerForClosure = playerForClosure {
|
||||
return playerForClosure(mediaSource)
|
||||
} else {
|
||||
return playerForReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - playerState
|
||||
|
||||
var playerStateForCallsCount = 0
|
||||
var playerStateForCalled: Bool {
|
||||
return playerStateForCallsCount > 0
|
||||
}
|
||||
var playerStateForReceivedId: AudioPlayerStateIdentifier?
|
||||
var playerStateForReceivedInvocations: [AudioPlayerStateIdentifier] = []
|
||||
var playerStateForReturnValue: AudioPlayerState?
|
||||
var playerStateForClosure: ((AudioPlayerStateIdentifier) -> AudioPlayerState?)?
|
||||
|
||||
func playerState(for id: AudioPlayerStateIdentifier) -> AudioPlayerState? {
|
||||
playerStateForCallsCount += 1
|
||||
playerStateForReceivedId = id
|
||||
playerStateForReceivedInvocations.append(id)
|
||||
if let playerStateForClosure = playerStateForClosure {
|
||||
return playerStateForClosure(id)
|
||||
} else {
|
||||
return playerStateForReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - register
|
||||
|
||||
var registerAudioPlayerStateCallsCount = 0
|
||||
var registerAudioPlayerStateCalled: Bool {
|
||||
return registerAudioPlayerStateCallsCount > 0
|
||||
}
|
||||
var registerAudioPlayerStateReceivedAudioPlayerState: AudioPlayerState?
|
||||
var registerAudioPlayerStateReceivedInvocations: [AudioPlayerState] = []
|
||||
var registerAudioPlayerStateClosure: ((AudioPlayerState) -> Void)?
|
||||
|
||||
func register(audioPlayerState: AudioPlayerState) {
|
||||
registerAudioPlayerStateCallsCount += 1
|
||||
registerAudioPlayerStateReceivedAudioPlayerState = audioPlayerState
|
||||
registerAudioPlayerStateReceivedInvocations.append(audioPlayerState)
|
||||
registerAudioPlayerStateClosure?(audioPlayerState)
|
||||
}
|
||||
//MARK: - unregister
|
||||
|
||||
var unregisterAudioPlayerStateCallsCount = 0
|
||||
var unregisterAudioPlayerStateCalled: Bool {
|
||||
return unregisterAudioPlayerStateCallsCount > 0
|
||||
}
|
||||
var unregisterAudioPlayerStateReceivedAudioPlayerState: AudioPlayerState?
|
||||
var unregisterAudioPlayerStateReceivedInvocations: [AudioPlayerState] = []
|
||||
var unregisterAudioPlayerStateClosure: ((AudioPlayerState) -> Void)?
|
||||
|
||||
func unregister(audioPlayerState: AudioPlayerState) {
|
||||
unregisterAudioPlayerStateCallsCount += 1
|
||||
unregisterAudioPlayerStateReceivedAudioPlayerState = audioPlayerState
|
||||
unregisterAudioPlayerStateReceivedInvocations.append(audioPlayerState)
|
||||
unregisterAudioPlayerStateClosure?(audioPlayerState)
|
||||
}
|
||||
//MARK: - detachAllStates
|
||||
|
||||
var detachAllStatesExceptCallsCount = 0
|
||||
var detachAllStatesExceptCalled: Bool {
|
||||
return detachAllStatesExceptCallsCount > 0
|
||||
}
|
||||
var detachAllStatesExceptReceivedException: AudioPlayerState?
|
||||
var detachAllStatesExceptReceivedInvocations: [AudioPlayerState?] = []
|
||||
var detachAllStatesExceptClosure: ((AudioPlayerState?) async -> Void)?
|
||||
|
||||
func detachAllStates(except exception: AudioPlayerState?) async {
|
||||
detachAllStatesExceptCallsCount += 1
|
||||
detachAllStatesExceptReceivedException = exception
|
||||
detachAllStatesExceptReceivedInvocations.append(exception)
|
||||
await detachAllStatesExceptClosure?(exception)
|
||||
}
|
||||
}
|
||||
class NotificationCenterMock: NotificationCenterProtocol {
|
||||
|
||||
//MARK: - post
|
||||
@ -1775,6 +1885,23 @@ class RoomProxyMock: RoomProxyProtocol {
|
||||
return sendLocationBodyGeoURIDescriptionZoomLevelAssetTypeReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - sendVoiceMessage
|
||||
|
||||
var sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleCallsCount = 0
|
||||
var sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleCalled: Bool {
|
||||
return sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleCallsCount > 0
|
||||
}
|
||||
var sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleReturnValue: Result<Void, RoomProxyError>!
|
||||
var sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleClosure: ((URL, AudioInfo, [UInt16], CurrentValueSubject<Double, Never>?, @MainActor (SendAttachmentJoinHandleProtocol) -> Void) async -> Result<Void, RoomProxyError>)?
|
||||
|
||||
func sendVoiceMessage(url: URL, audioInfo: AudioInfo, waveform: [UInt16], progressSubject: CurrentValueSubject<Double, Never>?, requestHandle: @MainActor (SendAttachmentJoinHandleProtocol) -> Void) async -> Result<Void, RoomProxyError> {
|
||||
sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleCallsCount += 1
|
||||
if let sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleClosure = sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleClosure {
|
||||
return await sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleClosure(url, audioInfo, waveform, progressSubject, requestHandle)
|
||||
} else {
|
||||
return sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - retrySend
|
||||
|
||||
var retrySendTransactionIDCallsCount = 0
|
||||
@ -2715,25 +2842,21 @@ class VoiceMessageCacheMock: VoiceMessageCacheProtocol {
|
||||
}
|
||||
//MARK: - cache
|
||||
|
||||
var cacheMediaSourceUsingMoveThrowableError: Error?
|
||||
var cacheMediaSourceUsingMoveCallsCount = 0
|
||||
var cacheMediaSourceUsingMoveCalled: Bool {
|
||||
return cacheMediaSourceUsingMoveCallsCount > 0
|
||||
}
|
||||
var cacheMediaSourceUsingMoveReceivedArguments: (mediaSource: MediaSourceProxy, fileURL: URL, move: Bool)?
|
||||
var cacheMediaSourceUsingMoveReceivedInvocations: [(mediaSource: MediaSourceProxy, fileURL: URL, move: Bool)] = []
|
||||
var cacheMediaSourceUsingMoveReturnValue: URL!
|
||||
var cacheMediaSourceUsingMoveClosure: ((MediaSourceProxy, URL, Bool) throws -> URL)?
|
||||
var cacheMediaSourceUsingMoveReturnValue: Result<URL, VoiceMessageCacheError>!
|
||||
var cacheMediaSourceUsingMoveClosure: ((MediaSourceProxy, URL, Bool) -> Result<URL, VoiceMessageCacheError>)?
|
||||
|
||||
func cache(mediaSource: MediaSourceProxy, using fileURL: URL, move: Bool) throws -> URL {
|
||||
if let error = cacheMediaSourceUsingMoveThrowableError {
|
||||
throw error
|
||||
}
|
||||
func cache(mediaSource: MediaSourceProxy, using fileURL: URL, move: Bool) -> Result<URL, VoiceMessageCacheError> {
|
||||
cacheMediaSourceUsingMoveCallsCount += 1
|
||||
cacheMediaSourceUsingMoveReceivedArguments = (mediaSource: mediaSource, fileURL: fileURL, move: move)
|
||||
cacheMediaSourceUsingMoveReceivedInvocations.append((mediaSource: mediaSource, fileURL: fileURL, move: move))
|
||||
if let cacheMediaSourceUsingMoveClosure = cacheMediaSourceUsingMoveClosure {
|
||||
return try cacheMediaSourceUsingMoveClosure(mediaSource, fileURL, move)
|
||||
return cacheMediaSourceUsingMoveClosure(mediaSource, fileURL, move)
|
||||
} else {
|
||||
return cacheMediaSourceUsingMoveReturnValue
|
||||
}
|
||||
@ -2779,4 +2902,172 @@ class VoiceMessageMediaManagerMock: VoiceMessageMediaManagerProtocol {
|
||||
}
|
||||
}
|
||||
}
|
||||
class VoiceMessageRecorderMock: VoiceMessageRecorderProtocol {
|
||||
var audioRecorder: AudioRecorderProtocol {
|
||||
get { return underlyingAudioRecorder }
|
||||
set(value) { underlyingAudioRecorder = value }
|
||||
}
|
||||
var underlyingAudioRecorder: AudioRecorderProtocol!
|
||||
var previewAudioPlayerState: AudioPlayerState?
|
||||
var recordingURL: URL?
|
||||
var recordingDuration: TimeInterval {
|
||||
get { return underlyingRecordingDuration }
|
||||
set(value) { underlyingRecordingDuration = value }
|
||||
}
|
||||
var underlyingRecordingDuration: TimeInterval!
|
||||
|
||||
//MARK: - startRecording
|
||||
|
||||
var startRecordingCallsCount = 0
|
||||
var startRecordingCalled: Bool {
|
||||
return startRecordingCallsCount > 0
|
||||
}
|
||||
var startRecordingReturnValue: Result<Void, VoiceMessageRecorderError>!
|
||||
var startRecordingClosure: (() async -> Result<Void, VoiceMessageRecorderError>)?
|
||||
|
||||
func startRecording() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
startRecordingCallsCount += 1
|
||||
if let startRecordingClosure = startRecordingClosure {
|
||||
return await startRecordingClosure()
|
||||
} else {
|
||||
return startRecordingReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - stopRecording
|
||||
|
||||
var stopRecordingCallsCount = 0
|
||||
var stopRecordingCalled: Bool {
|
||||
return stopRecordingCallsCount > 0
|
||||
}
|
||||
var stopRecordingReturnValue: Result<Void, VoiceMessageRecorderError>!
|
||||
var stopRecordingClosure: (() async -> Result<Void, VoiceMessageRecorderError>)?
|
||||
|
||||
func stopRecording() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
stopRecordingCallsCount += 1
|
||||
if let stopRecordingClosure = stopRecordingClosure {
|
||||
return await stopRecordingClosure()
|
||||
} else {
|
||||
return stopRecordingReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - cancelRecording
|
||||
|
||||
var cancelRecordingCallsCount = 0
|
||||
var cancelRecordingCalled: Bool {
|
||||
return cancelRecordingCallsCount > 0
|
||||
}
|
||||
var cancelRecordingClosure: (() async -> Void)?
|
||||
|
||||
func cancelRecording() async {
|
||||
cancelRecordingCallsCount += 1
|
||||
await cancelRecordingClosure?()
|
||||
}
|
||||
//MARK: - startPlayback
|
||||
|
||||
var startPlaybackCallsCount = 0
|
||||
var startPlaybackCalled: Bool {
|
||||
return startPlaybackCallsCount > 0
|
||||
}
|
||||
var startPlaybackReturnValue: Result<Void, VoiceMessageRecorderError>!
|
||||
var startPlaybackClosure: (() async -> Result<Void, VoiceMessageRecorderError>)?
|
||||
|
||||
func startPlayback() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
startPlaybackCallsCount += 1
|
||||
if let startPlaybackClosure = startPlaybackClosure {
|
||||
return await startPlaybackClosure()
|
||||
} else {
|
||||
return startPlaybackReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - pausePlayback
|
||||
|
||||
var pausePlaybackCallsCount = 0
|
||||
var pausePlaybackCalled: Bool {
|
||||
return pausePlaybackCallsCount > 0
|
||||
}
|
||||
var pausePlaybackClosure: (() -> Void)?
|
||||
|
||||
func pausePlayback() {
|
||||
pausePlaybackCallsCount += 1
|
||||
pausePlaybackClosure?()
|
||||
}
|
||||
//MARK: - stopPlayback
|
||||
|
||||
var stopPlaybackCallsCount = 0
|
||||
var stopPlaybackCalled: Bool {
|
||||
return stopPlaybackCallsCount > 0
|
||||
}
|
||||
var stopPlaybackClosure: (() async -> Void)?
|
||||
|
||||
func stopPlayback() async {
|
||||
stopPlaybackCallsCount += 1
|
||||
await stopPlaybackClosure?()
|
||||
}
|
||||
//MARK: - seekPlayback
|
||||
|
||||
var seekPlaybackToCallsCount = 0
|
||||
var seekPlaybackToCalled: Bool {
|
||||
return seekPlaybackToCallsCount > 0
|
||||
}
|
||||
var seekPlaybackToReceivedProgress: Double?
|
||||
var seekPlaybackToReceivedInvocations: [Double] = []
|
||||
var seekPlaybackToClosure: ((Double) async -> Void)?
|
||||
|
||||
func seekPlayback(to progress: Double) async {
|
||||
seekPlaybackToCallsCount += 1
|
||||
seekPlaybackToReceivedProgress = progress
|
||||
seekPlaybackToReceivedInvocations.append(progress)
|
||||
await seekPlaybackToClosure?(progress)
|
||||
}
|
||||
//MARK: - deleteRecording
|
||||
|
||||
var deleteRecordingCallsCount = 0
|
||||
var deleteRecordingCalled: Bool {
|
||||
return deleteRecordingCallsCount > 0
|
||||
}
|
||||
var deleteRecordingClosure: (() async -> Void)?
|
||||
|
||||
func deleteRecording() async {
|
||||
deleteRecordingCallsCount += 1
|
||||
await deleteRecordingClosure?()
|
||||
}
|
||||
//MARK: - buildRecordingWaveform
|
||||
|
||||
var buildRecordingWaveformCallsCount = 0
|
||||
var buildRecordingWaveformCalled: Bool {
|
||||
return buildRecordingWaveformCallsCount > 0
|
||||
}
|
||||
var buildRecordingWaveformReturnValue: Result<[UInt16], VoiceMessageRecorderError>!
|
||||
var buildRecordingWaveformClosure: (() async -> Result<[UInt16], VoiceMessageRecorderError>)?
|
||||
|
||||
func buildRecordingWaveform() async -> Result<[UInt16], VoiceMessageRecorderError> {
|
||||
buildRecordingWaveformCallsCount += 1
|
||||
if let buildRecordingWaveformClosure = buildRecordingWaveformClosure {
|
||||
return await buildRecordingWaveformClosure()
|
||||
} else {
|
||||
return buildRecordingWaveformReturnValue
|
||||
}
|
||||
}
|
||||
//MARK: - sendVoiceMessage
|
||||
|
||||
var sendVoiceMessageInRoomAudioConverterCallsCount = 0
|
||||
var sendVoiceMessageInRoomAudioConverterCalled: Bool {
|
||||
return sendVoiceMessageInRoomAudioConverterCallsCount > 0
|
||||
}
|
||||
var sendVoiceMessageInRoomAudioConverterReceivedArguments: (roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol)?
|
||||
var sendVoiceMessageInRoomAudioConverterReceivedInvocations: [(roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol)] = []
|
||||
var sendVoiceMessageInRoomAudioConverterReturnValue: Result<Void, VoiceMessageRecorderError>!
|
||||
var sendVoiceMessageInRoomAudioConverterClosure: ((RoomProxyProtocol, AudioConverterProtocol) async -> Result<Void, VoiceMessageRecorderError>)?
|
||||
|
||||
func sendVoiceMessage(inRoom roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol) async -> Result<Void, VoiceMessageRecorderError> {
|
||||
sendVoiceMessageInRoomAudioConverterCallsCount += 1
|
||||
sendVoiceMessageInRoomAudioConverterReceivedArguments = (roomProxy: roomProxy, audioConverter: audioConverter)
|
||||
sendVoiceMessageInRoomAudioConverterReceivedInvocations.append((roomProxy: roomProxy, audioConverter: audioConverter))
|
||||
if let sendVoiceMessageInRoomAudioConverterClosure = sendVoiceMessageInRoomAudioConverterClosure {
|
||||
return await sendVoiceMessageInRoomAudioConverterClosure(roomProxy, audioConverter)
|
||||
} else {
|
||||
return sendVoiceMessageInRoomAudioConverterReturnValue
|
||||
}
|
||||
}
|
||||
}
|
||||
// swiftlint:enable all
|
||||
|
24
ElementX/Sources/Other/VoiceMessage/WaveformSource.swift
Normal file
24
ElementX/Sources/Other/VoiceMessage/WaveformSource.swift
Normal file
@ -0,0 +1,24 @@
|
||||
//
|
||||
// Copyright 2023 New Vector Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
enum WaveformSource: Equatable {
|
||||
/// File URL of the source audio file
|
||||
case url(URL)
|
||||
/// Array of small number of pre-computed samples
|
||||
case data([Float])
|
||||
}
|
@ -0,0 +1,98 @@
|
||||
//
|
||||
// Copyright 2023 New Vector Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import DSWaveformImageViews
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
|
||||
struct WaveformViewDragGestureModifier: ViewModifier {
|
||||
@GestureState private var dragGestureState = WaveformViewDragState.inactive
|
||||
@Binding var dragState: WaveformViewDragState
|
||||
|
||||
let minimumDragDistance: Double
|
||||
|
||||
func body(content: Content) -> some View {
|
||||
GeometryReader { geometry in
|
||||
content
|
||||
.gesture(SpatialTapGesture()
|
||||
.simultaneously(with: LongPressGesture())
|
||||
.sequenced(before: DragGesture(minimumDistance: minimumDragDistance, coordinateSpace: .local))
|
||||
.updating($dragGestureState) { value, state, _ in
|
||||
switch value {
|
||||
// (SpatialTap, LongPress) begins.
|
||||
case .first(let spatialLongPress):
|
||||
// Compute the progress with the spatialTap location
|
||||
let progress = (spatialLongPress.first?.location ?? .zero).x / geometry.size.width
|
||||
state = .pressing(progress: progress)
|
||||
// Long press confirmed, dragging may begin.
|
||||
case .second(let spatialLongPress, let drag) where spatialLongPress.second ?? false:
|
||||
var progress: Double = dragState.progress
|
||||
// Compute the progress with drag location
|
||||
if let location = drag?.location {
|
||||
progress = location.x / geometry.size.width
|
||||
}
|
||||
state = .dragging(progress: progress)
|
||||
// Dragging ended or the long press cancelled.
|
||||
default:
|
||||
state = .inactive
|
||||
}
|
||||
})
|
||||
}
|
||||
.onChange(of: dragGestureState) { value in
|
||||
dragState = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension View {
|
||||
func waveformDragGesture(_ dragState: Binding<WaveformViewDragState>, minimumDragDistance: Double = 0) -> some View {
|
||||
modifier(WaveformViewDragGestureModifier(dragState: dragState,
|
||||
minimumDragDistance: minimumDragDistance))
|
||||
}
|
||||
}
|
||||
|
||||
enum WaveformViewDragState: Equatable {
|
||||
case inactive
|
||||
case pressing(progress: Double)
|
||||
case dragging(progress: Double)
|
||||
|
||||
var progress: Double {
|
||||
switch self {
|
||||
case .inactive:
|
||||
return .zero
|
||||
case .pressing(let progress), .dragging(let progress):
|
||||
return progress
|
||||
}
|
||||
}
|
||||
|
||||
var isActive: Bool {
|
||||
switch self {
|
||||
case .inactive:
|
||||
return false
|
||||
case .pressing, .dragging:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
var isDragging: Bool {
|
||||
switch self {
|
||||
case .inactive, .pressing:
|
||||
return false
|
||||
case .dragging:
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
@ -31,9 +31,13 @@ enum ComposerToolbarViewModelAction {
|
||||
case composerModeChanged(mode: RoomScreenComposerMode)
|
||||
case composerFocusedChanged(isFocused: Bool)
|
||||
|
||||
case startRecordingVoiceMessage
|
||||
case stopRecordingVoiceMessage
|
||||
case deleteRecordedVoiceMessage
|
||||
case startVoiceMessageRecording
|
||||
case stopVoiceMessageRecording
|
||||
case cancelVoiceMessageRecording
|
||||
case deleteVoiceMessageRecording
|
||||
case startVoiceMessagePlayback
|
||||
case pauseVoiceMessagePlayback
|
||||
case seekVoiceMessagePlayback(progress: Double)
|
||||
case sendVoiceMessage
|
||||
}
|
||||
|
||||
@ -51,9 +55,13 @@ enum ComposerToolbarViewAction {
|
||||
case enableTextFormatting
|
||||
case composerAction(action: ComposerAction)
|
||||
case selectedSuggestion(_ suggestion: SuggestionItem)
|
||||
case startRecordingVoiceMessage
|
||||
case stopRecordingVoiceMessage
|
||||
case deleteRecordedVoiceMessage
|
||||
case startVoiceMessageRecording
|
||||
case stopVoiceMessageRecording
|
||||
case cancelVoiceMessageRecording
|
||||
case deleteVoiceMessageRecording
|
||||
case startVoiceMessagePlayback
|
||||
case pauseVoiceMessagePlayback
|
||||
case seekVoiceMessagePlayback(progress: Double)
|
||||
}
|
||||
|
||||
struct ComposerToolbarViewState: BindableState {
|
||||
|
@ -47,7 +47,7 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
|
||||
|
||||
super.init(initialViewState: ComposerToolbarViewState(areSuggestionsEnabled: completionSuggestionService.areSuggestionsEnabled,
|
||||
enableVoiceMessageComposer: appSettings.voiceMessageEnabled,
|
||||
audioPlayerState: .init(duration: 0),
|
||||
audioPlayerState: .init(id: .recorderPreview, duration: 0),
|
||||
audioRecorderState: .init(),
|
||||
bindings: .init()),
|
||||
imageProvider: mediaProvider)
|
||||
@ -144,13 +144,21 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
|
||||
}
|
||||
case .selectedSuggestion(let suggestion):
|
||||
handleSuggestion(suggestion)
|
||||
case .startRecordingVoiceMessage:
|
||||
case .startVoiceMessageRecording:
|
||||
state.bindings.composerActionsEnabled = false
|
||||
actionsSubject.send(.startRecordingVoiceMessage)
|
||||
case .stopRecordingVoiceMessage:
|
||||
actionsSubject.send(.stopRecordingVoiceMessage)
|
||||
case .deleteRecordedVoiceMessage:
|
||||
actionsSubject.send(.deleteRecordedVoiceMessage)
|
||||
actionsSubject.send(.startVoiceMessageRecording)
|
||||
case .stopVoiceMessageRecording:
|
||||
actionsSubject.send(.stopVoiceMessageRecording)
|
||||
case .cancelVoiceMessageRecording:
|
||||
actionsSubject.send(.cancelVoiceMessageRecording)
|
||||
case .deleteVoiceMessageRecording:
|
||||
actionsSubject.send(.deleteVoiceMessageRecording)
|
||||
case .startVoiceMessagePlayback:
|
||||
actionsSubject.send(.startVoiceMessagePlayback)
|
||||
case .pauseVoiceMessagePlayback:
|
||||
actionsSubject.send(.pauseVoiceMessagePlayback)
|
||||
case .seekVoiceMessagePlayback(let progress):
|
||||
actionsSubject.send(.seekVoiceMessagePlayback(progress: progress))
|
||||
}
|
||||
}
|
||||
|
||||
@ -224,7 +232,7 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
|
||||
case .recordVoiceMessage(let audioRecorderState):
|
||||
state.bindings.composerFocused = false
|
||||
state.audioRecorderState = audioRecorderState
|
||||
case .previewVoiceMessage(let audioPlayerState):
|
||||
case .previewVoiceMessage(let audioPlayerState, _):
|
||||
state.audioPlayerState = audioPlayerState
|
||||
case .edit, .reply:
|
||||
// Focus composer when switching to reply/edit
|
||||
|
@ -28,9 +28,13 @@ struct ComposerToolbar: View {
|
||||
@ScaledMetric private var trashButtonIconSize = 24
|
||||
@ScaledMetric(relativeTo: .title) private var closeRTEButtonSize = 30
|
||||
|
||||
@State private var voiceMessageRecordingStartTime: Date?
|
||||
@State private var showVoiceMessageRecordingTooltip = false
|
||||
@ScaledMetric private var voiceMessageTooltipPointerHeight = 6
|
||||
|
||||
private let voiceMessageMinimumRecordingDuration = 1.0
|
||||
private let voiceMessageTooltipDuration = 1.0
|
||||
|
||||
@State private var frame: CGRect = .zero
|
||||
|
||||
var body: some View {
|
||||
@ -51,8 +55,10 @@ struct ComposerToolbar: View {
|
||||
}
|
||||
}
|
||||
.overlay(alignment: .bottomTrailing) {
|
||||
voiceMessageRecordingButtonTooltipView
|
||||
.offset(y: -frame.height - voiceMessageTooltipPointerHeight)
|
||||
if showVoiceMessageRecordingTooltip {
|
||||
voiceMessageRecordingButtonTooltipView
|
||||
.offset(y: -frame.height - voiceMessageTooltipPointerHeight)
|
||||
}
|
||||
}
|
||||
.alert(item: $context.alertInfo)
|
||||
}
|
||||
@ -69,9 +75,9 @@ struct ComposerToolbar: View {
|
||||
case .recordVoiceMessage(let state) where context.viewState.enableVoiceMessageComposer:
|
||||
VoiceMessageRecordingComposer(recorderState: state)
|
||||
.padding(.leading, 12)
|
||||
case .previewVoiceMessage(let state) where context.viewState.enableVoiceMessageComposer:
|
||||
case .previewVoiceMessage(let state, let waveform) where context.viewState.enableVoiceMessageComposer:
|
||||
voiceMessageTrashButton
|
||||
VoiceMessagePreviewComposer(playerState: state)
|
||||
voiceMessagePreviewComposer(audioPlayerState: state, waveform: waveform)
|
||||
default:
|
||||
if !context.composerActionsEnabled {
|
||||
RoomAttachmentPicker(context: context)
|
||||
@ -96,6 +102,7 @@ struct ComposerToolbar: View {
|
||||
}
|
||||
}
|
||||
}
|
||||
.animation(.elementDefault, value: context.viewState.composerMode)
|
||||
}
|
||||
|
||||
private var bottomBar: some View {
|
||||
@ -216,17 +223,26 @@ struct ComposerToolbar: View {
|
||||
// MARK: - Voice message
|
||||
|
||||
private var voiceMessageRecordingButton: some View {
|
||||
VoiceMessageRecordingButton(showRecordTooltip: $showVoiceMessageRecordingTooltip, startRecording: {
|
||||
context.send(viewAction: .startRecordingVoiceMessage)
|
||||
}, stopRecording: {
|
||||
context.send(viewAction: .stopRecordingVoiceMessage)
|
||||
})
|
||||
VoiceMessageRecordingButton {
|
||||
showVoiceMessageRecordingTooltip = false
|
||||
voiceMessageRecordingStartTime = Date.now
|
||||
context.send(viewAction: .startVoiceMessageRecording)
|
||||
} stopRecording: {
|
||||
if let voiceMessageRecordingStartTime, Date.now.timeIntervalSince(voiceMessageRecordingStartTime) < voiceMessageMinimumRecordingDuration {
|
||||
context.send(viewAction: .cancelVoiceMessageRecording)
|
||||
withAnimation {
|
||||
showVoiceMessageRecordingTooltip = true
|
||||
}
|
||||
} else {
|
||||
context.send(viewAction: .stopVoiceMessageRecording)
|
||||
}
|
||||
}
|
||||
.padding(4)
|
||||
}
|
||||
|
||||
private var voiceMessageTrashButton: some View {
|
||||
Button {
|
||||
context.send(viewAction: .deleteRecordedVoiceMessage)
|
||||
context.send(viewAction: .deleteVoiceMessageRecording)
|
||||
} label: {
|
||||
CompoundIcon(\.delete)
|
||||
.font(.compound.bodyLG)
|
||||
@ -241,8 +257,23 @@ struct ComposerToolbar: View {
|
||||
private var voiceMessageRecordingButtonTooltipView: some View {
|
||||
VoiceMessageRecordingButtonTooltipView(text: L10n.screenRoomVoiceMessageTooltip, pointerHeight: voiceMessageTooltipPointerHeight)
|
||||
.allowsHitTesting(false)
|
||||
.opacity(showVoiceMessageRecordingTooltip ? 1.0 : 0.0)
|
||||
.animation(.elementDefault, value: showVoiceMessageRecordingTooltip)
|
||||
.onAppear {
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + voiceMessageTooltipDuration) {
|
||||
withAnimation {
|
||||
showVoiceMessageRecordingTooltip = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func voiceMessagePreviewComposer(audioPlayerState: AudioPlayerState, waveform: WaveformSource) -> some View {
|
||||
VoiceMessagePreviewComposer(playerState: audioPlayerState, waveform: waveform) {
|
||||
context.send(viewAction: .startVoiceMessagePlayback)
|
||||
} onPause: {
|
||||
context.send(viewAction: .pauseVoiceMessagePlayback)
|
||||
} onSeek: { progress in
|
||||
context.send(viewAction: .seekVoiceMessagePlayback(progress: progress))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -333,13 +364,14 @@ extension ComposerToolbar {
|
||||
|
||||
static func voiceMessagePreviewMock(recording: Bool) -> ComposerToolbar {
|
||||
let wysiwygViewModel = WysiwygComposerViewModel()
|
||||
let waveformData: [Float] = Array(repeating: 1.0, count: 1000)
|
||||
var composerViewModel: ComposerToolbarViewModel {
|
||||
let model = ComposerToolbarViewModel(wysiwygViewModel: wysiwygViewModel,
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
mentionDisplayHelper: ComposerMentionDisplayHelper.mock)
|
||||
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(duration: 10.0))
|
||||
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData))
|
||||
model.state.enableVoiceMessageComposer = true
|
||||
return model
|
||||
}
|
||||
|
@ -14,49 +14,161 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import DSWaveformImage
|
||||
import DSWaveformImageViews
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
|
||||
struct VoiceMessagePreviewComposer: View {
|
||||
@ObservedObject var playerState: AudioPlayerState
|
||||
|
||||
let waveform: WaveformSource
|
||||
@ScaledMetric private var waveformLineWidth = 2.0
|
||||
@ScaledMetric private var waveformLinePadding = 2.0
|
||||
@State private var resumePlaybackAfterScrubbing = false
|
||||
|
||||
let onPlay: () -> Void
|
||||
let onPause: () -> Void
|
||||
let onSeek: (Double) -> Void
|
||||
|
||||
var body: some View {
|
||||
VoiceMessageRoomPlaybackView(playerState: playerState,
|
||||
onPlayPause: onPlaybackPlayPause,
|
||||
onSeek: { onPlaybackSeek($0) },
|
||||
onScrubbing: { onPlaybackScrubbing($0) })
|
||||
.padding(.vertical, 4.0)
|
||||
.padding(.horizontal, 6.0)
|
||||
.background {
|
||||
let roundedRectangle = RoundedRectangle(cornerRadius: 12)
|
||||
ZStack {
|
||||
roundedRectangle
|
||||
.fill(Color.compound.bgSubtleSecondary)
|
||||
roundedRectangle
|
||||
.stroke(Color.compound._borderTextFieldFocused, lineWidth: 0.5)
|
||||
}
|
||||
}
|
||||
.frame(minHeight: 42)
|
||||
.fixedSize(horizontal: false, vertical: true)
|
||||
@ScaledMetric private var playPauseButtonSize = 32
|
||||
@ScaledMetric private var playPauseImagePadding = 8
|
||||
@State var dragState: WaveformViewDragState = .inactive
|
||||
|
||||
private static let elapsedTimeFormatter: DateFormatter = {
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = "mm:ss"
|
||||
return dateFormatter
|
||||
}()
|
||||
|
||||
var timeLabelContent: String {
|
||||
// Display the duration if progress is 0.0
|
||||
let percent = playerState.progress > 0.0 ? playerState.progress : 1.0
|
||||
// If the duration is greater or equal 10 minutes, use the long format
|
||||
let elapsed = Date(timeIntervalSinceReferenceDate: playerState.duration * percent)
|
||||
return Self.elapsedTimeFormatter.string(from: elapsed)
|
||||
}
|
||||
|
||||
private func onPlaybackPlayPause() { }
|
||||
var showWaveformCursor: Bool {
|
||||
playerState.playbackState == .playing || dragState.isDragging
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
HStack {
|
||||
HStack {
|
||||
playPauseButton
|
||||
Text(timeLabelContent)
|
||||
.lineLimit(1)
|
||||
.font(.compound.bodySMSemibold)
|
||||
.foregroundColor(.compound.textSecondary)
|
||||
.monospacedDigit()
|
||||
.fixedSize(horizontal: true, vertical: true)
|
||||
}
|
||||
waveformView
|
||||
.waveformDragGesture($dragState)
|
||||
.onChange(of: dragState) { dragState in
|
||||
switch dragState {
|
||||
case .inactive:
|
||||
onScrubbing(false)
|
||||
case .pressing(let progress):
|
||||
onScrubbing(true)
|
||||
onSeek(max(0, min(progress, 1.0)))
|
||||
case .dragging(let progress):
|
||||
onSeek(max(0, min(progress, 1.0)))
|
||||
}
|
||||
self.dragState = dragState
|
||||
}
|
||||
}
|
||||
.padding(.vertical, 4.0)
|
||||
.padding(.horizontal, 6.0)
|
||||
.background {
|
||||
let roundedRectangle = RoundedRectangle(cornerRadius: 12)
|
||||
ZStack {
|
||||
roundedRectangle
|
||||
.fill(Color.compound.bgSubtleSecondary)
|
||||
roundedRectangle
|
||||
.stroke(Color.compound._borderTextFieldFocused, lineWidth: 0.5)
|
||||
}
|
||||
}
|
||||
.frame(minHeight: 42)
|
||||
.fixedSize(horizontal: false, vertical: true)
|
||||
}
|
||||
|
||||
private func onPlaybackSeek(_ progress: Double) { }
|
||||
@ViewBuilder
|
||||
private var waveformView: some View {
|
||||
let configuration: Waveform.Configuration = .init(style: .striped(.init(color: .black, width: waveformLineWidth, spacing: waveformLinePadding)),
|
||||
verticalScalingFactor: 1.0)
|
||||
switch waveform {
|
||||
case .url(let url):
|
||||
WaveformView(audioURL: url,
|
||||
configuration: configuration)
|
||||
.progressMask(progress: playerState.progress)
|
||||
case .data(let array):
|
||||
WaveformLiveCanvas(samples: array,
|
||||
configuration: configuration)
|
||||
.progressMask(progress: playerState.progress)
|
||||
}
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
private var playPauseButton: some View {
|
||||
Button {
|
||||
onPlayPause()
|
||||
} label: {
|
||||
ZStack {
|
||||
Circle()
|
||||
.foregroundColor(.compound.bgCanvasDefault)
|
||||
if playerState.playbackState == .loading {
|
||||
ProgressView()
|
||||
} else {
|
||||
Image(asset: playerState.playbackState == .playing ? Asset.Images.mediaPause : Asset.Images.mediaPlay)
|
||||
.resizable()
|
||||
.padding(playPauseImagePadding)
|
||||
.offset(x: playerState.playbackState == .playing ? 0 : 2)
|
||||
.aspectRatio(contentMode: .fit)
|
||||
.foregroundColor(.compound.iconSecondary)
|
||||
.accessibilityLabel(playerState.playbackState == .playing ? L10n.a11yPause : L10n.a11yPlay)
|
||||
}
|
||||
}
|
||||
}
|
||||
.disabled(playerState.playbackState == .loading)
|
||||
.frame(width: playPauseButtonSize,
|
||||
height: playPauseButtonSize)
|
||||
}
|
||||
|
||||
private func onPlaybackScrubbing(_ dragging: Bool) { }
|
||||
private func onPlayPause() {
|
||||
if playerState.playbackState == .playing {
|
||||
onPause()
|
||||
} else {
|
||||
onPlay()
|
||||
}
|
||||
}
|
||||
|
||||
private func onScrubbing(_ scrubbing: Bool) {
|
||||
if scrubbing {
|
||||
if playerState.playbackState == .playing {
|
||||
resumePlaybackAfterScrubbing = true
|
||||
onPause()
|
||||
}
|
||||
} else {
|
||||
if resumePlaybackAfterScrubbing {
|
||||
onPlay()
|
||||
resumePlaybackAfterScrubbing = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
|
||||
static let playerState = AudioPlayerState(duration: 10.0,
|
||||
static let playerState = AudioPlayerState(id: .recorderPreview,
|
||||
duration: 10.0,
|
||||
waveform: EstimatedWaveform.mockWaveform,
|
||||
progress: 0.4)
|
||||
|
||||
static let waveformData: [Float] = Array(repeating: 1.0, count: 1000)
|
||||
|
||||
static var previews: some View {
|
||||
VStack {
|
||||
VoiceMessagePreviewComposer(playerState: playerState)
|
||||
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in })
|
||||
.fixedSize(horizontal: false, vertical: true)
|
||||
}
|
||||
}
|
||||
|
@ -19,14 +19,8 @@ import SwiftUI
|
||||
|
||||
struct VoiceMessageRecordingButton: View {
|
||||
@ScaledMetric private var buttonIconSize = 24
|
||||
@State private var longPressConfirmed = false
|
||||
@State private var buttonPressed = false
|
||||
@State private var longPressTask = VoiceMessageButtonTask()
|
||||
|
||||
private let feedbackGenerator = UIImpactFeedbackGenerator(style: .heavy)
|
||||
private let delayBeforeRecording = 500
|
||||
|
||||
@Binding var showRecordTooltip: Bool
|
||||
var startRecording: (() -> Void)?
|
||||
var stopRecording: (() -> Void)?
|
||||
|
||||
@ -34,29 +28,16 @@ struct VoiceMessageRecordingButton: View {
|
||||
Button { } label: {
|
||||
voiceMessageButtonImage
|
||||
}
|
||||
.onLongPressGesture(perform: { }, onPressingChanged: { pressing in
|
||||
buttonPressed = pressing
|
||||
if pressing {
|
||||
showRecordTooltip = true
|
||||
feedbackGenerator.prepare()
|
||||
longPressTask.task = Task {
|
||||
try? await Task.sleep(for: .milliseconds(delayBeforeRecording))
|
||||
guard !Task.isCancelled else {
|
||||
return
|
||||
}
|
||||
feedbackGenerator.impactOccurred()
|
||||
showRecordTooltip = false
|
||||
longPressConfirmed = true
|
||||
startRecording?()
|
||||
}
|
||||
.onLongPressGesture { } onPressingChanged: { isPressing in
|
||||
buttonPressed = isPressing
|
||||
if isPressing {
|
||||
// Start recording
|
||||
startRecording?()
|
||||
} else {
|
||||
longPressTask.task?.cancel()
|
||||
showRecordTooltip = false
|
||||
guard longPressConfirmed else { return }
|
||||
longPressConfirmed = false
|
||||
// Stop recording
|
||||
stopRecording?()
|
||||
}
|
||||
})
|
||||
}
|
||||
.fixedSize()
|
||||
}
|
||||
|
||||
@ -71,13 +52,9 @@ struct VoiceMessageRecordingButton: View {
|
||||
}
|
||||
}
|
||||
|
||||
private class VoiceMessageButtonTask {
|
||||
@CancellableTask var task: Task<Void, Never>?
|
||||
}
|
||||
|
||||
struct VoiceMessageRecordingButton_Previews: PreviewProvider, TestablePreview {
|
||||
static var previews: some View {
|
||||
VoiceMessageRecordingButton(showRecordTooltip: .constant(false))
|
||||
VoiceMessageRecordingButton()
|
||||
.fixedSize(horizontal: true, vertical: true)
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,8 @@
|
||||
//
|
||||
|
||||
import Compound
|
||||
import DSWaveformImage
|
||||
import DSWaveformImageViews
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
|
||||
@ -30,10 +32,15 @@ struct VoiceMessageRecordingView: View {
|
||||
return dateFormatter
|
||||
}()
|
||||
|
||||
var timeLabelContent: String {
|
||||
private var timeLabelContent: String {
|
||||
Self.elapsedTimeFormatter.string(from: Date(timeIntervalSinceReferenceDate: recorderState.duration))
|
||||
}
|
||||
|
||||
private var configuration: Waveform.Configuration {
|
||||
.init(style: .striped(.init(color: .compound.iconSecondary, width: waveformLineWidth, spacing: waveformLinePadding)),
|
||||
verticalScalingFactor: 1.0)
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
HStack {
|
||||
Circle()
|
||||
@ -45,7 +52,9 @@ struct VoiceMessageRecordingView: View {
|
||||
.foregroundColor(.compound.textSecondary)
|
||||
.monospacedDigit()
|
||||
.fixedSize()
|
||||
EstimatedWaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: recorderState.waveform, progress: 0)
|
||||
|
||||
WaveformLiveCanvas(samples: recorderState.waveformSamples,
|
||||
configuration: configuration)
|
||||
}
|
||||
.padding(.leading, 2)
|
||||
.padding(.trailing, 8)
|
||||
@ -53,11 +62,6 @@ struct VoiceMessageRecordingView: View {
|
||||
}
|
||||
|
||||
struct VoiceMessageRecordingView_Previews: PreviewProvider, TestablePreview {
|
||||
static let waveform = EstimatedWaveform(data: [3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
|
||||
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
|
||||
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
|
||||
0, 0, 0, 0, 0, 3])
|
||||
|
||||
static let recorderState = AudioRecorderState()
|
||||
|
||||
static var previews: some View {
|
||||
|
@ -23,6 +23,7 @@ struct RoomScreenCoordinatorParameters {
|
||||
let roomProxy: RoomProxyProtocol
|
||||
let timelineController: RoomTimelineControllerProtocol
|
||||
let mediaProvider: MediaProviderProtocol
|
||||
let mediaPlayerProvider: MediaPlayerProviderProtocol
|
||||
let emojiProvider: EmojiProviderProtocol
|
||||
let completionSuggestionService: CompletionSuggestionServiceProtocol
|
||||
let appSettings: AppSettings
|
||||
@ -60,6 +61,7 @@ final class RoomScreenCoordinator: CoordinatorProtocol {
|
||||
|
||||
viewModel = RoomScreenViewModel(timelineController: parameters.timelineController,
|
||||
mediaProvider: parameters.mediaProvider,
|
||||
mediaPlayerProvider: parameters.mediaPlayerProvider,
|
||||
roomProxy: parameters.roomProxy,
|
||||
appSettings: parameters.appSettings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -42,7 +42,7 @@ enum RoomScreenComposerMode: Equatable {
|
||||
case reply(itemID: TimelineItemIdentifier, replyDetails: TimelineItemReplyDetails, isThread: Bool)
|
||||
case edit(originalItemId: TimelineItemIdentifier)
|
||||
case recordVoiceMessage(state: AudioRecorderState)
|
||||
case previewVoiceMessage(state: AudioPlayerState)
|
||||
case previewVoiceMessage(state: AudioPlayerState, waveform: WaveformSource)
|
||||
|
||||
var isEdit: Bool {
|
||||
switch self {
|
||||
|
@ -34,16 +34,16 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
||||
private let analytics: AnalyticsService
|
||||
private unowned let userIndicatorController: UserIndicatorControllerProtocol
|
||||
private let notificationCenterProtocol: NotificationCenterProtocol
|
||||
private let voiceMessageRecorder: VoiceMessageRecorderProtocol
|
||||
private let composerFocusedSubject = PassthroughSubject<Bool, Never>()
|
||||
|
||||
private let mediaPlayerProvider: MediaPlayerProviderProtocol
|
||||
private let actionsSubject: PassthroughSubject<RoomScreenViewModelAction, Never> = .init()
|
||||
|
||||
private var canCurrentUserRedact = false
|
||||
|
||||
private var paginateBackwardsTask: Task<Void, Never>?
|
||||
|
||||
init(timelineController: RoomTimelineControllerProtocol,
|
||||
mediaProvider: MediaProviderProtocol,
|
||||
mediaPlayerProvider: MediaPlayerProviderProtocol,
|
||||
roomProxy: RoomProxyProtocol,
|
||||
appSettings: AppSettings,
|
||||
analytics: AnalyticsService,
|
||||
@ -55,6 +55,8 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
||||
self.analytics = analytics
|
||||
self.userIndicatorController = userIndicatorController
|
||||
self.notificationCenterProtocol = notificationCenterProtocol
|
||||
self.mediaPlayerProvider = mediaPlayerProvider
|
||||
voiceMessageRecorder = VoiceMessageRecorder(audioRecorder: AudioRecorder(), mediaPlayerProvider: mediaPlayerProvider)
|
||||
|
||||
super.init(initialViewState: RoomScreenViewState(roomID: timelineController.roomID,
|
||||
roomTitle: roomProxy.roomTitle,
|
||||
@ -194,15 +196,28 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
||||
trackComposerMode(mode)
|
||||
case .composerFocusedChanged(isFocused: let isFocused):
|
||||
composerFocusedSubject.send(isFocused)
|
||||
case .startRecordingVoiceMessage:
|
||||
timelineController.pauseAudio()
|
||||
startRecordingVoiceMessage()
|
||||
case .stopRecordingVoiceMessage:
|
||||
stopRecordingVoiceMessage()
|
||||
case .deleteRecordedVoiceMessage:
|
||||
deleteCurrentVoiceMessage()
|
||||
case .startVoiceMessageRecording:
|
||||
Task {
|
||||
await mediaPlayerProvider.detachAllStates(except: nil)
|
||||
await startRecordingVoiceMessage()
|
||||
}
|
||||
case .stopVoiceMessageRecording:
|
||||
Task { await stopRecordingVoiceMessage() }
|
||||
case .cancelVoiceMessageRecording:
|
||||
Task { await cancelRecordingVoiceMessage() }
|
||||
case .deleteVoiceMessageRecording:
|
||||
Task { await deleteCurrentVoiceMessage() }
|
||||
case .sendVoiceMessage:
|
||||
Task { await sendCurrentVoiceMessage() }
|
||||
case .startVoiceMessagePlayback:
|
||||
Task {
|
||||
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
|
||||
await startPlayingRecordedVoiceMessage()
|
||||
}
|
||||
case .pauseVoiceMessagePlayback:
|
||||
pausePlayingRecordedVoiceMessage()
|
||||
case .seekVoiceMessagePlayback(let progress):
|
||||
Task { await seekRecordedVoiceMessage(to: progress) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -922,30 +937,89 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
|
||||
|
||||
// MARK: - Voice message
|
||||
|
||||
private func startRecordingVoiceMessage() {
|
||||
// Partially implemented
|
||||
|
||||
private func stopVoiceMessageRecorder() async {
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
await voiceMessageRecorder.stopPlayback()
|
||||
}
|
||||
|
||||
private func startRecordingVoiceMessage() async {
|
||||
let audioRecordState = AudioRecorderState()
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .recordVoiceMessage(state: audioRecordState))))
|
||||
audioRecordState.attachAudioRecorder(voiceMessageRecorder.audioRecorder)
|
||||
|
||||
switch await voiceMessageRecorder.startRecording() {
|
||||
case .success:
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .recordVoiceMessage(state: audioRecordState))))
|
||||
case .failure(let error):
|
||||
switch error {
|
||||
case .audioRecorderError(.recordPermissionNotGranted):
|
||||
state.bindings.confirmationAlertInfo = .init(id: .init(),
|
||||
title: "",
|
||||
message: L10n.dialogPermissionMicrophone,
|
||||
primaryButton: .init(title: L10n.actionOpenSettings, action: { [weak self] in self?.openSystemSettings() }),
|
||||
secondaryButton: .init(title: L10n.actionNotNow, role: .cancel, action: nil))
|
||||
default:
|
||||
MXLog.error("failed to start voice message recording: \(error)")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func stopRecordingVoiceMessage() {
|
||||
// Partially implemented
|
||||
private func stopRecordingVoiceMessage() async {
|
||||
if case .failure(let error) = await voiceMessageRecorder.stopRecording() {
|
||||
MXLog.error("failed to stop the recording", context: error)
|
||||
return
|
||||
}
|
||||
|
||||
let audioPlayerState = AudioPlayerState(duration: 0)
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState))))
|
||||
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
|
||||
MXLog.error("the recorder preview is missing after the recording has been stopped")
|
||||
return
|
||||
}
|
||||
|
||||
guard let recordingURL = voiceMessageRecorder.recordingURL else {
|
||||
MXLog.error("the recording URL is missing after the recording has been stopped")
|
||||
return
|
||||
}
|
||||
|
||||
mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState, waveform: .url(recordingURL)))))
|
||||
}
|
||||
|
||||
private func deleteCurrentVoiceMessage() {
|
||||
// Partially implemented
|
||||
|
||||
private func cancelRecordingVoiceMessage() async {
|
||||
await voiceMessageRecorder.cancelRecording()
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .default)))
|
||||
}
|
||||
|
||||
private func deleteCurrentVoiceMessage() async {
|
||||
await voiceMessageRecorder.deleteRecording()
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .default)))
|
||||
}
|
||||
|
||||
private func sendCurrentVoiceMessage() async {
|
||||
// Partially implemented
|
||||
|
||||
actionsSubject.send(.composer(action: .setMode(mode: .default)))
|
||||
await voiceMessageRecorder.stopPlayback()
|
||||
switch await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: AudioConverter()) {
|
||||
case .success:
|
||||
await deleteCurrentVoiceMessage()
|
||||
case .failure(let error):
|
||||
MXLog.error("failed to send the voice message", context: error)
|
||||
}
|
||||
}
|
||||
|
||||
private func startPlayingRecordedVoiceMessage() async {
|
||||
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
|
||||
MXLog.error("failed to play recorded voice message", context: error)
|
||||
}
|
||||
}
|
||||
|
||||
private func pausePlayingRecordedVoiceMessage() {
|
||||
voiceMessageRecorder.pausePlayback()
|
||||
}
|
||||
|
||||
private func seekRecordedVoiceMessage(to progress: Double) async {
|
||||
await voiceMessageRecorder.seekPlayback(to: progress)
|
||||
}
|
||||
|
||||
private func openSystemSettings() {
|
||||
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
|
||||
UIApplication.shared.open(url)
|
||||
}
|
||||
}
|
||||
|
||||
@ -974,6 +1048,7 @@ extension RoomScreenViewModel.Context {
|
||||
extension RoomScreenViewModel {
|
||||
static let mock = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -57,6 +57,7 @@ struct RoomHeaderView_Previews: PreviewProvider, TestablePreview {
|
||||
static var bodyPlain: some View {
|
||||
let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Some Room name", avatarURL: URL.picturesDirectory)),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -71,6 +72,7 @@ struct RoomHeaderView_Previews: PreviewProvider, TestablePreview {
|
||||
static var bodyEncrypted: some View {
|
||||
let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Some Room name")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -193,6 +193,7 @@ struct RoomScreen: View {
|
||||
struct RoomScreen_Previews: PreviewProvider, TestablePreview {
|
||||
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room", isCallOngoing: true)),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -505,7 +505,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
|
||||
contentType: nil),
|
||||
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
|
||||
contentType: .text(.init(body: "Short")))),
|
||||
playerState: AudioPlayerState(duration: 10, waveform: EstimatedWaveform.mockWaveform))
|
||||
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
|
||||
}
|
||||
.environmentObject(viewModel.context)
|
||||
}
|
||||
|
@ -232,7 +232,7 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
|
||||
contentType: nil),
|
||||
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
|
||||
contentType: .text(.init(body: "Short")))),
|
||||
playerState: AudioPlayerState(duration: 10, waveform: EstimatedWaveform.mockWaveform))
|
||||
playerState: AudioPlayerState(id: .timelineItemIdentifier(.init(timelineID: "")), duration: 10, waveform: EstimatedWaveform.mockWaveform))
|
||||
}
|
||||
.environmentObject(viewModel.context)
|
||||
}
|
||||
|
@ -60,6 +60,7 @@ struct TimelineReadReceiptsView_Previews: PreviewProvider, TestablePreview {
|
||||
|
||||
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Test",
|
||||
members: members)),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
|
@ -82,6 +82,7 @@ struct UITimelineView: UIViewControllerRepresentable {
|
||||
struct UITimelineView_Previews: PreviewProvider, TestablePreview {
|
||||
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -170,6 +170,7 @@ struct TimelineView: View {
|
||||
struct TimelineView_Previews: PreviewProvider, TestablePreview {
|
||||
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -30,7 +30,6 @@ enum AudioConverterPreferredFileExtension: String {
|
||||
struct AudioConverter: AudioConverterProtocol {
|
||||
func convertToOpusOgg(sourceURL: URL, destinationURL: URL) throws {
|
||||
do {
|
||||
MXLog.debug("converting \(sourceURL) to \(destinationURL)")
|
||||
try OGGConverter.convertM4aFileToOpusOGG(src: sourceURL, dest: destinationURL)
|
||||
} catch {
|
||||
MXLog.error("failed to convert to OpusOgg: \(error)")
|
||||
|
@ -30,11 +30,6 @@ private enum InternalAudioPlayerState {
|
||||
case error(Error)
|
||||
}
|
||||
|
||||
enum AudioPlayerError: Error {
|
||||
case genericError
|
||||
case loadFileError
|
||||
}
|
||||
|
||||
class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
var mediaSource: MediaSourceProxy?
|
||||
|
||||
@ -53,6 +48,7 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
private var rateObserver: NSKeyValueObservation?
|
||||
private var playToEndObserver: NSObjectProtocol?
|
||||
private var appBackgroundObserver: NSObjectProtocol?
|
||||
private var autoplay = false
|
||||
|
||||
private(set) var url: URL?
|
||||
|
||||
@ -91,11 +87,12 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
unloadContent()
|
||||
}
|
||||
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL) {
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL, autoplay: Bool) {
|
||||
unloadContent()
|
||||
setInternalState(.loading)
|
||||
self.mediaSource = mediaSource
|
||||
self.url = url
|
||||
self.autoplay = autoplay
|
||||
playerItem = AVPlayerItem(url: url)
|
||||
internalAudioPlayer = AVQueuePlayer(playerItem: playerItem)
|
||||
addObservers()
|
||||
@ -122,6 +119,7 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
isStopped = true
|
||||
internalAudioPlayer?.pause()
|
||||
internalAudioPlayer?.seek(to: .zero)
|
||||
try? AVAudioSession.sharedInstance().setActive(false)
|
||||
}
|
||||
|
||||
func seek(to progress: Double) async {
|
||||
@ -205,7 +203,10 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
|
||||
actionsSubject.send(.didStartLoading)
|
||||
case .readyToPlay:
|
||||
actionsSubject.send(.didFinishLoading)
|
||||
play()
|
||||
if autoplay {
|
||||
autoplay = false
|
||||
play()
|
||||
}
|
||||
case .playing:
|
||||
actionsSubject.send(.didStartPlaying)
|
||||
case .paused:
|
||||
|
@ -17,6 +17,11 @@
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
enum AudioPlayerError: Error {
|
||||
case genericError
|
||||
case loadFileError
|
||||
}
|
||||
|
||||
enum AudioPlayerAction {
|
||||
case didStartLoading
|
||||
case didFinishLoading
|
||||
|
@ -26,9 +26,14 @@ enum AudioPlayerPlaybackState {
|
||||
case error
|
||||
}
|
||||
|
||||
enum AudioPlayerStateIdentifier {
|
||||
case timelineItemIdentifier(TimelineItemIdentifier)
|
||||
case recorderPreview
|
||||
}
|
||||
|
||||
@MainActor
|
||||
class AudioPlayerState: ObservableObject, Identifiable {
|
||||
let id = UUID()
|
||||
let id: AudioPlayerStateIdentifier
|
||||
let duration: Double
|
||||
let waveform: EstimatedWaveform
|
||||
@Published private(set) var playbackState: AudioPlayerPlaybackState
|
||||
@ -50,7 +55,8 @@ class AudioPlayerState: ObservableObject, Identifiable {
|
||||
displayLink != nil
|
||||
}
|
||||
|
||||
init(duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
|
||||
init(id: AudioPlayerStateIdentifier, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
|
||||
self.id = id
|
||||
self.duration = duration
|
||||
self.waveform = waveform ?? EstimatedWaveform(data: [])
|
||||
self.progress = progress
|
||||
|
@ -17,16 +17,14 @@
|
||||
import AVFoundation
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
enum AudioRecorderError: Error {
|
||||
case genericError
|
||||
}
|
||||
import UIKit
|
||||
|
||||
class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
|
||||
private let silenceThreshold: Float = -50.0
|
||||
|
||||
private var audioRecorder: AVAudioRecorder?
|
||||
|
||||
private var cancellables = Set<AnyCancellable>()
|
||||
private let actionsSubject: PassthroughSubject<AudioRecorderAction, Never> = .init()
|
||||
var actions: AnyPublisher<AudioRecorderAction, Never> {
|
||||
actionsSubject.eraseToAnyPublisher()
|
||||
@ -44,7 +42,11 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
|
||||
audioRecorder?.isRecording ?? false
|
||||
}
|
||||
|
||||
func recordWithOutputURL(_ url: URL) {
|
||||
func record(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError> {
|
||||
guard await requestRecordPermission() else {
|
||||
return .failure(.recordPermissionNotGranted)
|
||||
}
|
||||
|
||||
let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
|
||||
AVSampleRateKey: 48000,
|
||||
AVEncoderBitRateKey: 128_000,
|
||||
@ -54,6 +56,7 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default)
|
||||
try AVAudioSession.sharedInstance().setActive(true)
|
||||
let url = URL.temporaryDirectory.appendingPathComponent("voice-message-\(recordID.identifier).m4a")
|
||||
audioRecorder = try AVAudioRecorder(url: url, settings: settings)
|
||||
audioRecorder?.delegate = self
|
||||
audioRecorder?.isMeteringEnabled = true
|
||||
@ -63,25 +66,26 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
|
||||
MXLog.error("audio recording failed: \(error)")
|
||||
actionsSubject.send(.didFailWithError(error: error))
|
||||
}
|
||||
}
|
||||
|
||||
func stopRecording() {
|
||||
audioRecorder?.stop()
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().setActive(false)
|
||||
} catch {
|
||||
actionsSubject.send(.didFailWithError(error: error))
|
||||
}
|
||||
return .success(())
|
||||
}
|
||||
|
||||
func stopRecording() async {
|
||||
guard let audioRecorder, audioRecorder.isRecording else {
|
||||
return
|
||||
}
|
||||
audioRecorder.stop()
|
||||
}
|
||||
|
||||
func deleteRecording() {
|
||||
audioRecorder?.deleteRecording()
|
||||
}
|
||||
|
||||
func peakPowerForChannelNumber(_ channelNumber: Int) -> Float {
|
||||
guard isRecording, let audioRecorder else {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
audioRecorder.updateMeters()
|
||||
|
||||
return normalizedPowerLevelFromDecibels(audioRecorder.peakPower(forChannel: channelNumber))
|
||||
}
|
||||
|
||||
@ -91,13 +95,37 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
|
||||
}
|
||||
|
||||
audioRecorder.updateMeters()
|
||||
|
||||
return normalizedPowerLevelFromDecibels(audioRecorder.averagePower(forChannel: channelNumber))
|
||||
}
|
||||
|
||||
// MARK: - Private
|
||||
|
||||
private func addObservers() {
|
||||
// Stop recording uppon UIApplication.didEnterBackgroundNotification notification
|
||||
NotificationCenter.default.publisher(for: UIApplication.didEnterBackgroundNotification)
|
||||
.sink { [weak self] _ in
|
||||
guard let self else { return }
|
||||
Task { await self.stopRecording() }
|
||||
}
|
||||
.store(in: &cancellables)
|
||||
}
|
||||
|
||||
private func removeObservers() {
|
||||
cancellables.removeAll()
|
||||
}
|
||||
|
||||
private func requestRecordPermission() async -> Bool {
|
||||
await withCheckedContinuation { continuation in
|
||||
AVAudioSession.sharedInstance().requestRecordPermission { granted in
|
||||
continuation.resume(returning: granted)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - AVAudioRecorderDelegate
|
||||
|
||||
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully success: Bool) {
|
||||
try? AVAudioSession.sharedInstance().setActive(false)
|
||||
if success {
|
||||
actionsSubject.send(.didStopRecording)
|
||||
} else {
|
||||
@ -106,6 +134,7 @@ class AudioRecorder: NSObject, AudioRecorderProtocol, AVAudioRecorderDelegate {
|
||||
}
|
||||
|
||||
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
|
||||
try? AVAudioSession.sharedInstance().setActive(false)
|
||||
actionsSubject.send(.didFailWithError(error: error ?? AudioRecorderError.genericError))
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,24 @@
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
enum AudioRecordingIdentifier {
|
||||
case uuid(UUID)
|
||||
}
|
||||
|
||||
extension AudioRecordingIdentifier {
|
||||
var identifier: String {
|
||||
switch self {
|
||||
case .uuid(let uuid):
|
||||
return uuid.uuidString
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum AudioRecorderError: Error {
|
||||
case genericError
|
||||
case recordPermissionNotGranted
|
||||
}
|
||||
|
||||
enum AudioRecorderAction {
|
||||
case didStartRecording
|
||||
case didStopRecording
|
||||
@ -29,8 +47,9 @@ protocol AudioRecorderProtocol: AnyObject {
|
||||
var isRecording: Bool { get }
|
||||
var url: URL? { get }
|
||||
|
||||
func recordWithOutputURL(_ url: URL)
|
||||
func stopRecording()
|
||||
func record(with recordID: AudioRecordingIdentifier) async -> Result<Void, AudioRecorderError>
|
||||
func stopRecording() async
|
||||
func deleteRecording()
|
||||
func averagePowerForChannelNumber(_ channelNumber: Int) -> Float
|
||||
}
|
||||
|
||||
|
@ -30,25 +30,27 @@ class AudioRecorderState: ObservableObject, Identifiable {
|
||||
|
||||
@Published private(set) var recordingState: AudioRecorderRecordingState = .stopped
|
||||
@Published private(set) var duration = 0.0
|
||||
@Published private(set) var waveform = EstimatedWaveform(data: Array(repeating: 0, count: 100))
|
||||
@Published private(set) var waveformSamples: [Float] = []
|
||||
|
||||
private weak var audioRecorder: AudioRecorderProtocol?
|
||||
private var cancellables: Set<AnyCancellable> = []
|
||||
private var displayLink: CADisplayLink?
|
||||
|
||||
func attachAudioRecorder(_ audioRecorder: AudioRecorderProtocol) {
|
||||
if self.audioRecorder != nil {
|
||||
detachAudioRecorder()
|
||||
}
|
||||
recordingState = .stopped
|
||||
self.audioRecorder = audioRecorder
|
||||
subscribeToAudioRecorder(audioRecorder)
|
||||
if audioRecorder.isRecording {
|
||||
startPublishUpdates()
|
||||
}
|
||||
}
|
||||
|
||||
func detachAudioRecorder() {
|
||||
guard audioRecorder != nil else { return }
|
||||
audioRecorder?.stopRecording()
|
||||
func detachAudioRecorder() async {
|
||||
if let audioRecorder, audioRecorder.isRecording {
|
||||
await audioRecorder.stopRecording()
|
||||
}
|
||||
stopPublishUpdates()
|
||||
waveformSamples = []
|
||||
cancellables = []
|
||||
audioRecorder = nil
|
||||
recordingState = .stopped
|
||||
@ -91,7 +93,7 @@ class AudioRecorderState: ObservableObject, Identifiable {
|
||||
stopPublishUpdates()
|
||||
}
|
||||
displayLink = CADisplayLink(target: self, selector: #selector(publishUpdate))
|
||||
displayLink?.preferredFrameRateRange = .init(minimum: 10, maximum: 20)
|
||||
displayLink?.preferredFrameRateRange = .init(minimum: 30, maximum: 60)
|
||||
displayLink?.add(to: .current, forMode: .common)
|
||||
}
|
||||
|
||||
@ -99,6 +101,9 @@ class AudioRecorderState: ObservableObject, Identifiable {
|
||||
if let currentTime = audioRecorder?.currentTime {
|
||||
duration = currentTime
|
||||
}
|
||||
if let sample = audioRecorder?.averagePowerForChannelNumber(0) {
|
||||
waveformSamples.append(sample)
|
||||
}
|
||||
}
|
||||
|
||||
private func stopPublishUpdates() {
|
||||
|
@ -31,7 +31,7 @@ protocol MediaPlayerProtocol: AnyObject {
|
||||
var url: URL? { get }
|
||||
var state: MediaPlayerState { get }
|
||||
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL)
|
||||
func load(mediaSource: MediaSourceProxy, using url: URL, autoplay: Bool)
|
||||
func play()
|
||||
func pause()
|
||||
func stop()
|
||||
|
@ -17,31 +17,72 @@
|
||||
import Foundation
|
||||
|
||||
class MediaPlayerProvider: MediaPlayerProviderProtocol {
|
||||
private let mediaProvider: MediaProviderProtocol
|
||||
private var audioPlayer: AudioPlayerProtocol?
|
||||
|
||||
init(mediaProvider: MediaProviderProtocol) {
|
||||
self.mediaProvider = mediaProvider
|
||||
}
|
||||
private lazy var audioPlayer = AudioPlayer()
|
||||
private var audioPlayerStates: [String: AudioPlayerState] = [:]
|
||||
|
||||
deinit {
|
||||
audioPlayer = nil
|
||||
audioPlayerStates = [:]
|
||||
}
|
||||
|
||||
func player(for mediaSource: MediaSourceProxy) -> MediaPlayerProtocol? {
|
||||
func player(for mediaSource: MediaSourceProxy) -> Result<MediaPlayerProtocol, MediaPlayerProviderError> {
|
||||
guard let mimeType = mediaSource.mimeType else {
|
||||
MXLog.error("Unknown mime type")
|
||||
return nil
|
||||
return .failure(.unsupportedMediaType)
|
||||
}
|
||||
|
||||
if mimeType.starts(with: "audio/") {
|
||||
if audioPlayer == nil {
|
||||
audioPlayer = AudioPlayer()
|
||||
}
|
||||
return audioPlayer
|
||||
return .success(audioPlayer)
|
||||
} else {
|
||||
MXLog.error("Unsupported media type: \(mediaSource.mimeType ?? "unknown")")
|
||||
return .failure(.unsupportedMediaType)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - AudioPlayer
|
||||
|
||||
func playerState(for id: AudioPlayerStateIdentifier) -> AudioPlayerState? {
|
||||
guard let audioPlayerStateID = audioPlayerStateID(for: id) else {
|
||||
MXLog.error("Failed to build an ID using: \(id)")
|
||||
return nil
|
||||
}
|
||||
return audioPlayerStates[audioPlayerStateID]
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func register(audioPlayerState: AudioPlayerState) {
|
||||
guard let audioPlayerStateID = audioPlayerStateID(for: audioPlayerState.id) else {
|
||||
MXLog.error("Failed to build a key to register this audioPlayerState: \(audioPlayerState)")
|
||||
return
|
||||
}
|
||||
audioPlayerStates[audioPlayerStateID] = audioPlayerState
|
||||
}
|
||||
|
||||
@MainActor
|
||||
func unregister(audioPlayerState: AudioPlayerState) {
|
||||
guard let audioPlayerStateID = audioPlayerStateID(for: audioPlayerState.id) else {
|
||||
MXLog.error("Failed to build a key to register this audioPlayerState: \(audioPlayerState)")
|
||||
return
|
||||
}
|
||||
audioPlayerStates[audioPlayerStateID] = nil
|
||||
}
|
||||
|
||||
func detachAllStates(except exception: AudioPlayerState?) async {
|
||||
for key in audioPlayerStates.keys {
|
||||
if let exception, key == audioPlayerStateID(for: exception.id) {
|
||||
continue
|
||||
}
|
||||
await audioPlayerStates[key]?.detachAudioPlayer()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Private
|
||||
|
||||
private func audioPlayerStateID(for identifier: AudioPlayerStateIdentifier) -> String? {
|
||||
switch identifier {
|
||||
case .timelineItemIdentifier(let timelineItemIdentifier):
|
||||
return timelineItemIdentifier.eventID
|
||||
case .recorderPreview:
|
||||
return "recorderPreviewAudioPlayerState"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,18 @@
|
||||
|
||||
import Foundation
|
||||
|
||||
protocol MediaPlayerProviderProtocol {
|
||||
func player(for mediaSource: MediaSourceProxy) async -> MediaPlayerProtocol?
|
||||
enum MediaPlayerProviderError: Error {
|
||||
case unsupportedMediaType
|
||||
}
|
||||
|
||||
protocol MediaPlayerProviderProtocol {
|
||||
func player(for mediaSource: MediaSourceProxy) -> Result<MediaPlayerProtocol, MediaPlayerProviderError>
|
||||
|
||||
func playerState(for id: AudioPlayerStateIdentifier) -> AudioPlayerState?
|
||||
func register(audioPlayerState: AudioPlayerState)
|
||||
func unregister(audioPlayerState: AudioPlayerState)
|
||||
func detachAllStates(except exception: AudioPlayerState?) async
|
||||
}
|
||||
|
||||
// sourcery: AutoMockable
|
||||
extension MediaPlayerProviderProtocol { }
|
||||
|
@ -375,6 +375,31 @@ class RoomProxy: RoomProxyProtocol {
|
||||
return .success(())
|
||||
}
|
||||
|
||||
func sendVoiceMessage(url: URL,
|
||||
audioInfo: AudioInfo,
|
||||
waveform: [UInt16],
|
||||
progressSubject: CurrentValueSubject<Double, Never>?,
|
||||
requestHandle: @MainActor (SendAttachmentJoinHandleProtocol) -> Void) async -> Result<Void, RoomProxyError> {
|
||||
sendMessageBackgroundTask = await backgroundTaskService.startBackgroundTask(withName: backgroundTaskName, isReusable: true)
|
||||
defer {
|
||||
sendMessageBackgroundTask?.stop()
|
||||
}
|
||||
|
||||
let handle = room.sendVoiceMessage(url: url.path(percentEncoded: false), audioInfo: audioInfo, waveform: waveform, progressWatcher: UploadProgressListener { progress in
|
||||
progressSubject?.send(progress)
|
||||
})
|
||||
|
||||
await requestHandle(handle)
|
||||
|
||||
do {
|
||||
try await handle.join()
|
||||
} catch {
|
||||
return .failure(.failedSendingMedia)
|
||||
}
|
||||
|
||||
return .success(())
|
||||
}
|
||||
|
||||
func sendFile(url: URL,
|
||||
fileInfo: FileInfo,
|
||||
progressSubject: CurrentValueSubject<Double, Never>?,
|
||||
|
@ -131,6 +131,12 @@ protocol RoomProxyProtocol {
|
||||
zoomLevel: UInt8?,
|
||||
assetType: AssetType?) async -> Result<Void, RoomProxyError>
|
||||
|
||||
func sendVoiceMessage(url: URL,
|
||||
audioInfo: AudioInfo,
|
||||
waveform: [UInt16],
|
||||
progressSubject: CurrentValueSubject<Double, Never>?,
|
||||
requestHandle: @MainActor (SendAttachmentJoinHandleProtocol) -> Void) async -> Result<Void, RoomProxyError>
|
||||
|
||||
/// Retries sending a failed message given its transaction ID
|
||||
func retrySend(transactionID: String) async
|
||||
|
||||
|
@ -89,7 +89,8 @@ class MockRoomTimelineController: RoomTimelineControllerProtocol {
|
||||
func retryDecryption(for sessionID: String) async { }
|
||||
|
||||
func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState {
|
||||
AudioPlayerState(duration: 10.0,
|
||||
AudioPlayerState(id: .timelineItemIdentifier(itemID),
|
||||
duration: 10.0,
|
||||
waveform: nil,
|
||||
progress: 0.0)
|
||||
}
|
||||
|
@ -38,7 +38,6 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
|
||||
let callbacks = PassthroughSubject<RoomTimelineControllerCallback, Never>()
|
||||
|
||||
private(set) var timelineItems = [RoomTimelineItemProtocol]()
|
||||
private var timelineAudioPlayerStates = [TimelineItemIdentifier: AudioPlayerState]()
|
||||
|
||||
var roomID: String {
|
||||
roomProxy.id
|
||||
@ -249,13 +248,14 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
|
||||
fatalError("Invalid TimelineItem type (expecting `VoiceMessageRoomTimelineItem` but found \(type(of: timelineItem)) instead")
|
||||
}
|
||||
|
||||
if let playerState = timelineAudioPlayerStates[itemID] {
|
||||
if let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) {
|
||||
return playerState
|
||||
}
|
||||
|
||||
let playerState = AudioPlayerState(duration: voiceMessageRoomTimelineItem.content.duration,
|
||||
let playerState = AudioPlayerState(id: .timelineItemIdentifier(itemID),
|
||||
duration: voiceMessageRoomTimelineItem.content.duration,
|
||||
waveform: voiceMessageRoomTimelineItem.content.waveform)
|
||||
timelineAudioPlayerStates[itemID] = playerState
|
||||
mediaPlayerProvider.register(audioPlayerState: playerState)
|
||||
return playerState
|
||||
}
|
||||
|
||||
@ -273,52 +273,48 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
|
||||
return
|
||||
}
|
||||
|
||||
guard let player = await mediaPlayerProvider.player(for: source) as? AudioPlayerProtocol else {
|
||||
guard case .success(let mediaPlayer) = mediaPlayerProvider.player(for: source), let audioPlayer = mediaPlayer as? AudioPlayerProtocol else {
|
||||
MXLog.error("Cannot play a voice message without an audio player")
|
||||
return
|
||||
}
|
||||
|
||||
let playerState = audioPlayerState(for: itemID)
|
||||
let audioPlayerState = audioPlayerState(for: itemID)
|
||||
|
||||
guard player.mediaSource == source, player.state != .error else {
|
||||
timelineAudioPlayerStates.forEach { itemID, playerState in
|
||||
if itemID != timelineItem.id {
|
||||
playerState.detachAudioPlayer()
|
||||
}
|
||||
}
|
||||
playerState.attachAudioPlayer(player)
|
||||
guard audioPlayer.mediaSource == source, audioPlayer.state != .error else {
|
||||
audioPlayer.stop()
|
||||
|
||||
await mediaPlayerProvider.detachAllStates(except: audioPlayerState)
|
||||
|
||||
audioPlayerState.attachAudioPlayer(audioPlayer)
|
||||
|
||||
// Load content
|
||||
do {
|
||||
let url = try await voiceMessageMediaManager.loadVoiceMessageFromSource(source, body: nil)
|
||||
|
||||
// Make sure that the player is still attached, as it may have been detached while waiting for the voice message to be loaded.
|
||||
if playerState.isAttached {
|
||||
player.load(mediaSource: source, using: url)
|
||||
if audioPlayerState.isAttached {
|
||||
audioPlayer.load(mediaSource: source, using: url, autoplay: true)
|
||||
}
|
||||
} catch {
|
||||
MXLog.error("Failed to load voice message: \(error)")
|
||||
playerState.reportError(error)
|
||||
audioPlayerState.reportError(error)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if player.state == .playing {
|
||||
player.pause()
|
||||
if audioPlayer.state == .playing {
|
||||
audioPlayer.pause()
|
||||
} else {
|
||||
player.play()
|
||||
audioPlayer.play()
|
||||
}
|
||||
}
|
||||
|
||||
func pauseAudio() {
|
||||
timelineAudioPlayerStates.forEach { _, playerState in
|
||||
playerState.detachAudioPlayer()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func seekAudio(for itemID: TimelineItemIdentifier, progress: Double) async {
|
||||
await timelineAudioPlayerStates[itemID]?.updateState(progress: progress)
|
||||
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
|
||||
return
|
||||
}
|
||||
await playerState.updateState(progress: progress)
|
||||
}
|
||||
|
||||
// MARK: - Private
|
||||
@ -402,11 +398,11 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
|
||||
|
||||
// Stops the audio player when a voice message is redacted.
|
||||
if timelineItem is RedactedRoomTimelineItem {
|
||||
guard let audioState = timelineAudioPlayerStates[timelineItem.id] else {
|
||||
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(timelineItem.id)) else {
|
||||
continue
|
||||
}
|
||||
audioState.detachAudioPlayer()
|
||||
timelineAudioPlayerStates.removeValue(forKey: timelineItem.id)
|
||||
playerState.detachAudioPlayer()
|
||||
mediaPlayerProvider.unregister(audioPlayerState: playerState)
|
||||
}
|
||||
|
||||
newTimelineItems.append(timelineItem)
|
||||
|
@ -74,9 +74,7 @@ protocol RoomTimelineControllerProtocol {
|
||||
func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState
|
||||
|
||||
func playPauseAudio(for itemID: TimelineItemIdentifier) async
|
||||
|
||||
func pauseAudio()
|
||||
|
||||
|
||||
func seekAudio(for itemID: TimelineItemIdentifier, progress: Double) async
|
||||
}
|
||||
|
||||
|
@ -45,8 +45,7 @@ struct VoiceMessageRoomPlaybackView: View {
|
||||
return dateFormatter
|
||||
}()
|
||||
|
||||
@GestureState private var dragState = DragState.inactive
|
||||
@State private var tapProgress: Double = .zero
|
||||
@State var dragState: WaveformViewDragState = .inactive
|
||||
|
||||
var timeLabelContent: String {
|
||||
// Display the duration if progress is 0.0
|
||||
@ -85,38 +84,14 @@ struct VoiceMessageRoomPlaybackView: View {
|
||||
.frame(width: waveformLineWidth, height: geometry.size.height)
|
||||
.opacity(showWaveformCursor ? 1 : 0)
|
||||
}
|
||||
// Add a gesture to drag the waveform
|
||||
.gesture(SpatialTapGesture()
|
||||
.simultaneously(with: LongPressGesture())
|
||||
.sequenced(before: DragGesture(minimumDistance: waveformLinePadding, coordinateSpace: .local))
|
||||
.updating($dragState) { value, state, _ in
|
||||
switch value {
|
||||
// (SpatialTap, LongPress) begins.
|
||||
case .first(let spatialLongPress) where spatialLongPress.second ?? false:
|
||||
// Compute the progress with the spatialTap location
|
||||
let progress = (spatialLongPress.first?.location ?? .zero).x / geometry.size.width
|
||||
state = .pressing(progress: progress)
|
||||
// Long press confirmed, dragging may begin.
|
||||
case .second(let spatialLongPress, let drag) where spatialLongPress.second ?? false:
|
||||
var progress: Double = tapProgress
|
||||
// Compute the progress with drag location
|
||||
if let loc = drag?.location {
|
||||
progress = loc.x / geometry.size.width
|
||||
}
|
||||
state = .dragging(progress: progress)
|
||||
// Dragging ended or the long press cancelled.
|
||||
default:
|
||||
state = .inactive
|
||||
}
|
||||
})
|
||||
.waveformDragGesture($dragState)
|
||||
}
|
||||
}
|
||||
.onChange(of: dragState) { newDragState in
|
||||
switch newDragState {
|
||||
case .inactive:
|
||||
onScrubbing(false)
|
||||
case .pressing(let progress):
|
||||
tapProgress = progress
|
||||
case .pressing:
|
||||
onScrubbing(true)
|
||||
feedbackGenerator.prepare()
|
||||
sendFeedback = true
|
||||
@ -218,7 +193,8 @@ struct VoiceMessageRoomPlaybackView_Previews: PreviewProvider, TestablePreview {
|
||||
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
|
||||
0, 0, 0, 0, 0, 3])
|
||||
|
||||
static var playerState = AudioPlayerState(duration: 10.0,
|
||||
static var playerState = AudioPlayerState(id: .timelineItemIdentifier(.random),
|
||||
duration: 10.0,
|
||||
waveform: waveform,
|
||||
progress: 0.3)
|
||||
|
||||
|
@ -65,8 +65,8 @@ struct VoiceMessageRoomTimelineView: View {
|
||||
|
||||
struct VoiceMessageRoomTimelineView_Previews: PreviewProvider, TestablePreview {
|
||||
static let viewModel = RoomScreenViewModel.mock
|
||||
|
||||
static let voiceRoomTimelineItem = VoiceMessageRoomTimelineItem(id: .random,
|
||||
static let timelineItemIdentifier = TimelineItemIdentifier.random
|
||||
static let voiceRoomTimelineItem = VoiceMessageRoomTimelineItem(id: timelineItemIdentifier,
|
||||
timestamp: "Now",
|
||||
isOutgoing: false,
|
||||
isEditable: false,
|
||||
@ -79,7 +79,8 @@ struct VoiceMessageRoomTimelineView_Previews: PreviewProvider, TestablePreview {
|
||||
source: nil,
|
||||
contentType: nil))
|
||||
|
||||
static let playerState = AudioPlayerState(duration: 10.0,
|
||||
static let playerState = AudioPlayerState(id: .timelineItemIdentifier(timelineItemIdentifier),
|
||||
duration: 10.0,
|
||||
waveform: EstimatedWaveform.mockWaveform,
|
||||
progress: 0.4)
|
||||
|
||||
|
@ -75,7 +75,7 @@ struct RoomTimelineItemView: View {
|
||||
case .poll(let item):
|
||||
PollRoomTimelineView(timelineItem: item)
|
||||
case .voice(let item):
|
||||
VoiceMessageRoomTimelineView(timelineItem: item, playerState: context.viewState.audioPlayerStateProvider?(item.id) ?? AudioPlayerState(duration: 0))
|
||||
VoiceMessageRoomTimelineView(timelineItem: item, playerState: context.viewState.audioPlayerStateProvider?(item.id) ?? AudioPlayerState(id: .timelineItemIdentifier(item.id), duration: 0))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,36 +16,31 @@
|
||||
|
||||
import Foundation
|
||||
|
||||
enum VoiceMessageCacheError: Error {
|
||||
case invalidFileExtension
|
||||
}
|
||||
|
||||
class VoiceMessageCache: VoiceMessageCacheProtocol {
|
||||
private let preferredFileExtension = "m4a"
|
||||
private var temporaryFilesFolderURL: URL {
|
||||
FileManager.default.temporaryDirectory.appendingPathComponent("media/voice-message")
|
||||
}
|
||||
|
||||
|
||||
func fileURL(for mediaSource: MediaSourceProxy) -> URL? {
|
||||
let url = cacheURL(for: mediaSource)
|
||||
return FileManager.default.fileExists(atPath: url.path()) ? url : nil
|
||||
}
|
||||
|
||||
func cache(mediaSource: MediaSourceProxy, using fileURL: URL, move: Bool = false) throws -> URL {
|
||||
guard fileURL.pathExtension == preferredFileExtension else {
|
||||
throw VoiceMessageCacheError.invalidFileExtension
|
||||
}
|
||||
setupTemporaryFilesFolder()
|
||||
let url = cacheURL(for: mediaSource)
|
||||
try? FileManager.default.removeItem(at: url)
|
||||
if move {
|
||||
try FileManager.default.moveItem(at: fileURL, to: url)
|
||||
} else {
|
||||
try FileManager.default.copyItem(at: fileURL, to: url)
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
func cache(mediaSource: MediaSourceProxy, using fileURL: URL, move: Bool = false) -> Result<URL, VoiceMessageCacheError> {
|
||||
guard fileURL.pathExtension == preferredFileExtension else {
|
||||
return .failure(.invalidFileExtension)
|
||||
}
|
||||
let url = cacheURL(for: mediaSource)
|
||||
do {
|
||||
try cacheFile(source: fileURL, destination: url, move: move)
|
||||
} catch {
|
||||
MXLog.error("Failed storing file in cache", context: error)
|
||||
return .failure(.failedStoringFileInCache)
|
||||
}
|
||||
return .success(url)
|
||||
}
|
||||
|
||||
func clearCache() {
|
||||
if FileManager.default.fileExists(atPath: temporaryFilesFolderURL.path) {
|
||||
do {
|
||||
@ -66,6 +61,16 @@ class VoiceMessageCache: VoiceMessageCacheProtocol {
|
||||
}
|
||||
}
|
||||
|
||||
private func cacheFile(source: URL, destination: URL, move: Bool) throws {
|
||||
setupTemporaryFilesFolder()
|
||||
try? FileManager.default.removeItem(at: destination)
|
||||
if move {
|
||||
try FileManager.default.moveItem(at: source, to: destination)
|
||||
} else {
|
||||
try FileManager.default.copyItem(at: source, to: destination)
|
||||
}
|
||||
}
|
||||
|
||||
private func cacheURL(for mediaSource: MediaSourceProxy) -> URL {
|
||||
temporaryFilesFolderURL.appendingPathComponent(mediaSource.url.lastPathComponent).appendingPathExtension(preferredFileExtension)
|
||||
}
|
||||
|
@ -16,9 +16,26 @@
|
||||
|
||||
import Foundation
|
||||
|
||||
enum VoiceMessageCacheError: Error {
|
||||
case invalidFileExtension
|
||||
case failedStoringFileInCache
|
||||
}
|
||||
|
||||
protocol VoiceMessageCacheProtocol {
|
||||
/// Returns the URL of the cached audio file for a given media source
|
||||
/// - Parameter mediaSource: the media source
|
||||
/// - Returns: the URL of the cached audio file or nil if the file doesn't exist
|
||||
func fileURL(for mediaSource: MediaSourceProxy) -> URL?
|
||||
func cache(mediaSource: MediaSourceProxy, using fileURL: URL, move: Bool) throws -> URL
|
||||
|
||||
/// Adds a file in the cache
|
||||
/// - Parameters:
|
||||
/// - mediaSource: the media source
|
||||
/// - fileURL: the source file
|
||||
/// - move: wheter to move or copy the source file
|
||||
/// - Returns: the cached URL
|
||||
func cache(mediaSource: MediaSourceProxy, using fileURL: URL, move: Bool) -> Result<URL, VoiceMessageCacheError>
|
||||
|
||||
/// Clears the cache
|
||||
func clearCache()
|
||||
}
|
||||
|
||||
|
@ -80,7 +80,13 @@ class VoiceMessageMediaManager: VoiceMessageMediaManagerProtocol {
|
||||
try audioConverter.convertToMPEG4AAC(sourceURL: fileHandle.url, destinationURL: convertedFileURL)
|
||||
|
||||
// Cache the file and return the url
|
||||
return try voiceMessageCache.cache(mediaSource: source, using: convertedFileURL, move: true)
|
||||
let result = voiceMessageCache.cache(mediaSource: source, using: convertedFileURL, move: true)
|
||||
switch result {
|
||||
case .success(let url):
|
||||
return url
|
||||
case .failure(let error):
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,198 @@
|
||||
//
|
||||
// Copyright 2023 New Vector Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import DSWaveformImage
|
||||
import Foundation
|
||||
import MatrixRustSDK
|
||||
|
||||
class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
|
||||
let audioRecorder: AudioRecorderProtocol
|
||||
private let audioConverter: AudioConverterProtocol
|
||||
private let voiceMessageCache: VoiceMessageCacheProtocol
|
||||
private let mediaPlayerProvider: MediaPlayerProviderProtocol
|
||||
|
||||
private let mp4accMimeType = "audio/m4a"
|
||||
private let waveformSamplesCount = 100
|
||||
|
||||
private(set) var recordingURL: URL?
|
||||
private(set) var recordingDuration: TimeInterval = 0.0
|
||||
|
||||
private(set) var previewAudioPlayerState: AudioPlayerState?
|
||||
private(set) var previewAudioPlayer: AudioPlayerProtocol?
|
||||
|
||||
init(audioRecorder: AudioRecorderProtocol = AudioRecorder(),
|
||||
mediaPlayerProvider: MediaPlayerProviderProtocol,
|
||||
audioConverter: AudioConverterProtocol = AudioConverter(),
|
||||
voiceMessageCache: VoiceMessageCacheProtocol = VoiceMessageCache()) {
|
||||
self.audioRecorder = audioRecorder
|
||||
self.mediaPlayerProvider = mediaPlayerProvider
|
||||
self.audioConverter = audioConverter
|
||||
self.voiceMessageCache = voiceMessageCache
|
||||
}
|
||||
|
||||
// MARK: - Recording
|
||||
|
||||
func startRecording() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
await stopPlayback()
|
||||
recordingURL = nil
|
||||
switch await audioRecorder.record(with: .uuid(UUID())) {
|
||||
case .failure(let error):
|
||||
return .failure(.audioRecorderError(error))
|
||||
case .success:
|
||||
recordingURL = audioRecorder.url
|
||||
return .success(())
|
||||
}
|
||||
}
|
||||
|
||||
func stopRecording() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
recordingDuration = audioRecorder.currentTime
|
||||
await audioRecorder.stopRecording()
|
||||
guard case .success = await finalizeRecording() else {
|
||||
return .failure(.previewNotAvailable)
|
||||
}
|
||||
return .success(())
|
||||
}
|
||||
|
||||
func cancelRecording() async {
|
||||
await audioRecorder.stopRecording()
|
||||
audioRecorder.deleteRecording()
|
||||
recordingURL = nil
|
||||
previewAudioPlayerState = nil
|
||||
}
|
||||
|
||||
func deleteRecording() async {
|
||||
await stopPlayback()
|
||||
audioRecorder.deleteRecording()
|
||||
previewAudioPlayerState = nil
|
||||
recordingURL = nil
|
||||
}
|
||||
|
||||
// MARK: - Preview
|
||||
|
||||
func startPlayback() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
guard let previewAudioPlayerState, let url = recordingURL else {
|
||||
return .failure(.previewNotAvailable)
|
||||
}
|
||||
|
||||
guard let audioPlayer = previewAudioPlayer else {
|
||||
return .failure(.previewNotAvailable)
|
||||
}
|
||||
|
||||
if audioPlayer.url == url {
|
||||
audioPlayer.play()
|
||||
return .success(())
|
||||
}
|
||||
|
||||
await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
|
||||
let pendingMediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
|
||||
audioPlayer.load(mediaSource: pendingMediaSource, using: url, autoplay: true)
|
||||
return .success(())
|
||||
}
|
||||
|
||||
func pausePlayback() {
|
||||
previewAudioPlayer?.pause()
|
||||
}
|
||||
|
||||
func stopPlayback() async {
|
||||
guard let previewAudioPlayerState else {
|
||||
return
|
||||
}
|
||||
await previewAudioPlayerState.detachAudioPlayer()
|
||||
previewAudioPlayer?.stop()
|
||||
}
|
||||
|
||||
func seekPlayback(to progress: Double) async {
|
||||
await previewAudioPlayerState?.updateState(progress: progress)
|
||||
}
|
||||
|
||||
func buildRecordingWaveform() async -> Result<[UInt16], VoiceMessageRecorderError> {
|
||||
guard let url = recordingURL else {
|
||||
return .failure(.missingRecordingFile)
|
||||
}
|
||||
// build the waveform
|
||||
var waveformData: [UInt16] = []
|
||||
let analyzer = WaveformAnalyzer()
|
||||
do {
|
||||
let samples = try await analyzer.samples(fromAudioAt: url, count: 100)
|
||||
// linearly normalized to [0, 1] (1 -> -50 dB)
|
||||
waveformData = samples.map { UInt16(max(0, (1 - $0) * 1024)) }
|
||||
} catch {
|
||||
MXLog.error("Waveform analysis failed: \(error)")
|
||||
}
|
||||
return .success(waveformData)
|
||||
}
|
||||
|
||||
func sendVoiceMessage(inRoom roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol) async -> Result<Void, VoiceMessageRecorderError> {
|
||||
guard let url = recordingURL else {
|
||||
return .failure(VoiceMessageRecorderError.missingRecordingFile)
|
||||
}
|
||||
|
||||
// convert the file
|
||||
let sourceFilename = url.deletingPathExtension().lastPathComponent
|
||||
let oggFile = URL.temporaryDirectory.appendingPathComponent(sourceFilename).appendingPathExtension("ogg")
|
||||
do {
|
||||
try audioConverter.convertToOpusOgg(sourceURL: url, destinationURL: oggFile)
|
||||
} catch {
|
||||
return .failure(.failedSendingVoiceMessage)
|
||||
}
|
||||
|
||||
// send it
|
||||
let size: UInt64
|
||||
do {
|
||||
size = try UInt64(FileManager.default.sizeForItem(at: oggFile))
|
||||
} catch {
|
||||
MXLog.error("Failed to get the recording file size", context: error)
|
||||
return .failure(.failedSendingVoiceMessage)
|
||||
}
|
||||
let audioInfo = AudioInfo(duration: recordingDuration, size: size, mimetype: "audio/ogg")
|
||||
guard case .success(let waveform) = await buildRecordingWaveform() else {
|
||||
return .failure(.failedSendingVoiceMessage)
|
||||
}
|
||||
|
||||
let result = await roomProxy.sendVoiceMessage(url: oggFile,
|
||||
audioInfo: audioInfo,
|
||||
waveform: waveform,
|
||||
progressSubject: nil) { _ in }
|
||||
// delete the temporary file
|
||||
try? FileManager.default.removeItem(at: oggFile)
|
||||
|
||||
if case .failure(let error) = result {
|
||||
MXLog.error("Failed to send the voice message.", context: error)
|
||||
return .failure(.failedSendingVoiceMessage)
|
||||
}
|
||||
|
||||
return .success(())
|
||||
}
|
||||
|
||||
// MARK: - Private
|
||||
|
||||
private func finalizeRecording() async -> Result<Void, VoiceMessageRecorderError> {
|
||||
guard let url = recordingURL else {
|
||||
return .failure(.previewNotAvailable)
|
||||
}
|
||||
|
||||
// Build the preview audio player state
|
||||
previewAudioPlayerState = await AudioPlayerState(id: .recorderPreview, duration: recordingDuration, waveform: EstimatedWaveform(data: []))
|
||||
|
||||
// Build the preview audio player
|
||||
let mediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
|
||||
guard case .success(let mediaPlayer) = mediaPlayerProvider.player(for: mediaSource), let audioPlayer = mediaPlayer as? AudioPlayerProtocol else {
|
||||
return .failure(.previewNotAvailable)
|
||||
}
|
||||
previewAudioPlayer = audioPlayer
|
||||
return .success(())
|
||||
}
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
//
|
||||
// Copyright 2023 New Vector Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
enum VoiceMessageRecorderError: Error {
|
||||
case genericError
|
||||
case missingRecordingFile
|
||||
case previewNotAvailable
|
||||
case audioRecorderError(AudioRecorderError)
|
||||
case failedSendingVoiceMessage
|
||||
}
|
||||
|
||||
protocol VoiceMessageRecorderProtocol {
|
||||
var audioRecorder: AudioRecorderProtocol { get }
|
||||
var previewAudioPlayerState: AudioPlayerState? { get }
|
||||
var recordingURL: URL? { get }
|
||||
var recordingDuration: TimeInterval { get }
|
||||
|
||||
func startRecording() async -> Result<Void, VoiceMessageRecorderError>
|
||||
func stopRecording() async -> Result<Void, VoiceMessageRecorderError>
|
||||
func cancelRecording() async
|
||||
func startPlayback() async -> Result<Void, VoiceMessageRecorderError>
|
||||
func pausePlayback()
|
||||
func stopPlayback() async
|
||||
func seekPlayback(to progress: Double) async
|
||||
func deleteRecording() async
|
||||
|
||||
func buildRecordingWaveform() async -> Result<[UInt16], VoiceMessageRecorderError>
|
||||
func sendVoiceMessage(inRoom roomProxy: RoomProxyProtocol, audioConverter: AudioConverterProtocol) async -> Result<Void, VoiceMessageRecorderError>
|
||||
}
|
||||
|
||||
// sourcery: AutoMockable
|
||||
extension VoiceMessageRecorderProtocol { }
|
@ -260,6 +260,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Some room name", avatarURL: nil)),
|
||||
timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -271,6 +272,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Some room name", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -284,6 +286,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "New room", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -297,6 +300,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "New room", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -311,6 +315,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "New room", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -327,6 +332,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Small timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -343,6 +349,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Small timeline, paginating", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -359,6 +366,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Large timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -376,6 +384,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Large timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -392,6 +401,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Large timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -408,6 +418,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Polls timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -424,6 +435,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Polls timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
@ -440,6 +452,7 @@ class MockScreen: Identifiable {
|
||||
let parameters = RoomScreenCoordinatorParameters(roomProxy: RoomProxyMock(with: .init(displayName: "Polls timeline", avatarURL: URL.picturesDirectory)),
|
||||
timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
emojiProvider: EmojiProvider(),
|
||||
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
|
||||
appSettings: ServiceLocator.shared.settings)
|
||||
|
BIN
UnitTests/Resources/Media/test_voice_message.m4a
(Stored with Git LFS)
Normal file
BIN
UnitTests/Resources/Media/test_voice_message.m4a
(Stored with Git LFS)
Normal file
Binary file not shown.
@ -47,7 +47,7 @@ class AudioPlayerStateTests: XCTestCase {
|
||||
override func setUp() async throws {
|
||||
audioPlayerActionsSubject = .init()
|
||||
audioPlayerSeekCallsSubject = .init()
|
||||
audioPlayerState = AudioPlayerState(duration: 10.0)
|
||||
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10.0)
|
||||
audioPlayerMock = buildAudioPlayerMock()
|
||||
}
|
||||
|
||||
|
@ -31,8 +31,10 @@ class AudioRecorderStateTests: XCTestCase {
|
||||
|
||||
private func buildAudioRecorderMock() -> AudioRecorderMock {
|
||||
let audioRecorderMock = AudioRecorderMock()
|
||||
audioRecorderMock.isRecording = false
|
||||
audioRecorderMock.underlyingActions = audioRecorderActions
|
||||
audioRecorderMock.currentTime = 0.0
|
||||
audioRecorderMock.averagePowerForChannelNumberReturnValue = 0
|
||||
return audioRecorderMock
|
||||
}
|
||||
|
||||
@ -44,14 +46,13 @@ class AudioRecorderStateTests: XCTestCase {
|
||||
|
||||
func testAttach() async throws {
|
||||
audioRecorderState.attachAudioRecorder(audioRecorderMock)
|
||||
|
||||
XCTAssertEqual(audioRecorderState.recordingState, .stopped)
|
||||
}
|
||||
|
||||
func testDetach() async throws {
|
||||
audioRecorderState.attachAudioRecorder(audioRecorderMock)
|
||||
|
||||
audioRecorderState.detachAudioRecorder()
|
||||
audioRecorderMock.isRecording = true
|
||||
await audioRecorderState.detachAudioRecorder()
|
||||
XCTAssert(audioRecorderMock.stopRecordingCalled)
|
||||
XCTAssertEqual(audioRecorderState.recordingState, .stopped)
|
||||
}
|
||||
|
125
UnitTests/Sources/MediaPlayerProviderTests.swift
Normal file
125
UnitTests/Sources/MediaPlayerProviderTests.swift
Normal file
@ -0,0 +1,125 @@
|
||||
//
|
||||
// Copyright 2023 New Vector Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import Combine
|
||||
@testable import ElementX
|
||||
import Foundation
|
||||
import XCTest
|
||||
|
||||
class MediaPlayerProviderTests: XCTestCase {
|
||||
private var mediaPlayerProvider: MediaPlayerProvider!
|
||||
|
||||
private let oggMimeType = "audio/ogg"
|
||||
private let someURL = URL("/some/url")
|
||||
private let someOtherURL = URL("/some/other/url")
|
||||
|
||||
override func setUp() async throws {
|
||||
mediaPlayerProvider = MediaPlayerProvider()
|
||||
}
|
||||
|
||||
func testPlayerForWrongMediaType() async throws {
|
||||
let mediaSourceWithoutMimeType = MediaSourceProxy(url: someURL, mimeType: nil)
|
||||
switch mediaPlayerProvider.player(for: mediaSourceWithoutMimeType) {
|
||||
case .failure(.unsupportedMediaType):
|
||||
// Ok
|
||||
break
|
||||
default:
|
||||
XCTFail("An error is expected")
|
||||
}
|
||||
|
||||
let mediaSourceVideo = MediaSourceProxy(url: someURL, mimeType: "video/mp4")
|
||||
switch mediaPlayerProvider.player(for: mediaSourceWithoutMimeType) {
|
||||
case .failure(.unsupportedMediaType):
|
||||
// Ok
|
||||
break
|
||||
default:
|
||||
XCTFail("An error is expected")
|
||||
}
|
||||
}
|
||||
|
||||
func testPlayerFor() async throws {
|
||||
let mediaSource = MediaSourceProxy(url: someURL, mimeType: oggMimeType)
|
||||
guard case .success(let playerA) = mediaPlayerProvider.player(for: mediaSource) else {
|
||||
XCTFail("A valid player is expected")
|
||||
return
|
||||
}
|
||||
|
||||
// calling it again with another mediasource must returns the same player
|
||||
let otherMediaSource = MediaSourceProxy(url: someOtherURL, mimeType: oggMimeType)
|
||||
guard case .success(let playerB) = mediaPlayerProvider.player(for: otherMediaSource) else {
|
||||
XCTFail("A valid player is expected")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssert(playerA === playerB)
|
||||
}
|
||||
|
||||
func testPlayerStates() async throws {
|
||||
let audioPlayerStateId = AudioPlayerStateIdentifier.timelineItemIdentifier(.random)
|
||||
// By default, there should be no player state
|
||||
XCTAssertNil(mediaPlayerProvider.playerState(for: audioPlayerStateId))
|
||||
|
||||
let audioPlayerState = await AudioPlayerState(id: audioPlayerStateId, duration: 10.0)
|
||||
await mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
XCTAssertEqual(audioPlayerState, mediaPlayerProvider.playerState(for: audioPlayerStateId))
|
||||
|
||||
await mediaPlayerProvider.unregister(audioPlayerState: audioPlayerState)
|
||||
XCTAssertNil(mediaPlayerProvider.playerState(for: audioPlayerStateId))
|
||||
}
|
||||
|
||||
func testDetachAllStates() async throws {
|
||||
let audioPlayer = AudioPlayerMock()
|
||||
audioPlayer.actions = PassthroughSubject<AudioPlayerAction, Never>().eraseToAnyPublisher()
|
||||
|
||||
let audioPlayerStates = await Array(repeating: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0), count: 10)
|
||||
for audioPlayerState in audioPlayerStates {
|
||||
await mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
await audioPlayerState.attachAudioPlayer(audioPlayer)
|
||||
let isAttached = await audioPlayerState.isAttached
|
||||
XCTAssertTrue(isAttached)
|
||||
}
|
||||
|
||||
await mediaPlayerProvider.detachAllStates(except: nil)
|
||||
for audioPlayerState in audioPlayerStates {
|
||||
let isAttached = await audioPlayerState.isAttached
|
||||
XCTAssertFalse(isAttached)
|
||||
}
|
||||
}
|
||||
|
||||
func testDetachAllStatesWithException() async throws {
|
||||
let audioPlayer = AudioPlayerMock()
|
||||
audioPlayer.actions = PassthroughSubject<AudioPlayerAction, Never>().eraseToAnyPublisher()
|
||||
|
||||
let audioPlayerStates = await Array(repeating: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0), count: 10)
|
||||
for audioPlayerState in audioPlayerStates {
|
||||
await mediaPlayerProvider.register(audioPlayerState: audioPlayerState)
|
||||
await audioPlayerState.attachAudioPlayer(audioPlayer)
|
||||
let isAttached = await audioPlayerState.isAttached
|
||||
XCTAssertTrue(isAttached)
|
||||
}
|
||||
|
||||
let exception = audioPlayerStates[1]
|
||||
await mediaPlayerProvider.detachAllStates(except: exception)
|
||||
for audioPlayerState in audioPlayerStates {
|
||||
let isAttached = await audioPlayerState.isAttached
|
||||
if audioPlayerState == exception {
|
||||
XCTAssertTrue(isAttached)
|
||||
} else {
|
||||
XCTAssertFalse(isAttached)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -28,6 +28,7 @@ class PillContextTests: XCTestCase {
|
||||
proxyMock.members = subject.asCurrentValuePublisher()
|
||||
let mock = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: proxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -59,6 +60,7 @@ class PillContextTests: XCTestCase {
|
||||
proxyMock.members = subject.asCurrentValuePublisher()
|
||||
let mock = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: proxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -77,6 +79,7 @@ class PillContextTests: XCTestCase {
|
||||
mockController.roomProxy = proxyMock
|
||||
let mock = RoomScreenViewModel(timelineController: mockController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: proxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -51,6 +51,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
timelineController.timelineItems = items
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -84,6 +85,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
timelineController.timelineItems = items
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -115,6 +117,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
timelineController.timelineItems = items
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -143,6 +146,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
timelineController.timelineItems = items
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -171,6 +175,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
timelineController.timelineItems = items
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: RoomProxyMock(with: .init(displayName: "")),
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -195,6 +200,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -233,6 +239,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -272,6 +279,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -303,6 +311,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -322,6 +331,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -340,6 +350,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -359,6 +370,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxyMock,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
@ -495,6 +507,7 @@ class RoomScreenViewModelTests: XCTestCase {
|
||||
|
||||
let viewModel = RoomScreenViewModel(timelineController: timelineController,
|
||||
mediaProvider: MockMediaProvider(),
|
||||
mediaPlayerProvider: MediaPlayerProviderMock(),
|
||||
roomProxy: roomProxy,
|
||||
appSettings: ServiceLocator.shared.settings,
|
||||
analytics: ServiceLocator.shared.analytics,
|
||||
|
@ -56,7 +56,10 @@ class VoiceMessageCacheTests: XCTestCase {
|
||||
|
||||
// If the file is present in the cache, its URL must be returned
|
||||
let temporaryFileURL = try createTemporaryFile(named: testFilename, withExtension: mpeg4aacFileExtension)
|
||||
let cachedURL = try voiceMessageCache.cache(mediaSource: mediaSource, using: temporaryFileURL, move: true)
|
||||
guard case .success(let cachedURL) = voiceMessageCache.cache(mediaSource: mediaSource, using: temporaryFileURL, move: true) else {
|
||||
XCTFail("A success is expected")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssertEqual(cachedURL, voiceMessageCache.fileURL(for: mediaSource))
|
||||
}
|
||||
@ -64,22 +67,20 @@ class VoiceMessageCacheTests: XCTestCase {
|
||||
func testCacheInvalidFileExtension() async throws {
|
||||
// An error should be raised if the file extension is not "m4a"
|
||||
let mpegFileURL = try createTemporaryFile(named: testFilename, withExtension: "mpg")
|
||||
do {
|
||||
_ = try voiceMessageCache.cache(mediaSource: mediaSource, using: mpegFileURL, move: true)
|
||||
guard case .failure(let error) = voiceMessageCache.cache(mediaSource: mediaSource, using: mpegFileURL, move: true) else {
|
||||
XCTFail("An error is expected")
|
||||
} catch {
|
||||
switch error as? VoiceMessageCacheError {
|
||||
case .invalidFileExtension:
|
||||
break
|
||||
default:
|
||||
XCTFail("A VoiceMessageCacheError.invalidFileExtension is expected")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssertEqual(error, .invalidFileExtension)
|
||||
}
|
||||
|
||||
func testCacheCopy() async throws {
|
||||
let fileURL = try createTemporaryFile(named: testFilename, withExtension: mpeg4aacFileExtension)
|
||||
let cacheURL = try voiceMessageCache.cache(mediaSource: mediaSource, using: fileURL, move: false)
|
||||
guard case .success(let cacheURL) = voiceMessageCache.cache(mediaSource: mediaSource, using: fileURL, move: false) else {
|
||||
XCTFail("A success is expected")
|
||||
return
|
||||
}
|
||||
|
||||
// The source file must remain in its original location
|
||||
XCTAssertTrue(fileManager.fileExists(atPath: fileURL.path()))
|
||||
@ -89,7 +90,10 @@ class VoiceMessageCacheTests: XCTestCase {
|
||||
|
||||
func testCacheMove() async throws {
|
||||
let fileURL = try createTemporaryFile(named: testFilename, withExtension: mpeg4aacFileExtension)
|
||||
let cacheURL = try voiceMessageCache.cache(mediaSource: mediaSource, using: fileURL, move: true)
|
||||
guard case .success(let cacheURL) = voiceMessageCache.cache(mediaSource: mediaSource, using: fileURL, move: true) else {
|
||||
XCTFail("A success is expected")
|
||||
return
|
||||
}
|
||||
|
||||
// The file must have been moved
|
||||
XCTAssertFalse(fileManager.fileExists(atPath: fileURL.path()))
|
||||
|
@ -93,7 +93,7 @@ class VoiceMessageMediaManagerTests: XCTestCase {
|
||||
let mediaSource = MediaSourceProxy(url: someURL, mimeType: audioOGGMimeType)
|
||||
mediaProvider.loadFileFromSourceReturnValue = MediaFileHandleProxy.unmanaged(url: loadedFile)
|
||||
let audioConverter = AudioConverterMock()
|
||||
voiceMessageCache.cacheMediaSourceUsingMoveReturnValue = cachedConvertedFileURL
|
||||
voiceMessageCache.cacheMediaSourceUsingMoveReturnValue = .success(cachedConvertedFileURL)
|
||||
voiceMessageMediaManager = VoiceMessageMediaManager(mediaProvider: mediaProvider,
|
||||
voiceMessageCache: voiceMessageCache,
|
||||
audioConverter: audioConverter,
|
||||
@ -124,7 +124,7 @@ class VoiceMessageMediaManagerTests: XCTestCase {
|
||||
}
|
||||
voiceMessageCache.cacheMediaSourceUsingMoveClosure = { _, _, _ in
|
||||
cachedURL = cachedConvertedFileURL
|
||||
return cachedConvertedFileURL
|
||||
return .success(cachedConvertedFileURL)
|
||||
}
|
||||
|
||||
let audioConverter = AudioConverterMock()
|
||||
|
237
UnitTests/Sources/VoiceMessageRecorderTests.swift
Normal file
237
UnitTests/Sources/VoiceMessageRecorderTests.swift
Normal file
@ -0,0 +1,237 @@
|
||||
//
|
||||
// Copyright 2023 New Vector Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import Combine
|
||||
@testable import ElementX
|
||||
import Foundation
|
||||
import XCTest
|
||||
|
||||
@MainActor
|
||||
class VoiceMessageRecorderTests: XCTestCase {
|
||||
private var voiceMessageRecorder: VoiceMessageRecorder!
|
||||
|
||||
private var audioRecorder: AudioRecorderMock!
|
||||
private var mediaPlayerProvider: MediaPlayerProviderMock!
|
||||
private var audioConverter: AudioConverterMock!
|
||||
private var voiceMessageCache: VoiceMessageCacheMock!
|
||||
|
||||
private var audioPlayer: AudioPlayerMock!
|
||||
private var audioPlayerActionsSubject: PassthroughSubject<AudioPlayerAction, Never> = .init()
|
||||
private var audioPlayerActions: AnyPublisher<AudioPlayerAction, Never> {
|
||||
audioPlayerActionsSubject.eraseToAnyPublisher()
|
||||
}
|
||||
|
||||
private let recordingURL = URL("/some/url")
|
||||
|
||||
override func setUp() async throws {
|
||||
audioRecorder = AudioRecorderMock()
|
||||
audioRecorder.recordWithReturnValue = .success(())
|
||||
audioRecorder.underlyingCurrentTime = 0
|
||||
audioRecorder.averagePowerForChannelNumberReturnValue = 0
|
||||
audioPlayer = AudioPlayerMock()
|
||||
audioPlayer.actions = audioPlayerActions
|
||||
|
||||
mediaPlayerProvider = MediaPlayerProviderMock()
|
||||
mediaPlayerProvider.playerForClosure = { _ in
|
||||
.success(self.audioPlayer)
|
||||
}
|
||||
audioConverter = AudioConverterMock()
|
||||
voiceMessageCache = VoiceMessageCacheMock()
|
||||
|
||||
voiceMessageRecorder = VoiceMessageRecorder(audioRecorder: audioRecorder,
|
||||
mediaPlayerProvider: mediaPlayerProvider,
|
||||
audioConverter: audioConverter,
|
||||
voiceMessageCache: voiceMessageCache)
|
||||
}
|
||||
|
||||
func testStartRecording() async throws {
|
||||
audioRecorder.url = recordingURL
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
XCTAssert(audioRecorder.recordWithCalled)
|
||||
XCTAssertEqual(voiceMessageRecorder.recordingURL, audioRecorder.url)
|
||||
}
|
||||
|
||||
func testStopRecording() async throws {
|
||||
audioRecorder.isRecording = true
|
||||
audioRecorder.currentTime = 14.0
|
||||
audioRecorder.url = recordingURL
|
||||
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
// Internal audio recorder must have been stopped
|
||||
XCTAssert(audioRecorder.stopRecordingCalled)
|
||||
|
||||
// A preview player state must be available
|
||||
let previewPlayerState = voiceMessageRecorder.previewAudioPlayerState
|
||||
XCTAssertNotNil(previewPlayerState)
|
||||
XCTAssertEqual(previewPlayerState?.duration, audioRecorder.currentTime)
|
||||
}
|
||||
|
||||
func testCancelRecording() async throws {
|
||||
audioRecorder.isRecording = true
|
||||
|
||||
await voiceMessageRecorder.cancelRecording()
|
||||
|
||||
// The recording audio file must have been deleted
|
||||
XCTAssert(audioRecorder.deleteRecordingCalled)
|
||||
}
|
||||
|
||||
func testDeleteRecording() async throws {
|
||||
await voiceMessageRecorder.deleteRecording()
|
||||
XCTAssert(audioRecorder.deleteRecordingCalled)
|
||||
}
|
||||
|
||||
func testStartPlayback() async throws {
|
||||
audioRecorder.url = recordingURL
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
// if the player url doesn't match the recording url
|
||||
guard case .success = await voiceMessageRecorder.startPlayback() else {
|
||||
XCTFail("Playback should start")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssert(audioPlayer.loadMediaSourceUsingAutoplayCalled)
|
||||
XCTAssertEqual(audioPlayer.loadMediaSourceUsingAutoplayReceivedArguments?.url, recordingURL)
|
||||
XCTAssertEqual(audioPlayer.loadMediaSourceUsingAutoplayReceivedArguments?.mediaSource.mimeType, "audio/m4a")
|
||||
XCTAssertEqual(audioPlayer.loadMediaSourceUsingAutoplayReceivedArguments?.mediaSource.url, recordingURL)
|
||||
XCTAssertEqual(audioPlayer.loadMediaSourceUsingAutoplayReceivedArguments?.autoplay, true)
|
||||
XCTAssertFalse(audioPlayer.playCalled)
|
||||
}
|
||||
|
||||
func testResumePlayback() async throws {
|
||||
audioRecorder.url = recordingURL
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
// if the player url matches the recording url
|
||||
audioPlayer.url = recordingURL
|
||||
guard case .success = await voiceMessageRecorder.startPlayback() else {
|
||||
XCTFail("Playback should start")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssertFalse(audioPlayer.loadMediaSourceUsingAutoplayCalled)
|
||||
XCTAssert(audioPlayer.playCalled)
|
||||
}
|
||||
|
||||
func testPausePlayback() async throws {
|
||||
audioRecorder.url = recordingURL
|
||||
switch await voiceMessageRecorder.startRecording() {
|
||||
case .failure(let error):
|
||||
XCTFail("Recording should start. \(error)")
|
||||
case .success:
|
||||
break
|
||||
}
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
voiceMessageRecorder.pausePlayback()
|
||||
XCTAssert(audioPlayer.pauseCalled)
|
||||
}
|
||||
|
||||
func testStopPlayback() async throws {
|
||||
audioRecorder.url = recordingURL
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
await voiceMessageRecorder.stopPlayback()
|
||||
XCTAssertEqual(voiceMessageRecorder.previewAudioPlayerState?.isAttached, false)
|
||||
XCTAssert(audioPlayer.stopCalled)
|
||||
}
|
||||
|
||||
func testSeekPlayback() async throws {
|
||||
audioRecorder.url = recordingURL
|
||||
// Calling stop will generate the preview player state needed to have an audio player
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
voiceMessageRecorder.previewAudioPlayerState?.attachAudioPlayer(audioPlayer)
|
||||
|
||||
await voiceMessageRecorder.seekPlayback(to: 0.4)
|
||||
XCTAssert(audioPlayer.seekToCalled)
|
||||
XCTAssertEqual(audioPlayer.seekToReceivedProgress, 0.4)
|
||||
}
|
||||
|
||||
func testBuildRecordedWaveform() async throws {
|
||||
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_audio", withExtension: "mp3") else {
|
||||
XCTFail("Test audio file is missing")
|
||||
return
|
||||
}
|
||||
audioRecorder.url = audioFileUrl
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
guard case .success(let data) = await voiceMessageRecorder.buildRecordingWaveform() else {
|
||||
XCTFail("A waveform is expected")
|
||||
return
|
||||
}
|
||||
XCTAssert(!data.isEmpty)
|
||||
}
|
||||
|
||||
func testSendVoiceMessage() async throws {
|
||||
guard let audioFileUrl = Bundle(for: Self.self).url(forResource: "test_voice_message", withExtension: "m4a") else {
|
||||
XCTFail("Test audio file is missing")
|
||||
return
|
||||
}
|
||||
audioRecorder.currentTime = 42
|
||||
audioRecorder.url = audioFileUrl
|
||||
_ = await voiceMessageRecorder.startRecording()
|
||||
_ = await voiceMessageRecorder.stopRecording()
|
||||
|
||||
let roomProxy = RoomProxyMock()
|
||||
let audioConverter = AudioConverterMock()
|
||||
var convertedFileUrl: URL?
|
||||
var convertedFileSize: UInt64?
|
||||
|
||||
audioConverter.convertToOpusOggSourceURLDestinationURLClosure = { source, destination in
|
||||
convertedFileUrl = destination
|
||||
try? FileManager.default.removeItem(at: destination)
|
||||
let internalConverter = AudioConverter()
|
||||
try internalConverter.convertToOpusOgg(sourceURL: source, destinationURL: destination)
|
||||
convertedFileSize = try? UInt64(FileManager.default.sizeForItem(at: destination))
|
||||
// the source URL must be the recorded file
|
||||
XCTAssertEqual(source, audioFileUrl)
|
||||
// check the converted file extension
|
||||
XCTAssertEqual(destination.pathExtension, "ogg")
|
||||
}
|
||||
|
||||
roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleClosure = { url, audioInfo, waveform, _, _ in
|
||||
XCTAssertEqual(url, convertedFileUrl)
|
||||
XCTAssertEqual(audioInfo.duration, self.audioRecorder.currentTime)
|
||||
XCTAssertEqual(audioInfo.size, convertedFileSize)
|
||||
XCTAssertEqual(audioInfo.mimetype, "audio/ogg")
|
||||
XCTAssertFalse(waveform.isEmpty)
|
||||
|
||||
return .success(())
|
||||
}
|
||||
|
||||
guard case .success = await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: audioConverter) else {
|
||||
XCTFail("A success is expected")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssert(audioConverter.convertToOpusOggSourceURLDestinationURLCalled)
|
||||
XCTAssert(roomProxy.sendVoiceMessageUrlAudioInfoWaveformProgressSubjectRequestHandleCalled)
|
||||
|
||||
// the converted file must have been deleted
|
||||
if let convertedFileUrl {
|
||||
XCTAssertFalse(FileManager.default.fileExists(atPath: convertedFileUrl.path()))
|
||||
} else {
|
||||
XCTFail("converted file URL is missing")
|
||||
}
|
||||
}
|
||||
}
|
BIN
UnitTests/__Snapshots__/PreviewTests/test_composerToolbar.Voice-Message.png
(Stored with Git LFS)
BIN
UnitTests/__Snapshots__/PreviewTests/test_composerToolbar.Voice-Message.png
(Stored with Git LFS)
Binary file not shown.
BIN
UnitTests/__Snapshots__/PreviewTests/test_voiceMessagePreviewComposer.1.png
(Stored with Git LFS)
BIN
UnitTests/__Snapshots__/PreviewTests/test_voiceMessagePreviewComposer.1.png
(Stored with Git LFS)
Binary file not shown.
BIN
UnitTests/__Snapshots__/PreviewTests/test_voiceMessageRecordingComposer.1.png
(Stored with Git LFS)
BIN
UnitTests/__Snapshots__/PreviewTests/test_voiceMessageRecordingComposer.1.png
(Stored with Git LFS)
Binary file not shown.
BIN
UnitTests/__Snapshots__/PreviewTests/test_voiceMessageRecordingView.1.png
(Stored with Git LFS)
BIN
UnitTests/__Snapshots__/PreviewTests/test_voiceMessageRecordingView.1.png
(Stored with Git LFS)
Binary file not shown.
BIN
UnitTests/__Snapshots__/PreviewTests/test_waveformView.1.png
(Stored with Git LFS)
BIN
UnitTests/__Snapshots__/PreviewTests/test_waveformView.1.png
(Stored with Git LFS)
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user