Voice message waveform from audio files (#1919)

* Add DSWaveformImage

* Add estimated waveform

* Add progress mask modifier

* Add real waveform

* Fix cursor

* Fix scale factor

* Refine ProgressMaskModifier

* Refine waveform logic

* Add docs

* Add waveform placeholder
This commit is contained in:
Alfonso Grillo 2023-10-19 11:01:45 +02:00 committed by GitHub
parent 75da7ba3ec
commit fbcf037240
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 175 additions and 61 deletions

View File

@ -369,6 +369,7 @@
6B4BF4A6450F55939B49FAEF /* PollOptionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67779D9A1B797285A09B7720 /* PollOptionView.swift */; };
6BAD956B909A6E29F6CC6E7C /* ButtonStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8CC23C63849452BC86EA2852 /* ButtonStyle.swift */; };
6BB6944443C421C722ED1E7D /* portrait_test_video.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = F2D513D2477B57F90E98EEC0 /* portrait_test_video.mp4 */; };
6BDD969EFFAF18120429084A /* EstimatedWaveformView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5D9A987EAA44E2E1BDCDAFDC /* EstimatedWaveformView.swift */; };
6C34237AFB808E38FC8776B9 /* RoomStateEventStringBuilder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8D55702474F279D910D2D162 /* RoomStateEventStringBuilder.swift */; };
6C5A2C454E6C198AB39ED760 /* SharedUserDefaultsKeys.swift in Sources */ = {isa = PBXBuildFile; fileRef = DBA8DC95C079805B0B56E8A9 /* SharedUserDefaultsKeys.swift */; };
6CD61FAF03E8986523C2ABB8 /* StartChatScreenCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3005886F00029F058DB62BE /* StartChatScreenCoordinator.swift */; };
@ -448,6 +449,7 @@
829062DD3C3F7016FE1A6476 /* RoomDetailsScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BFDAF6918BB096C44788FC9 /* RoomDetailsScreenUITests.swift */; };
8317E1314C00DCCC99D30DA8 /* TextBasedRoomTimelineItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = B9227F7495DA43324050A863 /* TextBasedRoomTimelineItem.swift */; };
83A4DAB181C56987C3E804FF /* MapTilerStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = F0B9F5BC4C80543DE7228B9D /* MapTilerStyle.swift */; };
8421FFCD5360A15D170922A8 /* ProgressMaskModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79A1D75C7C52CD14A327CC90 /* ProgressMaskModifier.swift */; };
84226AD2E1F1FBC965F3B09E /* UnitTestsAppCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6A8E19C4645D3F5F9FB02355 /* UnitTestsAppCoordinator.swift */; };
84CAE3E96D93194DA06B9194 /* CallScreenViewModelProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = AD9AD6AE5FC868962F090740 /* CallScreenViewModelProtocol.swift */; };
84EFCB95F9DA2979C8042B26 /* UITestsSignalling.swift in Sources */ = {isa = PBXBuildFile; fileRef = B7F0192CE2F891141A25B49F /* UITestsSignalling.swift */; };
@ -594,6 +596,7 @@
A851635B3255C6DC07034A12 /* RoomScreenCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8108C8F0ACF6A7EB72D0117 /* RoomScreenCoordinator.swift */; };
A8771F5975A82759FA5138AE /* RoomMemberDetailsScreenUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0F19DBE940499D3E3DD405D8 /* RoomMemberDetailsScreenUITests.swift */; };
A896998A6784DB6F16E912F4 /* MockMediaLoader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4AB7D7DAAAF662DED9D02379 /* MockMediaLoader.swift */; };
A93661C962B12942C08864B6 /* DSWaveformImageViews in Frameworks */ = {isa = PBXBuildFile; productRef = 2A4106A0A96DC4C273128AA5 /* DSWaveformImageViews */; };
A9482B967FC85DA611514D35 /* VoiceMessageRoomPlaybackView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CCD41CD67DB5DA0D436BFE9 /* VoiceMessageRoomPlaybackView.swift */; };
A969147E0EEE0E27EE226570 /* MediaUploadPreviewScreenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47F29139BC2A804CE5E0757E /* MediaUploadPreviewScreenViewModel.swift */; };
A975D60EA49F6AF73308809F /* RoomMembersListScreenMemberCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC03209FDE8CE0810617BFFF /* RoomMembersListScreenMemberCell.swift */; };
@ -743,7 +746,6 @@
D12F440F7973F1489F61389D /* NotificationSettingsScreenModels.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0F64447FF544298A6A3BEF85 /* NotificationSettingsScreenModels.swift */; };
D181AC8FF236B7F91C0A8C28 /* MapTiler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 23AA3F4B285570805CB0CCDD /* MapTiler.swift */; };
D19A748E95E2FAB2940570F0 /* CallScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = E4103AB4340F2974D690A12A /* CallScreen.swift */; };
D1DFECA12FBF5346EAC4EE92 /* WaveformView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A931ECBDC32FC90A6480751F /* WaveformView.swift */; };
D1EEF0CB0F5D9C15E224E670 /* landscape_test_video.mov in Resources */ = {isa = PBXBuildFile; fileRef = 9A2AC7BE17C05CF7D2A22338 /* landscape_test_video.mov */; };
D2A15D03F81342A09340BD56 /* AnalyticsScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEFEEE93B82937B2E86F92EB /* AnalyticsScreen.swift */; };
D2D70B5DB1A5E4AF0CD88330 /* target.yml in Resources */ = {isa = PBXBuildFile; fileRef = 033DB41C51865A2E83174E87 /* target.yml */; };
@ -1250,6 +1252,7 @@
5D26A086A8278D39B5756D6F /* project.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = project.yml; sourceTree = "<group>"; };
5D2D0A6F1ABC99D29462FB84 /* AuthenticationCoordinatorUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AuthenticationCoordinatorUITests.swift; sourceTree = "<group>"; };
5D99730313BEBF08CDE81EE3 /* EmojiDetection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiDetection.swift; sourceTree = "<group>"; };
5D9A987EAA44E2E1BDCDAFDC /* EstimatedWaveformView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EstimatedWaveformView.swift; sourceTree = "<group>"; };
5DE8D25D6A91030175D52A20 /* RoomTimelineItemProperties.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomTimelineItemProperties.swift; sourceTree = "<group>"; };
5EB2CAA266B921D128C35710 /* LegalInformationScreenCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LegalInformationScreenCoordinator.swift; sourceTree = "<group>"; };
5F4134FEFE4EB55759017408 /* UserSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSessionProtocol.swift; sourceTree = "<group>"; };
@ -1329,6 +1332,7 @@
78910787F967CBC6042A101E /* StartChatScreenViewModelProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StartChatScreenViewModelProtocol.swift; sourceTree = "<group>"; };
78913D6E120D46138E97C107 /* NavigationSplitCoordinatorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NavigationSplitCoordinatorTests.swift; sourceTree = "<group>"; };
7893780A1FD6E3F38B3E9049 /* UserIndicatorControllerMock.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserIndicatorControllerMock.swift; sourceTree = "<group>"; };
79A1D75C7C52CD14A327CC90 /* ProgressMaskModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressMaskModifier.swift; sourceTree = "<group>"; };
7A5D2323D7B6BF4913EB7EED /* landscape_test_image.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = landscape_test_image.jpg; sourceTree = "<group>"; };
7AB7ED3A898B07976F3AA90F /* BugReportViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BugReportViewModelTests.swift; sourceTree = "<group>"; };
7B04BD3874D736127A8156B8 /* it */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = it; path = it.lproj/Localizable.strings; sourceTree = "<group>"; };
@ -1466,7 +1470,6 @@
A7C4EA55DA62F9D0F984A2AE /* CollapsibleTimelineItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CollapsibleTimelineItem.swift; sourceTree = "<group>"; };
A861DA5932B128FE1DCB5CE2 /* InviteUsersScreenCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InviteUsersScreenCoordinator.swift; sourceTree = "<group>"; };
A8903A9F615BBD0E6D7CD133 /* ApplicationProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ApplicationProtocol.swift; sourceTree = "<group>"; };
A931ECBDC32FC90A6480751F /* WaveformView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WaveformView.swift; sourceTree = "<group>"; };
A9FAFE1C2149E6AC8156ED2B /* Collection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Collection.swift; sourceTree = "<group>"; };
AA19C32BD97F45847724E09A /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Untranslated.strings; sourceTree = "<group>"; };
AAC9344689121887B74877AF /* UnitTests.xctest */ = {isa = PBXFileReference; includeInIndex = 0; lastKnownFileType = wrapper.cfbundle; path = UnitTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
@ -1859,6 +1862,7 @@
36CD6E11B37396E14F032CB6 /* WysiwygComposer in Frameworks */,
A0D7E5BD0298A97DCBDCE40B /* Prefire in Frameworks */,
44F0E1B576C7599DF8022071 /* SwiftOGG in Frameworks */,
A93661C962B12942C08864B6 /* DSWaveformImageViews in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -2374,9 +2378,10 @@
3A542DF1C3BB67D829DFDC40 /* VoiceMessages */ = {
isa = PBXGroup;
children = (
5D9A987EAA44E2E1BDCDAFDC /* EstimatedWaveformView.swift */,
79A1D75C7C52CD14A327CC90 /* ProgressMaskModifier.swift */,
3CCD41CD67DB5DA0D436BFE9 /* VoiceMessageRoomPlaybackView.swift */,
B70A50C41C5871B4DB905E7E /* VoiceMessageRoomTimelineView.swift */,
A931ECBDC32FC90A6480751F /* WaveformView.swift */,
);
path = VoiceMessages;
sourceTree = "<group>";
@ -4363,6 +4368,7 @@
CA07D57389DACE18AEB6A5E2 /* WysiwygComposer */,
2629CF48B33643CD5F69C612 /* Prefire */,
391D11F92DFC91666AA1503F /* SwiftOGG */,
2A4106A0A96DC4C273128AA5 /* DSWaveformImageViews */,
);
productName = ElementX;
productReference = 4CD6AC7546E8D7E5C73CEA48 /* ElementX.app */;
@ -4481,6 +4487,7 @@
AC3475112CA40C2C6E78D1EB /* XCRemoteSwiftPackageReference "matrix-analytics-events" */,
F76A08D0EA29A07A54F4EB4D /* XCRemoteSwiftPackageReference "swift-collections" */,
9754C4B03F6255F67FC15E52 /* XCRemoteSwiftPackageReference "compound-ios" */,
4C34425923978C97409A3EF2 /* XCRemoteSwiftPackageReference "DSWaveformImage" */,
C13F55E4518415CB4C278E73 /* XCRemoteSwiftPackageReference "DTCoreText" */,
D5F7D47BBAAE0CF1DDEB3034 /* XCRemoteSwiftPackageReference "DeviceKit" */,
821C67C9A7F8CC3FD41B28B4 /* XCRemoteSwiftPackageReference "emojibase-bindings" */,
@ -4998,6 +5005,7 @@
9965CB800CE6BC74ACA969FC /* EncryptedHistoryRoomTimelineView.swift in Sources */,
4C5A638DAA8AF64565BA4866 /* EncryptedRoomTimelineItem.swift in Sources */,
B5903E48CF43259836BF2DBF /* EncryptedRoomTimelineView.swift in Sources */,
6BDD969EFFAF18120429084A /* EstimatedWaveformView.swift in Sources */,
F78BAD28482A467287A9A5A3 /* EventBasedMessageTimelineItemProtocol.swift in Sources */,
02D8DF8EB7537EB4E9019DDB /* EventBasedTimelineItemProtocol.swift in Sources */,
63E46D18B91D08E15FC04125 /* ExpiringTaskRunner.swift in Sources */,
@ -5192,6 +5200,7 @@
153E22E8227F46545E5D681C /* PollRoomTimelineView.swift in Sources */,
DF504B10A4918F971A57BEF2 /* PostHogAnalyticsClient.swift in Sources */,
FD4DEC88210F35C35B2FB386 /* ProcessInfo.swift in Sources */,
8421FFCD5360A15D170922A8 /* ProgressMaskModifier.swift in Sources */,
9B356742E035D90A8BB5CABE /* ProposedViewSize.swift in Sources */,
2835FD52F3F618D07F799B3D /* Publisher.swift in Sources */,
9095B9E40DB5CF8BA26CE0D8 /* ReactionsSummaryView.swift in Sources */,
@ -5440,7 +5449,6 @@
2F66701B15657A87B4AC3A0A /* WaitlistScreenModels.swift in Sources */,
CF3827071B0BC9638BD44F5D /* WaitlistScreenViewModel.swift in Sources */,
B717A820BE02C6FE2CB53F6E /* WaitlistScreenViewModelProtocol.swift in Sources */,
D1DFECA12FBF5346EAC4EE92 /* WaveformView.swift in Sources */,
D871C8CF46950F959C9A62C3 /* WelcomeScreen.swift in Sources */,
383055C6ABE5BE058CEE1DDB /* WelcomeScreenScreenCoordinator.swift in Sources */,
BD2BF1EC73FFB0C01552ECDA /* WelcomeScreenScreenModels.swift in Sources */,
@ -6087,6 +6095,14 @@
kind = branch;
};
};
4C34425923978C97409A3EF2 /* XCRemoteSwiftPackageReference "DSWaveformImage" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/dmrschmidt/DSWaveformImage";
requirement = {
kind = upToNextMinorVersion;
minimumVersion = 14.1.0;
};
};
61916C63E3F5BD900F08DA0C /* XCRemoteSwiftPackageReference "KeychainAccess" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/kishikawakatsumi/KeychainAccess";
@ -6310,6 +6326,11 @@
package = AC3475112CA40C2C6E78D1EB /* XCRemoteSwiftPackageReference "matrix-analytics-events" */;
productName = AnalyticsEvents;
};
2A4106A0A96DC4C273128AA5 /* DSWaveformImageViews */ = {
isa = XCSwiftPackageProductDependency;
package = 4C34425923978C97409A3EF2 /* XCRemoteSwiftPackageReference "DSWaveformImage" */;
productName = DSWaveformImageViews;
};
2B43F2AF7456567FE37270A7 /* KeychainAccess */ = {
isa = XCSwiftPackageProductDependency;
package = 61916C63E3F5BD900F08DA0C /* XCRemoteSwiftPackageReference "KeychainAccess" */;

View File

@ -25,6 +25,15 @@
"version" : "5.0.0"
}
},
{
"identity" : "dswaveformimage",
"kind" : "remoteSourceControl",
"location" : "https://github.com/dmrschmidt/DSWaveformImage",
"state" : {
"revision" : "6a4c99a8ab2d2a03f42de21fb8777172ebbcccb1",
"version" : "14.1.0"
}
},
{
"identity" : "dtcoretext",
"kind" : "remoteSourceControl",

View File

@ -51,7 +51,7 @@ struct VoiceMessagePreviewComposer: View {
struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
static let playerState = AudioPlayerState(duration: 10.0,
waveform: Waveform.mockWaveform,
waveform: EstimatedWaveform.mockWaveform,
progress: 0.4)
static var previews: some View {

View File

@ -45,7 +45,7 @@ struct VoiceMessageRecordingView: View {
.foregroundColor(.compound.textSecondary)
.monospacedDigit()
.fixedSize()
WaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: recorderState.waveform, progress: 0, showCursor: false)
EstimatedWaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: recorderState.waveform, progress: 0)
}
.padding(.leading, 2)
.padding(.trailing, 8)
@ -53,10 +53,10 @@ struct VoiceMessageRecordingView: View {
}
struct VoiceMessageRecordingView_Previews: PreviewProvider, TestablePreview {
static let waveform = Waveform(data: [3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0, 0, 0, 0, 3])
static let waveform = EstimatedWaveform(data: [3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0, 0, 0, 0, 3])
static let recorderState = AudioRecorderState()

View File

@ -437,7 +437,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
sender: .init(id: ""),
content: .init(body: "audio.ogg",
duration: 100,
waveform: Waveform.mockWaveform,
waveform: EstimatedWaveform.mockWaveform,
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
@ -500,12 +500,12 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
sender: .init(id: ""),
content: .init(body: "audio.ogg",
duration: 100,
waveform: Waveform.mockWaveform,
waveform: EstimatedWaveform.mockWaveform,
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))),
playerState: AudioPlayerState(duration: 10, waveform: Waveform.mockWaveform))
playerState: AudioPlayerState(duration: 10, waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}

View File

@ -227,12 +227,12 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
sender: .init(id: ""),
content: .init(body: "audio.ogg",
duration: 100,
waveform: Waveform.mockWaveform,
waveform: EstimatedWaveform.mockWaveform,
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))),
playerState: AudioPlayerState(duration: 10, waveform: Waveform.mockWaveform))
playerState: AudioPlayerState(duration: 10, waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}

View File

@ -30,7 +30,7 @@ enum AudioPlayerPlaybackState {
class AudioPlayerState: ObservableObject, Identifiable {
let id = UUID()
let duration: Double
let waveform: Waveform
let waveform: EstimatedWaveform
@Published private(set) var playbackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double
@ -38,6 +38,10 @@ class AudioPlayerState: ObservableObject, Identifiable {
private var cancellables: Set<AnyCancellable> = []
private var displayLink: CADisplayLink?
/// The file url that the last player attached to this object has loaded.
/// The file url persists even if the AudioPlayer will be detached later.
private(set) var fileURL: URL?
var isAttached: Bool {
audioPlayer != nil
}
@ -46,9 +50,9 @@ class AudioPlayerState: ObservableObject, Identifiable {
displayLink != nil
}
init(duration: Double, waveform: Waveform? = nil, progress: Double = 0.0) {
init(duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
self.duration = duration
self.waveform = waveform ?? Waveform(data: [])
self.waveform = waveform ?? EstimatedWaveform(data: [])
self.progress = progress
playbackState = .stopped
}
@ -110,6 +114,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
playbackState = .loading
case .didFinishLoading:
playbackState = .readyToPlay
fileURL = audioPlayer?.url
case .didStartPlaying:
if let audioPlayer {
await restoreAudioPlayerState(audioPlayer: audioPlayer)

View File

@ -30,7 +30,7 @@ class AudioRecorderState: ObservableObject, Identifiable {
@Published private(set) var recordingState: AudioRecorderRecordingState = .stopped
@Published private(set) var duration = 0.0
@Published private(set) var waveform = Waveform(data: Array(repeating: 0, count: 100))
@Published private(set) var waveform = EstimatedWaveform(data: Array(repeating: 0, count: 100))
private weak var audioRecorder: AudioRecorderProtocol?
private var cancellables: Set<AnyCancellable> = []

View File

@ -20,7 +20,7 @@ import UniformTypeIdentifiers
struct AudioRoomTimelineItemContent: Hashable {
let body: String
let duration: TimeInterval
let waveform: Waveform?
let waveform: EstimatedWaveform?
let source: MediaSourceProxy?
let contentType: UTType?
}

View File

@ -16,11 +16,11 @@
import SwiftUI
struct Waveform: Equatable, Hashable {
struct EstimatedWaveform: Equatable, Hashable {
let data: [UInt16]
}
extension Waveform {
extension EstimatedWaveform {
func normalisedData(keepSamplesCount: Int) -> [Float] {
guard keepSamplesCount > 0 else {
return []
@ -43,45 +43,30 @@ extension Waveform {
}
}
extension Waveform {
static let mockWaveform = Waveform(data: [0, 0, 0, 3, 3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0])
extension EstimatedWaveform {
static let mockWaveform = EstimatedWaveform(data: [0, 0, 0, 3, 3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0])
}
struct WaveformView: View {
struct EstimatedWaveformView: View {
var lineWidth: CGFloat = 2
var linePadding: CGFloat = 2
var waveform: Waveform
var waveform: EstimatedWaveform
private let minimumGraphAmplitude: CGFloat = 1
var progress: CGFloat = 0.0
var showCursor = false
@State private var normalizedWaveformData: [Float] = []
var body: some View {
GeometryReader { geometry in
ZStack(alignment: .leading) {
Rectangle().fill(Color.compound.iconQuaternary)
.frame(width: geometry.size.width, height: geometry.size.height)
Rectangle().fill(Color.compound.iconSecondary)
.frame(width: max(0.0, geometry.size.width * progress), height: geometry.size.height)
}
.preference(key: ViewSizeKey.self, value: geometry.size)
.mask(alignment: .leading) {
WaveformShape(lineWidth: lineWidth,
linePadding: linePadding,
waveformData: normalizedWaveformData)
.stroke(Color.compound.iconSecondary, style: StrokeStyle(lineWidth: lineWidth, lineCap: .round))
}
// Display a cursor
.overlay(alignment: .leading) {
RoundedRectangle(cornerRadius: 1).fill(Color.compound.iconAccentTertiary)
.offset(CGSize(width: progress * geometry.size.width, height: 0.0))
.frame(width: lineWidth, height: geometry.size.height)
.opacity(showCursor ? 1 : 0)
}
WaveformShape(lineWidth: lineWidth,
linePadding: linePadding,
waveformData: normalizedWaveformData)
.stroke(Color.compound.iconSecondary, style: StrokeStyle(lineWidth: lineWidth, lineCap: .round))
.progressMask(progress: progress)
.preference(key: ViewSizeKey.self, value: geometry.size)
}
.onPreferenceChange(ViewSizeKey.self) { size in
buildNormalizedWaveformData(size: size)
@ -122,7 +107,7 @@ private struct WaveformShape: Shape {
while xOffset <= width {
let sample = CGFloat(index >= waveformData.count ? 0 : waveformData[index])
let drawingAmplitude = max(minimumGraphAmplitude, sample * (height - 2))
path.move(to: CGPoint(x: xOffset, y: centerY - drawingAmplitude / 2))
path.addLine(to: CGPoint(x: xOffset, y: centerY + drawingAmplitude / 2))
xOffset += lineWidth + linePadding
@ -137,7 +122,7 @@ struct WaveformView_Previews: PreviewProvider, TestablePreview {
static var previews: some View {
// Wrap the WaveformView in a VStack otherwise the preview test will fail (because of Prefire / GeometryReader)
VStack {
WaveformView(waveform: Waveform.mockWaveform, progress: 0.5)
EstimatedWaveformView(waveform: EstimatedWaveform.mockWaveform, progress: 0.5)
.frame(width: 140, height: 50)
}
}

View File

@ -0,0 +1,56 @@
//
// Copyright 2023 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import SwiftUI
extension View {
func progressMask(progress: CGFloat,
trackColor: Color = .compound.iconSecondary,
backgroundTrackColor: Color = .compound.iconQuaternary) -> some View {
modifier(ProgressMaskModifier(progress: progress,
trackColor: trackColor,
backgroundTrackColor: backgroundTrackColor))
}
}
private struct ProgressMaskModifier: ViewModifier {
private let progress: CGFloat
private let trackColor: Color
private let backgroundTrackColor: Color
init(progress: CGFloat, trackColor: Color, backgroundTrackColor: Color) {
self.progress = progress
self.trackColor = trackColor
self.backgroundTrackColor = backgroundTrackColor
}
func body(content: Content) -> some View {
GeometryReader { geometry in
ZStack(alignment: .leading) {
Rectangle()
.fill(backgroundTrackColor)
.frame(width: geometry.size.width, height: geometry.size.height)
Rectangle()
.fill(trackColor)
.frame(width: max(0.0, geometry.size.width * progress), height: geometry.size.height)
}
.mask {
content
}
}
}
}

View File

@ -14,6 +14,8 @@
// limitations under the License.
//
import DSWaveformImage
import DSWaveformImageViews
import SwiftUI
struct VoiceMessageRoomPlaybackView: View {
@ -74,7 +76,15 @@ struct VoiceMessageRoomPlaybackView: View {
.fixedSize(horizontal: true, vertical: true)
}
GeometryReader { geometry in
WaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: playerState.waveform, progress: playerState.progress, showCursor: showWaveformCursor)
waveformView
.overlay(alignment: .leading) {
// Display a cursor
RoundedRectangle(cornerRadius: 1)
.fill(Color.compound.iconAccentTertiary)
.offset(CGSize(width: playerState.progress * geometry.size.width, height: 0.0))
.frame(width: waveformLineWidth, height: geometry.size.height)
.opacity(showWaveformCursor ? 1 : 0)
}
// Add a gesture to drag the waveform
.gesture(SpatialTapGesture()
.simultaneously(with: LongPressGesture())
@ -147,6 +157,26 @@ struct VoiceMessageRoomPlaybackView: View {
.frame(width: playPauseButtonSize,
height: playPauseButtonSize)
}
@ViewBuilder
private var waveformView: some View {
if let url = playerState.fileURL {
WaveformView(audioURL: url,
configuration: .init(style: .striped(.init(color: .black, width: waveformLineWidth, spacing: waveformLinePadding)),
verticalScalingFactor: 1.0),
placeholder: { estimatedWaveformView })
.progressMask(progress: playerState.progress)
} else {
estimatedWaveformView
}
}
private var estimatedWaveformView: some View {
EstimatedWaveformView(lineWidth: waveformLineWidth,
linePadding: waveformLinePadding,
waveform: playerState.waveform,
progress: playerState.progress)
}
}
private enum DragState: Equatable {
@ -183,10 +213,10 @@ private enum DragState: Equatable {
}
struct VoiceMessageRoomPlaybackView_Previews: PreviewProvider, TestablePreview {
static let waveform = Waveform(data: [3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0, 0, 0, 0, 3])
static let waveform = EstimatedWaveform(data: [3, 127, 400, 266, 126, 122, 373, 251, 45, 112,
334, 205, 99, 138, 397, 354, 125, 361, 199, 51,
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0, 0, 0, 0, 3])
static var playerState = AudioPlayerState(duration: 10.0,
waveform: waveform,

View File

@ -75,12 +75,12 @@ struct VoiceMessageRoomTimelineView_Previews: PreviewProvider, TestablePreview {
sender: .init(id: "Bob"),
content: .init(body: "audio.ogg",
duration: 300,
waveform: Waveform.mockWaveform,
waveform: EstimatedWaveform.mockWaveform,
source: nil,
contentType: nil))
static let playerState = AudioPlayerState(duration: 10.0,
waveform: Waveform.mockWaveform,
waveform: EstimatedWaveform.mockWaveform,
progress: 0.4)
static var previews: some View {

View File

@ -469,9 +469,9 @@ struct RoomTimelineItemFactory: RoomTimelineItemFactoryProtocol {
}
private func buildAudioTimelineItemContent(_ messageContent: AudioMessageContent) -> AudioRoomTimelineItemContent {
var waveform: Waveform?
var waveform: EstimatedWaveform?
if let audioWaveform = messageContent.audio?.waveform {
waveform = Waveform(data: audioWaveform)
waveform = EstimatedWaveform(data: audioWaveform)
}
return AudioRoomTimelineItemContent(body: messageContent.body,

View File

@ -194,6 +194,8 @@ targets:
- package: WysiwygComposer
- package: Prefire
- package: SwiftOGG
- package: DSWaveformImage
product: DSWaveformImageViews
sources:
- path: ../Sources

View File

@ -117,3 +117,9 @@ packages:
SwiftOGG:
url: https://github.com/vector-im/swift-ogg
branch: 0.0.1
DSWaveformImage:
url: https://github.com/dmrschmidt/DSWaveformImage
minorVersion: 14.1.0