Improved display of voice messages in the timeline (#1873)

This commit is contained in:
Nicolas Mauri 2023-10-10 18:20:31 +02:00 committed by GitHub
parent fc3bc00d92
commit b35bee5d8f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 153 additions and 99 deletions

View File

@ -33,16 +33,16 @@ class AudioPlayerState: ObservableObject {
@Published private(set) var playbackState: AudioPlayerPlaybackState @Published private(set) var playbackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double @Published private(set) var progress: Double
private var audioPlayer: AudioPlayerProtocol? private weak var audioPlayer: AudioPlayerProtocol?
private var cancellables: Set<AnyCancellable> = [] private var cancellables: Set<AnyCancellable> = []
private var cancellableTimer: AnyCancellable? private var displayLink: CADisplayLink?
var isAttached: Bool { var isAttached: Bool {
audioPlayer != nil audioPlayer != nil
} }
var isPublishingProgress: Bool { var isPublishingProgress: Bool {
cancellableTimer != nil displayLink != nil
} }
init(duration: Double, waveform: Waveform? = nil, progress: Double = 0.0) { init(duration: Double, waveform: Waveform? = nil, progress: Double = 0.0) {
@ -52,6 +52,11 @@ class AudioPlayerState: ObservableObject {
playbackState = .stopped playbackState = .stopped
} }
deinit {
displayLink?.invalidate()
displayLink = nil
}
func updateState(progress: Double) async { func updateState(progress: Double) async {
let progress = max(0.0, min(progress, 1.0)) let progress = max(0.0, min(progress, 1.0))
self.progress = progress self.progress = progress
@ -91,25 +96,25 @@ class AudioPlayerState: ObservableObject {
guard let self else { guard let self else {
return return
} }
self.handleAudioPlayerAction(action) Task {
await self.handleAudioPlayerAction(action)
}
} }
.store(in: &cancellables) .store(in: &cancellables)
} }
private func handleAudioPlayerAction(_ action: AudioPlayerAction) { private func handleAudioPlayerAction(_ action: AudioPlayerAction) async {
switch action { switch action {
case .didStartLoading: case .didStartLoading:
playbackState = .loading playbackState = .loading
case .didFinishLoading: case .didFinishLoading:
if let audioPlayer {
Task {
await restoreAudioPlayerState(audioPlayer: audioPlayer)
}
}
playbackState = .readyToPlay playbackState = .readyToPlay
case .didStartPlaying: case .didStartPlaying:
playbackState = .playing if let audioPlayer {
await restoreAudioPlayerState(audioPlayer: audioPlayer)
}
startPublishProgress() startPublishProgress()
playbackState = .playing
case .didPausePlaying, .didStopPlaying, .didFinishPlaying: case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
stopPublishProgress() stopPublishProgress()
playbackState = .stopped playbackState = .stopped
@ -122,22 +127,23 @@ class AudioPlayerState: ObservableObject {
} }
private func startPublishProgress() { private func startPublishProgress() {
cancellableTimer?.cancel() if displayLink != nil {
stopPublishProgress()
cancellableTimer = Timer.publish(every: 0.2, on: .main, in: .default) }
.autoconnect() displayLink = CADisplayLink(target: self, selector: #selector(updateProgress))
.receive(on: DispatchQueue.main) displayLink?.preferredFrameRateRange = .init(minimum: 10, maximum: 20)
.sink(receiveValue: { [weak self] _ in displayLink?.add(to: .current, forMode: .common)
guard let self else { return } }
if let currentTime = self.audioPlayer?.currentTime, self.duration > 0 {
self.progress = currentTime / self.duration @objc private func updateProgress(displayLink: CADisplayLink) {
} if let currentTime = audioPlayer?.currentTime, duration > 0 {
}) progress = currentTime / duration
}
} }
private func stopPublishProgress() { private func stopPublishProgress() {
cancellableTimer?.cancel() displayLink?.invalidate()
cancellableTimer = nil displayLink = nil
} }
private func restoreAudioPlayerState(audioPlayer: AudioPlayerProtocol) async { private func restoreAudioPlayerState(audioPlayer: AudioPlayerProtocol) async {

View File

@ -24,7 +24,7 @@ enum MediaPlayerState {
case error case error
} }
protocol MediaPlayerProtocol { protocol MediaPlayerProtocol: AnyObject {
var mediaSource: MediaSourceProxy? { get } var mediaSource: MediaSourceProxy? { get }
var currentTime: TimeInterval { get } var currentTime: TimeInterval { get }

View File

@ -28,13 +28,20 @@ struct VoiceMessageRoomPlaybackView: View {
@ScaledMetric private var waveformLineWidth = 2.0 @ScaledMetric private var waveformLineWidth = 2.0
@ScaledMetric private var waveformLinePadding = 2.0 @ScaledMetric private var waveformLinePadding = 2.0
private let waveformMaxWidth: CGFloat = 150 private let waveformMaxWidth: CGFloat = 150
private let playPauseButtonSize = CGSize(width: 32, height: 32) @ScaledMetric private var playPauseButtonSize = 32
@ScaledMetric private var playPauseImagePadding = 8
private static let elapsedTimeFormatter: DateFormatter = { private static let elapsedTimeFormatter: DateFormatter = {
let dateFormatter = DateFormatter() let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "m:ss" dateFormatter.dateFormat = "m:ss"
return dateFormatter return dateFormatter
}() }()
private static let longElapsedTimeFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "mm:ss"
return dateFormatter
}()
@GestureState private var dragState = DragState.inactive @GestureState private var dragState = DragState.inactive
@State private var tapProgress: Double = .zero @State private var tapProgress: Double = .zero
@ -42,7 +49,13 @@ struct VoiceMessageRoomPlaybackView: View {
var timeLabelContent: String { var timeLabelContent: String {
// Display the duration if progress is 0.0 // Display the duration if progress is 0.0
let percent = playerState.progress > 0.0 ? playerState.progress : 1.0 let percent = playerState.progress > 0.0 ? playerState.progress : 1.0
return Self.elapsedTimeFormatter.string(from: Date(timeIntervalSinceReferenceDate: playerState.duration * percent)) // If the duration is greater or equal 10 minutes, use the long format
let elapsed = Date(timeIntervalSinceReferenceDate: playerState.duration * percent)
if playerState.duration >= 600 {
return Self.longElapsedTimeFormatter.string(from: elapsed)
} else {
return Self.elapsedTimeFormatter.string(from: elapsed)
}
} }
var showWaveformCursor: Bool { var showWaveformCursor: Bool {
@ -57,6 +70,7 @@ struct VoiceMessageRoomPlaybackView: View {
.font(.compound.bodySMSemibold) .font(.compound.bodySMSemibold)
.foregroundColor(.compound.textSecondary) .foregroundColor(.compound.textSecondary)
.monospacedDigit() .monospacedDigit()
.fixedSize(horizontal: true, vertical: true)
} }
GeometryReader { geometry in GeometryReader { geometry in
WaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: playerState.waveform, progress: playerState.progress, showCursor: showWaveformCursor) WaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: playerState.waveform, progress: playerState.progress, showCursor: showWaveformCursor)
@ -78,7 +92,7 @@ struct VoiceMessageRoomPlaybackView: View {
if let loc = drag?.location { if let loc = drag?.location {
progress = loc.x / geometry.size.width progress = loc.x / geometry.size.width
} }
state = .dragging(progress: progress, distance: geometry.size.width) state = .dragging(progress: progress)
// Dragging ended or the long press cancelled. // Dragging ended or the long press cancelled.
default: default:
state = .inactive state = .inactive
@ -96,17 +110,12 @@ struct VoiceMessageRoomPlaybackView: View {
onScrubbing(true) onScrubbing(true)
feedbackGenerator.prepare() feedbackGenerator.prepare()
sendFeedback = true sendFeedback = true
case .dragging(let progress, let totalWidth): case .dragging(let progress):
if sendFeedback { if sendFeedback {
feedbackGenerator.impactOccurred() feedbackGenerator.impactOccurred()
sendFeedback = false sendFeedback = false
} }
let minimumProgress = waveformLinePadding / totalWidth onSeek(max(0, min(progress, 1.0)))
let deltaProgress = abs(progress - playerState.progress)
let deltaTime = playerState.duration * deltaProgress
if deltaProgress == 0 || deltaProgress >= minimumProgress || deltaTime >= 1.0 {
onSeek(max(0, min(progress, 1.0)))
}
} }
} }
.padding(.leading, 2) .padding(.leading, 2)
@ -125,6 +134,8 @@ struct VoiceMessageRoomPlaybackView: View {
ProgressView() ProgressView()
} else { } else {
Image(asset: playerState.playbackState == .playing ? Asset.Images.mediaPause : Asset.Images.mediaPlay) Image(asset: playerState.playbackState == .playing ? Asset.Images.mediaPause : Asset.Images.mediaPlay)
.resizable()
.padding(playPauseImagePadding)
.offset(x: playerState.playbackState == .playing ? 0 : 2) .offset(x: playerState.playbackState == .playing ? 0 : 2)
.aspectRatio(contentMode: .fit) .aspectRatio(contentMode: .fit)
.foregroundColor(.compound.iconSecondary) .foregroundColor(.compound.iconSecondary)
@ -132,21 +143,21 @@ struct VoiceMessageRoomPlaybackView: View {
} }
} }
.disabled(playerState.playbackState == .loading) .disabled(playerState.playbackState == .loading)
.frame(width: playPauseButtonSize.width, .frame(width: playPauseButtonSize,
height: playPauseButtonSize.height) height: playPauseButtonSize)
} }
} }
private enum DragState: Equatable { private enum DragState: Equatable {
case inactive case inactive
case pressing(progress: Double) case pressing(progress: Double)
case dragging(progress: Double, distance: Double) case dragging(progress: Double)
var progress: Double { var progress: Double {
switch self { switch self {
case .inactive, .pressing: case .inactive, .pressing:
return .zero return .zero
case .dragging(let progress, _): case .dragging(let progress):
return progress return progress
} }
} }
@ -176,7 +187,7 @@ struct VoiceMessageRoomPlaybackView_Previews: PreviewProvider, TestablePreview {
294, 131, 19, 2, 3, 3, 1, 2, 0, 0, 294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0, 0, 0, 0, 3]) 0, 0, 0, 0, 0, 3])
static let playerState = AudioPlayerState(duration: 10.0, static var playerState = AudioPlayerState(duration: 10.0,
waveform: waveform, waveform: waveform,
progress: 0.3) progress: 0.3)

View File

@ -21,14 +21,25 @@ struct Waveform: Equatable, Hashable {
} }
extension Waveform { extension Waveform {
func normalisedData(count: Int) -> [Float] { func normalisedData(keepSamplesCount: Int) -> [Float] {
guard count > 0 else { guard keepSamplesCount > 0 else {
return [] return []
} }
let stride = max(1, Int(data.count / count)) // Filter the data to keep only the expected number of samples
let data = data.striding(by: stride) let originalCount = Double(data.count)
let max = data.max().flatMap { Float($0) } ?? 0 let expectedCount = Double(keepSamplesCount)
return data.map { Float($0) / max } var filteredData: [UInt16] = []
if expectedCount < originalCount {
for index in 0..<keepSamplesCount {
let targetIndex = (Double(index) * (originalCount / expectedCount)).rounded()
filteredData.append(UInt16(data[Int(targetIndex)]))
}
} else {
filteredData = data
}
// Normalize the sample
let max = max(1.0, filteredData.max().flatMap { Float($0) } ?? 1.0)
return filteredData.map { Float($0) / max }
} }
} }
@ -47,6 +58,8 @@ struct WaveformView: View {
var progress: CGFloat = 0.0 var progress: CGFloat = 0.0
var showCursor = false var showCursor = false
@State private var normalizedWaveformData: [Float] = []
var body: some View { var body: some View {
GeometryReader { geometry in GeometryReader { geometry in
ZStack(alignment: .leading) { ZStack(alignment: .leading) {
@ -55,50 +68,77 @@ struct WaveformView: View {
Rectangle().fill(Color.compound.iconSecondary) Rectangle().fill(Color.compound.iconSecondary)
.frame(width: max(0.0, geometry.size.width * progress), height: geometry.size.height) .frame(width: max(0.0, geometry.size.width * progress), height: geometry.size.height)
} }
.preference(key: ViewSizeKey.self, value: geometry.size)
.mask(alignment: .leading) { .mask(alignment: .leading) {
Path { path in WaveformShape(lineWidth: lineWidth,
let width = geometry.size.width linePadding: linePadding,
let height = geometry.size.height waveformData: normalizedWaveformData)
let centerY = geometry.size.height / 2 .stroke(Color.compound.iconSecondary, style: StrokeStyle(lineWidth: lineWidth, lineCap: .round))
let visibleSamplesCount = Int(width / (lineWidth + linePadding))
let normalisedData = waveform.normalisedData(count: visibleSamplesCount)
var xOffset: CGFloat = lineWidth / 2
var index = 0
while xOffset <= width {
let sample = CGFloat(index >= normalisedData.count ? 0 : normalisedData[index])
let drawingAmplitude = max(minimumGraphAmplitude, sample * (height - 2))
path.move(to: CGPoint(x: xOffset, y: centerY - drawingAmplitude / 2))
path.addLine(to: CGPoint(x: xOffset, y: centerY + drawingAmplitude / 2))
xOffset += lineWidth + linePadding
index += 1
}
}
.stroke(Color.compound.iconSecondary, style: StrokeStyle(lineWidth: lineWidth, lineCap: .round))
} }
// Display a cursor // Display a cursor
.overlay(alignment: .leading) { .overlay(alignment: .leading) {
RoundedRectangle(cornerRadius: 1).fill(Color.compound.iconAccentTertiary) RoundedRectangle(cornerRadius: 1).fill(Color.compound.iconAccentTertiary)
.offset(CGSize(width: cursorPosition(progress: progress, width: geometry.size.width), height: 0.0)) .offset(CGSize(width: progress * geometry.size.width, height: 0.0))
.frame(width: lineWidth, height: geometry.size.height) .frame(width: lineWidth, height: geometry.size.height)
.opacity(showCursor ? 1 : 0) .opacity(showCursor ? 1 : 0)
} }
} }
.onPreferenceChange(ViewSizeKey.self) { size in
buildNormalizedWaveformData(size: size)
}
} }
private func cursorPosition(progress: Double, width: Double) -> Double { private func buildNormalizedWaveformData(size: CGSize) {
guard progress > 0 else { let count = Int(size.width / (lineWidth + linePadding))
return 0 // Rebuild the normalized waveform data only if the count has changed
if normalizedWaveformData.count == count {
return
} }
let width = (width * progress) normalizedWaveformData = waveform.normalisedData(keepSamplesCount: count)
return width - width.truncatingRemainder(dividingBy: lineWidth + linePadding) }
}
private struct ViewSizeKey: PreferenceKey {
static var defaultValue: CGSize = .zero
static func reduce(value: inout CGSize, nextValue: () -> CGSize) {
value = nextValue()
}
}
private struct WaveformShape: Shape {
let lineWidth: CGFloat
let linePadding: CGFloat
let waveformData: [Float]
var minimumGraphAmplitude: CGFloat = 1.0
func path(in rect: CGRect) -> Path {
let width = rect.size.width
let height = rect.size.height
let centerY = rect.size.height / 2
var xOffset: CGFloat = lineWidth / 2
var index = 0
var path = Path()
while xOffset <= width {
let sample = CGFloat(index >= waveformData.count ? 0 : waveformData[index])
let drawingAmplitude = max(minimumGraphAmplitude, sample * (height - 2))
path.move(to: CGPoint(x: xOffset, y: centerY - drawingAmplitude / 2))
path.addLine(to: CGPoint(x: xOffset, y: centerY + drawingAmplitude / 2))
xOffset += lineWidth + linePadding
index += 1
}
return path
} }
} }
struct WaveformView_Previews: PreviewProvider, TestablePreview { struct WaveformView_Previews: PreviewProvider, TestablePreview {
static var previews: some View { static var previews: some View {
WaveformView(waveform: Waveform.mockWaveform, progress: 0.5) // Wrap the WaveformView in a VStack otherwise the preview test will fail (because of Prefire / GeometryReader)
.frame(width: 140, height: 50) VStack {
WaveformView(waveform: Waveform.mockWaveform, progress: 0.5)
.frame(width: 140, height: 50)
}
} }
} }

View File

@ -100,9 +100,9 @@ class AudioPlayerStateTests: XCTestCase {
func testHandlingAudioPlayerActionDidStartLoading() async throws { func testHandlingAudioPlayerActionDidStartLoading() async throws {
audioPlayerState.attachAudioPlayer(audioPlayerMock) audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action { switch action {
case .didStartLoading: case .loading:
return true return true
default: default:
return false return false
@ -119,33 +119,29 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: originalStateProgress) await audioPlayerState.updateState(progress: originalStateProgress)
audioPlayerState.attachAudioPlayer(audioPlayerMock) audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action { switch action {
case .didFinishLoading: case .readyToPlay:
return true return true
default: default:
return false return false
} }
} }
// The progress should be restored
let deferedProgress = deferFulfillment(audioPlayerSeekCalls) { progress in
progress == originalStateProgress
}
audioPlayerActionsSubject.send(.didFinishLoading) audioPlayerActionsSubject.send(.didFinishLoading)
try await deferred.fulfill() try await deferred.fulfill()
try await deferedProgress.fulfill()
// The state is expected to be .readyToPlay // The state is expected to be .readyToPlay
XCTAssertEqual(audioPlayerState.playbackState, .readyToPlay) XCTAssertEqual(audioPlayerState.playbackState, .readyToPlay)
} }
func testHandlingAudioPlayerActionDidStartPlaying() async throws { func testHandlingAudioPlayerActionDidStartPlaying() async throws {
await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock) audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action { switch action {
case .didStartPlaying: case .playing:
return true return true
default: default:
return false return false
@ -154,6 +150,7 @@ class AudioPlayerStateTests: XCTestCase {
audioPlayerActionsSubject.send(.didStartPlaying) audioPlayerActionsSubject.send(.didStartPlaying)
try await deferred.fulfill() try await deferred.fulfill()
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssertEqual(audioPlayerState.playbackState, .playing) XCTAssertEqual(audioPlayerState.playbackState, .playing)
XCTAssert(audioPlayerState.isPublishingProgress) XCTAssert(audioPlayerState.isPublishingProgress)
} }
@ -162,9 +159,9 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: 0.4) await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock) audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action { switch action {
case .didPausePlaying: case .stopped:
return true return true
default: default:
return false return false
@ -182,9 +179,9 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: 0.4) await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock) audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action { switch action {
case .didStopPlaying: case .stopped:
return true return true
default: default:
return false return false
@ -202,9 +199,9 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: 0.4) await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock) audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action { switch action {
case .didFinishPlaying: case .stopped:
return true return true
default: default:
return false return false

Binary file not shown.