Improved display of voice messages in the timeline (#1873)

This commit is contained in:
Nicolas Mauri 2023-10-10 18:20:31 +02:00 committed by GitHub
parent fc3bc00d92
commit b35bee5d8f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 153 additions and 99 deletions

View File

@ -33,16 +33,16 @@ class AudioPlayerState: ObservableObject {
@Published private(set) var playbackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double
private var audioPlayer: AudioPlayerProtocol?
private weak var audioPlayer: AudioPlayerProtocol?
private var cancellables: Set<AnyCancellable> = []
private var cancellableTimer: AnyCancellable?
private var displayLink: CADisplayLink?
var isAttached: Bool {
audioPlayer != nil
}
var isPublishingProgress: Bool {
cancellableTimer != nil
displayLink != nil
}
init(duration: Double, waveform: Waveform? = nil, progress: Double = 0.0) {
@ -52,6 +52,11 @@ class AudioPlayerState: ObservableObject {
playbackState = .stopped
}
deinit {
displayLink?.invalidate()
displayLink = nil
}
func updateState(progress: Double) async {
let progress = max(0.0, min(progress, 1.0))
self.progress = progress
@ -91,25 +96,25 @@ class AudioPlayerState: ObservableObject {
guard let self else {
return
}
self.handleAudioPlayerAction(action)
Task {
await self.handleAudioPlayerAction(action)
}
}
.store(in: &cancellables)
}
private func handleAudioPlayerAction(_ action: AudioPlayerAction) {
private func handleAudioPlayerAction(_ action: AudioPlayerAction) async {
switch action {
case .didStartLoading:
playbackState = .loading
case .didFinishLoading:
if let audioPlayer {
Task {
await restoreAudioPlayerState(audioPlayer: audioPlayer)
}
}
playbackState = .readyToPlay
case .didStartPlaying:
playbackState = .playing
if let audioPlayer {
await restoreAudioPlayerState(audioPlayer: audioPlayer)
}
startPublishProgress()
playbackState = .playing
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
stopPublishProgress()
playbackState = .stopped
@ -122,22 +127,23 @@ class AudioPlayerState: ObservableObject {
}
private func startPublishProgress() {
cancellableTimer?.cancel()
cancellableTimer = Timer.publish(every: 0.2, on: .main, in: .default)
.autoconnect()
.receive(on: DispatchQueue.main)
.sink(receiveValue: { [weak self] _ in
guard let self else { return }
if let currentTime = self.audioPlayer?.currentTime, self.duration > 0 {
self.progress = currentTime / self.duration
}
})
if displayLink != nil {
stopPublishProgress()
}
displayLink = CADisplayLink(target: self, selector: #selector(updateProgress))
displayLink?.preferredFrameRateRange = .init(minimum: 10, maximum: 20)
displayLink?.add(to: .current, forMode: .common)
}
@objc private func updateProgress(displayLink: CADisplayLink) {
if let currentTime = audioPlayer?.currentTime, duration > 0 {
progress = currentTime / duration
}
}
private func stopPublishProgress() {
cancellableTimer?.cancel()
cancellableTimer = nil
displayLink?.invalidate()
displayLink = nil
}
private func restoreAudioPlayerState(audioPlayer: AudioPlayerProtocol) async {

View File

@ -24,7 +24,7 @@ enum MediaPlayerState {
case error
}
protocol MediaPlayerProtocol {
protocol MediaPlayerProtocol: AnyObject {
var mediaSource: MediaSourceProxy? { get }
var currentTime: TimeInterval { get }

View File

@ -28,13 +28,20 @@ struct VoiceMessageRoomPlaybackView: View {
@ScaledMetric private var waveformLineWidth = 2.0
@ScaledMetric private var waveformLinePadding = 2.0
private let waveformMaxWidth: CGFloat = 150
private let playPauseButtonSize = CGSize(width: 32, height: 32)
@ScaledMetric private var playPauseButtonSize = 32
@ScaledMetric private var playPauseImagePadding = 8
private static let elapsedTimeFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "m:ss"
return dateFormatter
}()
private static let longElapsedTimeFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "mm:ss"
return dateFormatter
}()
@GestureState private var dragState = DragState.inactive
@State private var tapProgress: Double = .zero
@ -42,7 +49,13 @@ struct VoiceMessageRoomPlaybackView: View {
var timeLabelContent: String {
// Display the duration if progress is 0.0
let percent = playerState.progress > 0.0 ? playerState.progress : 1.0
return Self.elapsedTimeFormatter.string(from: Date(timeIntervalSinceReferenceDate: playerState.duration * percent))
// If the duration is greater or equal 10 minutes, use the long format
let elapsed = Date(timeIntervalSinceReferenceDate: playerState.duration * percent)
if playerState.duration >= 600 {
return Self.longElapsedTimeFormatter.string(from: elapsed)
} else {
return Self.elapsedTimeFormatter.string(from: elapsed)
}
}
var showWaveformCursor: Bool {
@ -57,6 +70,7 @@ struct VoiceMessageRoomPlaybackView: View {
.font(.compound.bodySMSemibold)
.foregroundColor(.compound.textSecondary)
.monospacedDigit()
.fixedSize(horizontal: true, vertical: true)
}
GeometryReader { geometry in
WaveformView(lineWidth: waveformLineWidth, linePadding: waveformLinePadding, waveform: playerState.waveform, progress: playerState.progress, showCursor: showWaveformCursor)
@ -78,7 +92,7 @@ struct VoiceMessageRoomPlaybackView: View {
if let loc = drag?.location {
progress = loc.x / geometry.size.width
}
state = .dragging(progress: progress, distance: geometry.size.width)
state = .dragging(progress: progress)
// Dragging ended or the long press cancelled.
default:
state = .inactive
@ -96,17 +110,12 @@ struct VoiceMessageRoomPlaybackView: View {
onScrubbing(true)
feedbackGenerator.prepare()
sendFeedback = true
case .dragging(let progress, let totalWidth):
case .dragging(let progress):
if sendFeedback {
feedbackGenerator.impactOccurred()
sendFeedback = false
}
let minimumProgress = waveformLinePadding / totalWidth
let deltaProgress = abs(progress - playerState.progress)
let deltaTime = playerState.duration * deltaProgress
if deltaProgress == 0 || deltaProgress >= minimumProgress || deltaTime >= 1.0 {
onSeek(max(0, min(progress, 1.0)))
}
onSeek(max(0, min(progress, 1.0)))
}
}
.padding(.leading, 2)
@ -125,6 +134,8 @@ struct VoiceMessageRoomPlaybackView: View {
ProgressView()
} else {
Image(asset: playerState.playbackState == .playing ? Asset.Images.mediaPause : Asset.Images.mediaPlay)
.resizable()
.padding(playPauseImagePadding)
.offset(x: playerState.playbackState == .playing ? 0 : 2)
.aspectRatio(contentMode: .fit)
.foregroundColor(.compound.iconSecondary)
@ -132,21 +143,21 @@ struct VoiceMessageRoomPlaybackView: View {
}
}
.disabled(playerState.playbackState == .loading)
.frame(width: playPauseButtonSize.width,
height: playPauseButtonSize.height)
.frame(width: playPauseButtonSize,
height: playPauseButtonSize)
}
}
private enum DragState: Equatable {
case inactive
case pressing(progress: Double)
case dragging(progress: Double, distance: Double)
case dragging(progress: Double)
var progress: Double {
switch self {
case .inactive, .pressing:
return .zero
case .dragging(let progress, _):
case .dragging(let progress):
return progress
}
}
@ -176,7 +187,7 @@ struct VoiceMessageRoomPlaybackView_Previews: PreviewProvider, TestablePreview {
294, 131, 19, 2, 3, 3, 1, 2, 0, 0,
0, 0, 0, 0, 0, 3])
static let playerState = AudioPlayerState(duration: 10.0,
static var playerState = AudioPlayerState(duration: 10.0,
waveform: waveform,
progress: 0.3)

View File

@ -21,14 +21,25 @@ struct Waveform: Equatable, Hashable {
}
extension Waveform {
func normalisedData(count: Int) -> [Float] {
guard count > 0 else {
func normalisedData(keepSamplesCount: Int) -> [Float] {
guard keepSamplesCount > 0 else {
return []
}
let stride = max(1, Int(data.count / count))
let data = data.striding(by: stride)
let max = data.max().flatMap { Float($0) } ?? 0
return data.map { Float($0) / max }
// Filter the data to keep only the expected number of samples
let originalCount = Double(data.count)
let expectedCount = Double(keepSamplesCount)
var filteredData: [UInt16] = []
if expectedCount < originalCount {
for index in 0..<keepSamplesCount {
let targetIndex = (Double(index) * (originalCount / expectedCount)).rounded()
filteredData.append(UInt16(data[Int(targetIndex)]))
}
} else {
filteredData = data
}
// Normalize the sample
let max = max(1.0, filteredData.max().flatMap { Float($0) } ?? 1.0)
return filteredData.map { Float($0) / max }
}
}
@ -47,6 +58,8 @@ struct WaveformView: View {
var progress: CGFloat = 0.0
var showCursor = false
@State private var normalizedWaveformData: [Float] = []
var body: some View {
GeometryReader { geometry in
ZStack(alignment: .leading) {
@ -55,50 +68,77 @@ struct WaveformView: View {
Rectangle().fill(Color.compound.iconSecondary)
.frame(width: max(0.0, geometry.size.width * progress), height: geometry.size.height)
}
.preference(key: ViewSizeKey.self, value: geometry.size)
.mask(alignment: .leading) {
Path { path in
let width = geometry.size.width
let height = geometry.size.height
let centerY = geometry.size.height / 2
let visibleSamplesCount = Int(width / (lineWidth + linePadding))
let normalisedData = waveform.normalisedData(count: visibleSamplesCount)
var xOffset: CGFloat = lineWidth / 2
var index = 0
while xOffset <= width {
let sample = CGFloat(index >= normalisedData.count ? 0 : normalisedData[index])
let drawingAmplitude = max(minimumGraphAmplitude, sample * (height - 2))
path.move(to: CGPoint(x: xOffset, y: centerY - drawingAmplitude / 2))
path.addLine(to: CGPoint(x: xOffset, y: centerY + drawingAmplitude / 2))
xOffset += lineWidth + linePadding
index += 1
}
}
.stroke(Color.compound.iconSecondary, style: StrokeStyle(lineWidth: lineWidth, lineCap: .round))
WaveformShape(lineWidth: lineWidth,
linePadding: linePadding,
waveformData: normalizedWaveformData)
.stroke(Color.compound.iconSecondary, style: StrokeStyle(lineWidth: lineWidth, lineCap: .round))
}
// Display a cursor
.overlay(alignment: .leading) {
RoundedRectangle(cornerRadius: 1).fill(Color.compound.iconAccentTertiary)
.offset(CGSize(width: cursorPosition(progress: progress, width: geometry.size.width), height: 0.0))
.offset(CGSize(width: progress * geometry.size.width, height: 0.0))
.frame(width: lineWidth, height: geometry.size.height)
.opacity(showCursor ? 1 : 0)
}
}
.onPreferenceChange(ViewSizeKey.self) { size in
buildNormalizedWaveformData(size: size)
}
}
private func cursorPosition(progress: Double, width: Double) -> Double {
guard progress > 0 else {
return 0
private func buildNormalizedWaveformData(size: CGSize) {
let count = Int(size.width / (lineWidth + linePadding))
// Rebuild the normalized waveform data only if the count has changed
if normalizedWaveformData.count == count {
return
}
let width = (width * progress)
return width - width.truncatingRemainder(dividingBy: lineWidth + linePadding)
normalizedWaveformData = waveform.normalisedData(keepSamplesCount: count)
}
}
private struct ViewSizeKey: PreferenceKey {
static var defaultValue: CGSize = .zero
static func reduce(value: inout CGSize, nextValue: () -> CGSize) {
value = nextValue()
}
}
private struct WaveformShape: Shape {
let lineWidth: CGFloat
let linePadding: CGFloat
let waveformData: [Float]
var minimumGraphAmplitude: CGFloat = 1.0
func path(in rect: CGRect) -> Path {
let width = rect.size.width
let height = rect.size.height
let centerY = rect.size.height / 2
var xOffset: CGFloat = lineWidth / 2
var index = 0
var path = Path()
while xOffset <= width {
let sample = CGFloat(index >= waveformData.count ? 0 : waveformData[index])
let drawingAmplitude = max(minimumGraphAmplitude, sample * (height - 2))
path.move(to: CGPoint(x: xOffset, y: centerY - drawingAmplitude / 2))
path.addLine(to: CGPoint(x: xOffset, y: centerY + drawingAmplitude / 2))
xOffset += lineWidth + linePadding
index += 1
}
return path
}
}
struct WaveformView_Previews: PreviewProvider, TestablePreview {
static var previews: some View {
WaveformView(waveform: Waveform.mockWaveform, progress: 0.5)
.frame(width: 140, height: 50)
// Wrap the WaveformView in a VStack otherwise the preview test will fail (because of Prefire / GeometryReader)
VStack {
WaveformView(waveform: Waveform.mockWaveform, progress: 0.5)
.frame(width: 140, height: 50)
}
}
}

View File

@ -100,9 +100,9 @@ class AudioPlayerStateTests: XCTestCase {
func testHandlingAudioPlayerActionDidStartLoading() async throws {
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .didStartLoading:
case .loading:
return true
default:
return false
@ -119,33 +119,29 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: originalStateProgress)
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .didFinishLoading:
case .readyToPlay:
return true
default:
return false
}
}
// The progress should be restored
let deferedProgress = deferFulfillment(audioPlayerSeekCalls) { progress in
progress == originalStateProgress
}
audioPlayerActionsSubject.send(.didFinishLoading)
try await deferred.fulfill()
try await deferedProgress.fulfill()
// The state is expected to be .readyToPlay
XCTAssertEqual(audioPlayerState.playbackState, .readyToPlay)
}
func testHandlingAudioPlayerActionDidStartPlaying() async throws {
await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .didStartPlaying:
case .playing:
return true
default:
return false
@ -154,6 +150,7 @@ class AudioPlayerStateTests: XCTestCase {
audioPlayerActionsSubject.send(.didStartPlaying)
try await deferred.fulfill()
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssertEqual(audioPlayerState.playbackState, .playing)
XCTAssert(audioPlayerState.isPublishingProgress)
}
@ -162,9 +159,9 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .didPausePlaying:
case .stopped:
return true
default:
return false
@ -182,9 +179,9 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .didStopPlaying:
case .stopped:
return true
default:
return false
@ -202,9 +199,9 @@ class AudioPlayerStateTests: XCTestCase {
await audioPlayerState.updateState(progress: 0.4)
audioPlayerState.attachAudioPlayer(audioPlayerMock)
let deferred = deferFulfillment(audioPlayerActions) { action in
let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .didFinishPlaying:
case .stopped:
return true
default:
return false

Binary file not shown.