Extract room screen action handling into separate component (#2045)

* Extract room screen action handling into separate component

* Fix tracing configuration unit tests

* Move the custom reactionsCollapsedBinding to the TimelineReactionsView

* Move voice message playback handling from the timelineController to the room view model

* Reorder methods

* Remove the need for a media player provider in the timeline controller

* Move room attachments opening to the view model, remove the need for a media provider in the timeline controller

* Rename RoomScreenActionsHandler -> RoomScreenInteractionHandler

* Move message sending retry and cancellation to the timeline controller.

* Move audio playback, attachment loading and  user tapping handling into the InteractionHandler

* Fix unit tests

* Switch back swiftlint file_length rule error to 1000 lines
This commit is contained in:
Stefan Ceriu 2023-11-08 18:31:39 +02:00 committed by GitHub
parent 07313926c9
commit abe2080ee2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 1085 additions and 925 deletions

View File

@ -25,7 +25,7 @@ line_length:
file_length:
warning: 1000
error: 1200
error: 1000
type_name:
min_length: 3

View File

@ -263,6 +263,7 @@
4714991754A08B58B4D7ED85 /* OnboardingScreenViewModelProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = E2F27BAB69EB568369F1F6B3 /* OnboardingScreenViewModelProtocol.swift */; };
47305C0911C9E1AA774A4000 /* TemplateScreenCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = CA90BD288E5AE6BC643AFDDF /* TemplateScreenCoordinator.swift */; };
4799A852132F1744E2825994 /* CreateRoomViewModelProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 340179A0FC1AD4AEDA7FC134 /* CreateRoomViewModelProtocol.swift */; };
47FF70C051A991FB65CDBCF3 /* RoomScreenInteractionHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0135A608FFAD86E6674EE730 /* RoomScreenInteractionHandler.swift */; };
4807E8F51DB54F56B25E1C7E /* AppLockSetupSettingsScreenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1D8C38663020DF2EB2D13F5E /* AppLockSetupSettingsScreenViewModel.swift */; };
484202C5D50983442D24D061 /* AttributedString.swift in Sources */ = {isa = PBXBuildFile; fileRef = 52BD6ED18E2EB61E28C340AD /* AttributedString.swift */; };
491D62ACD19E6F134B1766AF /* RoomNotificationSettingsUserDefinedScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3203C6566DC17B7AECC1B7FD /* RoomNotificationSettingsUserDefinedScreen.swift */; };
@ -1029,6 +1030,7 @@
/* Begin PBXFileReference section */
00245D40CD90FD71D6A05239 /* EmojiPickerScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiPickerScreen.swift; sourceTree = "<group>"; };
00E5B2CBEF8F96424F095508 /* RoomDetailsEditScreenViewModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomDetailsEditScreenViewModelTests.swift; sourceTree = "<group>"; };
0135A608FFAD86E6674EE730 /* RoomScreenInteractionHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RoomScreenInteractionHandler.swift; sourceTree = "<group>"; };
01C4C7DB37597D7D8379511A /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
022E6BD64CB4610B9C95FC02 /* UserDetailsEditScreenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserDetailsEditScreenViewModel.swift; sourceTree = "<group>"; };
024F7398C5FC12586FB10E9D /* EffectsScene.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EffectsScene.swift; sourceTree = "<group>"; };
@ -3066,6 +3068,7 @@
isa = PBXGroup;
children = (
B8108C8F0ACF6A7EB72D0117 /* RoomScreenCoordinator.swift */,
0135A608FFAD86E6674EE730 /* RoomScreenInteractionHandler.swift */,
C2886615BEBAE33A0AA4D5F8 /* RoomScreenModels.swift */,
9CE3C90E487B255B735D73C8 /* RoomScreenViewModel.swift */,
A00C7A331B72C0F05C00392F /* RoomScreenViewModelProtocol.swift */,
@ -5706,6 +5709,7 @@
FA9C427FFB11B1AA2DCC5602 /* RoomProxyProtocol.swift in Sources */,
C55A44C99F64A479ABA85B46 /* RoomScreen.swift in Sources */,
A851635B3255C6DC07034A12 /* RoomScreenCoordinator.swift in Sources */,
47FF70C051A991FB65CDBCF3 /* RoomScreenInteractionHandler.swift in Sources */,
352C439BE0F75E101EF11FB1 /* RoomScreenModels.swift in Sources */,
7BB31E67648CF32D2AB5E502 /* RoomScreenViewModel.swift in Sources */,
617624A97BDBB75ED3DD8156 /* RoomScreenViewModelProtocol.swift in Sources */,

View File

@ -332,8 +332,6 @@ class RoomFlowCoordinator: FlowCoordinatorProtocol {
let userID = userSession.clientProxy.userID
let mediaPlayerProvider = MediaPlayerProvider()
let timelineItemFactory = RoomTimelineItemFactory(userID: userID,
mediaProvider: userSession.mediaProvider,
attributedStringBuilder: AttributedStringBuilder(permalinkBaseURL: appSettings.permalinkBaseURL,
@ -343,9 +341,6 @@ class RoomFlowCoordinator: FlowCoordinatorProtocol {
let timelineController = roomTimelineControllerFactory.buildRoomTimelineController(roomProxy: roomProxy,
timelineItemFactory: timelineItemFactory,
mediaProvider: userSession.mediaProvider,
mediaPlayerProvider: mediaPlayerProvider,
voiceMessageMediaManager: userSession.voiceMessageMediaManager,
secureBackupController: userSession.clientProxy.secureBackupController)
self.timelineController = timelineController
@ -356,7 +351,8 @@ class RoomFlowCoordinator: FlowCoordinatorProtocol {
let parameters = RoomScreenCoordinatorParameters(roomProxy: roomProxy,
timelineController: timelineController,
mediaProvider: userSession.mediaProvider,
mediaPlayerProvider: mediaPlayerProvider,
mediaPlayerProvider: MediaPlayerProvider(),
voiceMessageMediaManager: userSession.voiceMessageMediaManager,
emojiProvider: emojiProvider,
completionSuggestionService: completionSuggestionService,
appSettings: appSettings)

View File

@ -24,6 +24,7 @@ struct RoomScreenCoordinatorParameters {
let timelineController: RoomTimelineControllerProtocol
let mediaProvider: MediaProviderProtocol
let mediaPlayerProvider: MediaPlayerProviderProtocol
let voiceMessageMediaManager: VoiceMessageMediaManagerProtocol
let emojiProvider: EmojiProviderProtocol
let completionSuggestionService: CompletionSuggestionServiceProtocol
let appSettings: AppSettings
@ -59,14 +60,16 @@ final class RoomScreenCoordinator: CoordinatorProtocol {
init(parameters: RoomScreenCoordinatorParameters) {
self.parameters = parameters
viewModel = RoomScreenViewModel(timelineController: parameters.timelineController,
viewModel = RoomScreenViewModel(roomProxy: parameters.roomProxy,
timelineController: parameters.timelineController,
mediaProvider: parameters.mediaProvider,
mediaPlayerProvider: parameters.mediaPlayerProvider,
roomProxy: parameters.roomProxy,
appSettings: parameters.appSettings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: parameters.voiceMessageMediaManager,
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: UIApplication.shared)
application: UIApplication.shared,
appSettings: parameters.appSettings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenter.default)
wysiwygViewModel = WysiwygComposerViewModel(minHeight: ComposerConstant.minHeight,
maxCompressedHeight: ComposerConstant.maxHeight,

View File

@ -0,0 +1,652 @@
//
// Copyright 2023 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import Combine
import UIKit
enum RoomScreenInteractionHandlerAction {
case composer(action: RoomScreenComposerAction)
case displayError(RoomScreenErrorType)
case displayEmojiPicker(itemID: TimelineItemIdentifier, selectedEmojis: Set<String>)
case displayReportContent(itemID: TimelineItemIdentifier, senderID: String)
case displayMessageForwarding(itemID: TimelineItemIdentifier)
case displayMediaUploadPreviewScreen(url: URL)
case displayRoomMemberDetails(member: RoomMemberProxyProtocol)
case showActionMenu(TimelineItemActionMenuInfo)
case showDebugInfo(TimelineItemDebugInfo)
case showConfirmationAlert(AlertInfo<UUID>)
}
@MainActor
class RoomScreenInteractionHandler {
private let roomProxy: RoomProxyProtocol
private let timelineController: RoomTimelineControllerProtocol
private let mediaProvider: MediaProviderProtocol
private let mediaPlayerProvider: MediaPlayerProviderProtocol
private let voiceMessageRecorder: VoiceMessageRecorderProtocol
private let voiceMessageMediaManager: VoiceMessageMediaManagerProtocol
private let userIndicatorController: UserIndicatorControllerProtocol
private let application: ApplicationProtocol
private let appSettings: AppSettings
private let analyticsService: AnalyticsService
private let actionsSubject: PassthroughSubject<RoomScreenInteractionHandlerAction, Never> = .init()
var actions: AnyPublisher<RoomScreenInteractionHandlerAction, Never> {
actionsSubject.eraseToAnyPublisher()
}
private var voiceMessageRecorderObserver: AnyCancellable?
private var canCurrentUserRedact = false
private var resumeVoiceMessagePlaybackAfterScrubbing = false
init(roomProxy: RoomProxyProtocol,
timelineController: RoomTimelineControllerProtocol,
mediaProvider: MediaProviderProtocol,
mediaPlayerProvider: MediaPlayerProviderProtocol,
voiceMessageMediaManager: VoiceMessageMediaManagerProtocol,
voiceMessageRecorder: VoiceMessageRecorderProtocol,
userIndicatorController: UserIndicatorControllerProtocol,
application: ApplicationProtocol,
appSettings: AppSettings,
analyticsService: AnalyticsService) {
self.roomProxy = roomProxy
self.timelineController = timelineController
self.mediaProvider = mediaProvider
self.mediaPlayerProvider = mediaPlayerProvider
self.voiceMessageMediaManager = voiceMessageMediaManager
self.voiceMessageRecorder = voiceMessageRecorder
self.userIndicatorController = userIndicatorController
self.application = application
self.appSettings = appSettings
self.analyticsService = analyticsService
}
// MARK: Timeline Item Action Menu
func showTimelineItemActionMenu(for itemID: TimelineItemIdentifier) {
Task {
if case let .success(value) = await roomProxy.canUserRedact(userID: roomProxy.ownUserID) {
canCurrentUserRedact = value
} else {
canCurrentUserRedact = false
}
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
// Don't show a menu for non-event based items.
return
}
actionsSubject.send(.composer(action: .removeFocus))
actionsSubject.send(.showActionMenu(.init(item: eventTimelineItem)))
}
}
// swiftlint:disable:next cyclomatic_complexity
func timelineItemMenuActionsForItemId(_ itemID: TimelineItemIdentifier) -> TimelineItemMenuActions? {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let item = timelineItem as? EventBasedTimelineItemProtocol else {
// Don't show a context menu for non-event based items.
return nil
}
if timelineItem is StateRoomTimelineItem {
// Don't show a context menu for state events.
return nil
}
var debugActions: [TimelineItemMenuAction] = []
if appSettings.canShowDeveloperOptions || appSettings.viewSourceEnabled {
debugActions.append(.viewSource)
}
if let encryptedItem = timelineItem as? EncryptedRoomTimelineItem {
switch encryptedItem.encryptionType {
case .megolmV1AesSha2(let sessionID):
debugActions.append(.retryDecryption(sessionID: sessionID))
default:
break
}
return .init(actions: [.copyPermalink], debugActions: debugActions)
}
var actions: [TimelineItemMenuAction] = []
if item.canBeRepliedTo {
if let messageItem = item as? EventBasedMessageTimelineItemProtocol {
actions.append(.reply(isThread: messageItem.isThreaded))
} else {
actions.append(.reply(isThread: false))
}
}
if item.isForwardable {
actions.append(.forward(itemID: itemID))
}
if item.isEditable {
actions.append(.edit)
}
if item.isCopyable {
actions.append(.copy)
}
actions.append(.copyPermalink)
if canRedactItem(item), let poll = item.pollIfAvailable, !poll.hasEnded, let eventID = itemID.eventID {
actions.append(.endPoll(pollStartID: eventID))
}
if canRedactItem(item) {
actions.append(.redact)
}
if !item.isOutgoing {
actions.append(.report)
}
if item.hasFailedToSend {
actions = actions.filter(\.canAppearInFailedEcho)
}
if item.isRedacted {
actions = actions.filter(\.canAppearInRedacted)
}
return .init(actions: actions, debugActions: debugActions)
}
func processTimelineItemMenuAction(_ action: TimelineItemMenuAction, itemID: TimelineItemIdentifier) {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
return
}
switch action {
case .copy:
guard let messageTimelineItem = timelineItem as? EventBasedMessageTimelineItemProtocol else {
return
}
UIPasteboard.general.string = messageTimelineItem.body
case .edit:
guard let messageTimelineItem = timelineItem as? EventBasedMessageTimelineItemProtocol else {
return
}
let text: String
switch messageTimelineItem.contentType {
case .text(let textItem):
if ServiceLocator.shared.settings.richTextEditorEnabled, let formattedBodyHTMLString = textItem.formattedBodyHTMLString {
text = formattedBodyHTMLString
} else {
text = messageTimelineItem.body
}
case .emote(let emoteItem):
if ServiceLocator.shared.settings.richTextEditorEnabled, let formattedBodyHTMLString = emoteItem.formattedBodyHTMLString {
text = "/me " + formattedBodyHTMLString
} else {
text = "/me " + messageTimelineItem.body
}
default:
text = messageTimelineItem.body
}
actionsSubject.send(.composer(action: .setText(text: text)))
actionsSubject.send(.composer(action: .setMode(mode: .edit(originalItemId: messageTimelineItem.id))))
case .copyPermalink:
do {
guard let eventID = eventTimelineItem.id.eventID else {
actionsSubject.send(.displayError(.alert(L10n.errorFailedCreatingThePermalink)))
break
}
let permalink = try PermalinkBuilder.permalinkTo(eventIdentifier: eventID, roomIdentifier: timelineController.roomID,
baseURL: appSettings.permalinkBaseURL)
UIPasteboard.general.url = permalink
} catch {
actionsSubject.send(.displayError(.alert(L10n.errorFailedCreatingThePermalink)))
}
case .redact:
Task {
if eventTimelineItem.hasFailedToSend {
await timelineController.cancelSending(itemID: itemID)
} else {
await timelineController.redact(itemID)
}
}
case .reply:
let replyInfo = buildReplyInfo(for: eventTimelineItem)
let replyDetails = TimelineItemReplyDetails.loaded(sender: eventTimelineItem.sender, contentType: replyInfo.type)
actionsSubject.send(.composer(action: .setMode(mode: .reply(itemID: eventTimelineItem.id, replyDetails: replyDetails, isThread: replyInfo.isThread))))
case .forward(let itemID):
actionsSubject.send(.displayMessageForwarding(itemID: itemID))
case .viewSource:
let debugInfo = timelineController.debugInfo(for: eventTimelineItem.id)
MXLog.info(debugInfo)
actionsSubject.send(.showDebugInfo(debugInfo))
case .retryDecryption(let sessionID):
Task {
await timelineController.retryDecryption(for: sessionID)
}
case .report:
actionsSubject.send(.displayReportContent(itemID: itemID, senderID: eventTimelineItem.sender.id))
case .react:
showEmojiPicker(for: itemID)
case .endPoll(let pollStartID):
endPoll(pollStartID: pollStartID)
}
if action.switchToDefaultComposer {
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
}
// MARK: Polls
func sendPollResponse(pollStartID: String, optionID: String) {
Task {
let sendPollResponseResult = await roomProxy.sendPollResponse(pollStartID: pollStartID, answers: [optionID])
analyticsService.trackPollVote()
switch sendPollResponseResult {
case .success:
break
case .failure:
actionsSubject.send(.displayError(.toast(L10n.errorUnknown)))
}
}
}
func endPoll(pollStartID: String) {
Task {
let endPollResult = await roomProxy.endPoll(pollStartID: pollStartID,
text: "The poll with event id: \(pollStartID) has ended")
analyticsService.trackPollEnd()
switch endPollResult {
case .success:
break
case .failure:
actionsSubject.send(.displayError(.toast(L10n.errorUnknown)))
}
}
}
// MARK: Pasting and dropping
func handlePasteOrDrop(_ provider: NSItemProvider) {
guard let contentType = provider.preferredContentType,
let preferredExtension = contentType.preferredFilenameExtension else {
MXLog.error("Invalid NSItemProvider: \(provider)")
actionsSubject.send(.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia)))
return
}
let providerSuggestedName = provider.suggestedName
let providerDescription = provider.description
_ = provider.loadDataRepresentation(for: contentType) { data, error in
Task { @MainActor in
let loadingIndicatorIdentifier = UUID().uuidString
self.userIndicatorController.submitIndicator(UserIndicator(id: loadingIndicatorIdentifier, type: .modal, title: L10n.commonLoading, persistent: true))
defer {
self.userIndicatorController.retractIndicatorWithId(loadingIndicatorIdentifier)
}
if let error {
self.actionsSubject.send(.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia)))
MXLog.error("Failed processing NSItemProvider: \(providerDescription) with error: \(error)")
return
}
guard let data else {
self.actionsSubject.send(.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia)))
MXLog.error("Invalid NSItemProvider data: \(providerDescription)")
return
}
do {
let url = try await Task.detached {
if let filename = providerSuggestedName {
let hasExtension = !(filename as NSString).pathExtension.isEmpty
let filename = hasExtension ? filename : "\(filename).\(preferredExtension)"
return try FileManager.default.writeDataToTemporaryDirectory(data: data, fileName: filename)
} else {
let filename = "\(UUID().uuidString).\(preferredExtension)"
return try FileManager.default.writeDataToTemporaryDirectory(data: data, fileName: filename)
}
}.value
self.actionsSubject.send(.displayMediaUploadPreviewScreen(url: url))
} catch {
self.actionsSubject.send(.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia)))
MXLog.error("Failed storing NSItemProvider data \(providerDescription) with error: \(error)")
}
}
}
}
// MARK: Voice messages
private func handleVoiceMessageRecorderAction(_ action: VoiceMessageRecorderAction) {
MXLog.debug("handling voice recorder action: \(action) - (audio)")
switch action {
case .didStartRecording(let audioRecorder):
let audioRecordState = AudioRecorderState()
audioRecordState.attachAudioRecorder(audioRecorder)
actionsSubject.send(.composer(action: .setMode(mode: .recordVoiceMessage(state: audioRecordState))))
case .didStopRecording(let previewAudioPlayerState, let url):
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: previewAudioPlayerState, waveform: .url(url), isUploading: false))))
case .didFailWithError(let error):
switch error {
case .audioRecorderError(.recordPermissionNotGranted):
MXLog.info("permission to record audio has not been granted.")
actionsSubject.send(.showConfirmationAlert(.init(id: .init(),
title: L10n.dialogPermissionMicrophoneTitleIos(InfoPlistReader.main.bundleDisplayName),
message: L10n.dialogPermissionMicrophoneDescriptionIos,
primaryButton: .init(title: L10n.commonSettings, action: { [weak self] in self?.openSystemSettings() }),
secondaryButton: .init(title: L10n.actionNotNow, role: .cancel, action: nil))))
default:
MXLog.error("failed to start voice message recording. \(error)")
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
}
}
func startRecordingVoiceMessage() async {
voiceMessageRecorderObserver = voiceMessageRecorder.actions
.receive(on: DispatchQueue.main)
.sink { [weak self] action in
self?.handleVoiceMessageRecorderAction(action)
}
await voiceMessageRecorder.startRecording()
}
func stopRecordingVoiceMessage() async {
await voiceMessageRecorder.stopRecording()
}
func cancelRecordingVoiceMessage() async {
await voiceMessageRecorder.cancelRecording()
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
func deleteCurrentVoiceMessage() async {
await voiceMessageRecorder.deleteRecording()
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
func sendCurrentVoiceMessage() async {
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState, let recordingURL = voiceMessageRecorder.recordingURL else {
actionsSubject.send(.displayError(.alert(L10n.errorFailedUploadingVoiceMessage)))
return
}
analyticsService.trackComposer(inThread: false,
isEditing: false,
isReply: false,
messageType: .voiceMessage,
startsThread: nil)
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState, waveform: .url(recordingURL), isUploading: true))))
await voiceMessageRecorder.stopPlayback()
switch await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: AudioConverter()) {
case .success:
await deleteCurrentVoiceMessage()
case .failure(let error):
MXLog.error("failed to send the voice message. \(error)")
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState, waveform: .url(recordingURL), isUploading: false))))
actionsSubject.send(.displayError(.alert(L10n.errorFailedUploadingVoiceMessage)))
}
}
func startPlayingRecordedVoiceMessage() async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
MXLog.error("failed to play recorded voice message. \(error)")
}
}
func pausePlayingRecordedVoiceMessage() {
voiceMessageRecorder.pausePlayback()
}
func seekRecordedVoiceMessage(to progress: Double) async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
await voiceMessageRecorder.seekPlayback(to: progress)
}
func scrubVoiceMessagePlayback(scrubbing: Bool) async {
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
return
}
if scrubbing {
if audioPlayerState.playbackState == .playing {
resumeVoiceMessagePlaybackAfterScrubbing = true
pausePlayingRecordedVoiceMessage()
}
} else {
if resumeVoiceMessagePlaybackAfterScrubbing {
resumeVoiceMessagePlaybackAfterScrubbing = false
await startPlayingRecordedVoiceMessage()
}
}
}
// MARK: Audio Playback
func playPauseAudio(for itemID: TimelineItemIdentifier) async {
MXLog.info("Toggle play/pause audio for itemID \(itemID)")
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID) else {
fatalError("TimelineItem \(itemID) not found")
}
guard let voiceMessageRoomTimelineItem = timelineItem as? VoiceMessageRoomTimelineItem else {
fatalError("Invalid TimelineItem type for itemID \(itemID) (expecting `VoiceMessageRoomTimelineItem` but found \(type(of: timelineItem)) instead")
}
guard let source = voiceMessageRoomTimelineItem.content.source else {
MXLog.error("Cannot start voice message playback, source is not defined for itemID \(itemID)")
return
}
guard case .success(let mediaPlayer) = mediaPlayerProvider.player(for: source), let audioPlayer = mediaPlayer as? AudioPlayerProtocol else {
MXLog.error("Cannot play a voice message without an audio player")
return
}
let audioPlayerState = audioPlayerState(for: itemID)
// Ensure this one is attached
if !audioPlayerState.isAttached {
audioPlayerState.attachAudioPlayer(audioPlayer)
}
// Detach all other states
await mediaPlayerProvider.detachAllStates(except: audioPlayerState)
guard audioPlayer.mediaSource == source, audioPlayer.state != .error else {
// Load content
do {
MXLog.info("Loading voice message audio content from source for itemID \(itemID)")
let url = try await voiceMessageMediaManager.loadVoiceMessageFromSource(source, body: nil)
// Make sure that the player is still attached, as it may have been detached while waiting for the voice message to be loaded.
if audioPlayerState.isAttached {
audioPlayer.load(mediaSource: source, using: url, autoplay: true)
}
} catch {
MXLog.error("Failed to load voice message: \(error)")
audioPlayerState.reportError(error)
}
return
}
if audioPlayer.state == .playing {
audioPlayer.pause()
} else {
audioPlayer.play()
}
}
func seekAudio(for itemID: TimelineItemIdentifier, progress: Double) async {
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
return
}
await mediaPlayerProvider.detachAllStates(except: playerState)
await playerState.updateState(progress: progress)
}
func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID) else {
fatalError("TimelineItem \(itemID) not found")
}
guard let voiceMessageRoomTimelineItem = timelineItem as? VoiceMessageRoomTimelineItem else {
fatalError("Invalid TimelineItem type (expecting `VoiceMessageRoomTimelineItem` but found \(type(of: timelineItem)) instead")
}
if let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) {
return playerState
}
let playerState = AudioPlayerState(id: .timelineItemIdentifier(itemID),
duration: voiceMessageRoomTimelineItem.content.duration,
waveform: voiceMessageRoomTimelineItem.content.waveform)
mediaPlayerProvider.register(audioPlayerState: playerState)
return playerState
}
// MARK: Other
func showEmojiPicker(for itemID: TimelineItemIdentifier) {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
timelineItem.isReactable,
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
return
}
let selectedEmojis = Set(eventTimelineItem.properties.reactions.compactMap { $0.isHighlighted ? $0.key : nil })
actionsSubject.send(.displayEmojiPicker(itemID: itemID, selectedEmojis: selectedEmojis))
}
func handleTappedUser(userID: String) async {
// This is generally fast but it could take some time for rooms with thousands of users on first load
// Show a loader only if it takes more than 0.1 seconds
showLoadingIndicator(with: .milliseconds(100))
let result = await roomProxy.getMember(userID: userID)
hideLoadingIndicator()
switch result {
case .success(let member):
actionsSubject.send(.displayRoomMemberDetails(member: member))
case .failure(let error):
actionsSubject.send(.displayError(.alert(L10n.screenRoomErrorFailedRetrievingUserDetails)))
MXLog.error("Failed retrieving the user given the following id \(userID) with error: \(error)")
}
}
func processItemTap(_ itemID: TimelineItemIdentifier) async -> RoomTimelineControllerAction {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID) else {
return .none
}
switch timelineItem {
case let item as LocationRoomTimelineItem:
guard let geoURI = item.content.geoURI else { return .none }
return .displayLocation(body: item.content.body, geoURI: geoURI, description: item.content.description)
default:
return await displayMediaActionIfPossible(timelineItem: timelineItem)
}
}
// MARK: - Private
private func canRedactItem(_ item: EventBasedTimelineItemProtocol) -> Bool {
item.isOutgoing || (canCurrentUserRedact && !roomProxy.isDirect)
}
private func buildReplyInfo(for item: EventBasedTimelineItemProtocol) -> ReplyInfo {
guard let messageItem = item as? EventBasedMessageTimelineItemProtocol else {
return .init(type: .text(.init(body: item.body)), isThread: false)
}
return .init(type: messageItem.contentType, isThread: messageItem.isThreaded)
}
private func openSystemSettings() {
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
application.open(url)
}
private func displayMediaActionIfPossible(timelineItem: RoomTimelineItemProtocol) async -> RoomTimelineControllerAction {
var source: MediaSourceProxy?
var body: String
switch timelineItem {
case let item as ImageRoomTimelineItem:
source = item.content.source
body = item.content.body
case let item as VideoRoomTimelineItem:
source = item.content.source
body = item.content.body
case let item as FileRoomTimelineItem:
source = item.content.source
body = item.content.body
case let item as AudioRoomTimelineItem:
// For now we are just displaying audio messages with the File preview until we create a timeline player for them.
source = item.content.source
body = item.content.body
default:
return .none
}
guard let source else { return .none }
switch await mediaProvider.loadFileFromSource(source, body: body) {
case .success(let file):
return .displayMediaFile(file: file, title: body)
case .failure:
return .none
}
}
// MARK: User indicators
private static let loadingIndicatorIdentifier = "RoomScreenLoadingIndicator"
private func showLoadingIndicator(with delay: Duration) {
userIndicatorController.submitIndicator(UserIndicator(id: Self.loadingIndicatorIdentifier,
type: .modal(progress: .indeterminate, interactiveDismissDisabled: true, allowsInteraction: false),
title: L10n.commonLoading,
persistent: true),
delay: delay)
}
private func hideLoadingIndicator() {
userIndicatorController.retractIndicatorWithId(Self.loadingIndicatorIdentifier)
}
}
private struct ReplyInfo {
let type: EventBasedMessageTimelineItemContentType
let isThread: Bool
}

View File

@ -28,40 +28,57 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
static let toastErrorID = "RoomScreenToastError"
}
private let timelineController: RoomTimelineControllerProtocol
private let roomProxy: RoomProxyProtocol
private let appSettings: AppSettings
private let analytics: AnalyticsService
private let application: ApplicationProtocol
private unowned let userIndicatorController: UserIndicatorControllerProtocol
private let notificationCenterProtocol: NotificationCenterProtocol
private let voiceMessageRecorder: VoiceMessageRecorderProtocol
private let composerFocusedSubject = PassthroughSubject<Bool, Never>()
private let timelineController: RoomTimelineControllerProtocol
private let mediaPlayerProvider: MediaPlayerProviderProtocol
private let userIndicatorController: UserIndicatorControllerProtocol
private let application: ApplicationProtocol
private let appSettings: AppSettings
private let analyticsService: AnalyticsService
private let notificationCenter: NotificationCenterProtocol
private let roomScreenInteractionHandler: RoomScreenInteractionHandler
private let composerFocusedSubject = PassthroughSubject<Bool, Never>()
private let actionsSubject: PassthroughSubject<RoomScreenViewModelAction, Never> = .init()
private var canCurrentUserRedact = false
var actions: AnyPublisher<RoomScreenViewModelAction, Never> {
actionsSubject.eraseToAnyPublisher()
}
private var paginateBackwardsTask: Task<Void, Never>?
private var resumeVoiceMessagePlaybackAfterScrubbing = false
private var voiceMessageRecorderObserver: AnyCancellable?
init(timelineController: RoomTimelineControllerProtocol,
init(roomProxy: RoomProxyProtocol,
timelineController: RoomTimelineControllerProtocol,
mediaProvider: MediaProviderProtocol,
mediaPlayerProvider: MediaPlayerProviderProtocol,
roomProxy: RoomProxyProtocol,
appSettings: AppSettings,
analytics: AnalyticsService,
voiceMessageMediaManager: VoiceMessageMediaManagerProtocol,
userIndicatorController: UserIndicatorControllerProtocol,
application: ApplicationProtocol,
notificationCenterProtocol: NotificationCenterProtocol = NotificationCenter.default) {
self.roomProxy = roomProxy
appSettings: AppSettings,
analyticsService: AnalyticsService,
notificationCenter: NotificationCenterProtocol) {
self.timelineController = timelineController
self.appSettings = appSettings
self.analytics = analytics
self.userIndicatorController = userIndicatorController
self.notificationCenterProtocol = notificationCenterProtocol
self.mediaPlayerProvider = mediaPlayerProvider
self.roomProxy = roomProxy
self.appSettings = appSettings
self.analyticsService = analyticsService
self.userIndicatorController = userIndicatorController
self.application = application
voiceMessageRecorder = VoiceMessageRecorder(audioRecorder: AudioRecorder(), mediaPlayerProvider: mediaPlayerProvider)
self.notificationCenter = notificationCenter
let voiceMessageRecorder = VoiceMessageRecorder(audioRecorder: AudioRecorder(), mediaPlayerProvider: mediaPlayerProvider)
roomScreenInteractionHandler = RoomScreenInteractionHandler(roomProxy: roomProxy,
timelineController: timelineController,
mediaProvider: mediaProvider,
mediaPlayerProvider: mediaPlayerProvider,
voiceMessageMediaManager: voiceMessageMediaManager,
voiceMessageRecorder: voiceMessageRecorder,
userIndicatorController: userIndicatorController,
application: application,
appSettings: appSettings,
analyticsService: analyticsService)
super.init(initialViewState: RoomScreenViewState(roomID: timelineController.roomID,
roomTitle: roomProxy.roomTitle,
@ -76,13 +93,13 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
setupSubscriptions()
setupDirectRoomSubscriptionsIfNeeded()
state.timelineItemMenuActionProvider = { [weak self] itemId -> TimelineItemMenuActions? in
guard let self else {
return nil
}
return self.timelineItemMenuActionsForItemId(itemId)
return self.roomScreenInteractionHandler.timelineItemMenuActionsForItemId(itemId)
}
state.audioPlayerStateProvider = { [weak self] itemId -> AudioPlayerState? in
@ -90,7 +107,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
return nil
}
return self.audioPlayerState(for: itemId)
return self.roomScreenInteractionHandler.audioPlayerState(for: itemId)
}
buildTimelineViews()
@ -101,10 +118,6 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}
// MARK: - Public
var actions: AnyPublisher<RoomScreenViewModelAction, Never> {
actionsSubject.eraseToAnyPublisher()
}
func stop() {
// Work around QLPreviewController dismissal issues, see the InteractiveQuickLookModifier.
@ -120,34 +133,27 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
case .itemDisappeared(let id):
Task { await timelineController.processItemDisappearance(id) }
case .itemTapped(let id):
Task { await itemTapped(with: id) }
Task { await handleItemTapped(with: id) }
case .toggleReaction(let emoji, let itemId):
Task { await timelineController.toggleReaction(emoji, to: itemId) }
case .sendReadReceiptIfNeeded(let lastVisibleItemID):
Task { await sendReadReceiptIfNeeded(for: lastVisibleItemID) }
case .timelineItemMenu(let itemID):
Task {
if case let .success(value) = await roomProxy.canUserRedact(userID: roomProxy.ownUserID) {
canCurrentUserRedact = value
} else {
canCurrentUserRedact = false
}
showTimelineItemActionMenu(for: itemID)
}
roomScreenInteractionHandler.showTimelineItemActionMenu(for: itemID)
case .timelineItemMenuAction(let itemID, let action):
processTimelineItemMenuAction(action, itemID: itemID)
roomScreenInteractionHandler.processTimelineItemMenuAction(action, itemID: itemID)
case .handlePasteOrDrop(let provider):
handlePasteOrDrop(provider)
roomScreenInteractionHandler.handlePasteOrDrop(provider)
case .tappedOnUser(userID: let userID):
Task { await handleTappedUser(userID: userID) }
Task { await roomScreenInteractionHandler.handleTappedUser(userID: userID) }
case .displayEmojiPicker(let itemID):
showEmojiPicker(for: itemID)
roomScreenInteractionHandler.showEmojiPicker(for: itemID)
case .reactionSummary(let itemID, let key):
showReactionSummary(for: itemID, selectedKey: key)
case .retrySend(let itemID):
Task { await handleRetrySend(itemID: itemID) }
Task { await timelineController.retrySending(itemID: itemID) }
case .cancelSend(let itemID):
Task { await handleCancelSend(itemID: itemID) }
Task { await timelineController.cancelSending(itemID: itemID) }
case .paginateBackwards:
paginateBackwards()
case .scrolledToBottom:
@ -155,17 +161,17 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
renderPendingTimelineItems()
}
case let .selectedPollOption(pollStartID, optionID):
sendPollResponse(pollStartID: pollStartID, optionID: optionID)
roomScreenInteractionHandler.sendPollResponse(pollStartID: pollStartID, optionID: optionID)
case .playPauseAudio(let itemID):
Task { await timelineController.playPauseAudio(for: itemID) }
Task { await roomScreenInteractionHandler.playPauseAudio(for: itemID) }
case .seekAudio(let itemID, let progress):
Task { await timelineController.seekAudio(for: itemID, progress: progress) }
Task { await roomScreenInteractionHandler.seekAudio(for: itemID, progress: progress) }
case let .endPoll(pollStartID):
state.bindings.confirmationAlertInfo = .init(id: .init(),
title: L10n.actionEndPoll,
message: L10n.commonPollEndConfirmation,
primaryButton: .init(title: L10n.actionCancel, role: .cancel, action: nil),
secondaryButton: .init(title: L10n.actionOk, action: { self.endPoll(pollStartID: pollStartID) }))
secondaryButton: .init(title: L10n.actionOk, action: { self.roomScreenInteractionHandler.endPoll(pollStartID: pollStartID) }))
case .presentCall:
actionsSubject.send(.displayCallScreen)
}
@ -191,7 +197,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
case .displayPollForm:
actionsSubject.send(.displayPollForm)
case .handlePasteOrDrop(let provider):
handlePasteOrDrop(provider)
roomScreenInteractionHandler.handlePasteOrDrop(provider)
case .composerModeChanged(mode: let mode):
trackComposerMode(mode)
case .composerFocusedChanged(isFocused: let isFocused):
@ -199,24 +205,24 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
case .startVoiceMessageRecording:
Task {
await mediaPlayerProvider.detachAllStates(except: nil)
await startRecordingVoiceMessage()
await roomScreenInteractionHandler.startRecordingVoiceMessage()
}
case .stopVoiceMessageRecording:
Task { await stopRecordingVoiceMessage() }
Task { await roomScreenInteractionHandler.stopRecordingVoiceMessage() }
case .cancelVoiceMessageRecording:
Task { await cancelRecordingVoiceMessage() }
Task { await roomScreenInteractionHandler.cancelRecordingVoiceMessage() }
case .deleteVoiceMessageRecording:
Task { await deleteCurrentVoiceMessage() }
Task { await roomScreenInteractionHandler.deleteCurrentVoiceMessage() }
case .sendVoiceMessage:
Task { await sendCurrentVoiceMessage() }
Task { await roomScreenInteractionHandler.sendCurrentVoiceMessage() }
case .startVoiceMessagePlayback:
Task { await startPlayingRecordedVoiceMessage() }
Task { await roomScreenInteractionHandler.startPlayingRecordedVoiceMessage() }
case .pauseVoiceMessagePlayback:
pausePlayingRecordedVoiceMessage()
roomScreenInteractionHandler.pausePlayingRecordedVoiceMessage()
case .seekVoiceMessagePlayback(let progress):
Task { await seekRecordedVoiceMessage(to: progress) }
Task { await roomScreenInteractionHandler.seekRecordedVoiceMessage(to: progress) }
case .scrubVoiceMessagePlayback(let scrubbing):
Task { await scrubVoiceMessagePlayback(scrubbing: scrubbing) }
Task { await roomScreenInteractionHandler.scrubVoiceMessagePlayback(scrubbing: scrubbing) }
}
}
@ -275,6 +281,36 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
.receive(on: DispatchQueue.main)
.weakAssign(to: \.state.members, on: self)
.store(in: &cancellables)
roomScreenInteractionHandler.actions
.receive(on: DispatchQueue.main)
.sink { [weak self] action in
guard let self else { return }
switch action {
case .composer(let action):
actionsSubject.send(.composer(action: action))
case .displayError(let type):
displayError(type)
case .displayEmojiPicker(let itemID, let selectedEmojis):
actionsSubject.send(.displayEmojiPicker(itemID: itemID, selectedEmojis: selectedEmojis))
case .displayMessageForwarding(let itemID):
actionsSubject.send(.displayMessageForwarding(itemID: itemID))
case .displayReportContent(let itemID, let senderID):
actionsSubject.send(.displayReportContent(itemID: itemID, senderID: senderID))
case .displayMediaUploadPreviewScreen(let url):
actionsSubject.send(.displayMediaUploadPreviewScreen(url: url))
case .displayRoomMemberDetails(let member):
actionsSubject.send(.displayRoomMemberDetails(member: member))
case .showActionMenu(let actionMenuInfo):
state.bindings.actionMenuInfo = actionMenuInfo
case .showDebugInfo(let debugInfo):
state.bindings.debugInfo = debugInfo
case .showConfirmationAlert(let alertInfo):
state.bindings.confirmationAlertInfo = alertInfo
}
}
.store(in: &cancellables)
}
private func setupDirectRoomSubscriptionsIfNeeded() {
@ -342,7 +378,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
// Clear any notifications from notification center.
if lastVisibleItemID.timelineID == state.timelineViewState.timelineIDs.last {
notificationCenterProtocol.post(name: .roomMarkedAsRead, object: roomProxy.id)
notificationCenter.post(name: .roomMarkedAsRead, object: roomProxy.id)
}
switch await timelineController.sendReadReceipt(for: eventItemID) {
@ -364,9 +400,9 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
return nearestItemID
}
private func itemTapped(with itemID: TimelineItemIdentifier) async {
private func handleItemTapped(with itemID: TimelineItemIdentifier) async {
state.showLoading = true
let action = await timelineController.processItemTap(itemID)
let action = await roomScreenInteractionHandler.processItemTap(itemID)
switch action {
case .displayMediaFile(let file, let title):
@ -379,9 +415,65 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}
state.showLoading = false
}
private func sendCurrentMessage(_ message: String, html: String?, mode: RoomScreenComposerMode, intentionalMentions: IntentionalMentions) async {
guard !message.isEmpty else {
fatalError("This message should never be empty")
}
actionsSubject.send(.composer(action: .clear))
switch mode {
case .reply(let itemId, _, _):
await timelineController.sendMessage(message,
html: html,
inReplyTo: itemId,
intentionalMentions: intentionalMentions)
case .edit(let originalItemId):
await timelineController.editMessage(message,
html: html,
original: originalItemId,
intentionalMentions: intentionalMentions)
case .default:
await timelineController.sendMessage(message,
html: html,
intentionalMentions: intentionalMentions)
case .recordVoiceMessage, .previewVoiceMessage:
fatalError("invalid composer mode.")
}
}
private func trackComposerMode(_ mode: RoomScreenComposerMode) {
var isEdit = false
var isReply = false
switch mode {
case .edit:
isEdit = true
case .reply:
isReply = true
default:
break
}
analyticsService.trackComposer(inThread: false, isEditing: isEdit, isReply: isReply, startsThread: nil)
}
// MARK: - Timeline Item Building
private func buildTimelineViews() {
var timelineItemsDictionary = OrderedDictionary<String, RoomTimelineItemViewState>()
timelineController.timelineItems.filter { $0 is RedactedRoomTimelineItem }.forEach { timelineItem in
// Stops the audio player when a voice message is redacted.
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(timelineItem.id)) else {
return
}
Task { @MainActor in
playerState.detachAudioPlayer()
mediaPlayerProvider.unregister(audioPlayerState: playerState)
}
}
let itemsGroupedByTimelineDisplayStyle = timelineController.timelineItems.chunked { current, next in
canGroupItem(timelineItem: current, with: next)
@ -487,352 +579,6 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
return eventTimelineItem.properties.reactions.isEmpty && eventTimelineItem.sender == otherEventTimelineItem.sender
}
private func sendCurrentMessage(_ message: String, html: String?, mode: RoomScreenComposerMode, intentionalMentions: IntentionalMentions) async {
guard !message.isEmpty else {
fatalError("This message should never be empty")
}
actionsSubject.send(.composer(action: .clear))
switch mode {
case .reply(let itemId, _, _):
await timelineController.sendMessage(message,
html: html,
inReplyTo: itemId,
intentionalMentions: intentionalMentions)
case .edit(let originalItemId):
await timelineController.editMessage(message,
html: html,
original: originalItemId,
intentionalMentions: intentionalMentions)
case .default:
await timelineController.sendMessage(message,
html: html,
intentionalMentions: intentionalMentions)
case .recordVoiceMessage, .previewVoiceMessage:
fatalError("invalid composer mode.")
}
}
private func trackComposerMode(_ mode: RoomScreenComposerMode) {
var isEdit = false
var isReply = false
switch mode {
case .edit:
isEdit = true
case .reply:
isReply = true
default:
break
}
analytics.trackComposer(inThread: false, isEditing: isEdit, isReply: isReply, startsThread: nil)
}
private func displayError(_ type: RoomScreenErrorType) {
switch type {
case .alert(let message):
state.bindings.alertInfo = AlertInfo(id: type,
title: L10n.commonError,
message: message)
case .toast(let message):
userIndicatorController.submitIndicator(UserIndicator(id: Constants.toastErrorID,
type: .toast,
title: message,
iconName: "xmark"))
}
}
// MARK: TimelineItemActionMenu
private func showTimelineItemActionMenu(for itemID: TimelineItemIdentifier) {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
// Don't show a menu for non-event based items.
return
}
actionsSubject.send(.composer(action: .removeFocus))
state.bindings.actionMenuInfo = .init(item: eventTimelineItem)
}
// swiftlint:disable:next cyclomatic_complexity
private func timelineItemMenuActionsForItemId(_ itemID: TimelineItemIdentifier) -> TimelineItemMenuActions? {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let item = timelineItem as? EventBasedTimelineItemProtocol else {
// Don't show a context menu for non-event based items.
return nil
}
if timelineItem is StateRoomTimelineItem {
// Don't show a context menu for state events.
return nil
}
var debugActions: [TimelineItemMenuAction] = []
if appSettings.canShowDeveloperOptions || appSettings.viewSourceEnabled {
debugActions.append(.viewSource)
}
if let encryptedItem = timelineItem as? EncryptedRoomTimelineItem {
switch encryptedItem.encryptionType {
case .megolmV1AesSha2(let sessionID):
debugActions.append(.retryDecryption(sessionID: sessionID))
default:
break
}
return .init(actions: [.copyPermalink], debugActions: debugActions)
}
var actions: [TimelineItemMenuAction] = []
if item.canBeRepliedTo {
if let messageItem = item as? EventBasedMessageTimelineItemProtocol {
actions.append(.reply(isThread: messageItem.isThreaded))
} else {
actions.append(.reply(isThread: false))
}
}
if item.isForwardable {
actions.append(.forward(itemID: itemID))
}
if item.isEditable {
actions.append(.edit)
}
if item.isCopyable {
actions.append(.copy)
}
actions.append(.copyPermalink)
if canRedactItem(item), let poll = item.pollIfAvailable, !poll.hasEnded, let eventID = itemID.eventID {
actions.append(.endPoll(pollStartID: eventID))
}
if canRedactItem(item) {
actions.append(.redact)
}
if !item.isOutgoing {
actions.append(.report)
}
if item.hasFailedToSend {
actions = actions.filter(\.canAppearInFailedEcho)
}
if item.isRedacted {
actions = actions.filter(\.canAppearInRedacted)
}
return .init(actions: actions, debugActions: debugActions)
}
private func canRedactItem(_ item: EventBasedTimelineItemProtocol) -> Bool {
item.isOutgoing || (canCurrentUserRedact && !roomProxy.isDirect)
}
private func processTimelineItemMenuAction(_ action: TimelineItemMenuAction, itemID: TimelineItemIdentifier) {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
return
}
switch action {
case .copy:
guard let messageTimelineItem = timelineItem as? EventBasedMessageTimelineItemProtocol else {
return
}
UIPasteboard.general.string = messageTimelineItem.body
case .edit:
guard let messageTimelineItem = timelineItem as? EventBasedMessageTimelineItemProtocol else {
return
}
let text: String
switch messageTimelineItem.contentType {
case .text(let textItem):
if ServiceLocator.shared.settings.richTextEditorEnabled, let formattedBodyHTMLString = textItem.formattedBodyHTMLString {
text = formattedBodyHTMLString
} else {
text = messageTimelineItem.body
}
case .emote(let emoteItem):
if ServiceLocator.shared.settings.richTextEditorEnabled, let formattedBodyHTMLString = emoteItem.formattedBodyHTMLString {
text = "/me " + formattedBodyHTMLString
} else {
text = "/me " + messageTimelineItem.body
}
default:
text = messageTimelineItem.body
}
actionsSubject.send(.composer(action: .setText(text: text)))
actionsSubject.send(.composer(action: .setMode(mode: .edit(originalItemId: messageTimelineItem.id))))
case .copyPermalink:
do {
guard let eventID = eventTimelineItem.id.eventID else {
displayError(.alert(L10n.errorFailedCreatingThePermalink))
break
}
let permalink = try PermalinkBuilder.permalinkTo(eventIdentifier: eventID, roomIdentifier: timelineController.roomID,
baseURL: appSettings.permalinkBaseURL)
UIPasteboard.general.url = permalink
} catch {
displayError(.alert(L10n.errorFailedCreatingThePermalink))
}
case .redact:
Task {
if eventTimelineItem.hasFailedToSend {
await timelineController.cancelSend(itemID)
} else {
await timelineController.redact(itemID)
}
}
case .reply:
let replyInfo = buildReplyInfo(for: eventTimelineItem)
let replyDetails = TimelineItemReplyDetails.loaded(sender: eventTimelineItem.sender, contentType: replyInfo.type)
actionsSubject.send(.composer(action: .setMode(mode: .reply(itemID: eventTimelineItem.id, replyDetails: replyDetails, isThread: replyInfo.isThread))))
case .forward(let itemID):
actionsSubject.send(.displayMessageForwarding(itemID: itemID))
case .viewSource:
let debugInfo = timelineController.debugInfo(for: eventTimelineItem.id)
MXLog.info(debugInfo)
state.bindings.debugInfo = debugInfo
case .retryDecryption(let sessionID):
Task {
await timelineController.retryDecryption(for: sessionID)
}
case .report:
actionsSubject.send(.displayReportContent(itemID: itemID, senderID: eventTimelineItem.sender.id))
case .react:
showEmojiPicker(for: itemID)
case .endPoll(let pollStartID):
endPoll(pollStartID: pollStartID)
}
if action.switchToDefaultComposer {
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
}
// Pasting and dropping
private func handlePasteOrDrop(_ provider: NSItemProvider) {
guard let contentType = provider.preferredContentType,
let preferredExtension = contentType.preferredFilenameExtension else {
MXLog.error("Invalid NSItemProvider: \(provider)")
displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia))
return
}
let providerSuggestedName = provider.suggestedName
let providerDescription = provider.description
_ = provider.loadDataRepresentation(for: contentType) { data, error in
Task { @MainActor in
let loadingIndicatorIdentifier = UUID().uuidString
self.userIndicatorController.submitIndicator(UserIndicator(id: loadingIndicatorIdentifier, type: .modal, title: L10n.commonLoading, persistent: true))
defer {
self.userIndicatorController.retractIndicatorWithId(loadingIndicatorIdentifier)
}
if let error {
self.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia))
MXLog.error("Failed processing NSItemProvider: \(providerDescription) with error: \(error)")
return
}
guard let data else {
self.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia))
MXLog.error("Invalid NSItemProvider data: \(providerDescription)")
return
}
do {
let url = try await Task.detached {
if let filename = providerSuggestedName {
let hasExtension = !(filename as NSString).pathExtension.isEmpty
let filename = hasExtension ? filename : "\(filename).\(preferredExtension)"
return try FileManager.default.writeDataToTemporaryDirectory(data: data, fileName: filename)
} else {
let filename = "\(UUID().uuidString).\(preferredExtension)"
return try FileManager.default.writeDataToTemporaryDirectory(data: data, fileName: filename)
}
}.value
self.actionsSubject.send(.displayMediaUploadPreviewScreen(url: url))
} catch {
self.displayError(.toast(L10n.screenRoomErrorFailedProcessingMedia))
MXLog.error("Failed storing NSItemProvider data \(providerDescription) with error: \(error)")
}
}
}
}
private func buildReplyInfo(for item: EventBasedTimelineItemProtocol) -> ReplyInfo {
guard let messageItem = item as? EventBasedMessageTimelineItemProtocol else {
return .init(type: .text(.init(body: item.body)), isThread: false)
}
return .init(type: messageItem.contentType, isThread: messageItem.isThreaded)
}
private func handleTappedUser(userID: String) async {
// This is generally fast but it could take some time for rooms with thousands of users on first load
// Show a loader only if it takes more than 0.1 seconds
showLoadingIndicator(with: .milliseconds(100))
let result = await roomProxy.getMember(userID: userID)
hideLoadingIndicator()
switch result {
case .success(let member):
actionsSubject.send(.displayRoomMemberDetails(member: member))
case .failure(let error):
displayError(.alert(L10n.screenRoomErrorFailedRetrievingUserDetails))
MXLog.error("Failed retrieving the user given the following id \(userID) with error: \(error)")
}
}
private func handleRetrySend(itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
MXLog.error("Failed Retry Send: missing transaction ID")
return
}
await roomProxy.retrySend(transactionID: transactionID)
}
private func handleCancelSend(itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
MXLog.error("Failed Cancel Send: missing transaction ID")
return
}
await roomProxy.cancelSend(transactionID: transactionID)
}
private static let loadingIndicatorIdentifier = "RoomScreenLoadingIndicator"
private func showLoadingIndicator(with delay: Duration) {
userIndicatorController.submitIndicator(UserIndicator(id: Self.loadingIndicatorIdentifier,
type: .modal(progress: .indeterminate, interactiveDismissDisabled: true, allowsInteraction: false),
title: L10n.commonLoading,
persistent: true),
delay: delay)
}
private func hideLoadingIndicator() {
userIndicatorController.retractIndicatorWithId(Self.loadingIndicatorIdentifier)
}
// MARK: - Direct chats logics
private func showInviteAlert() {
@ -878,17 +624,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}
// MARK: - Reactions
private func showEmojiPicker(for itemID: TimelineItemIdentifier) {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
timelineItem.isReactable,
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
return
}
let selectedEmojis = Set(eventTimelineItem.properties.reactions.compactMap { $0.isHighlighted ? $0.key : nil })
actionsSubject.send(.displayEmojiPicker(itemID: itemID, selectedEmojis: selectedEmojis))
}
private func showReactionSummary(for itemID: TimelineItemIdentifier, selectedKey: String) {
guard let timelineItem = timelineController.timelineItems.firstUsingStableID(itemID),
let eventTimelineItem = timelineItem as? EventBasedTimelineItemProtocol else {
@ -897,157 +633,22 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
state.bindings.reactionSummaryInfo = .init(reactions: eventTimelineItem.properties.reactions, selectedKey: selectedKey)
}
// MARK: - Polls
private func sendPollResponse(pollStartID: String, optionID: String) {
Task {
let sendPollResponseResult = await roomProxy.sendPollResponse(pollStartID: pollStartID, answers: [optionID])
analytics.trackPollVote()
switch sendPollResponseResult {
case .success:
break
case .failure:
displayError(.toast(L10n.errorUnknown))
}
// MARK: - User Indicators
private func displayError(_ type: RoomScreenErrorType) {
switch type {
case .alert(let message):
state.bindings.alertInfo = AlertInfo(id: type,
title: L10n.commonError,
message: message)
case .toast(let message):
userIndicatorController.submitIndicator(UserIndicator(id: Constants.toastErrorID,
type: .toast,
title: message,
iconName: "xmark"))
}
}
private func endPoll(pollStartID: String) {
Task {
let endPollResult = await roomProxy.endPoll(pollStartID: pollStartID,
text: "The poll with event id: \(pollStartID) has ended")
analytics.trackPollEnd()
switch endPollResult {
case .success:
break
case .failure:
displayError(.toast(L10n.errorUnknown))
}
}
}
// MARK: - Audio
private func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState {
timelineController.audioPlayerState(for: itemID)
}
// MARK: - Voice message
private func handleVoiceMessageRecorderAction(_ action: VoiceMessageRecorderAction) {
MXLog.debug("handling voice recorder action: \(action) - (audio)")
switch action {
case .didStartRecording(let audioRecorder):
let audioRecordState = AudioRecorderState()
audioRecordState.attachAudioRecorder(audioRecorder)
actionsSubject.send(.composer(action: .setMode(mode: .recordVoiceMessage(state: audioRecordState))))
case .didStopRecording(let previewAudioPlayerState, let url):
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: previewAudioPlayerState, waveform: .url(url), isUploading: false))))
case .didFailWithError(let error):
switch error {
case .audioRecorderError(.recordPermissionNotGranted):
MXLog.info("permission to record audio has not been granted.")
state.bindings.confirmationAlertInfo = .init(id: .init(),
title: L10n.dialogPermissionMicrophoneTitleIos(InfoPlistReader.main.bundleDisplayName),
message: L10n.dialogPermissionMicrophoneDescriptionIos,
primaryButton: .init(title: L10n.commonSettings, action: { [weak self] in self?.openSystemSettings() }),
secondaryButton: .init(title: L10n.actionNotNow, role: .cancel, action: nil))
default:
MXLog.error("failed to start voice message recording. \(error)")
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
}
}
private func startRecordingVoiceMessage() async {
voiceMessageRecorderObserver = voiceMessageRecorder.actions
.receive(on: DispatchQueue.main)
.sink { [weak self] action in
self?.handleVoiceMessageRecorderAction(action)
}
await voiceMessageRecorder.startRecording()
}
private func stopRecordingVoiceMessage() async {
await voiceMessageRecorder.stopRecording()
}
private func cancelRecordingVoiceMessage() async {
await voiceMessageRecorder.cancelRecording()
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
private func deleteCurrentVoiceMessage() async {
await voiceMessageRecorder.deleteRecording()
voiceMessageRecorderObserver = nil
actionsSubject.send(.composer(action: .setMode(mode: .default)))
}
private func sendCurrentVoiceMessage() async {
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState, let recordingURL = voiceMessageRecorder.recordingURL else {
displayError(.alert(L10n.errorFailedUploadingVoiceMessage))
return
}
analytics.trackComposer(inThread: false,
isEditing: false,
isReply: false,
messageType: .voiceMessage,
startsThread: nil)
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState, waveform: .url(recordingURL), isUploading: true))))
await voiceMessageRecorder.stopPlayback()
switch await voiceMessageRecorder.sendVoiceMessage(inRoom: roomProxy, audioConverter: AudioConverter()) {
case .success:
await deleteCurrentVoiceMessage()
case .failure(let error):
MXLog.error("failed to send the voice message. \(error)")
actionsSubject.send(.composer(action: .setMode(mode: .previewVoiceMessage(state: audioPlayerState, waveform: .url(recordingURL), isUploading: false))))
displayError(.alert(L10n.errorFailedUploadingVoiceMessage))
}
}
private func startPlayingRecordedVoiceMessage() async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
MXLog.error("failed to play recorded voice message. \(error)")
}
}
private func pausePlayingRecordedVoiceMessage() {
voiceMessageRecorder.pausePlayback()
}
private func seekRecordedVoiceMessage(to progress: Double) async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
await voiceMessageRecorder.seekPlayback(to: progress)
}
private func scrubVoiceMessagePlayback(scrubbing: Bool) async {
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
return
}
if scrubbing {
if audioPlayerState.playbackState == .playing {
resumeVoiceMessagePlaybackAfterScrubbing = true
pausePlayingRecordedVoiceMessage()
}
} else {
if resumeVoiceMessagePlaybackAfterScrubbing {
resumeVoiceMessagePlaybackAfterScrubbing = false
await startPlayingRecordedVoiceMessage()
}
}
}
private func openSystemSettings() {
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
application.open(url)
}
}
private extension RoomProxyProtocol {
@ -1057,35 +658,19 @@ private extension RoomProxyProtocol {
}
}
extension RoomScreenViewModel.Context {
/// A function to make it easier to bind to reactions expand/collapsed state
/// - Parameter itemID: The id of the timeline item the reacted to
/// - Returns: Wether the reactions should show in the collapsed state, true by default.
func reactionsCollapsedBinding(for itemID: TimelineItemIdentifier) -> Binding<Bool> {
Binding(get: {
self.reactionsCollapsed[itemID] ?? true
}, set: {
self.reactionsCollapsed[itemID] = $0
})
}
}
// MARK: - Mocks
extension RoomScreenViewModel {
static let mock = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
static let mock = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
}
private struct ReplyInfo {
let type: EventBasedMessageTimelineItemContentType
let isThread: Bool
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
}
private struct RoomContextKey: EnvironmentKey {

View File

@ -55,14 +55,16 @@ struct RoomHeaderView_Previews: PreviewProvider, TestablePreview {
@ViewBuilder
static var bodyPlain: some View {
let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Some Room name", avatarURL: URL.picturesDirectory)),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Some Room name", avatarURL: URL.picturesDirectory)),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
RoomHeaderView(context: viewModel.context)
.previewLayout(.sizeThatFits)
@ -71,14 +73,16 @@ struct RoomHeaderView_Previews: PreviewProvider, TestablePreview {
@ViewBuilder
static var bodyEncrypted: some View {
let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Some Room name")),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Some Room name")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
RoomHeaderView(context: viewModel.context)
.previewLayout(.sizeThatFits)

View File

@ -187,14 +187,16 @@ struct RoomScreen: View {
// MARK: - Previews
struct RoomScreen_Previews: PreviewProvider, TestablePreview {
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
static let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Preview room", isCallOngoing: true)),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room", isCallOngoing: true)),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
static var previews: some View {
NavigationStack {

View File

@ -103,10 +103,10 @@ struct TimelineItemBubbledStylerView<Content: View>: View {
.timelineAccessibility(timelineItem)
if !timelineItem.properties.reactions.isEmpty {
TimelineReactionsView(itemID: timelineItem.id,
TimelineReactionsView(context: context,
itemID: timelineItem.id,
reactions: timelineItem.properties.reactions,
isLayoutRTL: timelineItem.isOutgoing,
collapsed: context.reactionsCollapsedBinding(for: timelineItem.id))
isLayoutRTL: timelineItem.isOutgoing)
// Workaround to stop the message long press stealing the touch from the reaction buttons
.onTapGesture { }
}

View File

@ -129,9 +129,9 @@ struct TimelineItemPlainStylerView<Content: View>: View {
}
if !timelineItem.properties.reactions.isEmpty {
TimelineReactionsView(itemID: timelineItem.id,
reactions: timelineItem.properties.reactions,
collapsed: context.reactionsCollapsedBinding(for: timelineItem.id))
TimelineReactionsView(context: context,
itemID: timelineItem.id,
reactions: timelineItem.properties.reactions)
// Workaround to stop the message long press stealing the touch from the reaction buttons
.onTapGesture { }
}

View File

@ -16,17 +16,35 @@
import SwiftUI
@MainActor
struct TimelineReactionsView: View {
private static let horizontalSpacing: CGFloat = 4
private static let verticalSpacing: CGFloat = 4
private let feedbackGenerator = UIImpactFeedbackGenerator(style: .heavy)
@EnvironmentObject private var context: RoomScreenViewModel.Context
@Environment(\.layoutDirection) private var layoutDirection: LayoutDirection
let context: RoomScreenViewModel.Context
let itemID: TimelineItemIdentifier
let reactions: [AggregatedReaction]
var isLayoutRTL = false
@Binding var collapsed: Bool
let isLayoutRTL: Bool
private var collapsed: Binding<Bool>
init(context: RoomScreenViewModel.Context,
itemID: TimelineItemIdentifier,
reactions: [AggregatedReaction],
isLayoutRTL: Bool = false) {
self.context = context
self.itemID = itemID
self.reactions = reactions
self.isLayoutRTL = isLayoutRTL
collapsed = Binding(get: {
context.reactionsCollapsed[itemID] ?? true
}, set: {
context.reactionsCollapsed[itemID] = $0
})
}
var reactionsLayoutDirection: LayoutDirection {
guard isLayoutRTL else { return layoutDirection }
@ -48,9 +66,9 @@ struct TimelineReactionsView: View {
if isCollapsible {
Button {
collapsed.toggle()
collapsed.wrappedValue.toggle()
} label: {
TimelineCollapseButtonLabel(collapsed: collapsed)
TimelineCollapseButtonLabel(collapsed: collapsed.wrappedValue)
.transaction { $0.animation = nil }
}
.reactionLayoutItem(.expandCollapse)
@ -79,7 +97,7 @@ struct TimelineReactionsView: View {
if isCollapsible {
return AnyLayout(CollapsibleReactionLayout(itemSpacing: 4,
rowSpacing: 4,
collapsed: collapsed,
collapsed: collapsed.wrappedValue,
rowsBeforeCollapsible: 2))
}
@ -188,21 +206,25 @@ struct TimelineReactionAddMoreButtonLabel: View {
}
struct TimelineReactionViewPreviewsContainer: View {
@State private var collapseState1 = false
@State private var collapseState2 = true
var body: some View {
VStack {
TimelineReactionsView(itemID: .init(timelineID: "1"),
TimelineReactionsView(context: RoomScreenViewModel.mock.context,
itemID: .init(timelineID: "1"),
reactions: [AggregatedReaction.mockReactionWithLongText,
AggregatedReaction.mockReactionWithLongTextRTL],
collapsed: .constant(true))
AggregatedReaction.mockReactionWithLongTextRTL])
Divider()
TimelineReactionsView(itemID: .init(timelineID: "2"), reactions: Array(AggregatedReaction.mockReactions.prefix(3)), collapsed: .constant(true))
TimelineReactionsView(context: RoomScreenViewModel.mock.context,
itemID: .init(timelineID: "2"),
reactions: Array(AggregatedReaction.mockReactions.prefix(3)))
Divider()
TimelineReactionsView(itemID: .init(timelineID: "3"), reactions: AggregatedReaction.mockReactions, collapsed: $collapseState1)
TimelineReactionsView(context: RoomScreenViewModel.mock.context,
itemID: .init(timelineID: "3"),
reactions: AggregatedReaction.mockReactions)
Divider()
TimelineReactionsView(itemID: .init(timelineID: "4"), reactions: AggregatedReaction.mockReactions, isLayoutRTL: true, collapsed: $collapseState2)
TimelineReactionsView(context: RoomScreenViewModel.mock.context,
itemID: .init(timelineID: "4"),
reactions: AggregatedReaction.mockReactions,
isLayoutRTL: true)
}
.background(Color.red)
.frame(maxWidth: 250, alignment: .leading)

View File

@ -58,15 +58,16 @@ struct TimelineReadReceiptsView_Previews: PreviewProvider, TestablePreview {
.mockMe
]
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
static let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Test", members: members)),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Test",
members: members)),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
static let singleReceipt = [ReadReceipt(userID: RoomMemberProxyMock.mockAlice.userID, formattedTimestamp: "Now")]
static let doubleReceipt = [ReadReceipt(userID: RoomMemberProxyMock.mockAlice.userID, formattedTimestamp: "Now"),

View File

@ -80,14 +80,16 @@ struct UITimelineView: UIViewControllerRepresentable {
// MARK: - Previews
struct UITimelineView_Previews: PreviewProvider, TestablePreview {
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
static let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
static var previews: some View {
NavigationStack {

View File

@ -168,15 +168,17 @@ struct TimelineView: View {
// MARK: - Previews
struct TimelineView_Previews: PreviewProvider, TestablePreview {
static let viewModel = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
static let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "Preview room")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
static var previews: some View {
NavigationStack {
RoomScreen(context: viewModel.context, composerToolbar: ComposerToolbar.mock())

View File

@ -63,8 +63,6 @@ class MockRoomTimelineController: RoomTimelineControllerProtocol {
func processItemAppearance(_ itemID: TimelineItemIdentifier) async { }
func processItemDisappearance(_ itemID: TimelineItemIdentifier) async { }
func processItemTap(_ itemID: TimelineItemIdentifier) async -> RoomTimelineControllerAction { .none }
func sendMessage(_ message: String,
html: String?,
@ -79,8 +77,6 @@ class MockRoomTimelineController: RoomTimelineControllerProtocol {
intentionalMentions: IntentionalMentions) async { }
func redact(_ itemID: TimelineItemIdentifier) async { }
func cancelSend(_ itemID: TimelineItemIdentifier) async { }
func debugInfo(for itemID: TimelineItemIdentifier) -> TimelineItemDebugInfo {
.init(model: "Mock debug description", originalJSON: nil, latestEditJSON: nil)
@ -88,18 +84,21 @@ class MockRoomTimelineController: RoomTimelineControllerProtocol {
func retryDecryption(for sessionID: String) async { }
func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState {
AudioPlayerState(id: .timelineItemIdentifier(itemID),
duration: 10.0,
waveform: nil,
progress: 0.0)
func retrySending(itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
return
}
await roomProxy?.retrySend(transactionID: transactionID)
}
func playPauseAudio(for itemID: TimelineItemIdentifier) async { }
func pauseAudio() { }
func seekAudio(for itemID: TimelineItemIdentifier, progress: Double) async { }
func cancelSending(itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
return
}
await roomProxy?.cancelSend(transactionID: transactionID)
}
// MARK: - UI Test signalling

View File

@ -17,12 +17,8 @@
import Foundation
struct MockRoomTimelineControllerFactory: RoomTimelineControllerFactoryProtocol {
// swiftlint:disable:next function_parameter_count
func buildRoomTimelineController(roomProxy: RoomProxyProtocol,
timelineItemFactory: RoomTimelineItemFactoryProtocol,
mediaProvider: MediaProviderProtocol,
mediaPlayerProvider: MediaPlayerProviderProtocol,
voiceMessageMediaManager: VoiceMessageMediaManagerProtocol,
secureBackupController: SecureBackupControllerProtocol) -> RoomTimelineControllerProtocol {
let timelineController = MockRoomTimelineController()
timelineController.timelineItems = RoomTimelineItemFixtures.largeChunk

View File

@ -22,9 +22,6 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
private let roomProxy: RoomProxyProtocol
private let timelineProvider: RoomTimelineProviderProtocol
private let timelineItemFactory: RoomTimelineItemFactoryProtocol
private let mediaProvider: MediaProviderProtocol
private let mediaPlayerProvider: MediaPlayerProviderProtocol
private let voiceMessageMediaManager: VoiceMessageMediaManagerProtocol
private let appSettings: AppSettings
private let secureBackupController: SecureBackupControllerProtocol
private let serialDispatchQueue: DispatchQueue
@ -46,17 +43,11 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
init(roomProxy: RoomProxyProtocol,
timelineItemFactory: RoomTimelineItemFactoryProtocol,
mediaProvider: MediaProviderProtocol,
mediaPlayerProvider: MediaPlayerProviderProtocol,
voiceMessageMediaManager: VoiceMessageMediaManagerProtocol,
appSettings: AppSettings,
secureBackupController: SecureBackupControllerProtocol) {
self.roomProxy = roomProxy
timelineProvider = roomProxy.timelineProvider
self.timelineItemFactory = timelineItemFactory
self.mediaProvider = mediaProvider
self.mediaPlayerProvider = mediaPlayerProvider
self.voiceMessageMediaManager = voiceMessageMediaManager
self.appSettings = appSettings
self.secureBackupController = secureBackupController
serialDispatchQueue = DispatchQueue(label: "io.element.elementx.roomtimelineprovider", qos: .utility)
@ -117,20 +108,6 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
}
func processItemDisappearance(_ itemID: TimelineItemIdentifier) { }
func processItemTap(_ itemID: TimelineItemIdentifier) async -> RoomTimelineControllerAction {
guard let timelineItem = timelineItems.firstUsingStableID(itemID) else {
return .none
}
switch timelineItem {
case let item as LocationRoomTimelineItem:
guard let geoURI = item.content.geoURI else { return .none }
return .displayLocation(body: item.content.body, geoURI: geoURI, description: item.content.description)
default:
return await displayMediaActionIfPossible(timelineItem: timelineItem)
}
}
func sendMessage(_ message: String,
html: String?,
@ -182,7 +159,7 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
let item = timelineItem as? EventBasedTimelineItemProtocol,
item.hasFailedToSend {
MXLog.info("Editing a failed echo, will cancel and resend it as a new message")
await cancelSend(itemID)
await cancelSending(itemID: itemID)
await sendMessage(newMessage, html: html, intentionalMentions: intentionalMentions)
} else if let eventID = itemID.eventID {
switch await roomProxy.editMessage(newMessage,
@ -211,15 +188,6 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
MXLog.error("Failed redacting message with error: \(error)")
}
}
func cancelSend(_ itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
MXLog.error("Failed cancelling send, missing transaction ID")
return
}
MXLog.info("Cancelling send in \(roomID)")
await roomProxy.cancelSend(transactionID: transactionID)
}
// Handle this parallel to the timeline items so we're not forced
// to bundle the Rust side objects within them
@ -242,87 +210,24 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
await roomProxy.retryDecryption(for: sessionID)
}
func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState {
guard let timelineItem = timelineItems.firstUsingStableID(itemID) else {
fatalError("TimelineItem \(itemID) not found")
func retrySending(itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
MXLog.error("Failed Retry Send: missing transaction ID")
return
}
guard let voiceMessageRoomTimelineItem = timelineItem as? VoiceMessageRoomTimelineItem else {
fatalError("Invalid TimelineItem type (expecting `VoiceMessageRoomTimelineItem` but found \(type(of: timelineItem)) instead")
}
if let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) {
return playerState
}
let playerState = AudioPlayerState(id: .timelineItemIdentifier(itemID),
duration: voiceMessageRoomTimelineItem.content.duration,
waveform: voiceMessageRoomTimelineItem.content.waveform)
mediaPlayerProvider.register(audioPlayerState: playerState)
return playerState
MXLog.info("Retry sending in \(roomID)")
await roomProxy.retrySend(transactionID: transactionID)
}
func playPauseAudio(for itemID: TimelineItemIdentifier) async {
MXLog.info("Toggle play/pause audio for itemID \(itemID)")
guard let timelineItem = timelineItems.firstUsingStableID(itemID) else {
fatalError("TimelineItem \(itemID) not found")
}
guard let voiceMessageRoomTimelineItem = timelineItem as? VoiceMessageRoomTimelineItem else {
fatalError("Invalid TimelineItem type for itemID \(itemID) (expecting `VoiceMessageRoomTimelineItem` but found \(type(of: timelineItem)) instead")
}
guard let source = voiceMessageRoomTimelineItem.content.source else {
MXLog.error("Cannot start voice message playback, source is not defined for itemID \(itemID)")
func cancelSending(itemID: TimelineItemIdentifier) async {
guard let transactionID = itemID.transactionID else {
MXLog.error("Failed Cancel Send: missing transaction ID")
return
}
guard case .success(let mediaPlayer) = mediaPlayerProvider.player(for: source), let audioPlayer = mediaPlayer as? AudioPlayerProtocol else {
MXLog.error("Cannot play a voice message without an audio player")
return
}
let audioPlayerState = audioPlayerState(for: itemID)
// Ensure this one is attached
if !audioPlayerState.isAttached {
audioPlayerState.attachAudioPlayer(audioPlayer)
}
// Detach all other states
await mediaPlayerProvider.detachAllStates(except: audioPlayerState)
guard audioPlayer.mediaSource == source, audioPlayer.state != .error else {
// Load content
do {
MXLog.info("Loading voice message audio content from source for itemID \(itemID)")
let url = try await voiceMessageMediaManager.loadVoiceMessageFromSource(source, body: nil)
// Make sure that the player is still attached, as it may have been detached while waiting for the voice message to be loaded.
if audioPlayerState.isAttached {
audioPlayer.load(mediaSource: source, using: url, autoplay: true)
}
} catch {
MXLog.error("Failed to load voice message: \(error)")
audioPlayerState.reportError(error)
}
return
}
if audioPlayer.state == .playing {
audioPlayer.pause()
} else {
audioPlayer.play()
}
}
func seekAudio(for itemID: TimelineItemIdentifier, progress: Double) async {
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
return
}
await mediaPlayerProvider.detachAllStates(except: playerState)
await playerState.updateState(progress: progress)
MXLog.info("Cancelling send in \(roomID)")
await roomProxy.cancelSend(transactionID: transactionID)
}
// MARK: - Private
@ -331,37 +236,6 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
// Recompute all attributed strings on content size changes -> DynamicType support
updateTimelineItems()
}
private func displayMediaActionIfPossible(timelineItem: RoomTimelineItemProtocol) async -> RoomTimelineControllerAction {
var source: MediaSourceProxy?
var body: String
switch timelineItem {
case let item as ImageRoomTimelineItem:
source = item.content.source
body = item.content.body
case let item as VideoRoomTimelineItem:
source = item.content.source
body = item.content.body
case let item as FileRoomTimelineItem:
source = item.content.source
body = item.content.body
case let item as AudioRoomTimelineItem:
// For now we are just displaying audio messages with the File preview until we create a timeline player for them.
source = item.content.source
body = item.content.body
default:
return .none
}
guard let source else { return .none }
switch await mediaProvider.loadFileFromSource(source, body: body) {
case .success(let file):
return .displayMediaFile(file: file, title: body)
case .failure:
return .none
}
}
private func updateTimelineItems() {
var newTimelineItems = [RoomTimelineItemProtocol]()
@ -404,17 +278,6 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
lastEncryptedHistoryItemIndex = newTimelineItems.endIndex
}
// Stops the audio player when a voice message is redacted.
if timelineItem is RedactedRoomTimelineItem {
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(timelineItem.id)) else {
continue
}
Task { @MainActor in
playerState.detachAudioPlayer()
mediaPlayerProvider.unregister(audioPlayerState: playerState)
}
}
newTimelineItems.append(timelineItem)
} else {
newTimelineItems.append(CollapsibleTimelineItem(items: items))

View File

@ -17,18 +17,11 @@
import Foundation
struct RoomTimelineControllerFactory: RoomTimelineControllerFactoryProtocol {
// swiftlint:disable:next function_parameter_count
func buildRoomTimelineController(roomProxy: RoomProxyProtocol,
timelineItemFactory: RoomTimelineItemFactoryProtocol,
mediaProvider: MediaProviderProtocol,
mediaPlayerProvider: MediaPlayerProviderProtocol,
voiceMessageMediaManager: VoiceMessageMediaManagerProtocol,
secureBackupController: SecureBackupControllerProtocol) -> RoomTimelineControllerProtocol {
RoomTimelineController(roomProxy: roomProxy,
timelineItemFactory: timelineItemFactory,
mediaProvider: mediaProvider,
mediaPlayerProvider: mediaPlayerProvider,
voiceMessageMediaManager: voiceMessageMediaManager,
appSettings: ServiceLocator.shared.settings,
secureBackupController: secureBackupController)
}

View File

@ -18,11 +18,7 @@ import Foundation
@MainActor
protocol RoomTimelineControllerFactoryProtocol {
// swiftlint:disable:next function_parameter_count
func buildRoomTimelineController(roomProxy: RoomProxyProtocol,
timelineItemFactory: RoomTimelineItemFactoryProtocol,
mediaProvider: MediaProviderProtocol,
mediaPlayerProvider: MediaPlayerProviderProtocol,
voiceMessageMediaManager: VoiceMessageMediaManagerProtocol,
secureBackupController: SecureBackupControllerProtocol) -> RoomTimelineControllerProtocol
}

View File

@ -44,8 +44,6 @@ protocol RoomTimelineControllerProtocol {
func processItemAppearance(_ itemID: TimelineItemIdentifier) async
func processItemDisappearance(_ itemID: TimelineItemIdentifier) async
func processItemTap(_ itemID: TimelineItemIdentifier) async -> RoomTimelineControllerAction
func paginateBackwards(requestSize: UInt, untilNumberOfItems: UInt) async -> Result<Void, RoomTimelineControllerError>
@ -64,18 +62,14 @@ protocol RoomTimelineControllerProtocol {
func toggleReaction(_ reaction: String, to itemID: TimelineItemIdentifier) async
func redact(_ itemID: TimelineItemIdentifier) async
func cancelSend(_ itemID: TimelineItemIdentifier) async
func debugInfo(for itemID: TimelineItemIdentifier) -> TimelineItemDebugInfo
func retryDecryption(for sessionID: String) async
func audioPlayerState(for itemID: TimelineItemIdentifier) -> AudioPlayerState
func retrySending(itemID: TimelineItemIdentifier) async
func playPauseAudio(for itemID: TimelineItemIdentifier) async
func seekAudio(for itemID: TimelineItemIdentifier, progress: Double) async
func cancelSending(itemID: TimelineItemIdentifier) async
}
extension RoomTimelineControllerProtocol {

View File

@ -280,6 +280,7 @@ class MockScreen: Identifiable {
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -292,6 +293,7 @@ class MockScreen: Identifiable {
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -306,6 +308,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -320,6 +323,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -335,6 +339,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -352,6 +357,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -369,6 +375,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -386,6 +393,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -404,6 +412,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -421,6 +430,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -438,6 +448,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -455,6 +466,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)
@ -472,6 +484,7 @@ class MockScreen: Identifiable {
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
emojiProvider: EmojiProvider(),
completionSuggestionService: CompletionSuggestionServiceMock(configuration: .init()),
appSettings: ServiceLocator.shared.settings)

View File

@ -26,14 +26,16 @@ class PillContextTests: XCTestCase {
let proxyMock = RoomProxyMock(with: .init(displayName: "Test"))
let subject = CurrentValueSubject<[RoomMemberProxyProtocol], Never>([])
proxyMock.members = subject.asCurrentValuePublisher()
let mock = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
let mock = RoomScreenViewModel(roomProxy: proxyMock,
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: proxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
let context = PillContext(roomContext: mock.context, data: PillTextAttachmentData(type: .user(userID: id), font: .preferredFont(forTextStyle: .body)))
XCTAssertFalse(context.viewState.isOwnMention)
@ -53,14 +55,16 @@ class PillContextTests: XCTestCase {
let proxyMock = RoomProxyMock(with: .init(displayName: "Test", ownUserID: id))
let subject = CurrentValueSubject<[RoomMemberProxyProtocol], Never>([])
proxyMock.members = subject.asCurrentValuePublisher()
let mock = RoomScreenViewModel(timelineController: MockRoomTimelineController(),
let mock = RoomScreenViewModel(roomProxy: proxyMock,
timelineController: MockRoomTimelineController(),
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: proxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
let context = PillContext(roomContext: mock.context, data: PillTextAttachmentData(type: .user(userID: id), font: .preferredFont(forTextStyle: .body)))
XCTAssertTrue(context.viewState.isOwnMention)
@ -73,14 +77,16 @@ class PillContextTests: XCTestCase {
let proxyMock = RoomProxyMock(with: .init(id: id, displayName: displayName, avatarURL: avatarURL))
let mockController = MockRoomTimelineController()
mockController.roomProxy = proxyMock
let mock = RoomScreenViewModel(timelineController: mockController,
let mock = RoomScreenViewModel(roomProxy: proxyMock,
timelineController: mockController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: proxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: ServiceLocator.shared.userIndicatorController,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
let context = PillContext(roomContext: mock.context, data: PillTextAttachmentData(type: .allUsers, font: .preferredFont(forTextStyle: .body)))
XCTAssertTrue(context.viewState.isOwnMention)

View File

@ -49,14 +49,16 @@ class RoomScreenViewModelTests: XCTestCase {
// When showing them in a timeline.
let timelineController = MockRoomTimelineController()
timelineController.timelineItems = items
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "")),
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
// Then the messages should be grouped together.
XCTAssertEqual(viewModel.state.timelineViewState.itemViewStates[0].groupStyle, .first, "Nothing should prevent the first message from being grouped.")
@ -84,14 +86,16 @@ class RoomScreenViewModelTests: XCTestCase {
// When showing them in a timeline.
let timelineController = MockRoomTimelineController()
timelineController.timelineItems = items
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "")),
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
// Then the messages should be grouped by sender.
XCTAssertEqual(viewModel.state.timelineViewState.itemViewStates[0].groupStyle, .single, "A message should not be grouped when the sender changes.")
@ -117,14 +121,16 @@ class RoomScreenViewModelTests: XCTestCase {
// When showing them in a timeline.
let timelineController = MockRoomTimelineController()
timelineController.timelineItems = items
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "")),
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
// Then the first message should not be grouped but the other two should.
XCTAssertEqual(viewModel.state.timelineViewState.itemViewStates[0].groupStyle, .single, "When the first message has reactions it should not be grouped.")
@ -147,14 +153,16 @@ class RoomScreenViewModelTests: XCTestCase {
// When showing them in a timeline.
let timelineController = MockRoomTimelineController()
timelineController.timelineItems = items
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "")),
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
// Then the first and second messages should be grouped and the last one should not.
XCTAssertEqual(viewModel.state.timelineViewState.itemViewStates[0].groupStyle, .first, "Nothing should prevent the first message from being grouped.")
@ -177,14 +185,16 @@ class RoomScreenViewModelTests: XCTestCase {
// When showing them in a timeline.
let timelineController = MockRoomTimelineController()
timelineController.timelineItems = items
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: RoomProxyMock(with: .init(displayName: "")),
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: RoomProxyMock(with: .init(displayName: "")),
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
// Then the messages should be grouped together.
XCTAssertEqual(viewModel.state.timelineViewState.itemViewStates[0].groupStyle, .first, "Nothing should prevent the first message from being grouped.")
@ -203,14 +213,16 @@ class RoomScreenViewModelTests: XCTestCase {
roomMemberMock.userID = "bob"
roomProxyMock.getMemberUserIDReturnValue = .success(roomMemberMock)
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.actions
.sink { action in
switch action {
@ -243,14 +255,16 @@ class RoomScreenViewModelTests: XCTestCase {
.success(roomMemberMock)
}
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.actions
.sink { action in
@ -284,14 +298,16 @@ class RoomScreenViewModelTests: XCTestCase {
.failure(.failedRetrievingMember)
}
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.actions
.sink { _ in
XCTFail("Should not receive any action")
@ -316,15 +332,18 @@ class RoomScreenViewModelTests: XCTestCase {
func testRetrySend() async throws {
let timelineController = MockRoomTimelineController()
let roomProxyMock = RoomProxyMock(with: .init(displayName: ""))
timelineController.roomProxy = roomProxyMock
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.context.send(viewAction: .retrySend(itemID: .init(timelineID: UUID().uuidString, transactionID: "test retry send id")))
@ -338,14 +357,16 @@ class RoomScreenViewModelTests: XCTestCase {
let timelineController = MockRoomTimelineController()
let roomProxyMock = RoomProxyMock(with: .init(displayName: ""))
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.context.send(viewAction: .retrySend(itemID: .random))
@ -357,15 +378,18 @@ class RoomScreenViewModelTests: XCTestCase {
func testCancelSend() async {
let timelineController = MockRoomTimelineController()
let roomProxyMock = RoomProxyMock(with: .init(displayName: ""))
timelineController.roomProxy = roomProxyMock
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.context.send(viewAction: .cancelSend(itemID: .init(timelineID: UUID().uuidString, transactionID: "test cancel send id")))
@ -379,14 +403,16 @@ class RoomScreenViewModelTests: XCTestCase {
let timelineController = MockRoomTimelineController()
let roomProxyMock = RoomProxyMock(with: .init(displayName: ""))
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxyMock,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxyMock,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default)
application: ApplicationMock.default,
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: NotificationCenterMock())
viewModel.context.send(viewAction: .cancelSend(itemID: .random))
@ -517,15 +543,16 @@ class RoomScreenViewModelTests: XCTestCase {
timelineController.timelineItems = items
timelineController.roomProxy = roomProxy
let viewModel = RoomScreenViewModel(timelineController: timelineController,
let viewModel = RoomScreenViewModel(roomProxy: roomProxy,
timelineController: timelineController,
mediaProvider: MockMediaProvider(),
mediaPlayerProvider: MediaPlayerProviderMock(),
roomProxy: roomProxy,
appSettings: ServiceLocator.shared.settings,
analytics: ServiceLocator.shared.analytics,
voiceMessageMediaManager: VoiceMessageMediaManagerMock(),
userIndicatorController: userIndicatorControllerMock,
application: ApplicationMock.default,
notificationCenterProtocol: notificationCenter)
appSettings: ServiceLocator.shared.settings,
analyticsService: ServiceLocator.shared.analytics,
notificationCenter: notificationCenter)
return (viewModel, roomProxy, timelineController, notificationCenter)
}

View File

@ -25,8 +25,8 @@ class TracingConfigurationTests: XCTestCase {
let filterComponents = configuration.filter.components(separatedBy: ",")
XCTAssertEqual(filterComponents.first, "info")
XCTAssertTrue(filterComponents.contains("matrix_sdk_base::sliding_sync=trace"))
XCTAssertTrue(filterComponents.contains("matrix_sdk::http_client=trace"))
XCTAssertTrue(filterComponents.contains("matrix_sdk_crypto=trace"))
XCTAssertTrue(filterComponents.contains("matrix_sdk::http_client=debug"))
XCTAssertTrue(filterComponents.contains("matrix_sdk_crypto=debug"))
XCTAssertTrue(filterComponents.contains("hyper=warn"))
}
}

Binary file not shown.