Reply to a poll (#2087)

* Refactor TimelineItemReplyDetails for polls

* Fix reply for polls

* Update reply details factory

* Revert Poll Hashable conformance

* PR comments
This commit is contained in:
Alfonso Grillo 2023-11-15 16:57:34 +01:00 committed by GitHub
parent 0b9da83470
commit 9f1092b0b6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 179 additions and 150 deletions

View File

@ -32,7 +32,7 @@ struct MessageComposer: View {
let editCancellationAction: () -> Void
let onAppearAction: () -> Void
@FocusState private var focused: Bool
@State private var composerTranslation: CGFloat = 0
var body: some View {
@ -40,7 +40,7 @@ struct MessageComposer: View {
if showResizeGrabber {
resizeGrabber
}
let borderRadius: CGFloat = 21
mainContent
.padding(.horizontal, 12.0)
@ -60,11 +60,11 @@ struct MessageComposer: View {
}
.gesture(showResizeGrabber ? dragGesture : nil)
}
// MARK: - Private
@State private var composerFrame = CGRect.zero
private var mainContent: some View {
VStack(alignment: .leading, spacing: -6) {
header
@ -187,34 +187,34 @@ struct MessageComposer_Previews: PreviewProvider, TestablePreview {
static let viewModel = RoomScreenViewModel.mock
static let replyTypes: [TimelineItemReplyDetails] = [
.loaded(sender: .init(id: "Dave"), contentType: .audio(.init(body: "Audio: Ride the lightning", duration: 100, waveform: nil, source: nil, contentType: nil))),
.loaded(sender: .init(id: "James"), contentType: .emote(.init(body: "Emote: James thinks he's the phantom lord"))),
.loaded(sender: .init(id: "Robert"), contentType: .file(.init(body: "File: Crash course in brain surgery.pdf", source: nil, thumbnailSource: nil, contentType: nil))),
.loaded(sender: .init(id: "Cliff"), contentType: .image(.init(body: "Image: Pushead",
source: .init(url: .picturesDirectory, mimeType: nil),
thumbnailSource: .init(url: .picturesDirectory, mimeType: nil)))),
.loaded(sender: .init(id: "Jason"), contentType: .notice(.init(body: "Notice: Too far gone?"))),
.loaded(sender: .init(id: "Kirk"), contentType: .text(.init(body: "Text: Where the wild things are"))),
.loaded(sender: .init(id: "Lars"), contentType: .video(.init(body: "Video: Through the never",
duration: 100,
source: nil,
thumbnailSource: .init(url: .picturesDirectory, mimeType: nil)))),
.loaded(sender: .init(id: "Dave"), eventContent: .message(.audio(.init(body: "Audio: Ride the lightning", duration: 100, waveform: nil, source: nil, contentType: nil)))),
.loaded(sender: .init(id: "James"), eventContent: .message(.emote(.init(body: "Emote: James thinks he's the phantom lord")))),
.loaded(sender: .init(id: "Robert"), eventContent: .message(.file(.init(body: "File: Crash course in brain surgery.pdf", source: nil, thumbnailSource: nil, contentType: nil)))),
.loaded(sender: .init(id: "Cliff"), eventContent: .message(.image(.init(body: "Image: Pushead",
source: .init(url: .picturesDirectory, mimeType: nil),
thumbnailSource: .init(url: .picturesDirectory, mimeType: nil))))),
.loaded(sender: .init(id: "Jason"), eventContent: .message(.notice(.init(body: "Notice: Too far gone?")))),
.loaded(sender: .init(id: "Kirk"), eventContent: .message(.text(.init(body: "Text: Where the wild things are")))),
.loaded(sender: .init(id: "Lars"), eventContent: .message(.video(.init(body: "Video: Through the never",
duration: 100,
source: nil,
thumbnailSource: .init(url: .picturesDirectory, mimeType: nil))))),
.loading(eventID: "")
]
static func messageComposer(_ content: String = "",
sendingDisabled: Bool = false,
mode: RoomScreenComposerMode = .default) -> MessageComposer {
let viewModel = WysiwygComposerViewModel(minHeight: 22,
maxExpandedHeight: 250)
viewModel.setMarkdownContent(content)
let composerView = WysiwygComposerView(placeholder: L10n.richTextEditorComposerPlaceholder,
viewModel: viewModel,
itemProviderHelper: nil,
keyCommandHandler: nil,
pasteHandler: nil)
return MessageComposer(composerView: composerView,
mode: mode,
showResizeGrabber: false,
@ -225,20 +225,21 @@ struct MessageComposer_Previews: PreviewProvider, TestablePreview {
editCancellationAction: { },
onAppearAction: { viewModel.setup() })
}
static var previews: some View {
VStack(spacing: 8) {
messageComposer(sendingDisabled: true)
messageComposer("Some message",
mode: .edit(originalItemId: .random))
messageComposer(mode: .reply(itemID: .random,
replyDetails: .loaded(sender: .init(id: "Kirk"),
contentType: .text(.init(body: "Text: Where the wild things are"))), isThread: false))
eventContent: .message(.text(.init(body: "Text: Where the wild things are")))),
isThread: false))
}
.padding(.horizontal)
ScrollView {
VStack(spacing: 8) {
ForEach(replyTypes, id: \.self) { replyDetails in

View File

@ -232,8 +232,8 @@ class RoomScreenInteractionHandler {
}
case .reply:
let replyInfo = buildReplyInfo(for: eventTimelineItem)
let replyDetails = TimelineItemReplyDetails.loaded(sender: eventTimelineItem.sender, contentType: replyInfo.type)
let replyDetails = TimelineItemReplyDetails.loaded(sender: eventTimelineItem.sender, eventContent: replyInfo.type)
actionsSubject.send(.composer(action: .setMode(mode: .reply(itemID: eventTimelineItem.id, replyDetails: replyDetails, isThread: replyInfo.isThread))))
case .forward(let itemID):
actionsSubject.send(.displayMessageForwarding(itemID: itemID))
@ -596,11 +596,14 @@ class RoomScreenInteractionHandler {
}
private func buildReplyInfo(for item: EventBasedTimelineItemProtocol) -> ReplyInfo {
guard let messageItem = item as? EventBasedMessageTimelineItemProtocol else {
return .init(type: .text(.init(body: item.body)), isThread: false)
switch item {
case let messageItem as EventBasedMessageTimelineItemProtocol:
return .init(type: .message(messageItem.contentType), isThread: messageItem.isThreaded)
case let pollItem as PollRoomTimelineItem:
return .init(type: .poll(question: pollItem.poll.question), isThread: false)
default:
return .init(type: .message(.text(.init(body: item.body))), isThread: false)
}
return .init(type: messageItem.contentType, isThread: messageItem.isThreaded)
}
private func openSystemSettings() {
@ -611,7 +614,7 @@ class RoomScreenInteractionHandler {
private func displayMediaActionIfPossible(timelineItem: RoomTimelineItemProtocol) async -> RoomTimelineControllerAction {
var source: MediaSourceProxy?
var body: String
switch timelineItem {
case let item as ImageRoomTimelineItem:
source = item.content.source
@ -657,6 +660,6 @@ class RoomScreenInteractionHandler {
}
private struct ReplyInfo {
let type: EventBasedMessageTimelineItemContentType
let type: TimelineEventContent
let isThread: Bool
}

View File

@ -31,48 +31,56 @@ struct TimelineReplyView: View {
switch timelineItemReplyDetails {
case .loaded(let sender, let content):
switch content {
case .audio(let content):
case .message(let content):
switch content {
case .audio(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: .init(kind: .systemIcon("waveform"), cornerRadii: iconCornerRadii))
case .emote(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: content.formattedBody)
case .file(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: .init(kind: .iconAsset(Asset.Images.document), cornerRadii: iconCornerRadii))
case .image(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: .init(kind: .mediaSource(content.thumbnailSource ?? content.source), cornerRadii: iconCornerRadii))
case .notice(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: content.formattedBody)
case .text(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: content.formattedBody)
case .video(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: content.thumbnailSource.map { .init(kind: .mediaSource($0), cornerRadii: iconCornerRadii) })
case .voice:
ReplyView(sender: sender,
plainBody: L10n.commonVoiceMessage,
formattedBody: nil,
icon: .init(kind: .icon(\.micOnOutline), cornerRadii: iconCornerRadii))
case .location:
ReplyView(sender: sender,
plainBody: L10n.commonSharedLocation,
formattedBody: nil,
icon: .init(kind: .iconAsset(Asset.Images.addLocation), cornerRadii: iconCornerRadii))
}
case .poll(let question):
ReplyView(sender: sender,
plainBody: content.body,
plainBody: question,
formattedBody: nil,
icon: .init(kind: .systemIcon("waveform"), cornerRadii: iconCornerRadii))
case .emote(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: content.formattedBody)
case .file(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: .init(kind: .iconAsset(Asset.Images.document), cornerRadii: iconCornerRadii))
case .image(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: .init(kind: .mediaSource(content.thumbnailSource ?? content.source), cornerRadii: iconCornerRadii))
case .notice(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: content.formattedBody)
case .text(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: content.formattedBody)
case .video(let content):
ReplyView(sender: sender,
plainBody: content.body,
formattedBody: nil,
icon: content.thumbnailSource.map { .init(kind: .mediaSource($0), cornerRadii: iconCornerRadii) })
case .voice:
ReplyView(sender: sender,
plainBody: L10n.commonVoiceMessage,
formattedBody: nil,
icon: .init(kind: .icon(\.micOnOutline), cornerRadii: iconCornerRadii))
case .location:
ReplyView(sender: sender,
plainBody: L10n.commonSharedLocation,
formattedBody: nil,
icon: .init(kind: .iconAsset(Asset.Images.addLocation), cornerRadii: iconCornerRadii))
icon: .init(kind: .iconAsset(Asset.Images.timelinePoll), cornerRadii: iconCornerRadii))
}
default:
LoadingReplyView()
@ -223,7 +231,7 @@ struct TimelineReplyView_Previews: PreviewProvider, TestablePreview {
static var previewItems: [TimelineReplyView] {
let imageSource = MediaSourceProxy(url: "https://mock.com", mimeType: "image/png")
return [
TimelineReplyView(placement: .timeline, timelineItemReplyDetails: .notLoaded(eventID: "")),
@ -231,60 +239,60 @@ struct TimelineReplyView_Previews: PreviewProvider, TestablePreview {
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "This is a reply")))),
eventContent: .message(.text(.init(body: "This is a reply"))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .emote(.init(body: "says hello")))),
eventContent: .message(.emote(.init(body: "says hello"))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Bob"),
contentType: .notice(.init(body: "Hello world")))),
eventContent: .message(.notice(.init(body: "Hello world"))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .audio(.init(body: "Some audio",
duration: 0,
waveform: nil,
source: nil,
contentType: nil)))),
eventContent: .message(.audio(.init(body: "Some audio",
duration: 0,
waveform: nil,
source: nil,
contentType: nil))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .file(.init(body: "Some file",
source: nil,
thumbnailSource: nil,
contentType: nil)))),
eventContent: .message(.file(.init(body: "Some file",
source: nil,
thumbnailSource: nil,
contentType: nil))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .image(.init(body: "Some image",
source: imageSource,
thumbnailSource: imageSource)))),
eventContent: .message(.image(.init(body: "Some image",
source: imageSource,
thumbnailSource: imageSource))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .video(.init(body: "Some video",
duration: 0,
source: nil,
thumbnailSource: imageSource)))),
eventContent: .message(.video(.init(body: "Some video",
duration: 0,
source: nil,
thumbnailSource: imageSource))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .location(.init(body: "")))),
eventContent: .message(.location(.init(body: ""))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .voice(.init(body: "Some voice message",
duration: 0,
waveform: nil,
source: nil,
contentType: nil)))),
eventContent: .message(.voice(.init(body: "Some voice message",
duration: 0,
waveform: nil,
source: nil,
contentType: nil))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Bob"),
contentType: .notice(.init(body: "", formattedBody: attributedStringWithMention)))),
eventContent: .message(.notice(.init(body: "", formattedBody: attributedStringWithMention))))),
TimelineReplyView(placement: .timeline,
timelineItemReplyDetails: .loaded(sender: .init(id: "", displayName: "Bob"),
contentType: .notice(.init(body: "", formattedBody: attributedStringWithAtRoomMention))))
eventContent: .message(.notice(.init(body: "", formattedBody: attributedStringWithAtRoomMention)))))
]
}

View File

@ -421,7 +421,8 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
sender: .init(id: "whoever"),
content: .init(body: "A long message that should be on multiple lines."),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))), groupStyle: .single))
eventContent: .message(.text(.init(body: "Short"))))),
groupStyle: .single))
AudioRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
@ -436,7 +437,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
FileRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
@ -450,7 +451,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
thumbnailSource: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
ImageRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
isOutgoing: true,
@ -460,7 +461,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
sender: .init(id: ""),
content: .init(body: "Some image", source: MediaSourceProxy(url: .picturesDirectory, mimeType: "image/png"), thumbnailSource: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
LocationRoomTimelineView(timelineItem: .init(id: .random,
timestamp: "Now",
isOutgoing: false,
@ -473,7 +474,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
longitude: 12.496366),
description: "Location description description description description description description description description"),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
LocationRoomTimelineView(timelineItem: .init(id: .random,
timestamp: "Now",
isOutgoing: false,
@ -484,7 +485,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
content: .init(body: "Fallback geo uri description",
geoURI: .init(latitude: 41.902782, longitude: 12.496366), description: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
VoiceMessageRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
@ -499,7 +500,7 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))),
eventContent: .message(.text(.init(body: "Short"))))),
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
@ -530,7 +531,8 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
sender: .init(id: "whoever"),
content: .init(body: "A long message that should be on multiple lines."),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))), groupStyle: .single))
eventContent: .message(.text(.init(body: "Short"))))),
groupStyle: .single))
RoomTimelineItemView(viewState: .init(item: TextRoomTimelineItem(id: .init(timelineID: ""),
timestamp: "10:42",
@ -541,7 +543,8 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
sender: .init(id: "whoever"),
content: .init(body: "Short message"),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "A long message that should be on more than 2 lines and so will be clipped by the layout.")))), groupStyle: .single))
eventContent: .message(.text(.init(body: "A long message that should be on more than 2 lines and so will be clipped by the layout."))))),
groupStyle: .single))
}
.environmentObject(viewModel.context)
}

View File

@ -156,8 +156,9 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
sender: .init(id: "whoever"),
content: .init(body: "A long message that should be on multiple lines."),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))), groupStyle: .single))
eventContent: .message(.text(.init(body: "Short"))))),
groupStyle: .single))
AudioRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
isOutgoing: true,
@ -171,7 +172,7 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
FileRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
isOutgoing: false,
@ -184,7 +185,7 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
thumbnailSource: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
ImageRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
isOutgoing: true,
@ -194,7 +195,7 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
sender: .init(id: ""),
content: .init(body: "Some image", source: MediaSourceProxy(url: .picturesDirectory, mimeType: "image/png"), thumbnailSource: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
LocationRoomTimelineView(timelineItem: .init(id: .random,
timestamp: "Now",
isOutgoing: false,
@ -207,7 +208,7 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
longitude: 12.496366),
description: "Location description description description description description description description description"),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
LocationRoomTimelineView(timelineItem: .init(id: .random,
timestamp: "Now",
isOutgoing: false,
@ -218,7 +219,7 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
content: .init(body: "Fallback geo uri description",
geoURI: .init(latitude: 41.902782, longitude: 12.496366), description: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))))
eventContent: .message(.text(.init(body: "Short"))))))
VoiceMessageRoomTimelineView(timelineItem: .init(id: .init(timelineID: ""),
timestamp: "10:42",
isOutgoing: true,
@ -232,12 +233,12 @@ struct TimelineItemPlainStylerView_Previews: PreviewProvider, TestablePreview {
source: nil,
contentType: nil),
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
contentType: .text(.init(body: "Short")))),
eventContent: .message(.text(.init(body: "Short"))))),
playerState: AudioPlayerState(id: .timelineItemIdentifier(.init(timelineID: "")), duration: 10, waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}
static var previews: some View {
VStack(alignment: .leading, spacing: 0) {
ForEach(1..<MockRoomTimelineController().timelineItems.count, id: \.self) { index in

View File

@ -136,6 +136,6 @@ struct LocationRoomTimelineView_Previews: PreviewProvider, TestablePreview {
content: .init(body: "Fallback geo uri description",
geoURI: .init(latitude: 41.902782, longitude: 12.496366), description: "Location description description description description description description description description"),
replyDetails: .loaded(sender: .init(id: "Someone"),
contentType: .text(.init(body: "The thread content goes 'ere.")))))
eventContent: .message(.text(.init(body: "The thread content goes 'ere."))))))
}
}

View File

@ -19,6 +19,11 @@ import Foundation
enum TimelineItemReplyDetails: Hashable {
case notLoaded(eventID: String)
case loading(eventID: String)
case loaded(sender: TimelineItemSender, contentType: EventBasedMessageTimelineItemContentType)
case loaded(sender: TimelineItemSender, eventContent: TimelineEventContent)
case error(eventID: String, message: String)
}
enum TimelineEventContent: Hashable {
case message(EventBasedMessageTimelineItemContentType)
case poll(question: String)
}

View File

@ -628,47 +628,55 @@ struct RoomTimelineItemFactory: RoomTimelineItemFactoryProtocol {
avatarURL: nil)
}
let replyContent: EventBasedMessageTimelineItemContentType
let replyContent: TimelineEventContent
switch timelineItem.kind() {
case .message:
switch timelineItem.asMessage()?.msgtype() {
case .audio(let content):
if content.voice != nil {
replyContent = .voice(buildAudioTimelineItemContent(content))
} else {
replyContent = .audio(buildAudioTimelineItemContent(content))
}
case .emote(let content):
replyContent = .emote(buildEmoteTimelineItemContent(senderDisplayName: sender.displayName, senderID: sender.id, messageContent: content))
case .file(let content):
replyContent = .file(buildFileTimelineItemContent(content))
case .image(let content):
replyContent = .image(buildImageTimelineItemContent(content))
case .notice(let content):
replyContent = .notice(buildNoticeTimelineItemContent(content))
case .text(let content):
replyContent = .text(buildTextTimelineItemContent(content))
case .video(let content):
replyContent = .video(buildVideoTimelineItemContent(content))
case .location(let content):
replyContent = .location(buildLocationTimelineItemContent(content))
case .other, .none:
replyContent = .text(.init(body: L10n.commonUnsupportedEvent))
}
return timelineItemReplyDetails(for: timelineItem.asMessage()?.msgtype(), sender: sender)
case .poll(let question, _, _, _, _, _):
replyContent = .text(.init(body: question))
replyContent = .poll(question: question)
case .sticker(let body, _, _):
replyContent = .text(.init(body: body))
replyContent = .message(.text(.init(body: body)))
default:
replyContent = .text(.init(body: L10n.commonUnsupportedEvent))
replyContent = .message(.text(.init(body: L10n.commonUnsupportedEvent)))
}
return .loaded(sender: sender, contentType: replyContent)
return .loaded(sender: sender, eventContent: replyContent)
case let .error(message):
return .error(eventID: details.eventId, message: message)
}
}
private func timelineItemReplyDetails(for messageType: MessageType?, sender: TimelineItemSender) -> TimelineItemReplyDetails {
let replyContent: EventBasedMessageTimelineItemContentType
switch messageType {
case .audio(let content):
if content.voice != nil {
replyContent = .voice(buildAudioTimelineItemContent(content))
} else {
replyContent = .audio(buildAudioTimelineItemContent(content))
}
case .emote(let content):
replyContent = .emote(buildEmoteTimelineItemContent(senderDisplayName: sender.displayName, senderID: sender.id, messageContent: content))
case .file(let content):
replyContent = .file(buildFileTimelineItemContent(content))
case .image(let content):
replyContent = .image(buildImageTimelineItemContent(content))
case .notice(let content):
replyContent = .notice(buildNoticeTimelineItemContent(content))
case .text(let content):
replyContent = .text(buildTextTimelineItemContent(content))
case .video(let content):
replyContent = .video(buildVideoTimelineItemContent(content))
case .location(let content):
replyContent = .location(buildLocationTimelineItemContent(content))
case .other, .none:
replyContent = .text(.init(body: L10n.commonUnsupportedEvent))
}
return .loaded(sender: sender, eventContent: .message(replyContent))
}
}
private extension LocationRoomTimelineItemContent.AssetType {