Remove feature flag for voice messages (#1964)

* Delete voice messages feature flag

* Fix preview tests

* Update snapshots
This commit is contained in:
Alfonso Grillo 2023-10-27 13:45:15 +02:00 committed by GitHub
parent adfe855b99
commit acfc564490
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
68 changed files with 128 additions and 149 deletions

View File

@ -42,7 +42,6 @@ final class AppSettings {
case userSuggestionsEnabled
case readReceiptsEnabled
case swiftUITimelineEnabled
case voiceMessageEnabled
case mentionsEnabled
case appLockFlowEnabled
case elementCallEnabled
@ -270,9 +269,6 @@ final class AppSettings {
@UserPreference(key: UserDefaultsKeys.swiftUITimelineEnabled, defaultValue: false, storageType: .volatile)
var swiftUITimelineEnabled
@UserPreference(key: UserDefaultsKeys.voiceMessageEnabled, defaultValue: false, storageType: .userDefaults(store))
var voiceMessageEnabled
@UserPreference(key: UserDefaultsKeys.mentionsEnabled, defaultValue: false, storageType: .userDefaults(store))
var mentionsEnabled

View File

@ -69,8 +69,6 @@ struct ComposerToolbarViewState: BindableState {
var composerEmpty = true
var areSuggestionsEnabled = true
var suggestions: [SuggestionItem] = []
var enableVoiceMessageComposer: Bool
var audioPlayerState: AudioPlayerState
var audioRecorderState: AudioRecorderState
@ -92,11 +90,7 @@ struct ComposerToolbarViewState: BindableState {
case .previewVoiceMessage:
return true
default:
if enableVoiceMessageComposer {
return !composerEmpty
} else {
return true
}
return !composerEmpty
}
}

View File

@ -46,7 +46,6 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
self.appSettings = appSettings
super.init(initialViewState: ComposerToolbarViewState(areSuggestionsEnabled: completionSuggestionService.areSuggestionsEnabled,
enableVoiceMessageComposer: appSettings.voiceMessageEnabled,
audioPlayerState: .init(id: .recorderPreview, duration: 0),
audioRecorderState: .init(),
bindings: .init()),

View File

@ -85,7 +85,7 @@ struct ComposerToolbar: View {
} else if context.viewState.showSendButton {
sendButton
.padding(.leading, 3)
} else if context.viewState.enableVoiceMessageComposer {
} else {
voiceMessageRecordingButton
.background {
ViewFrameReader(frame: $voiceMessageRecordingButtonFrame, coordinateSpace: .global)
@ -246,10 +246,10 @@ struct ComposerToolbar: View {
private var voiceMessageContent: some View {
// Display the voice message composer above to keep the focus and keep the keyboard open if it's already open.
switch context.viewState.composerMode {
case .recordVoiceMessage(let state) where context.viewState.enableVoiceMessageComposer:
case .recordVoiceMessage(let state):
VoiceMessageRecordingComposer(recorderState: state)
.padding(.leading, 12)
case .previewVoiceMessage(let state, let waveform, let isUploading) where context.viewState.enableVoiceMessageComposer:
case .previewVoiceMessage(let state, let waveform, let isUploading):
topBarLayout {
voiceMessageTrashButton
voiceMessagePreviewComposer(audioPlayerState: state, waveform: waveform)
@ -380,7 +380,6 @@ extension ComposerToolbar {
appSettings: ServiceLocator.shared.settings,
mentionDisplayHelper: ComposerMentionDisplayHelper.mock)
model.state.composerEmpty = focused
model.state.enableVoiceMessageComposer = true
return model
}
return ComposerToolbar(context: composerViewModel.context,
@ -397,7 +396,6 @@ extension ComposerToolbar {
appSettings: ServiceLocator.shared.settings,
mentionDisplayHelper: ComposerMentionDisplayHelper.mock)
model.state.composerMode = .recordVoiceMessage(state: AudioRecorderState())
model.state.enableVoiceMessageComposer = true
return model
}
return ComposerToolbar(context: composerViewModel.context,
@ -415,7 +413,6 @@ extension ComposerToolbar {
appSettings: ServiceLocator.shared.settings,
mentionDisplayHelper: ComposerMentionDisplayHelper.mock)
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData), isUploading: uploading)
model.state.enableVoiceMessageComposer = true
return model
}
return ComposerToolbar(context: composerViewModel.context,

View File

@ -49,7 +49,6 @@ protocol DeveloperOptionsProtocol: AnyObject {
var userSuggestionsEnabled: Bool { get set }
var readReceiptsEnabled: Bool { get set }
var swiftUITimelineEnabled: Bool { get set }
var voiceMessageEnabled: Bool { get set }
var mentionsEnabled: Bool { get set }
var appLockFlowEnabled: Bool { get set }
var elementCallEnabled: Bool { get set }

View File

@ -74,12 +74,6 @@ struct DeveloperOptionsScreen: View {
}
}
Section("Voice message") {
Toggle(isOn: $context.voiceMessageEnabled) {
Text("Enable voice messages")
}
}
Section {
Button {
showConfetti = true

View File

@ -98,7 +98,7 @@ struct RoomTimelineItemFactory: RoomTimelineItemFactoryProtocol {
case .emote(content: let content):
return buildEmoteTimelineItem(for: eventItemProxy, messageTimelineItem, content, isOutgoing, isThreaded)
case .audio(let content):
if appSettings.voiceMessageEnabled, content.voice != nil {
if content.voice != nil {
return buildVoiceTimelineItem(for: eventItemProxy, messageTimelineItem, content, isOutgoing, isThreaded)
} else {
return buildAudioTimelineItem(for: eventItemProxy, messageTimelineItem, content, isOutgoing, isThreaded)
@ -634,7 +634,7 @@ struct RoomTimelineItemFactory: RoomTimelineItemFactoryProtocol {
case .message:
switch timelineItem.asMessage()?.msgtype() {
case .audio(let content):
if appSettings.voiceMessageEnabled, content.voice != nil {
if content.voice != nil {
replyContent = .voice(buildAudioTimelineItemContent(content))
} else {
replyContent = .audio(buildAudioTimelineItemContent(content))

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.