Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix: Keep the progress indicator visible after pausing or scrubbing. #1969

Merged
merged 5 commits into from
Oct 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,6 @@ private struct WaveformInteractionModifier: ViewModifier {
func body(content: Content) -> some View {
GeometryReader { geometry in
content
.gesture(SpatialTapGesture()
.onEnded { tapGesture in
let progress = tapGesture.location.x / geometry.size.width
onSeek(max(0, min(progress, 1.0)))
})
.progressCursor(progress: progress) {
WaveformCursorView(color: .compound.iconAccentTertiary)
.frame(width: cursorVisibleWidth, height: cursorVisibleHeight)
Expand All @@ -56,6 +51,11 @@ private struct WaveformInteractionModifier: ViewModifier {
)
.offset(x: -cursorInteractiveSize / 2, y: 0)
}
.gesture(SpatialTapGesture()
.onEnded { tapGesture in
let progress = tapGesture.location.x / geometry.size.width
onSeek(max(0, min(progress, 1.0)))
})
alfogrillo marked this conversation as resolved.
Show resolved Hide resolved
}
.coordinateSpace(name: Self.namespaceName)
.animation(nil, value: progress)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ enum ComposerToolbarViewModelAction {
case deleteVoiceMessageRecording
case startVoiceMessagePlayback
case pauseVoiceMessagePlayback
case scrubVoiceMessagePlayback(scrubbing: Bool)
case seekVoiceMessagePlayback(progress: Double)
case sendVoiceMessage
}
Expand All @@ -61,6 +62,7 @@ enum ComposerToolbarViewAction {
case deleteVoiceMessageRecording
case startVoiceMessagePlayback
case pauseVoiceMessagePlayback
case scrubVoiceMessagePlayback(scrubbing: Bool)
case seekVoiceMessagePlayback(progress: Double)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,8 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
actionsSubject.send(.pauseVoiceMessagePlayback)
case .seekVoiceMessagePlayback(let progress):
actionsSubject.send(.seekVoiceMessagePlayback(progress: progress))
case .scrubVoiceMessagePlayback(let scrubbing):
actionsSubject.send(.scrubVoiceMessagePlayback(scrubbing: scrubbing))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,8 @@ struct ComposerToolbar: View {
context.send(viewAction: .pauseVoiceMessagePlayback)
} onSeek: { progress in
context.send(viewAction: .seekVoiceMessagePlayback(progress: progress))
} onScrubbing: { isScrubbing in
context.send(viewAction: .scrubVoiceMessagePlayback(scrubbing: isScrubbing))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ struct VoiceMessagePreviewComposer: View {
let onPlay: () -> Void
let onPause: () -> Void
let onSeek: (Double) -> Void
let onScrubbing: (Bool) -> Void

var timeLabelContent: String {
// Display the duration if progress is 0.0
Expand All @@ -39,10 +40,6 @@ struct VoiceMessagePreviewComposer: View {
return DateFormatter.elapsedTimeFormatter.string(from: elapsed)
}

var showWaveformCursor: Bool {
playerState.playbackState == .playing || isDragging
}

var body: some View {
HStack {
HStack {
Expand All @@ -60,9 +57,12 @@ struct VoiceMessagePreviewComposer: View {
waveformView
.waveformInteraction(isDragging: $isDragging,
progress: playerState.progress,
showCursor: showWaveformCursor,
showCursor: playerState.showProgressIndicator,
onSeek: onSeek)
}
.onChange(of: isDragging) { isDragging in
onScrubbing(isDragging)
}
.padding(.vertical, 4.0)
.padding(.horizontal, 6.0)
.background {
Expand Down Expand Up @@ -133,7 +133,7 @@ struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {

static var previews: some View {
VStack {
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in })
VoiceMessagePreviewComposer(playerState: playerState, waveform: .data(waveformData), onPlay: { }, onPause: { }, onSeek: { _ in }, onScrubbing: { _ in })
.fixedSize(horizontal: false, vertical: true)
}
}
Expand Down
29 changes: 24 additions & 5 deletions ElementX/Sources/Screens/RoomScreen/RoomScreenViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
private let actionsSubject: PassthroughSubject<RoomScreenViewModelAction, Never> = .init()
private var canCurrentUserRedact = false
private var paginateBackwardsTask: Task<Void, Never>?
private var resumeVoiceMessagePlaybackAfterScrubbing = false
alfogrillo marked this conversation as resolved.
Show resolved Hide resolved

init(timelineController: RoomTimelineControllerProtocol,
mediaProvider: MediaProviderProtocol,
Expand Down Expand Up @@ -208,14 +209,13 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
case .sendVoiceMessage:
Task { await sendCurrentVoiceMessage() }
case .startVoiceMessagePlayback:
Task {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
await startPlayingRecordedVoiceMessage()
}
Task { await startPlayingRecordedVoiceMessage() }
case .pauseVoiceMessagePlayback:
pausePlayingRecordedVoiceMessage()
case .seekVoiceMessagePlayback(let progress):
Task { await seekRecordedVoiceMessage(to: progress) }
case .scrubVoiceMessagePlayback(let scrubbing):
Task { await scrubVoiceMessagePlayback(scrubbing: scrubbing) }
}
}

Expand Down Expand Up @@ -349,7 +349,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}

switch await timelineController.sendReadReceipt(for: eventItemID) {
case .success():
case .success:
break
case let .failure(error):
MXLog.error("[TimelineViewController] Failed to send read receipt: \(error)")
Expand Down Expand Up @@ -1015,6 +1015,7 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}

private func startPlayingRecordedVoiceMessage() async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
if case .failure(let error) = await voiceMessageRecorder.startPlayback() {
MXLog.error("failed to play recorded voice message. \(error)")
}
Expand All @@ -1025,9 +1026,27 @@ class RoomScreenViewModel: RoomScreenViewModelType, RoomScreenViewModelProtocol
}

private func seekRecordedVoiceMessage(to progress: Double) async {
await mediaPlayerProvider.detachAllStates(except: voiceMessageRecorder.previewAudioPlayerState)
await voiceMessageRecorder.seekPlayback(to: progress)
}

private func scrubVoiceMessagePlayback(scrubbing: Bool) async {
guard let audioPlayerState = voiceMessageRecorder.previewAudioPlayerState else {
return
}
if scrubbing {
if audioPlayerState.playbackState == .playing {
resumeVoiceMessagePlaybackAfterScrubbing = true
pausePlayingRecordedVoiceMessage()
}
} else {
if resumeVoiceMessagePlaybackAfterScrubbing {
resumeVoiceMessagePlaybackAfterScrubbing = false
await startPlayingRecordedVoiceMessage()
}
}
}

private func openSystemSettings() {
guard let url = URL(string: UIApplication.openSettingsURLString) else { return }
application.open(url)
Expand Down
16 changes: 15 additions & 1 deletion ElementX/Sources/Services/Audio/Player/AudioPlayerState.swift
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
let waveform: EstimatedWaveform
@Published private(set) var playbackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double
@Published private(set) var showProgressIndicator: Bool

private weak var audioPlayer: AudioPlayerProtocol?
private var cancellables: Set<AnyCancellable> = []
Expand All @@ -60,6 +61,7 @@ class AudioPlayerState: ObservableObject, Identifiable {
self.duration = duration
self.waveform = waveform ?? EstimatedWaveform(data: [])
self.progress = progress
showProgressIndicator = false
playbackState = .stopped
}

Expand All @@ -71,8 +73,17 @@ class AudioPlayerState: ObservableObject, Identifiable {
func updateState(progress: Double) async {
let progress = max(0.0, min(progress, 1.0))
self.progress = progress
showProgressIndicator = true
if let audioPlayer {
var shouldResumeProgressPublishing = false
if audioPlayer.state == .playing {
shouldResumeProgressPublishing = true
stopPublishProgress()
}
await audioPlayer.seek(to: progress)
if shouldResumeProgressPublishing, audioPlayer.state == .playing {
startPublishProgress()
}
}
}

Expand All @@ -86,12 +97,12 @@ class AudioPlayerState: ObservableObject, Identifiable {
}

func detachAudioPlayer() {
guard audioPlayer != nil else { return }
audioPlayer?.stop()
stopPublishProgress()
cancellables = []
audioPlayer = nil
playbackState = .stopped
showProgressIndicator = false
}

func reportError(_ error: Error) {
Expand Down Expand Up @@ -127,14 +138,17 @@ class AudioPlayerState: ObservableObject, Identifiable {
}
startPublishProgress()
playbackState = .playing
showProgressIndicator = true
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
stopPublishProgress()
playbackState = .stopped
if case .didFinishPlaying = action {
progress = 0.0
showProgressIndicator = false
}
case .didFailWithError:
stopPublishProgress()
playbackState = .error
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,7 @@ class RoomTimelineController: RoomTimelineControllerProtocol {
guard let playerState = mediaPlayerProvider.playerState(for: .timelineItemIdentifier(itemID)) else {
return
}
await mediaPlayerProvider.detachAllStates(except: playerState)
await playerState.updateState(progress: progress)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,6 @@ struct VoiceMessageRoomPlaybackView: View {
}
}

var showWaveformCursor: Bool {
playerState.playbackState == .playing || isDragging
}

var body: some View {
HStack {
HStack {
Expand All @@ -61,7 +57,7 @@ struct VoiceMessageRoomPlaybackView: View {
waveformView
.waveformInteraction(isDragging: $isDragging,
progress: playerState.progress,
showCursor: showWaveformCursor,
showCursor: playerState.showProgressIndicator,
onSeek: onSeek)
}
.padding(.leading, 2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,12 +91,15 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
return .failure(.previewNotAvailable)
}

if await !previewAudioPlayerState.isAttached {
await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
}

if audioPlayer.url == url {
audioPlayer.play()
return .success(())
}

await previewAudioPlayerState.attachAudioPlayer(audioPlayer)
let pendingMediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
audioPlayer.load(mediaSource: pendingMediaSource, using: url, autoplay: true)
return .success(())
Expand Down
47 changes: 47 additions & 0 deletions UnitTests/Sources/AudioPlayerStateTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class AudioPlayerStateTests: XCTestCase {
private func buildAudioPlayerMock() -> AudioPlayerMock {
let audioPlayerMock = AudioPlayerMock()
audioPlayerMock.underlyingActions = audioPlayerActions
audioPlayerMock.state = .stopped
audioPlayerMock.currentTime = 0.0
audioPlayerMock.seekToClosure = { [audioPlayerSeekCallsSubject] progress in
audioPlayerSeekCallsSubject?.send(progress)
Expand Down Expand Up @@ -65,6 +66,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssert(audioPlayerMock.stopCalled)
XCTAssertFalse(audioPlayerState.isAttached)
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
XCTAssertFalse(audioPlayerState.showProgressIndicator)
}

func testReportError() async throws {
Expand All @@ -91,9 +93,19 @@ class AudioPlayerStateTests: XCTestCase {
}

do {
audioPlayerMock.state = .stopped
await audioPlayerState.updateState(progress: 0.4)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
}

do {
audioPlayerMock.state = .playing
await audioPlayerState.updateState(progress: 0.4)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssert(audioPlayerState.isPublishingProgress)
}
}

Expand Down Expand Up @@ -153,6 +165,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssertEqual(audioPlayerMock.seekToReceivedProgress, 0.4)
XCTAssertEqual(audioPlayerState.playbackState, .playing)
XCTAssert(audioPlayerState.isPublishingProgress)
XCTAssert(audioPlayerState.showProgressIndicator)
}

func testHandlingAudioPlayerActionDidPausePlaying() async throws {
Expand All @@ -173,6 +186,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssert(audioPlayerState.showProgressIndicator)
}

func testHandlingAudioPlayerActionsidStopPlaying() async throws {
Expand All @@ -193,6 +207,7 @@ class AudioPlayerStateTests: XCTestCase {
XCTAssertEqual(audioPlayerState.playbackState, .stopped)
XCTAssertEqual(audioPlayerState.progress, 0.4)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssert(audioPlayerState.showProgressIndicator)
}

func testAudioPlayerActionsDidFinishPlaying() async throws {
Expand All @@ -214,5 +229,37 @@ class AudioPlayerStateTests: XCTestCase {
// Progress should be reset to 0
XCTAssertEqual(audioPlayerState.progress, 0.0)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssertFalse(audioPlayerState.showProgressIndicator)
}

func testAudioPlayerActionsDidFailed() async throws {
audioPlayerState.attachAudioPlayer(audioPlayerMock)

let deferredPlayingState = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .playing:
return true
default:
return false
}
}
audioPlayerActionsSubject.send(.didStartPlaying)
try await deferredPlayingState.fulfill()
XCTAssertTrue(audioPlayerState.showProgressIndicator)

let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
switch action {
case .error:
return true
default:
return false
}
}

audioPlayerActionsSubject.send(.didFailWithError(error: AudioPlayerError.genericError))
try await deferred.fulfill()
XCTAssertEqual(audioPlayerState.playbackState, .error)
XCTAssertFalse(audioPlayerState.isPublishingProgress)
XCTAssertTrue(audioPlayerState.showProgressIndicator)
}
}
1 change: 1 addition & 0 deletions UnitTests/Sources/VoiceMessageRecorderTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ class VoiceMessageRecorderTests: XCTestCase {
audioRecorder.averagePowerForChannelNumberReturnValue = 0
audioPlayer = AudioPlayerMock()
audioPlayer.actions = audioPlayerActions
audioPlayer.state = .stopped

mediaPlayerProvider = MediaPlayerProviderMock()
mediaPlayerProvider.playerForClosure = { _ in
Expand Down