diff --git a/nxdrive/engine/processor.py b/nxdrive/engine/processor.py index ddf5d8e31a..552c59bb18 100644 --- a/nxdrive/engine/processor.py +++ b/nxdrive/engine/processor.py @@ -291,8 +291,7 @@ def _get_next_doc_pair(self, item: DocPair) -> Optional[DocPair]: try: return self.dao.acquire_state(self.thread_id, item.id) except sqlite3.OperationalError: - state = self.dao.get_state_from_id(item.id) - if state: + if state := self.dao.get_state_from_id(item.id): if ( WINDOWS and state.pair_state == "locally_moved" @@ -460,9 +459,7 @@ def _execute(self) -> None: log.info("The document does not exist anymore locally") self.dao.remove_state(doc_pair) elif error in LONG_FILE_ERRORS: - self.dao.remove_filter( - doc_pair.remote_parent_path + "/" + doc_pair.remote_ref - ) + self.dao.remove_filter(f"{doc_pair.remote_parent_path}/{doc_pair.remote_ref}") self.engine.longPathError.emit(doc_pair) elif hasattr(exc, "trash_issue"): """ @@ -475,18 +472,19 @@ def _execute(self) -> None: else: self._handle_pair_handler_exception(doc_pair, handler_name, exc) except RuntimeError as exc: - if "but the refreshed credentials are still expired" in str(exc): - log.warning( - "AWS credentials were refreshed, but the refreshed credentials are still expired" - ) - log.info("Reinitializing the upload") - self.dao.remove_transfer( - "upload", - doc_pair=doc_pair.id, - is_direct_transfer=doc_pair.local_state == "direct", - ) - else: + if "but the refreshed credentials are still expired" not in str( + exc + ): raise + log.warning( + "AWS credentials were refreshed, but the refreshed credentials are still expired" + ) + log.info("Reinitializing the upload") + self.dao.remove_transfer( + "upload", + doc_pair=doc_pair.id, + is_direct_transfer=doc_pair.local_state == "direct", + ) except Exception as exc: # Workaround to forward unhandled exceptions to sys.excepthook between all Qthreads sys.excepthook(*sys.exc_info()) @@ -563,12 +561,7 @@ def _synchronize_direct_transfer(self, doc_pair: DocPair, /) -> None: log.debug(f"The session is paused, skipping ") return - if WINDOWS: - path = doc_pair.local_path - else: - # The path retrieved from the database will have its starting slash trimmed, restore it - path = Path(f"/{doc_pair.local_path}") - + path = doc_pair.local_path if WINDOWS else Path(f"/{doc_pair.local_path}") if not path.exists(): self.engine.directTranferError.emit(path) if session: @@ -616,9 +609,7 @@ def _direct_transfer_end( # Clean-up self.dao.remove_state(doc_pair, recursive=recursive) - # Update session then handle the status - session = self.dao.get_session(doc_pair.session) - if session: + if session := self.dao.get_session(doc_pair.session): if ( not cancelled_transfer and session.status is not TransferStatus.CANCELLED @@ -657,8 +648,7 @@ def _synchronize_if_not_remotely_dirty( remote_info.name != doc_pair.local_name or remote_info.digest != doc_pair.local_digest ): - modified = self.dao.get_state_from_local(doc_pair.local_path) - if modified: + if modified := self.dao.get_state_from_local(doc_pair.local_path): log.info( f"Forcing remotely_modified for pair={modified!r} " f"with info={remote_info!r}" diff --git a/nxdrive/tracing.py b/nxdrive/tracing.py index 2d2018512e..9164351a01 100644 --- a/nxdrive/tracing.py +++ b/nxdrive/tracing.py @@ -42,11 +42,7 @@ def should_ignore(event: _Event) -> bool: def before_send(event: _Event, _: _Hint, /) -> Any: """Alter an event before sending to the Sentry server.""" - if should_ignore(event): - # The event will not be sent if None is returned - return None - - return event + return None if should_ignore(event) else event def setup_sentry() -> None: