Skip to content

Commit

Permalink
format with black
Browse files Browse the repository at this point in the history
  • Loading branch information
Gautzilla committed Oct 30, 2024
1 parent 76ce7ca commit a28932f
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 21 deletions.
48 changes: 35 additions & 13 deletions src/OSmOSE/Dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,9 @@ def __init__(
self.__original_folder = original_folder

if skip_perms:
self.logger.debug("It seems you are on a non-Unix operating system (probably Windows). The build() method will not work as intended and permission might be incorrectly set.")
self.logger.debug(
"It seems you are on a non-Unix operating system (probably Windows). The build() method will not work as intended and permission might be incorrectly set."
)

pd.set_option("display.float_format", lambda x: "%.0f" % x)

Expand Down Expand Up @@ -231,7 +233,9 @@ def depth(
def owner_group(self):
"""str: The Unix group able to interact with the dataset."""
if self.__group is None:
self.logger.warning("The OSmOSE group name is not defined. Please specify the group name before trying to build the dataset.")
self.logger.warning(
"The OSmOSE group name is not defined. Please specify the group name before trying to build the dataset."
)
return self.__group

@owner_group.setter
Expand All @@ -244,7 +248,9 @@ def owner_group(self, value):
try:
gid = grp.getgrnam(value).gr_gid
except KeyError as e:
self.logger.error(f"The group {value} does not exist on the system. Full error trace: {e}")
self.logger.error(
f"The group {value} does not exist on the system. Full error trace: {e}"
)
raise KeyError(
f"The group {value} does not exist on the system. Full error trace: {e}"
)
Expand Down Expand Up @@ -322,7 +328,9 @@ def build(
and pd.read_csv(metadata_path)["is_built"][0]
and not force_upload
):
self.logger.warning("This dataset has already been built. To run the build() method on an already built dataset, you have to use the force_upload parameter.")
self.logger.warning(
"This dataset has already been built. To run the build() method on an already built dataset, you have to use the force_upload parameter."
)
sys.exit()

if self.gps_coordinates is None:
Expand All @@ -346,7 +354,9 @@ def build(
try:
os.chown(self.path, -1, gid)
except PermissionError:
self.logger.error(f"You have not the permission to change the owner of the {self.path} folder. This might be because you are trying to rebuild an existing dataset. The group owner has not been changed.")
self.logger.error(
f"You have not the permission to change the owner of the {self.path} folder. This might be because you are trying to rebuild an existing dataset. The group owner has not been changed."
)

# Add the setgid bid to the folder's permissions, in order for subsequent created files to be created by the same user group.
chmod_if_needed(path=self.path, mode=DPDEFAULT)
Expand Down Expand Up @@ -417,7 +427,9 @@ def build(
path_raw_audio.joinpath(cur_filename)
)
if ind_dt == 0:
self.logger.warning("We do not accept the sign '-' in our filenames, we transformed them into '_'. In case you have to rebuild your dataset be careful to change your timestamp template accordingly...")
self.logger.warning(
"We do not accept the sign '-' in our filenames, we transformed them into '_'. In case you have to rebuild your dataset be careful to change your timestamp template accordingly..."
)
else:
cur_filename = audio_file.name

Expand All @@ -434,7 +446,9 @@ def build(
sf_meta = sf.info(path_raw_audio / cur_filename)

except Exception as e:
self.logger.error(f"error message making status read header False : \n {e}")
self.logger.error(
f"error message making status read header False : \n {e}"
)
# append audio metadata read from header for files with corrupted headers
audio_metadata = pd.concat(
[
Expand Down Expand Up @@ -537,12 +551,16 @@ def build(
if (
len(list_tests_level0) - sum(list_tests_level0) > 0
): # if presence of anomalies of level 0
self.logger.warning(f"Your dataset failed {len(list_tests_level0)-sum(list_tests_level0)} anomaly test of level 0 (over {len(list_tests_level0)}); see details below. \n Anomalies of level 0 block dataset uploading as long as they are present. Please correct your anomalies first, and try uploading it again after. \n You can inspect your metadata saved here {path_raw_audio.joinpath('file_metadata.csv')} using the notebook /home/datawork-osmose/osmose-datarmor/notebooks/metadata_analyzer.ipynb.")
self.logger.warning(
f"Your dataset failed {len(list_tests_level0)-sum(list_tests_level0)} anomaly test of level 0 (over {len(list_tests_level0)}); see details below. \n Anomalies of level 0 block dataset uploading as long as they are present. Please correct your anomalies first, and try uploading it again after. \n You can inspect your metadata saved here {path_raw_audio.joinpath('file_metadata.csv')} using the notebook /home/datawork-osmose/osmose-datarmor/notebooks/metadata_analyzer.ipynb."
)

if (
len(list_tests_level1) - sum(list_tests_level1) > 0
): # if also presence of anomalies of level 1
self.logger.warning(f"Your dataset also failed {len(list_tests_level1)-sum(list_tests_level1)} anomaly test of level 1 (over {len(list_tests_level1)}).")
self.logger.warning(
f"Your dataset also failed {len(list_tests_level1)-sum(list_tests_level1)} anomaly test of level 1 (over {len(list_tests_level1)})."
)

with open(resume_test_anomalies) as f:
self.logger.warning(f.read())
Expand All @@ -557,7 +575,9 @@ def build(
len(list_tests_level1) - sum(list_tests_level1) > 0
) and not force_upload: # if presence of anomalies of level 1

self.logger.warning(f"Your dataset failed {len(list_tests_level1)-sum(list_tests_level1)} anomaly test of level 1 (over {len(list_tests_level1)}); see details below. \n Anomalies of level 1 block dataset uploading, but anyone can force it by setting the variable `force_upload` to True. \n You can inspect your metadata saved here {path_raw_audio.joinpath('file_metadata.csv')} using the notebook /home/datawork-osmose/osmose-datarmor/notebooks/metadata_analyzer.ipynb.")
self.logger.warning(
f"Your dataset failed {len(list_tests_level1)-sum(list_tests_level1)} anomaly test of level 1 (over {len(list_tests_level1)}); see details below. \n Anomalies of level 1 block dataset uploading, but anyone can force it by setting the variable `force_upload` to True. \n You can inspect your metadata saved here {path_raw_audio.joinpath('file_metadata.csv')} using the notebook /home/datawork-osmose/osmose-datarmor/notebooks/metadata_analyzer.ipynb."
)

with open(resume_test_anomalies) as f:
self.logger.warning(f.read())
Expand Down Expand Up @@ -666,10 +686,12 @@ def _write_timestamp_csv_from_audio_files(
def _create_logger(self):
logs_directory = self.__path / "log"
if not logs_directory.exists():
os.mkdir(logs_directory, mode = DPDEFAULT)
os.mkdir(logs_directory, mode=DPDEFAULT)
self.logger = logging.getLogger(f"dataset").getChild(self.__name)
self.file_handler = logging.FileHandler(logs_directory / "logs.log", mode = "w")
self.file_handler.setFormatter(logging.getLogger("dataset").handlers[0].formatter)
self.file_handler = logging.FileHandler(logs_directory / "logs.log", mode="w")
self.file_handler.setFormatter(
logging.getLogger("dataset").handlers[0].formatter
)
self.logger.setLevel(logging.DEBUG)
self.file_handler.setLevel(logging.DEBUG)
self.logger.addHandler(self.file_handler)
Expand Down
2 changes: 1 addition & 1 deletion src/OSmOSE/LoggingContext.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from contextlib import contextmanager
import logging


class LoggingContext:
def __init__(self):
self.logger = logging.root
Expand Down Expand Up @@ -33,4 +34,3 @@ def log_something():
yield
finally:
self.logger = previous_logger

21 changes: 15 additions & 6 deletions src/OSmOSE/Spectrogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,9 @@ def extract_spectro_params(self):
def check_spectro_size(self):
"""Verify if the parameters will generate a spectrogram that can fit one screen properly"""
if self.nfft > 2048:
self.logger.warning(f"Your spectra contain more than 1024 bin (ie {self.nfft/2}).\nNote that unless you have a 4K screen, unwanted numerical compression might occur when visualizing your spectrograms..")
self.logger.warning(
f"Your spectra contain more than 1024 bin (ie {self.nfft/2}).\nNote that unless you have a 4K screen, unwanted numerical compression might occur when visualizing your spectrograms.."
)

temporal_resolution, frequency_resolution, Nbwin = self.extract_spectro_params()

Expand All @@ -544,7 +546,8 @@ def check_spectro_size(self):
)

if Nbwin > 3500:
self.logger.warning(f"Note that unless you have a 4K screen, unwanted numerical compression might occur when visualizing your spectrograms.\nYour resolutions: \n\ttime = {temporal_resolution} s\n\tfrequency = {frequency_resolution} Hz",
self.logger.warning(
f"Note that unless you have a 4K screen, unwanted numerical compression might occur when visualizing your spectrograms.\nYour resolutions: \n\ttime = {temporal_resolution} s\n\tfrequency = {frequency_resolution} Hz",
)

def prepare_paths(self, force_init: bool = False):
Expand Down Expand Up @@ -1144,7 +1147,9 @@ def process_file(
return

if audio_file not in os.listdir(self.audio_path):
self.logger.error(f"The file {audio_file} must be in {self.audio_path} in order to be processed.")
self.logger.error(
f"The file {audio_file} must be in {self.audio_path} in order to be processed."
)
raise FileNotFoundError(
f"The file {audio_file} must be in {self.audio_path} in order to be processed."
)
Expand Down Expand Up @@ -1243,7 +1248,7 @@ def gen_tiles(
f"- data max : {np.max(data):.3f}\n"
f"- data mean : {np.mean(data):.3f}\n"
f"- data std : {np.std(data):.3f}"
)
)

duration = len(data) / int(sample_rate)

Expand Down Expand Up @@ -1735,7 +1740,9 @@ def generate_and_save_LTAS(
ax.tick_params(axis="x", rotation=20)

# Saving spectrogram plot to file
self.logger.debug("Saving", output_file, "\nNumber of welch:", str(log_spectro.shape[1]))
self.logger.debug(
"Saving", output_file, "\nNumber of welch:", str(log_spectro.shape[1])
)
plt.savefig(output_file, bbox_inches="tight", pad_inches=0)
plt.close()

Expand Down Expand Up @@ -1940,7 +1947,9 @@ def build_EPD(self, time_resolution: str, sample_rate: int, show_fig: bool = Fal

# save as png figure
output_file = self.path / OSMOSE_PATH.EPD / "EPD.png"
self.logger.debug(f"Saving {output_file}\nNumber of welch: {all_welch.shape[0]}")
self.logger.debug(
f"Saving {output_file}\nNumber of welch: {all_welch.shape[0]}"
)
plt.savefig(output_file, bbox_inches="tight", pad_inches=0)
plt.close()

Expand Down
2 changes: 1 addition & 1 deletion src/OSmOSE/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,4 @@
FPDEFAULT = 0o664 # Default file permissions
DPDEFAULT = stat.S_ISGID | 0o775 # Default directory permissions

global_logging_context = LoggingContext()
global_logging_context = LoggingContext()

0 comments on commit a28932f

Please sign in to comment.