Skip to content

Commit

Permalink
ruff linting
Browse files Browse the repository at this point in the history
  • Loading branch information
KristinaGomoryova committed Jul 3, 2024
1 parent 5a715d9 commit 6c7e67f
Show file tree
Hide file tree
Showing 4 changed files with 116 additions and 177 deletions.
25 changes: 13 additions & 12 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@

# -- Project information -----------------------------------------------------

project = u"rcx_tk"
copyright = u"2024, RECETOX, Masaryk University"
author = u"Zargham Ahmad"
project = "rcx_tk"
copyright = "2024, RECETOX, Masaryk University"
author = "Zargham Ahmad"

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
Expand Down Expand Up @@ -61,7 +61,7 @@

# -- Use autoapi.extension to run sphinx-apidoc -------

autoapi_dirs = ['../src/rcx_tk']
autoapi_dirs = ["../src/rcx_tk"]

# -- Options for HTML output ----------------------------------------------

Expand All @@ -78,11 +78,12 @@

# -- Options for Intersphinx

intersphinx_mapping = {'python': ('https://docs.python.org/3', None),
# Commonly used libraries, uncomment when used in package
# 'numpy': ('http://docs.scipy.org/doc/numpy/', None),
# 'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
# 'scikit-learn': ('https://scikit-learn.org/stable/', None),
# 'matplotlib': ('https://matplotlib.org/stable/', None),
# 'pandas': ('http://pandas.pydata.org/docs/', None),
}
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
# Commonly used libraries, uncomment when used in package
# 'numpy': ('http://docs.scipy.org/doc/numpy/', None),
# 'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
# 'scikit-learn': ('https://scikit-learn.org/stable/', None),
# 'matplotlib': ('https://matplotlib.org/stable/', None),
# 'pandas': ('http://pandas.pydata.org/docs/', None),
}
1 change: 1 addition & 0 deletions src/rcx_tk/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Documentation about rcx_tk."""

import logging

logging.getLogger(__name__).addHandler(logging.NullHandler())
Expand Down
37 changes: 17 additions & 20 deletions src/rcx_tk/process_metadata_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


def read_file(file_path: str) -> pd.DataFrame:
"""Imports the metadata file to pandas dataframe.
"""Imports the metadata file to pandas dataframe.
Args:
file_path (str): The path to the input data.
Expand All @@ -14,17 +14,17 @@ def read_file(file_path: str) -> pd.DataFrame:
Returns:
pd.DataFrame: Dataframe containing the metadata.
"""

file_extension = os.path.splitext(file_path)[1].lower()
if file_extension == '.csv':
return pd.read_csv(file_path, encoding='UTF-8')
elif file_extension in ['.xls', '.xlsx']:
if file_extension == ".csv":
return pd.read_csv(file_path, encoding="UTF-8")
elif file_extension in [".xls", ".xlsx"]:
return pd.read_excel(file_path)
elif file_extension in ['.tsv', '.txt']:
return pd.read_csv(file_path, sep='\t')
elif file_extension in [".tsv", ".txt"]:
return pd.read_csv(file_path, sep="\t")
else:
raise ValueError("Unsupported file format. Please provide a CSV, Excel, or TSV file.")


def save_dataframe_as_tsv(df: pd.DataFrame, file_path: str) -> None:
"""Saves the dataframe as a TSV file.
Expand All @@ -37,8 +37,8 @@ def save_dataframe_as_tsv(df: pd.DataFrame, file_path: str) -> None:
"""
if os.path.splitext(file_path)[1] != ".tsv":
raise ValueError("Unsupported file format. Please point to a TSV file.")
df.to_csv(file_path, sep='\t', index=False)
df.to_csv(file_path, sep="\t", index=False)


def process_metadata_file(file_path: str, out_path: str) -> None:
"""Processes a metadata file, keeping and renaming specific columns.
Expand All @@ -48,32 +48,29 @@ def process_metadata_file(file_path: str, out_path: str) -> None:
out_path (str): A path where processed metadata dataframe is exported.
"""
columns_to_keep = {
'File name': 'sampleName',
'Type': 'sampleType',
'Class ID': 'class',
'Batch': 'batch',
'Analytical order': 'injectionOrder'
"File name": "sampleName",
"Type": "sampleType",
"Class ID": "class",
"Batch": "batch",
"Analytical order": "injectionOrder",
}

df = read_file(file_path)
df = df[list(columns_to_keep.keys())].rename(columns=columns_to_keep)
df['sampleName'] = df['sampleName'].str.replace(' ', '_')
df["sampleName"] = df["sampleName"].str.replace(" ", "_")
save_dataframe_as_tsv(df, out_path)


def process_alkane_ri_file(file_path: str, out_path: str) -> None:
"""Processes an alkane file, keeping and renaming specific columns.
Args:
file_path (str): A path to the alkane file.
out_path (str): A path where processed alkane file is exported.
"""
columns_to_keep = {
'Carbon number': 'carbon_number',
'RT (min)': 'rt'
}
columns_to_keep = {"Carbon number": "carbon_number", "RT (min)": "rt"}

df = read_file(file_path)
df.columns = df.columns.str.strip()
df = df.rename(columns=columns_to_keep)
save_dataframe_as_tsv(df, out_path)

Loading

0 comments on commit 6c7e67f

Please sign in to comment.