Skip to content

Commit

Permalink
style: Run black and isort
Browse files Browse the repository at this point in the history
  • Loading branch information
jvfe committed Oct 19, 2023
1 parent 0f7ebf8 commit 82d9444
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 4 deletions.
1 change: 0 additions & 1 deletion microview/file_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,6 @@ def detect_report_type(report_paths: List[Path], console) -> List[Sample]:
console.print("\n Could not find any valid reports", style="red")
raise Exception("Could not find any valid files.")
else:

kraken_not_in_kaiju = list(
filter(lambda report: report not in kaiju_reports, kraken_reports)
)
Expand Down
1 change: 0 additions & 1 deletion microview/parse_taxonomy.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def parse_kaiju2table(sample_name: str, df, parsed_stats: Dict) -> None:
Parses kaiju report
"""
for row in df.itertuples():

row_dict = {"n_reads": row.reads, "percent": row.percent}
if row.taxon_name == "unclassified":
parsed_stats[sample_name].update({"unclassified": row_dict})
Expand Down
7 changes: 5 additions & 2 deletions microview/plotting.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import Dict, Optional
from pathlib import Path
from typing import Dict, Optional

from plotly import io
from plotly.express import bar, colors, line, scatter
from plotly.graph_objects import Figure
Expand Down Expand Up @@ -156,7 +157,9 @@ def generate_taxo_plots(tax_data: Dict, contrast_df=None) -> Dict:

# TODO: Improve this check
if contrast_df is not None and "group" in contrast_df.columns:
contrast_df['sample'] = [str(Path(s).name) for s in contrast_df['sample'].to_list()]
contrast_df["sample"] = [
str(Path(s).name) for s in contrast_df["sample"].to_list()
]
merged_taxas_df = merge_with_contrasts(tax_data["common taxas"], contrast_df)

common_taxas = plot_common_taxas(merged_taxas_df, facet_col="group")
Expand Down

0 comments on commit 82d9444

Please sign in to comment.