Skip to content

Commit

Permalink
Merge branch 'main' into colored-nodes
Browse files Browse the repository at this point in the history
  • Loading branch information
antonneubauer committed Nov 25, 2023
2 parents d56b4eb + ce23960 commit 60d739b
Show file tree
Hide file tree
Showing 58 changed files with 1,421 additions and 304 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/issue.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: Issue Template
about: Standard Issue Template
title:
labels:
project: PROTzilla 2
project: "PROTzilla"

---

Expand Down
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/todo_issue.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: TODO
about: TODO Issue Template
title: '# TODO <issue_nr>'
labels: todo
project: 'PROTzilla2'
project: 'PROTzilla'

---

Expand Down
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ coverage.xml
# Django stuff:
*.log
local_settings.py
db.sqlite3
#db.sqlite3 #tracked to remove migrations warning. The database is not actually in use
db.sqlite3-journal

# Flask stuff:
Expand Down
Empty file added db.sqlite3
Empty file.
20 changes: 20 additions & 0 deletions docs/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build

# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

.PHONY: help Makefile

# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
8 changes: 8 additions & 0 deletions docs/build_docs.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
## Build docs with Sphinx ##
- Docs are build with sphinx-autoapi
- after installing the packages in requirements.txt, all necessary dependencies for building the docs should be installed (sphinx==7.2.6, sphinx-autoapi==3.0.0, requests==2.31.0)
- to build the docs open the docs\ folder in a terminal and run "make html" to create the html documentation
- in case the error "Could not import extension sphinx.builders.linkcheck" occurs, try reinstalling python requests (pip install requests==2.31.0)
- warnings might occur, they usually do not prevent the successful build of the docs
- To open the docs open the index.html in the docs\build\html folder
- when adding docstrings to the code they should follow the correct syntax(https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html), in order to be formatted correctly in the generated documentation
35 changes: 35 additions & 0 deletions docs/make.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
@ECHO OFF

pushd %~dp0

REM Command file for Sphinx documentation

if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build

%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)

if "%1" == "" goto help

%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end

:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%

:end
popd
27 changes: 27 additions & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html

# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information

project = "PROTzilla"
copyright = "2023, BP22/23"
author = "BP22/23"

# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration

extensions = ["sphinx.ext.napoleon", "autoapi.extension"]
autoapi_dirs = ["../../"]

templates_path = ["_templates"]
exclude_patterns = []


# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output

html_theme = "alabaster"
html_static_path = ["_static"]
20 changes: 20 additions & 0 deletions docs/source/index.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
.. PROTzilla documentation master file, created by
sphinx-quickstart on Wed Sep 27 18:24:09 2023.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to PROTzilla's documentation!
=====================================

.. toctree::
:maxdepth: 2
:caption: Contents:



Indices and tables
==================

* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
32 changes: 22 additions & 10 deletions protzilla/constants/location_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,9 @@
)
from ..importing import metadata_import, ms_data_import, peptide_import

"""
In this data structure, a method is associated with a location. The location is
determined by the section, step, and method keys found in the workflow_meta
file that correspond to the method.
"""
# In this data structure, a method is associated with a location. The location is
# determined by the section, step, and method keys found in the workflow_meta
# file that correspond to the method.
method_map = {
(
"importing",
Expand All @@ -41,6 +39,11 @@
"metadata_import",
"metadata_import_method",
): metadata_import.metadata_import_method,
(
"importing",
"metadata_import",
"metadata_column_assignment",
): metadata_import.metadata_column_assignment,
("importing", "peptide_import", "peptide_import"): peptide_import.peptide_import,
(
"data_preprocessing",
Expand Down Expand Up @@ -127,6 +130,11 @@
"imputation",
"min_value_per_dataset",
): imputation.by_min_per_dataset,
(
"data_preprocessing",
"imputation",
"normal_distribution_sampling",
): imputation.by_normal_distribution_sampling,
(
"data_preprocessing",
"filter_peptides",
Expand Down Expand Up @@ -237,11 +245,10 @@
# reversed mapping of method callable and location
location_map = {v: k for k, v in method_map.items()}

"""
In this data structure, a plot for a given method is associated with a
location. The location is determined by the section, step, and method keys
found in the workflow_meta file that correspond to the method.
"""

# In this data structure, a plot for a given method is associated with a
# location. The location is determined by the section, step, and method keys
# found in the workflow_meta file that correspond to the method.
plot_map = {
(
"data_preprocessing",
Expand Down Expand Up @@ -313,6 +320,11 @@
"imputation",
"min_value_per_dataset",
): imputation.by_min_per_dataset_plot,
(
"data_preprocessing",
"imputation",
"normal_distribution_sampling",
): imputation.by_normal_distribution_sampling_plot,
(
"data_preprocessing",
"outlier_detection",
Expand Down
107 changes: 98 additions & 9 deletions protzilla/constants/workflow_meta.json
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,32 @@
"default": "Columns (samples in rows, features in columns)"
}
}
},
"metadata_column_assignment": {
"name": "Metadata Column Assignment",
"description": "Assign columns to metadata categories, repeatable for each category",
"parameters": {
"metadata_df": {
"type": "empty",
"name": "metadata_df",
"default": null
},
"metadata_required_column": {
"name": "Missing, but required Metadata Columns",
"fill": "metadata_required_columns",
"type": "categorical",
"categories": [],
"default": null
},
"metadata_unknown_column": {
"name": "Existing, but unknown Metadata Columns",
"fill": "metadata_unknown_columns",
"type": "categorical",
"categories": [],
"default": null
}

}
}
},
"peptide_import": {
Expand Down Expand Up @@ -677,6 +703,69 @@
}
}
]
},
"normal_distribution_sampling": {
"name": "Normal Distribution Sampling",
"description": "Imputation methods include normal distribution sampling per Protein or over Dataset",
"parameters": {
"strategy": {
"name": "Strategy:",
"type": "categorical",
"categories": [
"perProtein",
"perDataset"
],
"default": "perProtein"
},
"down_shift": {
"name": "Downshift:",
"type": "numeric",
"min": -10,
"max": 10,
"default": -1
},
"scaling_factor": {
"name": "Scaling Factor:",
"type": "numeric",
"min": 0,
"max": 1,
"default": 1
}
},
"graphs": [
{
"graph_type": {
"name": "Graph type:",
"type": "categorical",
"categories": [
"Boxplot",
"Histogram"
],
"default": "Boxplot"
},
"group_by": {
"name": "Group by:",
"type": "categorical",
"categories": [
"None",
"Sample",
"Protein ID"
],
"default": "None"
}
},
{
"graph_type_quantities": {
"name": "Graph type Imputed Values:",
"type": "categorical",
"categories": [
"Bar chart",
"Pie chart"
],
"default": "Pie chart"
}
}
]
}
},
"filter_peptides": {
Expand Down Expand Up @@ -744,7 +833,7 @@
"grouping": {
"name": "Grouping:",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"selected_groups"
],
Expand Down Expand Up @@ -810,7 +899,7 @@
"grouping": {
"name": "Grouping",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"group1",
"group2"
Expand Down Expand Up @@ -876,7 +965,7 @@
"grouping": {
"name": "Grouping",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"group1",
"group2"
Expand Down Expand Up @@ -1109,7 +1198,7 @@
"labels_column": {
"name": "Choose labels column from metadata",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"positive_label"
],
Expand Down Expand Up @@ -1245,7 +1334,7 @@
"labels_column": {
"name": "Choose labels column from metadata",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"positive_label"
],
Expand Down Expand Up @@ -1381,7 +1470,7 @@
"labels_column": {
"name": "Choose labels column from metadata",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"positive_label"
],
Expand Down Expand Up @@ -1502,7 +1591,7 @@
"labels_column": {
"name": "Choose labels column from metadata",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"positive_label"
],
Expand Down Expand Up @@ -1708,7 +1797,7 @@
"labels_column": {
"name": "Choose labels column from metadata",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"categories": [],
"default": null
},
Expand Down Expand Up @@ -2457,7 +2546,7 @@
"grouping": {
"name": "Grouping from metadata",
"type": "categorical",
"fill": "metadata_columns",
"fill": "metadata_non_sample_columns",
"fill_dynamic": [
"group1",
"group2"
Expand Down
Loading

0 comments on commit 60d739b

Please sign in to comment.