diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..f611e879 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +node_modules +.dockerignore +**/.DS_Store +**/node_modules \ No newline at end of file diff --git a/.gitignore b/.gitignore index 00d61681..6cb42dbc 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # .* +!.dockerignore # Selected file. *.iml diff --git a/administration/Dockerfile b/administration/Dockerfile index 8816e895..cab3be22 100644 --- a/administration/Dockerfile +++ b/administration/Dockerfile @@ -2,7 +2,7 @@ # java-tools # -FROM debian:bookworm-20211115 as java-tools +FROM debian:bookworm-20231009 as java-tools RUN apt-get update \ && apt-get -y --no-install-recommends install openjdk-17-jdk @@ -17,17 +17,17 @@ RUN chmod +x ./gradlew && ./gradlew installDist \ # administration # -FROM debian:bookworm-20211115 +FROM debian:bookworm-20231009 ARG UID=5988 ARG GID=5988 RUN apt-get update \ - && apt-get -y --no-install-recommends install \ - wget curl \ - python3 python3-pip python3-venv \ - openjdk-17-jre-headless \ - libgomp1 vim + && apt-get -y --no-install-recommends install \ + wget curl \ + python3 python3-pip python3-venv \ + openjdk-17-jre-headless \ + libgomp1 vim # https://github.com/python/cpython/issues/102134 ENV VIRTUAL_ENV=/opt/venv diff --git a/docker-compose-prankweb.yml b/docker-compose-prankweb.yml index 9afad214..c7c8d963 100644 --- a/docker-compose-prankweb.yml +++ b/docker-compose-prankweb.yml @@ -46,7 +46,8 @@ services: GID: ${GID} environment: CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq:5672" - PRANKWEB_DATA: "/data/prankweb" + PRANKWEB_DATA_PREDICTIONS: "/data/prankweb/predictions/" + PRANKWEB_DATA_DOCKING: "/data/prankweb/docking/" restart: unless-stopped volumes: - predictions:/data/prankweb @@ -60,17 +61,35 @@ services: args: UID: ${UID} GID: ${GID} - command: ["celery", "--app=celery_p2rank", "worker", "--concurrency=4", "--hostname=executor-p2rank"] + command: ["celery", "--app=celery_p2rank", "worker", "--queues=p2rank", "--concurrency=4", "--hostname=executor-p2rank"] + depends_on: + rabbitmq: + condition: service_healthy environment: CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq:5672" LOCK_DIRECTORY: "/data/prankweb/lock" restart: unless-stopped volumes: - conservation:/data/conservation - - predictions:/data/prankweb + - predictions:/data/prankweb/predictions + - docking:/data/prankweb/docking + executor-docking: + build: + context: ./ + dockerfile: ./executor-docking/Dockerfile + args: + UID: ${UID} + GID: ${GID} + command: ["celery", "--app=celery_docking", "worker", "--queues=docking", "--concurrency=4", "--hostname=executor-docking"] + environment: + CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq:5672" + restart: unless-stopped + volumes: + - docking:/data/prankweb/docking + - predictions:/data/prankweb/predictions depends_on: rabbitmq: - condition: service_healthy + condition: service_healthy monitor: build: context: ./ @@ -101,3 +120,6 @@ volumes: services: external: True name: prankweb_services + docking: + external: True + name: prankweb_docking \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index bcc7ee8d..045624fe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -31,7 +31,7 @@ services: build: https://github.com/mher/flower.git command: "celery flower --url_prefix=service/flower" environment: - CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq:5672" + CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER:-user}:${RABBITMQ_DEFAULT_PASS:-1234}@rabbitmq:5672" depends_on: rabbitmq: condition: service_healthy @@ -46,10 +46,12 @@ services: rabbitmq: condition: service_healthy environment: - CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq:5672" - PRANKWEB_DATA: "/data/prankweb" + CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER:-user}:${RABBITMQ_DEFAULT_PASS:-1234}@rabbitmq:5672" + PRANKWEB_DATA_PREDICTIONS: "/data/prankweb/predictions/" + PRANKWEB_DATA_DOCKING: "/data/prankweb/docking/" volumes: - - predictions:/data/prankweb + - predictions:/data/prankweb/predictions + - docking:/data/prankweb/docking executor-p2rank: build: context: ./ @@ -57,16 +59,34 @@ services: args: UID: ${UID:-5988} GID: ${GID:-5988} - command: ["celery", "--app=celery_p2rank", "worker", "--concurrency=4", "--hostname=executor-p2rank"] + command: ["celery", "--app=celery_p2rank", "worker", "--queues=p2rank", "--concurrency=4", "--hostname=executor-p2rank"] depends_on: rabbitmq: condition: service_healthy environment: - CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq:5672" + CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER:-user}:${RABBITMQ_DEFAULT_PASS:-1234}@rabbitmq:5672" LOCK_DIRECTORY: "/data/prankweb/lock" volumes: - conservation:/data/conservation - - predictions:/data/prankweb + - predictions:/data/prankweb/predictions + - docking:/data/prankweb/docking + executor-docking: + build: + context: ./ + dockerfile: ./executor-docking/Dockerfile + args: + UID: ${UID:-5988} + GID: ${GID:-5988} + command: ["celery", "--app=celery_docking", "worker", "--queues=docking", "--concurrency=4", "--hostname=executor-docking"] + environment: + CELERY_BROKER_URL: "amqp://${RABBITMQ_DEFAULT_USER:-user}:${RABBITMQ_DEFAULT_PASS:-1234}@rabbitmq:5672" + restart: unless-stopped + volumes: + - docking:/data/prankweb/docking + - predictions:/data/prankweb/predictions + depends_on: + rabbitmq: + condition: service_healthy prometheus: build: context: ./ @@ -92,3 +112,6 @@ volumes: services: external: True name: prankweb_services + docking: + external: True + name: prankweb_docking \ No newline at end of file diff --git a/documentation/prankweb.open-api.yaml b/documentation/prankweb.open-api.yaml index 0278f5cd..10411608 100644 --- a/documentation/prankweb.open-api.yaml +++ b/documentation/prankweb.open-api.yaml @@ -1,14 +1,14 @@ -openapi: 3.0.0 +openapi: 3.0.2 info: title: Prankweb API - version: 1.0.0 + version: 1.0.1 servers: - - url: https://prankweb.cz/api/v2/predictions/ + - url: https://prankweb.cz/api/v2/ paths: - /{database}/{task}/: + /predictions/{database}/{task}/: get: parameters: - in: path @@ -28,7 +28,7 @@ paths: application/json: schema: $ref: '#/components/schemas/Task' - /{database}/{task}/log: + /predictions/{database}/{task}/log: get: parameters: - in: path @@ -48,7 +48,7 @@ paths: text/plain: schema: type: string - /{database}/{task}/public/prediction.json: + /predictions/{database}/{task}/public/prediction.json: get: parameters: - in: path @@ -68,6 +68,53 @@ paths: application/json: schema: $ref: '#/components/schemas/Prediction' + /docking/{database}/{task}/tasks: + get: + parameters: + - in: path + name: database + required: true + schema: + $ref: '#/components/schemas/DatabaseId' + - in: path + name: task + required: true + schema: + $ref: '#/components/schemas/TaskId' + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/DockingTaskList' + /docking/{database}/{task}/public/result.json: + post: + parameters: + - in: path + name: database + required: true + schema: + $ref: '#/components/schemas/DatabaseId' + - in: path + name: task + required: true + schema: + $ref: '#/components/schemas/TaskId' + requestBody: + description: The request needs to contain the hash and the pocket number. + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DockingRequest' + responses: + '200': + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/DockingResponse' components: schemas: @@ -148,3 +195,45 @@ components: type: number end: type: number + DockingTaskList: + type: object + properties: + identifier: + type: string + tasks: + type: array + items: + type: object + properties: + id: + type: string + created: + type: string + format: date-time + lastChange: + type: string + format: date-time + status: + type: string + enum: ['queued', 'running', 'failed', 'successful'] + initialData: + type: object + properties: + hash: + type: string + pocket: + type: string + DockingRequest: + type: object + properties: + hash: + type: string + pocket: + type: number + DockingResponse: + type: array + items: + type: object + properties: + url: + type: string \ No newline at end of file diff --git a/executor-docking/Dockerfile b/executor-docking/Dockerfile new file mode 100644 index 00000000..be6f10ec --- /dev/null +++ b/executor-docking/Dockerfile @@ -0,0 +1,103 @@ +# +# executor-docking +# + +FROM ubuntu:23.10 AS base + +# to prevent apt-get install from asking questions +ARG DEBIAN_FRONTEND=noninteractive + +ARG CELERY_BROKER_URL="amqp://user-develop:develop@localhost:5672" + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + curl \ + wget \ + python3 \ + python3-pip \ + python3-venv \ + openbabel \ + libopenbabel-dev nano \ + software-properties-common \ + build-essential \ + libssl-dev \ + zlib1g-dev \ + libncurses5-dev \ + libncursesw5-dev \ + libreadline-dev \ + libsqlite3-dev \ + libgdbm-dev \ + libdb5.3-dev \ + libbz2-dev \ + libexpat1-dev \ + liblzma-dev \ + tk-dev \ + libffi-dev + +ARG UID=5988 +ARG GID=5988 + +ENV VIRTUAL_ENV=/opt/venv +RUN python3 -m venv $VIRTUAL_ENV +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +RUN groupadd --gid ${GID} user \ + && useradd --uid ${UID} --gid ${GID} user + +# Since WORKDIR created with current user (root) we precreate the folders. +RUN mkdir /data \ + && mkdir /data/prankweb && chown user:user /data/prankweb \ + && mkdir /opt/executor-docking && chown user:user /opt/executor-docking + +# prankweb executor-docking +WORKDIR /opt/executor-docking + +RUN wget https://www.python.org/ftp/python/2.7.18/Python-2.7.18.tgz && \ + tar xzf Python-2.7.18.tgz && \ + rm Python-2.7.18.tgz && \ + cd Python-2.7.18 && \ + ./configure --enable-optimizations && \ + make altinstall + +RUN curl https://bootstrap.pypa.io/pip/2.7/get-pip.py -o get-pip.py && \ + python2.7 get-pip.py + +RUN python2.7 -m pip install numpy + +RUN wget -q https://vina.scripps.edu/wp-content/uploads/sites/55/2020/12/autodock_vina_1_1_2_linux_x86.tgz && \ + tar -xzvf autodock_vina_1_1_2_linux_x86.tgz && \ + rm autodock_vina_1_1_2_linux_x86.tgz && \ + mv autodock_vina_1_1_2_linux_x86 /opt/vina && \ + ln -s /opt/vina/bin/vina /usr/local/bin/vina + +RUN curl -L -o mgltools_x86_64Linux2_1.5.7p1.tar.gz https://ccsb.scripps.edu/mgltools/download/491/ && \ + tar -xzf mgltools_x86_64Linux2_1.5.7p1.tar.gz && \ + rm mgltools_x86_64Linux2_1.5.7p1.tar.gz && \ + mv mgltools_x86_64Linux2_1.5.7 /opt/mgltools_x86_64Linux2_1.5.7 && \ + tar -xzf /opt/mgltools_x86_64Linux2_1.5.7/MGLToolsPckgs.tar.gz -C /opt/mgltools_x86_64Linux2_1.5.7/ && \ + rm /opt/mgltools_x86_64Linux2_1.5.7/MGLToolsPckgs.tar.gz + +ENV PATH="/opt/mgltools_x86_64Linux2_1.5.7/MGLToolsPckgs/AutoDockTools/Utilities24/:${PATH}" +ENV PYTHONPATH="/opt/mgltools_x86_64Linux2_1.5.7/MGLToolsPckgs/:${PYTHONPATH}" + +# a trick to work around a bug in MolKit MMCIF parser (MolKit/__init__.py calls MMCIFParser with an argument modelsAs=True, which is not supported by the parser) +RUN sed -i 's/^ parser = MMCIFParser(filename, modelsAs=modelsAs)$/ parser = MMCIFParser(filename)/' /opt/mgltools_x86_64Linux2_1.5.7/MGLToolsPckgs/MolKit/__init__.py + +# stage 2 - install docking scripts +FROM base AS final + +# install dodo (docking in docker) +# https://github.com/kiarka7/DODO +RUN wget -q https://raw.githubusercontent.com/kiarka7/DODO/1a9001a2996325f78a656c14fcf1503b8bfc0674/run_docking.py -O /opt/executor-docking/run_docking.py + +COPY --chown=user:user ./executor-docking/requirements.txt ./ +RUN pip3 install -r requirements.txt + +COPY --chown=user:user ./executor-docking/ ./ +RUN chmod a+x ./run_task.py + +# +# environment +WORKDIR /opt/executor-docking + +USER ${UID}:${GID} diff --git a/executor-docking/celery_docking.py b/executor-docking/celery_docking.py new file mode 100644 index 00000000..8f75c4b8 --- /dev/null +++ b/executor-docking/celery_docking.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +import os +import celery.signals +import run_task + +prankweb = celery.Celery("prankweb") + +if "CELERY_BROKER_URL" in os.environ: + prankweb.conf.update( + { + "broker_url": os.environ["CELERY_BROKER_URL"], + "broker_connection_retry_on_startup": True, + } + ) +elif "CELERY_BROKER_PATH" in os.environ: + folder = os.environ["CELERY_BROKER_PATH"] + prankweb.conf.update( + { + "broker_url": "filesystem://", + "broker_transport_options": { + "data_folder_in": folder + "/queue/", + "data_folder_out": folder + "/queue/", + "data_folder_processed": folder + "/processed/", + }, + "broker_connection_retry_on_startup": True, + } + ) + + +# when uncommented, this will disable logging for Celery (including prints to stdout) +@celery.signals.setup_logging.connect +def setup_celery_logging(**kwargs): + # We do nothing here to disable logging. + ... + + +# https://github.com/celery/celery/issues/2509 +prankweb.log.setup() + + +@prankweb.task(name="docking") +def celery_run_docking(directory: str, taskId): + if os.path.isdir(directory): + run_task.execute_directory_task(directory, taskId) + else: + print(f"Given directory does not exist {directory}") diff --git a/executor-docking/requirements.txt b/executor-docking/requirements.txt new file mode 100644 index 00000000..f7487bed --- /dev/null +++ b/executor-docking/requirements.txt @@ -0,0 +1,4 @@ +requests==2.31.0 +eventlet==0.33.3 +celery==5.3.4 +biopython==1.82 diff --git a/executor-docking/run_task.py b/executor-docking/run_task.py new file mode 100644 index 00000000..76004e21 --- /dev/null +++ b/executor-docking/run_task.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +# +# Run a sample task. +# +import os +import sys +import datetime +import enum +import json +import time +import glob +import gzip +import shutil + +from run_docking import run_docking + +class Status(enum.Enum): + """ + A class to represent a task status. + Notice that those values are written to the info.json file that is used by the frontend, + so any changes here should be reflected in the frontend as well. + """ + QUEUED = "queued" + RUNNING = "running" + FAILED = "failed" + SUCCESSFUL = "successful" + +def _load_json(path: str): + """ + Method to load a json file from a given path. + """ + with open(path, encoding="utf-8") as stream: + return json.load(stream) + +def _save_status_file(path: str, status: any, taskId: int): + """ + Method to save the status file. It will update the lastChange field with the current time. + """ + now = datetime.datetime.today() + status["tasks"][taskId]["lastChange"] = now.strftime('%Y-%m-%dT%H:%M:%S') + _save_json(path, status) + +def _save_json(path: str, content: any): + """ + Method to save a json file to a given path. + """ + path_swp = path + ".swp" + if(os.path.exists(path_swp)): + time.sleep(1) + with open(path_swp, "w", encoding="utf-8") as stream: + json.dump(content, stream, ensure_ascii=True) + os.replace(path_swp, path) + +def get_prediction_directory(docking_directory: str): + """ + Method to get the path to the prediction directory from the docking directory. + """ + #currently assuming that the docking and predictions paths are different just by the name + return str.replace(docking_directory, "docking", "predictions") + +def get_prediction_path(docking_directory: str): + """ + Method to get the path to the prediction file from the docking directory. + """ + #currently assuming that the docking and predictions paths are different just by the name + return os.path.join(get_prediction_directory(docking_directory), "public", "prediction.json") + +def prepare_docking(input_file: str, structure_file_gzip: str, task_directory: str): + # unzip the pdb/mmCIF file + extension = structure_file_gzip.split(".")[-2] + structureFile = os.path.join(task_directory, ("structure." + extension)) + + with gzip.open(structure_file_gzip, 'rb') as f_in: + with open(structureFile, 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + # create a smiles file from the ligand + ligandFile = os.path.join(task_directory, "ligand.smi") + with open(input_file) as inp, open(ligandFile, "w") as f: + input_json = json.load(inp) + f.write(input_json["hash"]) + + # prepare the input file + new_input_file = os.path.join(task_directory, "docking_parameters.json") + with open(input_file) as inp, open(new_input_file, "w") as out: + input_json = json.load(inp) + out_json = {} + + out_json["receptor"] = structureFile + out_json["ligand"] = ligandFile + out_json["output"] = os.path.join(task_directory, "public", "out_vina.pdbqt") + out_json["center"] = input_json["bounding_box"]["center"] + out_json["size"] = input_json["bounding_box"]["size"] + + json.dump(out_json, out) + +def execute_directory_task(docking_directory: str, taskId: int): + """ + Method to execute a task for a given directory and a given taskId. + """ + + result_file = os.path.join(docking_directory, str(taskId), "public", "result.json") + + #check if the directory exists - if not, we did not ask for this task + #check if the result file exists - if it does, we already calculated it + if not os.path.exists(docking_directory) or not os.path.isdir(docking_directory) or os.path.exists(result_file): + return + + #first update the status file + status_file = os.path.join(docking_directory, "info.json") + status = _load_json(status_file) + + status["tasks"][taskId]["status"] = Status.RUNNING.value + _save_status_file(status_file, status, taskId) + + #do the actual work here! + #first, look for the gz file with the structure + structure_file = "" + for file_path in glob.glob(os.path.join(get_prediction_directory(docking_directory), "public") + "/*.gz"): + structure_file = file_path + break + + if structure_file == "": + #no structure file found, we cannot do anything + #this should not happen because the structure has to be downloadable for the prediction... + status["tasks"][taskId]["status"] = Status.FAILED.value + _save_status_file(status_file, status, taskId) + return + + #try to dock the molecule + try: + prepare_docking(os.path.join(docking_directory, str(taskId), "input.json"), structure_file, os.path.join(docking_directory, str(taskId))) + run_docking(os.path.join(docking_directory, str(taskId), "docking_parameters.json"), os.path.join(docking_directory, str(taskId)), os.path.join(docking_directory, str(taskId)), "public") + except Exception as e: + print(repr(e)) + print(str(e)) + #something went wrong during the docking + #TODO: add some more error handling here, provide a log? + status["tasks"][taskId]["status"] = Status.FAILED.value + _save_status_file(status_file, status, taskId) + return + + #parse the prediction file and do some calculations - in this case just counting the number of residues per pocket + #API is /docking///public/ + #split docking_directory to get database_name and prediction_name + result = [] + database_name = docking_directory.split("/")[4] + if "user-upload" in database_name: + prediction_name = docking_directory.split("/")[5] + else: + prediction_name = docking_directory.split("/")[6] + + result_url = "./api/v2/docking/" + database_name + "/" + prediction_name + "/public/results.zip" + result.append({ + "url": result_url + }) + result_json = json.dumps(result) + + #save the result file (this directory should already exist, though...) + os.makedirs(os.path.join(docking_directory, str(taskId), "public"), exist_ok=True) + + with open(result_file, "w", encoding="utf-8") as stream: + try: + stream.write(result_json) + finally: + stream.flush() + + #update the status file, reload it first to make sure we don't overwrite any changes + status = _load_json(status_file) + + status["tasks"][taskId]["status"] = Status.SUCCESSFUL.value + _save_status_file(status_file, status, taskId) + +def main(arguments): + pass + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/executor-p2rank/Dockerfile b/executor-p2rank/Dockerfile index 60f12f9d..e3d9b25e 100644 --- a/executor-p2rank/Dockerfile +++ b/executor-p2rank/Dockerfile @@ -2,10 +2,10 @@ # java-tools # -FROM debian:bookworm-20211115 as java-tools +FROM debian:bookworm-20231009 as java-tools RUN apt-get update \ - && apt-get -y --no-install-recommends install openjdk-17-jdk + && apt-get -y --no-install-recommends install openjdk-17-jdk WORKDIR /opt/java-tools COPY ./java-tools ./ @@ -21,7 +21,7 @@ RUN chmod +x ./gradlew && ./gradlew installDist \ # * /opt/alignment-based-conservation-dependencies # -FROM debian:bookworm-20211115 as alignment-based-conservation +FROM debian:bookworm-20231009 as alignment-based-conservation RUN apt-get update \ && apt-get -y --no-install-recommends install \ @@ -67,7 +67,7 @@ RUN chmod a+x /opt/alignment-based-conservation/conservation_alignment_based.py # * /opt/hmm-based-conservation-dependencies # -FROM debian:bookworm-20211115 as hmm-based-conservation +FROM debian:bookworm-20231009 as hmm-based-conservation RUN apt-get update \ && apt-get -y --no-install-recommends install \ @@ -96,7 +96,7 @@ RUN chmod a+x /opt/hmm-based-conservation/conservation_hmm_based.py \ # executor-p2rank # -FROM debian:bookworm-20211115 +FROM debian:bookworm-20231009 ARG UID=5988 ARG GID=5988 @@ -107,7 +107,7 @@ RUN apt-get update \ && apt-get -y --no-install-recommends install \ wget curl \ python3 python3-pip python3-venv \ - openjdk-17-jre-headless \ + default-jre \ libgomp1 vim # https://github.com/python/cpython/issues/102134 diff --git a/executor-p2rank/celery_p2rank.py b/executor-p2rank/celery_p2rank.py index 2a3959e3..9d3d3d97 100644 --- a/executor-p2rank/celery_p2rank.py +++ b/executor-p2rank/celery_p2rank.py @@ -11,7 +11,8 @@ if "CELERY_BROKER_URL" in os.environ: prankweb.conf.update({ - "broker_url": os.environ["CELERY_BROKER_URL"] + "broker_url": os.environ["CELERY_BROKER_URL"], + "broker_connection_retry_on_startup": True, }) elif "CELERY_BROKER_PATH" in os.environ: folder = os.environ["CELERY_BROKER_PATH"] @@ -22,6 +23,7 @@ "data_folder_out": folder + "/queue/", "data_folder_processed": folder + "/processed/" }, + "broker_connection_retry_on_startup": True, }) diff --git a/executor-p2rank/output_prankweb.py b/executor-p2rank/output_prankweb.py index 623245dd..11903eea 100644 --- a/executor-p2rank/output_prankweb.py +++ b/executor-p2rank/output_prankweb.py @@ -68,6 +68,16 @@ def _extension(file_name: str) -> str: """For 'name.ext' return 'ext'.""" return file_name[file_name.rindex(".") + 1:] +def _get_p2rank_version(file_name: str) -> str: + """Returns the P2rank version from the params file. + Supposes that one of the parameters starts with 'version:', + otherwise returns 'unknown'.""" + + with open(file_name) as stream: + for line in stream: + if line.startswith("version:"): + return line.split(":")[1].strip() + return "unknown" def _prepare_prediction_file( output_file: str, @@ -82,6 +92,9 @@ def _prepare_prediction_file( structure_file = os.path.join( configuration.working_directory, "structure-information.json") + parameters_file = os.path.join( + p2rank_output, "params.txt") + configuration.execute_command( f"{configuration.java_tools} structure-info" f" --input={structure.raw_structure_file}" @@ -94,6 +107,7 @@ def _prepare_prediction_file( "pockets": load_pockets(predictions_file), "metadata": { **structure.metadata, + "p2rank_version": _get_p2rank_version(parameters_file) }, }, stream, indent=2) diff --git a/executor-p2rank/requirements.txt b/executor-p2rank/requirements.txt index f80062c8..163f1c5b 100644 --- a/executor-p2rank/requirements.txt +++ b/executor-p2rank/requirements.txt @@ -1,3 +1,3 @@ -celery==5.2.1 -requests==2.24.0 -eventlet==0.33.0 +celery==5.3.4 +requests==2.31.0 +eventlet==0.33.3 diff --git a/flower/Dockerfile b/flower/Dockerfile index 346997dc..83faa6dd 100644 --- a/flower/Dockerfile +++ b/flower/Dockerfile @@ -1,16 +1,16 @@ -FROM debian:bookworm-20211115 +FROM debian:bookworm-20231009 ARG UID ARG GID RUN apt-get update \ - && apt-get -y --no-install-recommends install \ - curl python3 python3-pip vim wget + && apt-get -y --no-install-recommends install \ + curl python3 python3-pip vim wget RUN addgroup --gid ${GID} user \ - && useradd --uid ${UID} --gid ${GID} user + && useradd --uid ${UID} --gid ${GID} user -RUN mkdir /opt/flower && chown user:user /opt/flower \ +RUN mkdir /opt/flower && chown user:user /opt/flower WORKDIR /opt/flower COPY --chown=user:user ./web-server/requirements.txt ./ diff --git a/flower/requirements.txt b/flower/requirements.txt index 38e02499..4c39f5e6 100644 --- a/flower/requirements.txt +++ b/flower/requirements.txt @@ -1,3 +1,3 @@ -flask==2.0.2 -celery==5.2.1 -gunicorn==20.1.0 +flask==3.0.0 +celery==5.3.4 +gunicorn==21.2.0 diff --git a/frontend/babel.config.js b/frontend/babel.config.js deleted file mode 100644 index 03198c65..00000000 --- a/frontend/babel.config.js +++ /dev/null @@ -1,31 +0,0 @@ -module.exports = (api) => { - - api.cache.using(() => process.env.NODE_ENV); - - const presets = [ - "@babel/preset-react", - ["@babel/preset-env", { - "targets": { - "chrome": 41, - }, - "useBuiltIns": "usage", - "corejs": { - "version": 3, - "proposals": true, - }, - }], - ]; - - const plugins = []; - - const ignore = [ ]; - - // We exclude node_modules as they would cause a lot of warnings. - ignore.push("node_modules"); - - return { - "presets": presets, - "plugins": plugins, - "ignore": ignore, - } -}; \ No newline at end of file diff --git a/frontend/build/webpack.common.js b/frontend/build/webpack.common.js index 32a5eb5a..b60c786d 100644 --- a/frontend/build/webpack.common.js +++ b/frontend/build/webpack.common.js @@ -28,13 +28,12 @@ module.exports = { "module": { "rules": [ { - "test": /\.jsx?$/, - "use": "babel-loader", - }, - { - "test": /\.tsx?$/, - "use": "ts-loader", - "exclude": /node_modules/, + "test": /\.[jt]sx?$/, + "loader": "esbuild-loader", + "options": { + "loader": "tsx", + "target": "es2019" + } }, { "test": /\.html$/, diff --git a/frontend/build/webpack.develop.js b/frontend/build/webpack.develop.js index d77affaf..3da6ec63 100644 --- a/frontend/build/webpack.develop.js +++ b/frontend/build/webpack.develop.js @@ -6,15 +6,5 @@ module.exports = merge(common, { "devtool": "eval", "devServer": { "hot": true, - }, - "module": { - "rules": [ - { - "enforce": "pre", - "test": /\.(js|jsx)$/, - "exclude": [/node_modules/, /assets/], - "use": ["babel-loader"], - }, - ], - }, + } }); diff --git a/frontend/client/about/about.html b/frontend/client/about/about.html index c40f4c16..811be45a 100644 --- a/frontend/client/about/about.html +++ b/frontend/client/about/about.html @@ -1,187 +1,175 @@ + -@require("../partials/head.html") + @require("../partials/head.html") + -@require("../partials/navigation.html") -
+ @require("../partials/navigation.html") +
-

About

-

- Proteins are fundamental building blocks of all living organisms. They - perform their function by binding to other molecules. This project deals - with interactions between proteins and small molecules (so called - ligands) because most of the currently used drugs are small molecules. - While there are several tools that can predict these interactions, they - are almost none for their visualization. Thus, we built a new - visualization website by combining several protein visualizers together. - Since evolutionary homology correlates with binding sites, our web - interface also displays homology for comparison. We developed several - ways how to calculate homology, and used it to improve detection of - protein-ligand binding sites. Here we present PrankWeb, a modern web - application for structure and sequence visualization of a protein and - its protein-ligand binding sites as well as evolutionary homology. We - hope that it will provide a quick and convenient way for scientists to - analyze proteins. -

+

About

+

+ Proteins are fundamental building blocks of all living organisms. They + perform their function by binding to other molecules. This project deals + with interactions between proteins and small molecules (so called + ligands) because most of the currently used drugs are small molecules. + While there are several tools that can predict these interactions, there + are almost none for their visualization. Thus, we have built a new + visualization website by combining several protein visualizers together. + Since evolutionary homology correlates with binding sites, our web + interface also displays homology for comparison. We developed several + ways how to calculate homology, and used it to improve detection of + protein-ligand binding sites. Here we present PrankWeb, a modern web + application for structure and sequence visualization of a protein and + its protein-ligand binding sites as well as evolutionary homology. We + hope that it will provide a quick and convenient way for scientists to + analyze proteins. +

-

Underlying method

-

- PrankWeb is web interface based on - - P2Rank standalone method - . - For batch-processing, it is recommended to download - - standalone version - - of P2Rank and run experiments locally. -

+

Underlying method

+

+ PrankWeb is web interface based on + P2Rank standalone method. + For batch-processing, it is recommended to download + standalone version + of P2Rank and run experiments locally. +

-

Training Datasets

-

- PrankWeb classification models have been trained and validated on - - publicly available datasets - . -

+

Training Datasets

+

+ PrankWeb classification models have been trained and validated on + publicly available datasets. +

-

Precomputed predictions

-

- We have computed the ligand-binding sites predictions for two components of the AlphaFold DB, the “model organism proteomes” and “Swiss-Prot”, as well as PDB. - For each database, AlphaFold DB and PDB, we computed the prediction with and without conservations. - What is available for download is the content of the predictions directory created as described at wiki page - Large scale Predictions. -

- +

Precomputed predictions

+

+ We have computed the ligand-binding sites predictions for two components of the AlphaFold DB, the “model organism + proteomes” and “Swiss-Prot”, as well as PDB. + For each database, AlphaFold DB and PDB, we computed the prediction with and without conservations. + What is available for download is the content of the predictions directory created as described at wiki + page + Large scale Predictions. +

+ -

Citing

-

If you use P2Rank online service, please cite: -

+

Citing

+

If you use P2Rank online service, please cite: +

-

Authors

-
-
-
- +

Authors

+
+
+
+ +
+

+ David Hoksza
+ (contact person) +

+

Faculty of Mathematics and Physics, Charles University

+

+ + david.hoksza (at) matfyz.cuni.cz. +

-

- David Hoksza
- (contact person) -

-

Faculty of Mathematics and Physics, Charles University

-

- - david.hoksza (at) matfyz.cuni.cz. -

-
-
-
- +
+
+ +
+

Lukas Jendele

+

Faculty of Mathematics and Physics, Charles University

+

Department of Computer Science, ETH Zurich

+

+ + lukas.jendele (at) gmail.com +

-

Lukas Jendele

-

Faculty of Mathematics and Physics, Charles University

-

Department of Computer Science, ETH Zurich

-

- - lukas.jendele (at) gmail.com -

-
-
-
- +
+
+ +
+

Radoslav Krivák

+

Faculty of Mathematics and Physics, Charles University

+

+ + rkrivak (at) gmail.com +

-

Radoslav Krivák

-

Faculty of Mathematics and Physics, Charles University

-

- - rkrivak (at) gmail.com -

-
-
-
- +
+
+ +
+

Marian Novotný

+

Faculty of Science, Charles University

+

+ + marian.novotny (at) natur.cuni.cz +

-

Marian Novotný

-

Faculty of Science, Charles University

-

- - marian.novotny (at) natur.cuni.cz -

-
-
-
- +
+
+ +
+

Petr Škoda

+

Faculty of Mathematics and Physics, Charles University

+

+ + petr.skoda (at) matfyz.cuni.cz +

-

Petr Škoda

-

Faculty of Mathematics and Physics, Charles University

-

- - petr.skoda (at) matfyz.cuni.cz -

-
-
-
- +
+
+ +
+

Lukáš Polák

+

Faculty of Mathematics and Physics, Charles University

+

+ + admin (at) lukaspolak.cz +

-

Lukáš Polák

-

Faculty of Mathematics and Physics, Charles University

-

- - admin (at) lukaspolak.cz -

-
- @require("../partials/footer.html") + @require("../partials/footer.html") -
+
+ \ No newline at end of file diff --git a/frontend/client/analyze/analyze.html b/frontend/client/analyze/analyze.html index b83d68c7..0d97e6c0 100644 --- a/frontend/client/analyze/analyze.html +++ b/frontend/client/analyze/analyze.html @@ -1,52 +1,51 @@ + - - - - @require("../partials/head.html") + -@require("../partials/navigation.html") - -