Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Web UI #7

Merged
merged 19 commits into from
Aug 26, 2024
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ COPY ./bioimageio_colab/register_sam_service.py /app/register_sam_service.py
# Change ownership of the application directory to the non-root user
RUN chown -R bioimageio_colab:bioimageio_colab /app/

# Add a build argument for cache invalidation
ARG CACHEBUST=1

# Fetch the Hypha server version and reinstall or upgrade hypha-rpc to the matching version
RUN HYPHA_VERSION=$(curl -s https://hypha.aicell.io/config.json | jq -r '.hypha_version') && \
pip install --upgrade "hypha-rpc<=$HYPHA_VERSION"
Expand Down
3 changes: 1 addition & 2 deletions bioimageio_colab/register_sam_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,7 @@ async def register_service(args: dict) -> None:
# remove the user id from the storage
# returns True if the user was removed successfully
"remove_user_id": remove_user_id, # TODO: add a timeout to remove a user after a certain time
},
overwrite=True,
}
)
sid = service_info["id"]
assert sid == f"{args.workspace_name}/{args.client_id}:{args.service_id}"
Expand Down
5 changes: 4 additions & 1 deletion build_and_push.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,10 @@ IMAGE_NAME=ghcr.io/${GITHUB_REPOSITORY}:latest
# Log in to GHCR
echo "$GHCR_PAT" | docker login ghcr.io -u "$GITHUB_ACTOR" --password-stdin

# Build the Docker image using Docker Compose
# Generate a dynamic CACHEBUST value (timestamp)
export CACHEBUST=$(date +%Y%m%d%H%M%S)

# Build the Docker image using Docker Compose with the CACHEBUST argument
docker-compose build

# Push the Docker image to GHCR
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ services:
context: .
dockerfile: Dockerfile
args:
SOURCE_LABEL: "https://github.com/bioimage-io/bioimageio-colab"
CACHEBUST: ${CACHEBUST}
image: ghcr.io/bioimage-io/bioimageio-colab:latest
env_file:
- .env
Expand Down
97 changes: 97 additions & 0 deletions docs/data-providing-service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import os
import json
from functools import partial
from typing import Tuple

import numpy as np
from hypha_rpc import connect_to_server
from kaibu_utils import features_to_mask
from tifffile import imread, imwrite


def list_image_files(image_folder: str, supported_file_types: Tuple[str]):
return [f for f in os.listdir(image_folder) if f.endswith(supported_file_types)]


def read_image(file_path: str):
image = imread(file_path)
if len(image.shape) == 3 and image.shape[0] == 3:
image = np.transpose(image, [1, 2, 0])
return image


def get_random_image(image_folder: str, supported_file_types: Tuple[str]):
filenames = list_image_files(image_folder, supported_file_types)
r = np.random.randint(len(filenames) - 1)
file_name = filenames[r]
image = read_image(os.path.join(image_folder, file_name))
return (image, file_name.split(".")[0])


def save_annotation(annotations_folder: str, image_name: str, features, image_shape):
mask = features_to_mask(features, image_shape)
n_image_masks = len(
[f for f in os.listdir(annotations_folder) if f.startswith(image_name)]
)
mask_name = os.pth.join(
annotations_folder, f"{image_name}_mask_{n_image_masks + 1}.tif"
)
imwrite(mask_name, mask)


def upload_image_to_s3():
"""
Steps:
- Create a user prefix on S3
- Create a data and annotation prefix
- For every image:
- Load the image from the data folder into a numpy array
- Upload the image to the data prefix

Return:
- The user prefix

# TODO: register a data providing service on K8S cluster that uses the user prefix (get_random_image_s3, save_annotation_s3)
"""
raise NotImplementedError


async def register_service(
server_url: str,
token: str,
supported_file_types_json: str,
):
# Define path to images and annotations
images_path = "/mnt"
annotations_path = "/mnt/annotations"

# Check if the images folder exists
if not os.path.isdir(images_path):
raise FileNotFoundError("Mounted images folder not found")

# Decode the JSON string to a Python tuple
supported_file_types = tuple(json.loads(supported_file_types_json))

# Connect to the server link
server = await connect_to_server({"server_url": server_url, "token": token})

# Register the service
svc = await server.register_service(
{
"name": "Collaborative Annotation",
"id": "data-provider",
"config": {
"visibility": "public", # TODO: make protected
"run_in_executor": True,
},
# Exposed functions:
# get a random image from the dataset
# returns the image as a numpy image
"get_random_image": partial(
get_random_image, images_path, supported_file_types
),
# save the annotation mask
# pass the filename of the image, the new filename, the features and the image shape
"save_annotation": partial(save_annotation, annotations_path),
}
)
Loading
Loading