Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add Docker Container example #94

Open
wants to merge 2 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 13 additions & 7 deletions examples/docker_submission/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,30 @@ TODO: Add a description of the submission process here.


## Launching the submission container
TODO: Create a docker-compose file

First we have to build the container
```bash
cd ./http_submission
docker build -t sample_pysaliency .
docker build -t sample_pysaliency docker
```

Then we can start it
```bash
docker run --rm -it -p 4000:4000 sample_pysaliency
```
The above command will launch the image as interactive container in the foregroun
and expose the port `4000` to the host machine.
If you prefer to run it in the background, use
```bash
docker run --name sample_pysaliency -dp 4000:4000 sample_pysaliency
```
The above command will launch a container named `sample_pysaliency` and expose the port `4000` to the host machine. The container will be running in the background.
which will launch a container named `sample_pysaliency`. The container will be running in the background.

To test the model server, run the sample_evaluation script (Make sure to have the `pysaliency` package installed):
```bash
python ./http_evaluation/sample_evaluation.py
python ./sample_evaluation.py
```


To delete the container, run the following command:
To delete the background container, run the following command:
```bash
docker stop sample_pysaliency && docker rm sample_pysaliency
```
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,19 @@ WORKDIR /app
ENV HTTP_PORT=4000

RUN apt-get update \
&& apt-get -y install gcc
&& apt-get -y install gcc \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/*

COPY ./requirements.txt ./
RUN python -m pip install -U pip \
&& python -m pip install -r requirements.txt
RUN python -m pip install --no-cache -U pip \
&& python -m pip install --no-cache -r requirements.txt

COPY . ./
COPY ./model_server.py ./
COPY ./sample_submission.py ./

# This is needed for Singularity builds.
EXPOSE $HTTP_PORT

# The entrypoint for a container,
CMD ["gunicorn", "-w", "1", "-b", "0.0.0.0:4000", "--pythonpath", ".", "model_server:app"]
CMD ["gunicorn", "-w", "1", "-b", "0.0.0.0:4000", "--pythonpath", ".", "--access-logfile", "-", "model_server:app"]
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
from flask import Flask, request, jsonify
from flask_orjson import OrjsonProvider
import numpy as np
import json
from PIL import Image
from io import BytesIO
# import pickle
import orjson


# Import your model here
from sample_submission import MySimpleScanpathModel

app = Flask("saliency-model-server")
app.json_provider = OrjsonProvider(app)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we even using the OrjsonProvider? I guess not, since we're manually calling orjson. In this case i would remove the dependency. Alternatively, you can try to keep it, use jsonify and set the config option to prevent pretty printing

app.logger.setLevel("DEBUG")

# # TODO - replace this with your model
model = MySimpleScanpathModel()


@app.route('/conditional_log_density', methods=['POST'])
def conditional_log_density():
data = json.loads(request.form['json_data'])
Expand All @@ -28,14 +30,16 @@ def conditional_log_density():
stimulus = np.array(image)

log_density = model.conditional_log_density(stimulus, x_hist, y_hist, t_hist, attributes)
return jsonify({'log_density': log_density.tolist()})
log_density_list = log_density.tolist()
response = orjson.dumps({'log_density': log_density_list})
return response


@app.route('/type', methods=['GET'])
def type():
type = "ScanpathModel"
version = "v1.0.0"
return jsonify({'type': type, 'version': version})
return orjson.dumps({'type': type, 'version': version})


def main():
Expand Down
10 changes: 10 additions & 0 deletions examples/docker_submission/docker/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
cython
flask
gunicorn
numpy

# Add additional dependencies here
pysaliency
scipy
torch
flask_orjson
79 changes: 79 additions & 0 deletions examples/docker_submission/docker/sample_submission.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import numpy as np
import sys
from typing import Union
from scipy.ndimage import gaussian_filter
import pysaliency


class LocalContrastModel(pysaliency.Model):
def __init__(self, bandwidth=0.05, **kwargs):
super().__init__(**kwargs)
self.bandwidth = bandwidth

def _log_density(self, stimulus: Union[pysaliency.datasets.Stimulus, np.ndarray]):

# _log_density can either take pysaliency Stimulus objects, or, for convenience, simply numpy arrays
# `as_stimulus` ensures that we have a Stimulus object
stimulus_object = pysaliency.datasets.as_stimulus(stimulus)

# grayscale image
gray_stimulus = np.mean(stimulus_object.stimulus_data, axis=2)

# size contains the height and width of the image, but not potential color channels
height, width = stimulus_object.size

# define kernel size based on image size
kernel_size = np.round(self.bandwidth * max(width, height)).astype(int)
sigma = (kernel_size - 1) / 6

# apply Gausian blur and calculate squared difference between blurred and original image
blurred_stimulus = gaussian_filter(gray_stimulus, sigma)

prediction = gaussian_filter((gray_stimulus - blurred_stimulus)**2, sigma)

# normalize to [1, 255]
prediction = (254 * (prediction / prediction.max())).astype(int) + 1

density = prediction / prediction.sum()

return np.log(density)

class MySimpleScanpathModel(pysaliency.ScanpathModel):
def __init__(self, spatial_model_bandwidth: float=0.05, saccade_width: float=0.1):
self.spatial_model_bandwidth = spatial_model_bandwidth
self.saccade_width = saccade_width
self.spatial_model = LocalContrastModel(spatial_model_bandwidth)
# self.spatial_model = pysaliency.UniformModel()


def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None,):
stimulus_object = pysaliency.datasets.as_stimulus(stimulus)

# size contains the height and width of the image, but not potential color channels
height, width = stimulus_object.size

spatial_prior_log_density = self.spatial_model.log_density(stimulus)
spatial_prior_density = np.exp(spatial_prior_log_density)

# compute saccade bias
last_x = x_hist[-1]
last_y = y_hist[-1]

xs = np.arange(width, dtype=float)
ys = np.arange(height, dtype=float)
XS, YS = np.meshgrid(xs, ys)

XS -= last_x
YS -= last_y

# compute prior
max_size = max(width, height)
actual_kernel_size = self.saccade_width * max_size

saccade_bias = np.exp(-0.5 * (XS ** 2 + YS ** 2) / actual_kernel_size ** 2)

prediction = spatial_prior_density * saccade_bias

density = prediction / prediction.sum()
return np.log(density)

6 changes: 0 additions & 6 deletions examples/docker_submission/requirements.txt

This file was deleted.

12 changes: 11 additions & 1 deletion examples/docker_submission/sample_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
import pysaliency


from tqdm import tqdm


if __name__ == "__main__":
http_model = HTTPScanpathModel("http://localhost:4000")
http_model.check_type()
Expand All @@ -16,9 +19,16 @@
# get MIT1003 dataset
stimuli, fixations = pysaliency.get_mit1003(location='pysaliency_datasets')

for stimulus in tqdm(stimuli):
stimulus.stimulus_data

eval_fixations = fixations[fixations.scanpath_history_length > 0]
eval_fixations = eval_fixations[:10]

information_gain = http_model.information_gain(stimuli, eval_fixations, average="image", verbose=True)
print("IG:", information_gain)

for fixation_index in range(10):
for fixation_index in tqdm(range(10)):
# get server response for one stimulus
server_density = http_model.conditional_log_density(
stimulus=stimuli.stimuli[eval_fixations.n[fixation_index]],
Expand Down
2 changes: 1 addition & 1 deletion examples/docker_submission/sample_submission.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import sys
from typing import Union
from scipy.ndimage import gaussian_filter
sys.path.insert(0, '..')
sys.path.insert(0, '../..')
import pysaliency


Expand Down
34 changes: 27 additions & 7 deletions pysaliency/http_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,20 @@
import requests
import json
import numpy as np
import orjson

from .datasets import as_stimulus

class HTTPScanpathModel(ScanpathModel):
"""
A scanpath model that uses a HTTP server to make predictions.

The model is provided with an URL where it expects a server with the following API:

/conditional_log_density: expects a POST request with a file attachtment `stimulus`
containing the stimulus and a json body containing x_hist, y_hist, t_hist and a dictionary with other attributes
/type: returns the model type and version
"""
def __init__(self, url):
self.url = url
self.check_type()
Expand All @@ -20,30 +32,38 @@ def type_url(self):

def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None):
# build request
pil_image = Image.fromarray(stimulus)
stimulus_object = as_stimulus(stimulus)

# TODO: check for file stimuli, in this case use original file to save encoding time
pil_image = Image.fromarray(stimulus_object.stimulus_data)
image_bytes = BytesIO()
pil_image.save(image_bytes, format='png')

def _convert_attribute(attribute):
if isinstance(attribute, np.ndarray):
return attribute.tolist()
if isinstance(attribute, (np.int64, np.int32)):
return int(attribute)
if isinstance(attribute, (np.float64, np.float32)):
return float(attribute)
return attribute

json_data = {
"x_hist": list(x_hist),
"y_hist": list(y_hist),
"t_hist": list(t_hist),
"x_hist": list(x_hist.tolist()),
matthias-k marked this conversation as resolved.
Show resolved Hide resolved
"y_hist": list(y_hist.tolist()),
"t_hist": list(t_hist.tolist()),
"attributes": {key: _convert_attribute(value) for key, value in (attributes or {}).items()}
}

# send request
response = requests.post(f"{self.log_density_url}", data={'json_data': json.dumps(json_data)}, files={'stimulus': image_bytes.getvalue()})
response = requests.post(f"{self.log_density_url}", data={'json_data': orjson.dumps(json_data)}, files={'stimulus': image_bytes.getvalue()})

# parse response
if response.status_code != 200:
raise ValueError(f"Server returned status code {response.status_code}")

return np.array(response.json()['log_density'])
json_data = orjson.loads(response.text)
prediction = np.array(json_data['log_density'])
return prediction

def check_type(self):
response = requests.get(f"{self.type_url}").json()
Expand Down
Loading