Skip to content

Commit

Permalink
Server responding to MIT1003 stimulus
Browse files Browse the repository at this point in the history
  • Loading branch information
JanRiedelsheimer committed Nov 12, 2024
1 parent 43d8543 commit a37a645
Show file tree
Hide file tree
Showing 3 changed files with 140 additions and 111 deletions.
49 changes: 24 additions & 25 deletions http_submission/model_server.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,34 @@
from flask import Flask, request, jsonify
import pickle
import numpy as np
import json
from PIL import Image
from io import BytesIO
# import pickle

# Import your model here
from sample_submission import SampleScanpathModel
from sample_submission import MySimpleScanpathModel

app = Flask("saliency-model-server")
app.logger.setLevel("DEBUG")

# TODO - replace this with your model
model = SampleScanpathModel()


@app.route("/predict", methods=["POST"])
def predict():
payload = request.get_data()
inputs = pickle.loads(payload)
app.logger.info(f"Received: {inputs}")

# TODO - replace this with your model prediction function
result = model.conditional_log_density(
inputs["stimulus"],
inputs["x_hist"],
inputs["y_hist"],
inputs["t_hist"],
inputs["attributes"],
inputs["out"],
)
# resp = pickle.dumps(result)
app.logger.info(f"Result: {result}")
# The below assumes that the model returns a numpy array.
return result.tolist()
# # TODO - replace this with your model
model = MySimpleScanpathModel()


@app.route('/conditional_log_density', methods=['POST'])
def conditional_log_density():
data = json.loads(request.form['json_data'])
x_hist = np.array(data['x_hist'])
y_hist = np.array(data['y_hist'])
t_hist = np.array(data['t_hist'])
attributes = data.get('attributes', {})

image_bytes = request.files['stimulus'].read()
image = Image.open(BytesIO(image_bytes))
stimulus = np.array(image)

log_density = model.conditional_log_density(stimulus, x_hist, y_hist, t_hist, attributes)
return jsonify({'log_density': log_density.tolist()})


def main():
Expand Down
103 changes: 52 additions & 51 deletions http_submission/sample_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,71 +2,72 @@
import pickle
import requests
import sys
from sample_submission import SampleScanpathModel
from sample_submission import MySimpleScanpathModel
from PIL import Image
from io import BytesIO
import json
import matplotlib.pyplot as plt
from pysaliency.plotting import plot_scanpath
sys.path.insert(0, '..')
import pysaliency

class HTTPScanpathModel(SampleScanpathModel):
class HTTPScanpathModel(MySimpleScanpathModel):
def __init__(self, url):
self.url = url

def conditional_log_density(
self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None
):
inputs = {
"stimulus": stimulus,
"x_hist": x_hist,
"y_hist": y_hist,
"t_hist": t_hist,
"attributes": attributes,
"out": out,
}
payload = pickle.dumps(inputs)
response = requests.post(self.url, data=payload)
# print(f"Received: {response.json()}")
return np.array(response.json())
def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None):

# class HTTPScanpathModel(pysaliency.ScanpathModel):
# def __init__(self, url):
# self.url = url
# build request
pil_image = Image.fromarray(stimulus)
image_bytes = BytesIO()
pil_image.save(image_bytes, format='png')

# def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None):
# # build request
# pil_image = Image.fromarray(stimulus)
# image_bytes = BytesIO()
# pil_image.save(image_bytes, format='png')
def _convert_attribute(attribute):
if isinstance(attribute, np.ndarray):
return attribute.tolist()
return attribute

# def _convert_attribute(attribute):
# if isinstance(attribute, np.ndarray):
# return attribute.tolist()
# return attribute
json_data = {
"x_hist": list(x_hist),
"y_hist": list(y_hist),
"t_hist": list(t_hist),
"attributes": {key: _convert_attribute(value) for key, value in (attributes or {}).items()}
}

# json_data = {
# "x_hist": list(x_hist),
# "y_hist": list(y_hist),
# "t_hist": list(t_hist),
# "attributes": {key: _convert_attribute(value) for key, value in (attributes or {}).items()}
# }
# send request
response = requests.post(f"{self.url}", data={'json_data': json.dumps(json_data)}, files={'stimulus': image_bytes.getvalue()})

# # send request
# parse response
if response.status_code != 200:
raise ValueError(f"Server returned status code {response.status_code}")

# response = requests.post(f"{self.url}/conditional_log_density", json=json_data, files={'stimulus': image_bytes.getvalue()})
return np.array(response.json()['log_density'])

# # parse response
if __name__ == "__main__":
http_model = HTTPScanpathModel("http://localhost:4000/conditional_log_density")

# if response.status_code != 200:
# raise ValueError(f"Server returned status code {response.status_code}")
# get MIT1003 dataset
stimuli, fixations = pysaliency.get_mit1003(location='pysaliency_datasets')
fixation_index = 32185
# get server response for one stimulus
server_response = http_model.conditional_log_density(
stimulus=stimuli.stimuli[fixations.n[fixation_index]],
x_hist=fixations.x_hist[fixation_index],
y_hist=fixations.y_hist[fixation_index],
t_hist=fixations.t_hist[fixation_index]
)
# TODO: delete plotting part
# plot server response, only for testing

# return np.array(response.json()['log_density'])
fig, axs = plt.subplots(1, 2, figsize=(12, 6))
axs[0].set_axis_off()
axs[1].set_axis_off()

if __name__ == "__main__":
http_model = HTTPScanpathModel("http://localhost:4000/predict")
axs[0].imshow(stimuli.stimuli[fixations.n[fixation_index]])
plot_scanpath(stimuli, fixations, fixation_index, visualize_next_saccade=True, ax=axs[0])
axs[0].set_title("Image")

axs[1].imshow(server_response)

print(
http_model.conditional_log_density(
[1, 1.4, 10, 1],
[1, 1, 0.51, 1],
[1, 1, 2, 1],
[1, 3, 1, 1],
)
)
axs[1].set_title("http_model_log_density")
fig.savefig("test.png")
99 changes: 64 additions & 35 deletions http_submission/sample_submission.py
Original file line number Diff line number Diff line change
@@ -1,50 +1,79 @@
import numpy as np
# import pysaliency
import sys
from typing import Union
from scipy.ndimage import gaussian_filter
sys.path.insert(0, '..')
import pysaliency

class SampleScanpathModel():
def __init__(self):
super().__init__()

def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None):
return np.log(stimulus)
class LocalContrastModel(pysaliency.Model):
def __init__(self, bandwidth=0.05, **kwargs):
super().__init__(**kwargs)
self.bandwidth = bandwidth

def _log_density(self, stimulus: Union[pysaliency.datasets.Stimulus, np.ndarray]):

# _log_density can either take pysaliency Stimulus objects, or, for convenience, simply numpy arrays
# `as_stimulus` ensures that we have a Stimulus object
stimulus_object = pysaliency.datasets.as_stimulus(stimulus)

# from io import BytesIO
# grayscale image
gray_stimulus = np.mean(stimulus_object.stimulus_data, axis=2)

# import pysaliency
# import requests
# from PIL import Image
# import numpy as np
# size contains the height and width of the image, but not potential color channels
height, width = stimulus_object.size

# define kernel size based on image size
kernel_size = np.round(self.bandwidth * max(width, height)).astype(int)
sigma = (kernel_size - 1) / 6

# apply Gausian blur and calculate squared difference between blurred and original image
blurred_stimulus = gaussian_filter(gray_stimulus, sigma)

# class HTTPScanpathModel(pysaliency.ScanpathModel):
# def __init__(self, url):
# self.url = url
prediction = gaussian_filter((gray_stimulus - blurred_stimulus)**2, sigma)

# def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None):
# # build request
# pil_image = Image.fromarray(stimulus)
# image_bytes = BytesIO()
# pil_image.save(image_bytes, format='png')
# normalize to [1, 255]
prediction = (254 * (prediction / prediction.max())).astype(int) + 1

# def _convert_attribute(attribute):
# if isinstance(attribute, np.ndarray):
# return attribute.tolist()
# return attribute
density = prediction / prediction.sum()

return np.log(density)

class MySimpleScanpathModel(pysaliency.ScanpathModel):
def __init__(self, spatial_model_bandwidth: float=0.05, saccade_width: float=0.1):
self.spatial_model_bandwidth = spatial_model_bandwidth
self.saccade_width = saccade_width
self.spatial_model = LocalContrastModel(spatial_model_bandwidth)

# json_data = {
# "x_hist": list(x_hist),
# "y_hist": list(y_hist),
# "t_hist": list(t_hist),
# "attributes": {key: _convert_attribute(value) for key, value in (attributes or {}).items()}
# }

# # send request
def conditional_log_density(self, stimulus, x_hist, y_hist, t_hist, attributes=None, out=None,):
stimulus_object = pysaliency.datasets.as_stimulus(stimulus)

# response = requests.post(f"{self.url}/conditional_log_density", json=json_data, files={'stimulus': image_bytes.getvalue()})
# size contains the height and width of the image, but not potential color channels
height, width = stimulus_object.size

# # parse response
spatial_prior_log_density = self.spatial_model.log_density(stimulus)
spatial_prior_density = np.exp(spatial_prior_log_density)

# if response.status_code != 200:
# raise ValueError(f"Server returned status code {response.status_code}")
# compute saccade bias
last_x = x_hist[-1]
last_y = y_hist[-1]

xs = np.arange(width, dtype=float)
ys = np.arange(height, dtype=float)
XS, YS = np.meshgrid(xs, ys)

XS -= last_x
YS -= last_y

# compute prior
max_size = max(width, height)
actual_kernel_size = self.saccade_width * max_size

saccade_bias = np.exp(-0.5 * (XS ** 2 + YS ** 2) / actual_kernel_size ** 2)

prediction = spatial_prior_density * saccade_bias

density = prediction / prediction.sum()
return np.log(density)

# return np.array(response.json()['log_density'])

0 comments on commit a37a645

Please sign in to comment.