Skip to content

Commit

Permalink
adds new function+rule to calculate distances
Browse files Browse the repository at this point in the history
  • Loading branch information
billbrod committed Nov 6, 2020
1 parent b7e2106 commit faf0f62
Show file tree
Hide file tree
Showing 5 changed files with 111 additions and 0 deletions.
46 changes: 46 additions & 0 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -1073,3 +1073,49 @@ rule synthesis_video:
with open(log[0], 'w', buffering=1) as log_file:
with contextlib.redirect_stdout(log_file), contextlib.redirect_stderr(log_file):
met.figures.synthesis_video(input[0], wildcards.model_name)


rule compute_distances:
input:
ref_image = lambda wildcards: utils.get_ref_image_full_path(wildcards.image_name),
windows = get_windows,
norm_dict = get_norm_dict,
synth_images = lambda wildcards: [m.replace('.png', '-16.png') for m in
utils.generate_metamer_paths(wildcards.synth_model_name,
image_name=wildcards.image_name)],
output:
op.join(config["DATA_DIR"], 'distances', '{model_name}', 'scaling-{scaling}',
'synth-{synth_model_name}', '{image_name}_e0-{min_ecc}_em-{max_ecc}_distances.csv'),
log:
op.join(config["DATA_DIR"], 'logs', 'distances', '{model_name}', 'scaling-{scaling}',
'synth-{synth_model_name}', '{image_name}_e0-{min_ecc}_em-{max_ecc}_distances.log')
benchmark:
op.join(config["DATA_DIR"], 'logs', 'distances', '{model_name}', 'scaling-{scaling}',
'synth-{synth_model_name}', '{image_name}_e0-{min_ecc}_em-{max_ecc}_distances_benchmark.txt')
params:
cache_dir = lambda wildcards: op.join(config['DATA_DIR'], 'windows_cache'),
resources:
mem = get_mem_estimate,
run:
import foveated_metamers as met
import plenoptic as po
import torch
import pandas as pd
ref_image = po.load_images(input.ref_image)
if input.norm_dict:
norm_dict = torch.load(input.norm_dict)
else:
norm_dict = None
model = met.create_metamers.setup_model(wildcards.model_name, float(wildcards.scaling),
ref_image, float(wildcards.min_ecc),
float(wildcards.max_ecc), params.cache_dir,
input.norm_dict)[0]
synth_scaling = config[wildcards.synth_model_name.split('_')[0]]['scaling']
df = []
for sc in synth_scaling:
df.append(met.distances.model_distance(model, wildcards.synth_model_name,
wildcards.image_name, sc))
df = pd.concat(df).reset_index(drop=True)
df['distance_model'] = wildcards.model_name
df['distance_scaling'] = float(wildcards.scaling)
df.to_csv(output[0], index=False)
1 change: 1 addition & 0 deletions foveated_metamers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@
from . import analysis
from . import utils
from . import figures
from . import distances
56 changes: 56 additions & 0 deletions foveated_metamers/distances.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/usr/bin/env python3
"""functions related to calculating distances
"""

import pandas as pd
import plenoptic as po
from . import utils
import os.path as op
import itertools
import re


def _find_seed(x):
"""Grabs seed from image name.
if can't find seed, returns 'ref'
"""
try:
return re.findall('seed-(\d)_', x)[0]
except IndexError:
return 'ref'


def model_distance(model, synth_model_name, ref_image_name, scaling):
"""Calculate distances between images for a model.
We want to reason about the distance
"""
paths = utils.generate_metamer_paths(synth_model_name,
image_name=ref_image_name,
scaling=scaling)
synth_images = po.load_images(paths)
ref_image = po.load_images(utils.get_ref_image_full_path(ref_image_name))
ref_image_rep = model(ref_image)
df = []
reps = {}
for i, (im, p) in enumerate(zip(synth_images, paths)):
image_name = op.splitext(op.basename(p))[0]
reps[image_name] = model(im)
dist = po.optim.l2_norm(reps[image_name], ref_image_rep).item()
df.append(pd.DataFrame({'distance': dist, 'image_1': image_name,
'image_2': ref_image_name}, index=[0]))
for im_1, im_2 in itertools.combinations(reps, 2):
dist = po.optim.l2_norm(reps[im_1], reps[im_2]).item()
df.append(pd.DataFrame({'distance': dist, 'image_1': im_1,
'image_2': im_2}, index=[0]))
df = pd.concat(df).reset_index(drop=True)
# df['distance_model'] = model
df['synthesis_model'] = synth_model_name
df['synthesis_scaling'] = scaling
df['ref_image'] = ref_image_name.split('_')[0]
df['image_1_seed'] = df.image_1.apply(_find_seed)
df['image_2_seed'] = df.image_2.apply(_find_seed)
return df
4 changes: 4 additions & 0 deletions prince.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,9 @@
{
"mem": "{resources.mem}GB",
"time": "4:00:00"
},
"compute_distances":
{
"mem": "{resources.mem}GB"
}
}
4 changes: 4 additions & 0 deletions rusty.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,5 +39,9 @@
{
"mem": "{resources.mem}GB",
"time": "04:00:00"
},
"compute_distances":
{
"mem": "{resources.mem}GB"
}
}

0 comments on commit faf0f62

Please sign in to comment.