Skip to content

Commit

Permalink
Sync master into develop. Triggered by PR (#1278)
Browse files Browse the repository at this point in the history
* point to pypi instead of git (#1236)

* update dependency names w/ hyphens (#1238)

* point to pypi instead of git

* Update pyproject.toml with hyphens

* Custom trained scaling models (#1206)

* Custom trained models

* Fix missing library

* Fix a missing library

* Not every model needs timm

* Trigger build tests

* Use model_id instead of model_name to ensure correct weights are loaded

* timm is missing from deit models

* timm is missing from deit models

* Fix requirements.txt newline

* Package models into a single plugin

* Update requirements.txt

* Update requirements.txt

* Update requirements.txt

* Change the order of models

* Update model.py

* Disable resultcaching for vits

* Tests reordering

* Revert tests back

* Update model.py

* Update model.py

* Update to sync workflow (#1248)

* Update to sync workflow

Adds dynamic PR title and commit history included in PR description.

Adds `No changes detected` job.

* Update no_changes condition and text

Refined condition to not require PR_merge because that is when this condition SHOULD be triggered to indicate the correct status of the workflow.

* enable unverified SSL for scaling models (#1263)

* Add pathfinder benchmark (#1193)

* add pathfinder task

* update data packaging

* modify benchmark file

* add benchmark tests

* upload datasets

* add some tests and fix some bugs

* add dataset tests

* fix errors with plugin imports

* add error with stimulus set naming test

* fix bug with field name test

* remove field from test that shouldn't be there

* fix model loading bug in test

* fix ceiled test value -> raw test value

---------

Co-authored-by: Martin Schrimpf <[email protected]>

* Ep/add fixres resnext101 32x48d wsl (#1103)

* Add model

* Modify test

* Fix imports

---------

Co-authored-by: Ethan Pellegrini <[email protected]>
Co-authored-by: Kartik Pradeepan <[email protected]>

---------

Co-authored-by: Sam Winebrake <[email protected]>
Co-authored-by: Abdulkadir Gokce <[email protected]>
Co-authored-by: Ben Lonnqvist <[email protected]>
Co-authored-by: Martin Schrimpf <[email protected]>
Co-authored-by: pellegreene <[email protected]>
Co-authored-by: Ethan Pellegrini <[email protected]>
  • Loading branch information
7 people authored Sep 26, 2024
1 parent 3061d4c commit 42e0d45
Show file tree
Hide file tree
Showing 4 changed files with 76 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from brainscore_vision import model_registry
from brainscore_vision.model_helpers.brain_transformation import ModelCommitment
from .model import get_model, get_layers

model_registry['fixres_resnext101_32x48d_wsl'] = lambda: ModelCommitment(identifier='fixres_resnext101_32x48d_wsl',
activations_model=get_model(),
layers=get_layers('fixres_resnext101_32x48d_wsl'))
57 changes: 57 additions & 0 deletions brainscore_vision/models/fixres_resnext101_32x48d_wsl/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from brainscore_vision.model_helpers.activations.pytorch import PytorchWrapper
from fixres.hubconf import load_state_dict_from_url
from fixres.transforms_v2 import get_transforms
from brainscore_vision.model_helpers.activations.pytorch import load_images
import numpy as np
from importlib import import_module
import ssl


ssl._create_default_https_context = ssl._create_unverified_context


def get_model():
module = import_module('fixres.imnet_evaluate.resnext_wsl')
model_ctr = getattr(module, 'resnext101_32x48d_wsl')
model = model_ctr(pretrained=False) # the pretrained flag here corresponds to standard resnext weights
pretrained_dict = load_state_dict_from_url('https://dl.fbaipublicfiles.com/FixRes_data/FixRes_Pretrained_Models/ResNeXt_101_32x48d.pth',
map_location=lambda storage, loc: storage)['model']
model_dict = model.state_dict()
for k in model_dict.keys():
assert ('module.' + k) in pretrained_dict.keys()
model_dict[k] = pretrained_dict.get(('module.' + k))
model.load_state_dict(model_dict)

# preprocessing
# 320 for ResNeXt:
# https://github.com/mschrimpf/FixRes/tree/4ddcf11b29c118dfb8a48686f75f572450f67e5d#example-evaluation-procedure
input_size = 320
# https://github.com/mschrimpf/FixRes/blob/0dc15ab509b9cb9d7002ca47826dab4d66033668/fixres/imnet_evaluate/train.py#L159-L160
transformation = get_transforms(input_size=input_size, test_size=input_size,
kind='full', need=('val',),
# this is different from standard ImageNet evaluation to show the whole image
crop=False,
# no backbone parameter for ResNeXt following
# https://github.com/mschrimpf/FixRes/blob/0dc15ab509b9cb9d7002ca47826dab4d66033668/fixres/imnet_evaluate/train.py#L154-L156
backbone=None)
transform = transformation['val']

def load_preprocess_images(image_filepaths):
images = load_images(image_filepaths)
images = [transform(image) for image in images]
images = [image.unsqueeze(0) for image in images]
images = np.concatenate(images)
return images

wrapper = PytorchWrapper(identifier='resnext101_32x48d_wsl', model=model, preprocessing=load_preprocess_images,
batch_size=4) # doesn't fit into 12 GB GPU memory otherwise
wrapper.image_size = input_size
return wrapper


def get_layers(name):
return (['conv1'] +
# note that while relu is used multiple times, by default the last one will overwrite all previous ones
[f"layer{block + 1}.{unit}.relu"
for block, block_units in enumerate([3, 4, 23, 3]) for unit in range(block_units)] +
['avgpool'])
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
torchvision
torch
numpy
importlib
Fixing-the-train-test-resolution-discrepancy-scripts@ git+https://github.com/mschrimpf/FixRes.git
7 changes: 7 additions & 0 deletions brainscore_vision/models/fixres_resnext101_32x48d_wsl/test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import pytest
import brainscore_vision

@pytest.mark.travis_slow
def test_has_identifier():
model = brainscore_vision.load_model('fixres_resnext101_32x48d_wsl')
assert model.identifier == 'fixres_resnext101_32x48d_wsl'

0 comments on commit 42e0d45

Please sign in to comment.