-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathgeneration_utils.py
73 lines (60 loc) · 3.24 KB
/
generation_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
from a2a.generators.processors.processor import Processor
from a2a.generators.factory import processor
from a2a.networks import network
from a2a.networks.utils import Reshape, initialize_weights
from torch import nn
@processor('MRIRawData')
class MRIRawData(Processor):
def __init__(self, dataset=[], output_dataset=None, dataset_phase='', dataset_csm='', delete_phase=True, delete_csm=True):
super().__init__(dataset=dataset, output_dataset=output_dataset)
self.dataset_phase = dataset_phase
self.dataset_csm = dataset_csm
self.delete_phase = delete_phase
self.delete_csm = delete_csm
def __call__(self, data, attrs):
im = data[self.dataset[0]]
phase = data[self.dataset_phase]
csm = data[self.dataset_csm]
# im: B 1 YX (magnitude)
# phase: B 2 YX (real/imag)
# csm: B C*2 YX (real coils, imag coils)
ncoils = csm.shape[1] // 2
phase = phase[:,:1] + 1j * phase[:,1:]
phase /= (abs(phase)+1e-12)
data[self.output_dataset[0]] = (im * phase) * (csm[:,:ncoils] + 1j*csm[:,ncoils:])
# out: B C YX complex
if self.delete_phase:
del data[self.dataset_phase]
if self.delete_csm:
del data[self.dataset_csm]
@network('FCN2DMLP', normalization_parameters=['normalization'], activation_parameters=['activation'])
class FCN2DMLP(nn.Module):
def __init__(self, input_channels=1, hidden_channels_mlp=32, hidden_channels_fcn=32, output_channels=1, image_shape=[64,64], filter_size=(3,3), padding=(1,1), padding_mode='zeros', bias=None, activation=nn.SELU, normalization=None):
super().__init__()
if bias is None:
bias = not normalization
layerlist = [nn.Conv2d(input_channels, hidden_channels_fcn, kernel_size=7, padding='same', padding_mode=padding_mode),
activation(),
nn.Conv2d(hidden_channels_fcn, hidden_channels_fcn, kernel_size=4, padding=1, stride=2, padding_mode=padding_mode)]
if normalization:
layerlist += [normalization(hidden_channels_fcn)]
for i in range(3):
layerlist += [activation(),
nn.Conv2d(hidden_channels_fcn, hidden_channels_fcn, kernel_size=7, padding='same', padding_mode=padding_mode)]
if normalization:
layerlist += [normalization(hidden_channels_fcn)]
layerlist += [activation(),
nn.Conv2d(hidden_channels_fcn, hidden_channels_fcn, kernel_size=4, padding=1, stride=2, padding_mode=padding_mode)]
if normalization:
layerlist += [normalization(hidden_channels_fcn)]
layerlist += [activation(),
Reshape(shape=[-1]),
nn.Linear(image_shape[0]//16*image_shape[1]//16*hidden_channels_fcn, hidden_channels_mlp*2),
activation(),
nn.Linear(hidden_channels_mlp*2, hidden_channels_mlp),
activation(),
nn.Linear(hidden_channels_mlp,output_channels)]
self.model = nn.Sequential(*layerlist)
self.model.apply(initialize_weights)
def forward(self, x):
return self.model(x)