diff --git a/gluoncv/model_zoo/danet.py b/gluoncv/model_zoo/danet.py index 313fce727f..a35aa4b8f0 100644 --- a/gluoncv/model_zoo/danet.py +++ b/gluoncv/model_zoo/danet.py @@ -90,15 +90,15 @@ def __init__(self, in_channels, out_channels, norm_layer=nn.BatchNorm, norm_kwar self.conv52.add(nn.Activation('relu')) self.conv6 = nn.HybridSequential() - self.conv6.add(nn.Dropout(0.1)) + # self.conv6.add(nn.Dropout(0.1)) self.conv6.add(nn.Conv2D(in_channels=512, channels=out_channels, kernel_size=1)) self.conv7 = nn.HybridSequential() - self.conv7.add(nn.Dropout(0.1)) + # self.conv7.add(nn.Dropout(0.1)) self.conv7.add(nn.Conv2D(in_channels=512, channels=out_channels, kernel_size=1)) self.conv8 = nn.HybridSequential() - self.conv8.add(nn.Dropout(0.1)) + # self.conv8.add(nn.Dropout(0.1)) self.conv8.add(nn.Conv2D(in_channels=512, channels=out_channels, kernel_size=1)) def hybrid_forward(self, F, x): diff --git a/gluoncv/model_zoo/model_zoo.py b/gluoncv/model_zoo/model_zoo.py index b738869b63..527133f640 100644 --- a/gluoncv/model_zoo/model_zoo.py +++ b/gluoncv/model_zoo/model_zoo.py @@ -215,7 +215,7 @@ 'resnext50_32x4d': resnext50_32x4d, 'resnext101_32x4d': resnext101_32x4d, 'resnext101_64x4d': resnext101_64x4d, - 'resnext101b_64x4d': resnext101e_64x4d, + 'resnext101e_64x4d': resnext101e_64x4d, 'se_resnext50_32x4d': se_resnext50_32x4d, 'se_resnext101_32x4d': se_resnext101_32x4d, 'se_resnext101_64x4d': se_resnext101_64x4d, diff --git a/gluoncv/model_zoo/resnext.py b/gluoncv/model_zoo/resnext.py index 1f64807431..3be1fb63f2 100644 --- a/gluoncv/model_zoo/resnext.py +++ b/gluoncv/model_zoo/resnext.py @@ -363,6 +363,7 @@ def resnext101_64x4d(**kwargs): def resnext101e_64x4d(**kwargs): + # pylint: disable=line-too-long r"""ResNext101e 64x4d model modified from `"Aggregated Residual Transformations for Deep Neural Network" `_ paper. @@ -388,6 +389,9 @@ def resnext101e_64x4d(**kwargs): for :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`. """ kwargs['use_se'] = False + if kwargs['pretrained']: + msg = 'GluonCV does not have pretrained weights for resnext101e_64x4d at this moment. Please set pretrained=False.' + raise RuntimeError(msg) return get_resnext(101, 64, 4, deep_stem=True, avg_down=True, **kwargs) @@ -479,6 +483,7 @@ def se_resnext101_64x4d(**kwargs): def se_resnext101e_64x4d(**kwargs): + # pylint: disable=line-too-long r"""SE-ResNext101e 64x4d model modified from `"Aggregated Residual Transformations for Deep Neural Network" `_ paper. @@ -504,4 +509,7 @@ def se_resnext101e_64x4d(**kwargs): for :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`. """ kwargs['use_se'] = True + if kwargs['pretrained']: + msg = 'GluonCV does not have pretrained weights for resnext101e_64x4d at this moment. Please set pretrained=False.' + raise RuntimeError(msg) return get_resnext(101, 64, 4, deep_stem=True, avg_down=True, **kwargs)