@@ -101,11 +101,12 @@ def _dcfg(url='', **kwargs):
101101 url = '' , pool_size = (15 , 15 ), input_size = (3 , 480 , 480 ), test_input_size = (3 , 608 , 608 )),
102102
103103 nfnet_l0a = _dcfg (
104- url = '' , pool_size = (6 , 6 ), input_size = (3 , 192 , 192 ), test_input_size = (3 , 256 , 256 )),
104+ url = '' , pool_size = (7 , 7 ), input_size = (3 , 224 , 224 ), test_input_size = (3 , 288 , 288 )),
105105 nfnet_l0b = _dcfg (
106- url = '' , pool_size = (6 , 6 ), input_size = (3 , 192 , 192 ), test_input_size = (3 , 256 , 256 )),
106+ url = '' , pool_size = (7 , 7 ), input_size = (3 , 224 , 224 ), test_input_size = (3 , 288 , 288 )),
107107 nfnet_l0c = _dcfg (
108- url = '' , pool_size = (6 , 6 ), input_size = (3 , 192 , 192 ), test_input_size = (3 , 256 , 256 )),
108+ url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/nfnet_l0c-ad1045c2.pth' ,
109+ pool_size = (7 , 7 ), input_size = (3 , 224 , 224 ), test_input_size = (3 , 288 , 288 ), crop_pct = 1.0 ),
109110
110111 nf_regnet_b0 = _dcfg (
111112 url = '' , pool_size = (6 , 6 ), input_size = (3 , 192 , 192 ), test_input_size = (3 , 256 , 256 ), first_conv = 'stem.conv' ),
@@ -376,9 +377,9 @@ def forward(self, x):
376377 return out
377378
378379
379- def create_stem (in_chs , out_chs , stem_type = '' , conv_layer = None , act_layer = None ):
380+ def create_stem (in_chs , out_chs , stem_type = '' , conv_layer = None , act_layer = None , preact_feature = True ):
380381 stem_stride = 2
381- stem_feature = dict (num_chs = out_chs , reduction = 2 , module = '' )
382+ stem_feature = dict (num_chs = out_chs , reduction = 2 , module = 'stem.conv ' )
382383 stem = OrderedDict ()
383384 assert stem_type in ('' , 'deep' , 'deep_tiered' , 'deep_quad' , '3x3' , '7x7' , 'deep_pool' , '3x3_pool' , '7x7_pool' )
384385 if 'deep' in stem_type :
@@ -388,14 +389,14 @@ def create_stem(in_chs, out_chs, stem_type='', conv_layer=None, act_layer=None):
388389 stem_chs = (out_chs // 8 , out_chs // 4 , out_chs // 2 , out_chs )
389390 strides = (2 , 1 , 1 , 2 )
390391 stem_stride = 4
391- stem_feature = dict (num_chs = out_chs // 2 , reduction = 2 , module = 'stem.act4 ' )
392+ stem_feature = dict (num_chs = out_chs // 2 , reduction = 2 , module = 'stem.conv3 ' )
392393 else :
393394 if 'tiered' in stem_type :
394395 stem_chs = (3 * out_chs // 8 , out_chs // 2 , out_chs ) # 'T' resnets in resnet.py
395396 else :
396397 stem_chs = (out_chs // 2 , out_chs // 2 , out_chs ) # 'D' ResNets
397398 strides = (2 , 1 , 1 )
398- stem_feature = dict (num_chs = out_chs // 2 , reduction = 2 , module = 'stem.act3 ' )
399+ stem_feature = dict (num_chs = out_chs // 2 , reduction = 2 , module = 'stem.conv2 ' )
399400 last_idx = len (stem_chs ) - 1
400401 for i , (c , s ) in enumerate (zip (stem_chs , strides )):
401402 stem [f'conv{ i + 1 } ' ] = conv_layer (in_chs , c , kernel_size = 3 , stride = s )
@@ -477,7 +478,7 @@ def __init__(self, cfg: NfCfg, num_classes=1000, in_chans=3, global_pool='avg',
477478 self .stem , stem_stride , stem_feat = create_stem (
478479 in_chans , stem_chs , cfg .stem_type , conv_layer = conv_layer , act_layer = act_layer )
479480
480- self .feature_info = [stem_feat ] if stem_stride == 4 else []
481+ self .feature_info = [stem_feat ]
481482 drop_path_rates = [x .tolist () for x in torch .linspace (0 , drop_path_rate , sum (cfg .depths )).split (cfg .depths )]
482483 prev_chs = stem_chs
483484 net_stride = stem_stride
@@ -486,8 +487,6 @@ def __init__(self, cfg: NfCfg, num_classes=1000, in_chans=3, global_pool='avg',
486487 stages = []
487488 for stage_idx , stage_depth in enumerate (cfg .depths ):
488489 stride = 1 if stage_idx == 0 and stem_stride > 2 else 2
489- if stride == 2 :
490- self .feature_info += [dict (num_chs = prev_chs , reduction = net_stride , module = f'stages.{ stage_idx } .0.act1' )]
491490 if net_stride >= output_stride and stride > 1 :
492491 dilation *= stride
493492 stride = 1
@@ -522,18 +521,19 @@ def __init__(self, cfg: NfCfg, num_classes=1000, in_chans=3, global_pool='avg',
522521 expected_var += cfg .alpha ** 2 # Even if reset occurs, increment expected variance
523522 first_dilation = dilation
524523 prev_chs = out_chs
524+ self .feature_info += [dict (num_chs = prev_chs , reduction = net_stride , module = f'stages.{ stage_idx } ' )]
525525 stages += [nn .Sequential (* blocks )]
526526 self .stages = nn .Sequential (* stages )
527527
528528 if cfg .num_features :
529529 # The paper NFRegNet models have an EfficientNet-like final head convolution.
530530 self .num_features = make_divisible (cfg .width_factor * cfg .num_features , cfg .ch_div )
531531 self .final_conv = conv_layer (prev_chs , self .num_features , 1 )
532+ self .feature_info [- 1 ] = dict (num_chs = self .num_features , reduction = net_stride , module = f'final_conv' )
532533 else :
533534 self .num_features = prev_chs
534535 self .final_conv = nn .Identity ()
535536 self .final_act = act_layer (inplace = cfg .num_features > 0 )
536- self .feature_info += [dict (num_chs = self .num_features , reduction = net_stride , module = 'final_act' )]
537537
538538 self .head = ClassifierHead (self .num_features , num_classes , pool_type = global_pool , drop_rate = self .drop_rate )
539539
@@ -572,10 +572,6 @@ def forward(self, x):
572572def _create_normfreenet (variant , pretrained = False , ** kwargs ):
573573 model_cfg = model_cfgs [variant ]
574574 feature_cfg = dict (flatten_sequential = True )
575- feature_cfg ['feature_cls' ] = 'hook' # pre-act models need hooks to grab feat from act1 in bottleneck blocks
576- if 'pool' in model_cfg .stem_type and 'deep' not in model_cfg .stem_type :
577- feature_cfg ['out_indices' ] = (1 , 2 , 3 , 4 ) # no stride 2 feat for stride 4, 1 layer maxpool stems
578-
579575 return build_model_with_cfg (
580576 NormFreeNet , variant , pretrained ,
581577 default_cfg = default_cfgs [variant ],
0 commit comments