|
25 | 25 | NON_STD_FILTERS = [ |
26 | 26 | 'vit_*', 'tnt_*', 'pit_*', 'swin_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*', |
27 | 27 | 'convit_*', 'levit*', 'visformer*', 'deit*', 'jx_nest_*', 'nest_*', 'xcit_*', 'crossvit_*', 'beit_*', |
28 | | - 'poolformer_*', 'volo_*'] |
| 28 | + 'poolformer_*', 'volo_*', 'sequencer2d_*'] |
29 | 29 | NUM_NON_STD = len(NON_STD_FILTERS) |
30 | 30 |
|
31 | 31 | # exclude models that cause specific test failures |
@@ -202,28 +202,32 @@ def test_model_default_cfgs_non_std(model_name, batch_size): |
202 | 202 | pytest.skip("Fixed input size model > limit.") |
203 | 203 |
|
204 | 204 | input_tensor = torch.randn((batch_size, *input_size)) |
| 205 | + feat_dim = getattr(model, 'feature_dim', None) |
205 | 206 |
|
206 | 207 | outputs = model.forward_features(input_tensor) |
207 | 208 | if isinstance(outputs, (tuple, list)): |
208 | 209 | # cannot currently verify multi-tensor output. |
209 | 210 | pass |
210 | 211 | else: |
211 | | - feat_dim = -1 if outputs.ndim == 3 else 1 |
| 212 | + if feat_dim is None: |
| 213 | + feat_dim = -1 if outputs.ndim == 3 else 1 |
212 | 214 | assert outputs.shape[feat_dim] == model.num_features |
213 | 215 |
|
214 | 216 | # test forward after deleting the classifier, output should be poooled, size(-1) == model.num_features |
215 | 217 | model.reset_classifier(0) |
216 | 218 | outputs = model.forward(input_tensor) |
217 | 219 | if isinstance(outputs, (tuple, list)): |
218 | 220 | outputs = outputs[0] |
219 | | - feat_dim = -1 if outputs.ndim == 3 else 1 |
| 221 | + if feat_dim is None: |
| 222 | + feat_dim = -1 if outputs.ndim == 3 else 1 |
220 | 223 | assert outputs.shape[feat_dim] == model.num_features, 'pooled num_features != config' |
221 | 224 |
|
222 | 225 | model = create_model(model_name, pretrained=False, num_classes=0).eval() |
223 | 226 | outputs = model.forward(input_tensor) |
224 | 227 | if isinstance(outputs, (tuple, list)): |
225 | 228 | outputs = outputs[0] |
226 | | - feat_dim = -1 if outputs.ndim == 3 else 1 |
| 229 | + if feat_dim is None: |
| 230 | + feat_dim = -1 if outputs.ndim == 3 else 1 |
227 | 231 | assert outputs.shape[feat_dim] == model.num_features |
228 | 232 |
|
229 | 233 | # check classifier name matches default_cfg |
|
0 commit comments