Skip to content

Commit 913b3fd

Browse files
committed
Make meta private
1 parent f3a1f51 commit 913b3fd

36 files changed

+216
-216
lines changed

docs/source/conf.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ def inject_weight_metadata(app, what, name, obj, options, lines):
326326
lines += [f"This weight is also available as ``{obj.__name__}.DEFAULT``.", ""]
327327

328328
table = []
329-
for k, v in field.meta.items():
329+
for k, v in field._meta.items():
330330
if k == "categories":
331331
continue
332332
elif k == "recipe":
@@ -347,10 +347,10 @@ def generate_classification_table():
347347
content = [
348348
(
349349
f":class:`{w} <{type(w).__name__}>`",
350-
w.meta["acc@1"],
351-
w.meta["acc@5"],
352-
f"{w.meta['num_params']/1e6:.1f}M",
353-
f"`link <{w.meta['recipe']}>`__",
350+
w._meta["acc@1"],
351+
w._meta["acc@5"],
352+
f"{w._meta['num_params'] / 1e6:.1f}M",
353+
f"`link <{w._meta['recipe']}>`__",
354354
)
355355
for w in weights
356356
]

test/test_extended_models.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -101,21 +101,21 @@ def test_schema_meta_validation(model_fn):
101101
incorrect_params = []
102102
bad_names = []
103103
for w in weights_enum:
104-
missing_fields = fields - set(w.meta.keys())
104+
missing_fields = fields - set(w._meta.keys())
105105
if missing_fields:
106106
problematic_weights[w] = missing_fields
107107
if w == weights_enum.DEFAULT:
108108
if module_name == "quantization":
109109
# parameters() count doesn't work well with quantization, so we check against the non-quantized
110-
unquantized_w = w.meta.get("unquantized")
111-
if unquantized_w is not None and w.meta.get("num_params") != unquantized_w.meta.get("num_params"):
110+
unquantized_w = w._meta.get("unquantized")
111+
if unquantized_w is not None and w._meta.get("num_params") != unquantized_w._meta.get("num_params"):
112112
incorrect_params.append(w)
113113
else:
114-
if w.meta.get("num_params") != sum(p.numel() for p in model_fn(weights=w).parameters()):
114+
if w._meta.get("num_params") != sum(p.numel() for p in model_fn(weights=w).parameters()):
115115
incorrect_params.append(w)
116116
else:
117-
if w.meta.get("num_params") != weights_enum.DEFAULT.meta.get("num_params"):
118-
if w.meta.get("num_params") != sum(p.numel() for p in model_fn(weights=w).parameters()):
117+
if w._meta.get("num_params") != weights_enum.DEFAULT._meta.get("num_params"):
118+
if w._meta.get("num_params") != sum(p.numel() for p in model_fn(weights=w).parameters()):
119119
incorrect_params.append(w)
120120
if not w.name.isupper():
121121
bad_names.append(w)

torchvision/models/_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class Weights:
3535

3636
url: str
3737
transforms: Callable
38-
meta: Dict[str, Any]
38+
_meta: Dict[str, Any]
3939

4040

4141
class WeightsEnum(StrEnum):

torchvision/models/alexnet.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ class AlexNet_Weights(WeightsEnum):
5656
IMAGENET1K_V1 = Weights(
5757
url="https://download.pytorch.org/models/alexnet-owt-7be5be79.pth",
5858
transforms=partial(ImageClassification, crop_size=224),
59-
meta={
59+
_meta={
6060
"task": "image_classification",
6161
"architecture": "AlexNet",
6262
"publication_year": 2012,
@@ -86,7 +86,7 @@ def alexnet(*, weights: Optional[AlexNet_Weights] = None, progress: bool = True,
8686
weights = AlexNet_Weights.verify(weights)
8787

8888
if weights is not None:
89-
_ovewrite_named_param(kwargs, "num_classes", len(weights.meta["categories"]))
89+
_ovewrite_named_param(kwargs, "num_classes", len(weights._meta["categories"]))
9090

9191
model = AlexNet(**kwargs)
9292

torchvision/models/convnext.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def _convnext(
193193
**kwargs: Any,
194194
) -> ConvNeXt:
195195
if weights is not None:
196-
_ovewrite_named_param(kwargs, "num_classes", len(weights.meta["categories"]))
196+
_ovewrite_named_param(kwargs, "num_classes", len(weights._meta["categories"]))
197197

198198
model = ConvNeXt(block_setting, stochastic_depth_prob=stochastic_depth_prob, **kwargs)
199199

@@ -219,7 +219,7 @@ class ConvNeXt_Tiny_Weights(WeightsEnum):
219219
IMAGENET1K_V1 = Weights(
220220
url="https://download.pytorch.org/models/convnext_tiny-983f1562.pth",
221221
transforms=partial(ImageClassification, crop_size=224, resize_size=236),
222-
meta={
222+
_meta={
223223
**_COMMON_META,
224224
"num_params": 28589128,
225225
"acc@1": 82.520,
@@ -233,7 +233,7 @@ class ConvNeXt_Small_Weights(WeightsEnum):
233233
IMAGENET1K_V1 = Weights(
234234
url="https://download.pytorch.org/models/convnext_small-0c510722.pth",
235235
transforms=partial(ImageClassification, crop_size=224, resize_size=230),
236-
meta={
236+
_meta={
237237
**_COMMON_META,
238238
"num_params": 50223688,
239239
"acc@1": 83.616,
@@ -247,7 +247,7 @@ class ConvNeXt_Base_Weights(WeightsEnum):
247247
IMAGENET1K_V1 = Weights(
248248
url="https://download.pytorch.org/models/convnext_base-6075fbad.pth",
249249
transforms=partial(ImageClassification, crop_size=224, resize_size=232),
250-
meta={
250+
_meta={
251251
**_COMMON_META,
252252
"num_params": 88591464,
253253
"acc@1": 84.062,
@@ -261,7 +261,7 @@ class ConvNeXt_Large_Weights(WeightsEnum):
261261
IMAGENET1K_V1 = Weights(
262262
url="https://download.pytorch.org/models/convnext_large-ea097f82.pth",
263263
transforms=partial(ImageClassification, crop_size=224, resize_size=232),
264-
meta={
264+
_meta={
265265
**_COMMON_META,
266266
"num_params": 197767336,
267267
"acc@1": 84.414,

torchvision/models/densenet.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -255,7 +255,7 @@ def _densenet(
255255
**kwargs: Any,
256256
) -> DenseNet:
257257
if weights is not None:
258-
_ovewrite_named_param(kwargs, "num_classes", len(weights.meta["categories"]))
258+
_ovewrite_named_param(kwargs, "num_classes", len(weights._meta["categories"]))
259259

260260
model = DenseNet(growth_rate, block_config, num_init_features, **kwargs)
261261

@@ -281,7 +281,7 @@ class DenseNet121_Weights(WeightsEnum):
281281
IMAGENET1K_V1 = Weights(
282282
url="https://download.pytorch.org/models/densenet121-a639ec97.pth",
283283
transforms=partial(ImageClassification, crop_size=224),
284-
meta={
284+
_meta={
285285
**_COMMON_META,
286286
"num_params": 7978856,
287287
"acc@1": 74.434,
@@ -295,7 +295,7 @@ class DenseNet161_Weights(WeightsEnum):
295295
IMAGENET1K_V1 = Weights(
296296
url="https://download.pytorch.org/models/densenet161-8d451a50.pth",
297297
transforms=partial(ImageClassification, crop_size=224),
298-
meta={
298+
_meta={
299299
**_COMMON_META,
300300
"num_params": 28681000,
301301
"acc@1": 77.138,
@@ -309,7 +309,7 @@ class DenseNet169_Weights(WeightsEnum):
309309
IMAGENET1K_V1 = Weights(
310310
url="https://download.pytorch.org/models/densenet169-b2777c0a.pth",
311311
transforms=partial(ImageClassification, crop_size=224),
312-
meta={
312+
_meta={
313313
**_COMMON_META,
314314
"num_params": 14149480,
315315
"acc@1": 75.600,
@@ -323,7 +323,7 @@ class DenseNet201_Weights(WeightsEnum):
323323
IMAGENET1K_V1 = Weights(
324324
url="https://download.pytorch.org/models/densenet201-c1103571.pth",
325325
transforms=partial(ImageClassification, crop_size=224),
326-
meta={
326+
_meta={
327327
**_COMMON_META,
328328
"num_params": 20013928,
329329
"acc@1": 76.896,

torchvision/models/detection/faster_rcnn.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -382,7 +382,7 @@ class FasterRCNN_ResNet50_FPN_Weights(WeightsEnum):
382382
COCO_V1 = Weights(
383383
url="https://download.pytorch.org/models/fasterrcnn_resnet50_fpn_coco-258fb6c6.pth",
384384
transforms=ObjectDetection,
385-
meta={
385+
_meta={
386386
**_COMMON_META,
387387
"num_params": 41755286,
388388
"recipe": "https://github.com/pytorch/vision/tree/main/references/detection#faster-r-cnn-resnet-50-fpn",
@@ -396,7 +396,7 @@ class FasterRCNN_ResNet50_FPN_V2_Weights(WeightsEnum):
396396
COCO_V1 = Weights(
397397
url="https://download.pytorch.org/models/fasterrcnn_resnet50_fpn_v2_coco-dd69338a.pth",
398398
transforms=ObjectDetection,
399-
meta={
399+
_meta={
400400
**_COMMON_META,
401401
"publication_year": 2021,
402402
"num_params": 43712278,
@@ -411,7 +411,7 @@ class FasterRCNN_MobileNet_V3_Large_FPN_Weights(WeightsEnum):
411411
COCO_V1 = Weights(
412412
url="https://download.pytorch.org/models/fasterrcnn_mobilenet_v3_large_fpn-fb6a3cc7.pth",
413413
transforms=ObjectDetection,
414-
meta={
414+
_meta={
415415
**_COMMON_META,
416416
"num_params": 19386354,
417417
"recipe": "https://github.com/pytorch/vision/tree/main/references/detection#faster-r-cnn-mobilenetv3-large-fpn",
@@ -425,7 +425,7 @@ class FasterRCNN_MobileNet_V3_Large_320_FPN_Weights(WeightsEnum):
425425
COCO_V1 = Weights(
426426
url="https://download.pytorch.org/models/fasterrcnn_mobilenet_v3_large_320_fpn-907ea3f9.pth",
427427
transforms=ObjectDetection,
428-
meta={
428+
_meta={
429429
**_COMMON_META,
430430
"num_params": 19386354,
431431
"recipe": "https://github.com/pytorch/vision/tree/main/references/detection#faster-r-cnn-mobilenetv3-large-320-fpn",
@@ -519,7 +519,7 @@ def fasterrcnn_resnet50_fpn(
519519

520520
if weights is not None:
521521
weights_backbone = None
522-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
522+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
523523
elif num_classes is None:
524524
num_classes = 91
525525

@@ -570,7 +570,7 @@ def fasterrcnn_resnet50_fpn_v2(
570570

571571
if weights is not None:
572572
weights_backbone = None
573-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
573+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
574574
elif num_classes is None:
575575
num_classes = 91
576576

@@ -610,7 +610,7 @@ def _fasterrcnn_mobilenet_v3_large_fpn(
610610
) -> FasterRCNN:
611611
if weights is not None:
612612
weights_backbone = None
613-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
613+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
614614
elif num_classes is None:
615615
num_classes = 91
616616

torchvision/models/detection/fcos.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -650,7 +650,7 @@ class FCOS_ResNet50_FPN_Weights(WeightsEnum):
650650
COCO_V1 = Weights(
651651
url="https://download.pytorch.org/models/fcos_resnet50_fpn_coco-99b0c9b7.pth",
652652
transforms=ObjectDetection,
653-
meta={
653+
_meta={
654654
"task": "image_object_detection",
655655
"architecture": "FCOS",
656656
"publication_year": 2019,
@@ -729,7 +729,7 @@ def fcos_resnet50_fpn(
729729

730730
if weights is not None:
731731
weights_backbone = None
732-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
732+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
733733
elif num_classes is None:
734734
num_classes = 91
735735

torchvision/models/detection/keypoint_rcnn.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum):
321321
COCO_LEGACY = Weights(
322322
url="https://download.pytorch.org/models/keypointrcnn_resnet50_fpn_coco-9f466800.pth",
323323
transforms=ObjectDetection,
324-
meta={
324+
_meta={
325325
**_COMMON_META,
326326
"num_params": 59137258,
327327
"recipe": "https://github.com/pytorch/vision/issues/1606",
@@ -332,7 +332,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum):
332332
COCO_V1 = Weights(
333333
url="https://download.pytorch.org/models/keypointrcnn_resnet50_fpn_coco-fc266e95.pth",
334334
transforms=ObjectDetection,
335-
meta={
335+
_meta={
336336
**_COMMON_META,
337337
"num_params": 59137258,
338338
"recipe": "https://github.com/pytorch/vision/tree/main/references/detection#keypoint-r-cnn",
@@ -423,8 +423,8 @@ def keypointrcnn_resnet50_fpn(
423423

424424
if weights is not None:
425425
weights_backbone = None
426-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
427-
num_keypoints = _ovewrite_value_param(num_keypoints, len(weights.meta["keypoint_names"]))
426+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
427+
num_keypoints = _ovewrite_value_param(num_keypoints, len(weights._meta["keypoint_names"]))
428428
else:
429429
if num_classes is None:
430430
num_classes = 2

torchvision/models/detection/mask_rcnn.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ class MaskRCNN_ResNet50_FPN_Weights(WeightsEnum):
362362
COCO_V1 = Weights(
363363
url="https://download.pytorch.org/models/maskrcnn_resnet50_fpn_coco-bf2d0c1e.pth",
364364
transforms=ObjectDetection,
365-
meta={
365+
_meta={
366366
**_COMMON_META,
367367
"publication_year": 2017,
368368
"num_params": 44401393,
@@ -378,7 +378,7 @@ class MaskRCNN_ResNet50_FPN_V2_Weights(WeightsEnum):
378378
COCO_V1 = Weights(
379379
url="https://download.pytorch.org/models/maskrcnn_resnet50_fpn_v2_coco-73cbd019.pth",
380380
transforms=ObjectDetection,
381-
meta={
381+
_meta={
382382
**_COMMON_META,
383383
"publication_year": 2021,
384384
"num_params": 46359409,
@@ -464,7 +464,7 @@ def maskrcnn_resnet50_fpn(
464464

465465
if weights is not None:
466466
weights_backbone = None
467-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
467+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
468468
elif num_classes is None:
469469
num_classes = 91
470470

@@ -515,7 +515,7 @@ def maskrcnn_resnet50_fpn_v2(
515515

516516
if weights is not None:
517517
weights_backbone = None
518-
num_classes = _ovewrite_value_param(num_classes, len(weights.meta["categories"]))
518+
num_classes = _ovewrite_value_param(num_classes, len(weights._meta["categories"]))
519519
elif num_classes is None:
520520
num_classes = 91
521521

0 commit comments

Comments
 (0)