@@ -12,7 +12,7 @@ def is_simple_tensor(inpt: Any) -> bool:
1212 return isinstance (inpt , torch .Tensor ) and not isinstance (inpt , datapoints .Datapoint )
1313
1414
15- # {dispatcher : {input_type: type_specific_kernel}}
15+ # {functional : {input_type: type_specific_kernel}}
1616_KERNEL_REGISTRY : Dict [Callable , Dict [Type , Callable ]] = {}
1717
1818
@@ -27,10 +27,10 @@ def wrapper(inpt, *args, **kwargs):
2727 return wrapper
2828
2929
30- def _register_kernel_internal (dispatcher , input_type , * , datapoint_wrapper = True ):
31- registry = _KERNEL_REGISTRY .setdefault (dispatcher , {})
30+ def _register_kernel_internal (functional , input_type , * , datapoint_wrapper = True ):
31+ registry = _KERNEL_REGISTRY .setdefault (functional , {})
3232 if input_type in registry :
33- raise ValueError (f"Dispatcher { dispatcher } already has a kernel registered for type { input_type } ." )
33+ raise ValueError (f"Functional { functional } already has a kernel registered for type { input_type } ." )
3434
3535 def decorator (kernel ):
3636 registry [input_type ] = (
@@ -43,14 +43,14 @@ def decorator(kernel):
4343 return decorator
4444
4545
46- def _name_to_dispatcher (name ):
46+ def _name_to_functional (name ):
4747 import torchvision .transforms .v2 .functional # noqa
4848
4949 try :
5050 return getattr (torchvision .transforms .v2 .functional , name )
5151 except AttributeError :
5252 raise ValueError (
53- f"Could not find dispatcher with name '{ name } ' in torchvision.transforms.v2.functional."
53+ f"Could not find functional with name '{ name } ' in torchvision.transforms.v2.functional."
5454 ) from None
5555
5656
@@ -59,21 +59,21 @@ def _name_to_dispatcher(name):
5959}
6060
6161
62- def register_kernel (dispatcher , datapoint_cls ):
63- """Decorate a kernel to register it for a dispatcher and a (custom) datapoint type.
62+ def register_kernel (functional , datapoint_cls ):
63+ """Decorate a kernel to register it for a functional and a (custom) datapoint type.
6464
6565 See :ref:`sphx_glr_auto_examples_plot_custom_datapoints.py` for usage
6666 details.
6767 """
68- if isinstance (dispatcher , str ):
69- dispatcher = _name_to_dispatcher (name = dispatcher )
68+ if isinstance (functional , str ):
69+ functional = _name_to_functional (name = functional )
7070 elif not (
71- callable (dispatcher )
72- and getattr (dispatcher , "__module__" , "" ).startswith ("torchvision.transforms.v2.functional" )
71+ callable (functional )
72+ and getattr (functional , "__module__" , "" ).startswith ("torchvision.transforms.v2.functional" )
7373 ):
7474 raise ValueError (
75- f"Kernels can only be registered on dispatchers from the torchvision.transforms.v2.functional namespace, "
76- f"but got { dispatcher } ."
75+ f"Kernels can only be registered on functionals from the torchvision.transforms.v2.functional namespace, "
76+ f"but got { functional } ."
7777 )
7878
7979 if not (isinstance (datapoint_cls , type ) and issubclass (datapoint_cls , datapoints .Datapoint )):
@@ -85,13 +85,13 @@ def register_kernel(dispatcher, datapoint_cls):
8585 if datapoint_cls in _BUILTIN_DATAPOINT_TYPES :
8686 raise ValueError (f"Kernels cannot be registered for the builtin datapoint classes, but got { datapoint_cls } " )
8787
88- return _register_kernel_internal (dispatcher , datapoint_cls , datapoint_wrapper = False )
88+ return _register_kernel_internal (functional , datapoint_cls , datapoint_wrapper = False )
8989
9090
91- def _get_kernel (dispatcher , input_type , * , allow_passthrough = False ):
92- registry = _KERNEL_REGISTRY .get (dispatcher )
91+ def _get_kernel (functional , input_type , * , allow_passthrough = False ):
92+ registry = _KERNEL_REGISTRY .get (functional )
9393 if not registry :
94- raise ValueError (f"No kernel registered for dispatcher { dispatcher .__name__ } ." )
94+ raise ValueError (f"No kernel registered for functional { functional .__name__ } ." )
9595
9696 # In case we have an exact type match, we take a shortcut.
9797 if input_type in registry :
@@ -113,17 +113,17 @@ def _get_kernel(dispatcher, input_type, *, allow_passthrough=False):
113113 return lambda inpt , * args , ** kwargs : inpt
114114
115115 raise TypeError (
116- f"Dispatcher F.{ dispatcher .__name__ } supports inputs of type { registry .keys ()} , "
116+ f"Functional F.{ functional .__name__ } supports inputs of type { registry .keys ()} , "
117117 f"but got { input_type } instead."
118118 )
119119
120120
121121# This basically replicates _register_kernel_internal, but with a specialized wrapper for five_crop / ten_crop
122- # We could get rid of this by letting _register_kernel_internal take arbitrary dispatchers rather than wrap_kernel: bool
123- def _register_five_ten_crop_kernel_internal (dispatcher , input_type ):
124- registry = _KERNEL_REGISTRY .setdefault (dispatcher , {})
122+ # We could get rid of this by letting _register_kernel_internal take arbitrary functionals rather than wrap_kernel: bool
123+ def _register_five_ten_crop_kernel_internal (functional , input_type ):
124+ registry = _KERNEL_REGISTRY .setdefault (functional , {})
125125 if input_type in registry :
126- raise TypeError (f"Dispatcher '{ dispatcher } ' already has a kernel registered for type '{ input_type } '." )
126+ raise TypeError (f"Functional '{ functional } ' already has a kernel registered for type '{ input_type } '." )
127127
128128 def wrap (kernel ):
129129 @functools .wraps (kernel )
0 commit comments