|
| 1 | +from typing import Callable, Union |
| 2 | + |
| 3 | +import torch |
| 4 | + |
| 5 | +from pytorch_lightning.metrics.metric import Metric |
| 6 | + |
| 7 | + |
| 8 | +class CompositionalMetric(Metric): |
| 9 | + """Composition of two metrics with a specific operator |
| 10 | + which will be executed upon metric's compute |
| 11 | +
|
| 12 | + """ |
| 13 | + |
| 14 | + def __init__( |
| 15 | + self, |
| 16 | + operator: Callable, |
| 17 | + metric_a: Union[Metric, int, float, torch.Tensor], |
| 18 | + metric_b: Union[Metric, int, float, torch.Tensor, None], |
| 19 | + ): |
| 20 | + """ |
| 21 | +
|
| 22 | + Args: |
| 23 | + operator: the operator taking in one (if metric_b is None) |
| 24 | + or two arguments. Will be applied to outputs of metric_a.compute() |
| 25 | + and (optionally if metric_b is not None) metric_b.compute() |
| 26 | + metric_a: first metric whose compute() result is the first argument of operator |
| 27 | + metric_b: second metric whose compute() result is the second argument of operator. |
| 28 | + For operators taking in only one input, this should be None |
| 29 | + """ |
| 30 | + super().__init__() |
| 31 | + |
| 32 | + self.op = operator |
| 33 | + |
| 34 | + if isinstance(metric_a, torch.Tensor): |
| 35 | + self.register_buffer("metric_a", metric_a) |
| 36 | + else: |
| 37 | + self.metric_a = metric_a |
| 38 | + |
| 39 | + if isinstance(metric_b, torch.Tensor): |
| 40 | + self.register_buffer("metric_b", metric_b) |
| 41 | + else: |
| 42 | + self.metric_b = metric_b |
| 43 | + |
| 44 | + def _sync_dist(self, dist_sync_fn=None): |
| 45 | + # No syncing required here. syncing will be done in metric_a and metric_b |
| 46 | + pass |
| 47 | + |
| 48 | + def update(self, *args, **kwargs): |
| 49 | + if isinstance(self.metric_a, Metric): |
| 50 | + self.metric_a.update(*args, **self.metric_a._filter_kwargs(**kwargs)) |
| 51 | + |
| 52 | + if isinstance(self.metric_b, Metric): |
| 53 | + self.metric_b.update(*args, **self.metric_b._filter_kwargs(**kwargs)) |
| 54 | + |
| 55 | + def compute(self): |
| 56 | + |
| 57 | + # also some parsing for kwargs? |
| 58 | + if isinstance(self.metric_a, Metric): |
| 59 | + val_a = self.metric_a.compute() |
| 60 | + else: |
| 61 | + val_a = self.metric_a |
| 62 | + |
| 63 | + if isinstance(self.metric_b, Metric): |
| 64 | + val_b = self.metric_b.compute() |
| 65 | + else: |
| 66 | + val_b = self.metric_b |
| 67 | + |
| 68 | + if val_b is None: |
| 69 | + return self.op(val_a) |
| 70 | + |
| 71 | + return self.op(val_a, val_b) |
| 72 | + |
| 73 | + def reset(self): |
| 74 | + if isinstance(self.metric_a, Metric): |
| 75 | + self.metric_a.reset() |
| 76 | + |
| 77 | + if isinstance(self.metric_b, Metric): |
| 78 | + self.metric_b.reset() |
| 79 | + |
| 80 | + def persistent(self, mode: bool = False): |
| 81 | + if isinstance(self.metric_a, Metric): |
| 82 | + self.metric_a.persistent(mode=mode) |
| 83 | + if isinstance(self.metric_b, Metric): |
| 84 | + self.metric_b.persistent(mode=mode) |
| 85 | + |
| 86 | + def __repr__(self): |
| 87 | + repr_str = ( |
| 88 | + self.__class__.__name__ |
| 89 | + + f"(\n {self.op.__name__}(\n {repr(self.metric_a)},\n {repr(self.metric_b)}\n )\n)" |
| 90 | + ) |
| 91 | + |
| 92 | + return repr_str |
0 commit comments