|
12 | 12 |
|
13 | 13 | Example: |
14 | 14 | >>> from anomalib.metrics import AUROC |
| 15 | + >>> from anomalib.data import ImageBatch |
15 | 16 | >>> import torch |
16 | | - >>> # Create sample data |
17 | | - >>> labels = torch.tensor([0, 0, 1, 1]) # Binary labels |
18 | | - >>> scores = torch.tensor([0.1, 0.2, 0.8, 0.9]) # Anomaly scores |
| 17 | + >>> # Create sample batch |
| 18 | + >>> batch = ImageBatch( |
| 19 | + ... image=torch.rand(4, 3, 32, 32), |
| 20 | + ... pred_score=torch.tensor([0.1, 0.2, 0.8, 0.9]), |
| 21 | + ... gt_label=torch.tensor([0, 0, 1, 1]) |
| 22 | + ... ) |
19 | 23 | >>> # Initialize and compute AUROC |
20 | | - >>> metric = AUROC() |
21 | | - >>> auroc_score = metric(scores, labels) |
22 | | - >>> auroc_score |
| 24 | + >>> auroc = AUROC(fields=["pred_score", "gt_label"]) |
| 25 | + >>> auroc(batch) |
23 | 26 | tensor(1.0) |
24 | 27 |
|
25 | | -The metric can also be updated incrementally with batches: |
| 28 | +The metric can also be updated incrementally: |
26 | 29 |
|
27 | | - >>> for batch_scores, batch_labels in dataloader: |
28 | | - ... metric.update(batch_scores, batch_labels) |
29 | | - >>> final_score = metric.compute() |
| 30 | + >>> for batch in dataloader: |
| 31 | + ... auroc.update(batch) |
| 32 | + >>> final_score = auroc.compute() |
30 | 33 |
|
31 | 34 | Once computed, the ROC curve can be visualized: |
32 | 35 |
|
@@ -56,12 +59,11 @@ class _AUROC(BinaryROC): |
56 | 59 |
|
57 | 60 | Examples: |
58 | 61 | To compute the metric for a set of predictions and ground truth targets: |
59 | | -
|
| 62 | + >>> from anomalib.metrics.auroc import _AUROC |
60 | 63 | >>> import torch |
61 | | - >>> from anomalib.metrics import AUROC |
62 | 64 | >>> preds = torch.tensor([0.13, 0.26, 0.08, 0.92, 0.03]) |
63 | 65 | >>> target = torch.tensor([0, 0, 1, 1, 0]) |
64 | | - >>> auroc = AUROC() |
| 66 | + >>> auroc = _AUROC() |
65 | 67 | >>> auroc(preds, target) |
66 | 68 | tensor(0.6667) |
67 | 69 |
|
@@ -148,4 +150,23 @@ def generate_figure(self) -> tuple[Figure, str]: |
148 | 150 |
|
149 | 151 |
|
150 | 152 | class AUROC(AnomalibMetric, _AUROC): # type: ignore[misc] |
151 | | - """Wrapper to add AnomalibMetric functionality to AUROC metric.""" |
| 153 | + """Wrapper to add AnomalibMetric functionality to AUROC metric. |
| 154 | +
|
| 155 | + This class wraps the internal ``_AUROC`` metric to make it compatible with |
| 156 | + Anomalib's batch processing capabilities. |
| 157 | +
|
| 158 | + Example: |
| 159 | + >>> from anomalib.metrics import AUROC |
| 160 | + >>> from anomalib.data import ImageBatch |
| 161 | + >>> import torch |
| 162 | + >>> # Create sample batch |
| 163 | + >>> batch = ImageBatch( |
| 164 | + ... image=torch.rand(4, 3, 32, 32), |
| 165 | + ... pred_score=torch.tensor([0.1, 0.2, 0.8, 0.9]), |
| 166 | + ... gt_label=torch.tensor([0, 0, 1, 1]) |
| 167 | + ... ) |
| 168 | + >>> # Initialize and compute AUROC |
| 169 | + >>> auroc = AUROC(fields=["pred_score", "gt_label"]) |
| 170 | + >>> auroc(batch) |
| 171 | + tensor(1.0) |
| 172 | + """ |
0 commit comments