|
21 | 21 | isclose,
|
22 | 22 | kron,
|
23 | 23 | nunique,
|
| 24 | + one_hot, |
24 | 25 | pad,
|
25 | 26 | setdiff1d,
|
26 | 27 | sinc,
|
|
44 | 45 | lazy_xp_function(expand_dims)
|
45 | 46 | lazy_xp_function(kron)
|
46 | 47 | lazy_xp_function(nunique)
|
| 48 | +lazy_xp_function(one_hot) |
47 | 49 | lazy_xp_function(pad)
|
48 | 50 | # FIXME calls in1d which calls xp.unique_values without size
|
49 | 51 | lazy_xp_function(setdiff1d, jax_jit=False)
|
@@ -448,6 +450,78 @@ def test_xp(self, xp: ModuleType):
|
448 | 450 | )
|
449 | 451 |
|
450 | 452 |
|
| 453 | +@pytest.mark.skip_xp_backend( |
| 454 | + Backend.SPARSE, reason="read-only backend without .at support" |
| 455 | +) |
| 456 | +@pytest.mark.skip_xp_backend( |
| 457 | + Backend.DASK, reason="backend does not yet support indexed assignment" |
| 458 | +) |
| 459 | +class TestOneHot: |
| 460 | + @pytest.mark.parametrize("n_dim", range(4)) |
| 461 | + @pytest.mark.parametrize("num_classes", [1, 3, 10]) |
| 462 | + def test_dims_and_classes(self, xp: ModuleType, n_dim: int, num_classes: int): |
| 463 | + shape = tuple(range(2, 2 + n_dim)) |
| 464 | + rng = np.random.default_rng(2347823) |
| 465 | + np_x = rng.integers(num_classes, size=shape) |
| 466 | + x = xp.asarray(np_x) |
| 467 | + y = one_hot(x, num_classes) |
| 468 | + assert y.shape == (*x.shape, num_classes) |
| 469 | + for *i_list, j in ndindex(*shape, num_classes): |
| 470 | + i = tuple(i_list) |
| 471 | + assert float(y[(*i, j)]) == (int(x[i]) == j) |
| 472 | + |
| 473 | + def test_basic(self, xp: ModuleType): |
| 474 | + actual = one_hot(xp.asarray([0, 1, 2]), 3) |
| 475 | + expected = xp.asarray([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) |
| 476 | + xp_assert_equal(actual, expected) |
| 477 | + |
| 478 | + actual = one_hot(xp.asarray([1, 2, 0]), 3) |
| 479 | + expected = xp.asarray([[0.0, 1.0, 0.0], [0.0, 0.0, 1.0], [1.0, 0.0, 0.0]]) |
| 480 | + xp_assert_equal(actual, expected) |
| 481 | + |
| 482 | + @pytest.mark.skip_xp_backend( |
| 483 | + Backend.TORCH_GPU, reason="Puts Pytorch into a bad state." |
| 484 | + ) |
| 485 | + def test_out_of_bound(self, xp: ModuleType): |
| 486 | + # Undefined behavior. Either return zero, or raise. |
| 487 | + try: |
| 488 | + actual = one_hot(xp.asarray([-1, 3]), 3) |
| 489 | + except IndexError: |
| 490 | + return |
| 491 | + expected = xp.asarray([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]) |
| 492 | + xp_assert_equal(actual, expected) |
| 493 | + |
| 494 | + @pytest.mark.parametrize( |
| 495 | + "int_dtype", |
| 496 | + ["int8", "int16", "int32", "int64", "uint8", "uint16", "uint32", "uint64"], |
| 497 | + ) |
| 498 | + def test_int_types(self, xp: ModuleType, int_dtype: str): |
| 499 | + dtype = getattr(xp, int_dtype) |
| 500 | + x = xp.asarray([0, 1, 2], dtype=dtype) |
| 501 | + actual = one_hot(x, 3) |
| 502 | + expected = xp.asarray([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) |
| 503 | + xp_assert_equal(actual, expected) |
| 504 | + |
| 505 | + def test_custom_dtype(self, xp: ModuleType): |
| 506 | + actual = one_hot(xp.asarray([0, 1, 2], dtype=xp.int32), 3, dtype=xp.bool) |
| 507 | + expected = xp.asarray( |
| 508 | + [[True, False, False], [False, True, False], [False, False, True]] |
| 509 | + ) |
| 510 | + xp_assert_equal(actual, expected) |
| 511 | + |
| 512 | + def test_axis(self, xp: ModuleType): |
| 513 | + expected = xp.asarray([[0.0, 1.0, 0.0], [0.0, 0.0, 1.0], [1.0, 0.0, 0.0]]).T |
| 514 | + actual = one_hot(xp.asarray([1, 2, 0]), 3, axis=0) |
| 515 | + xp_assert_equal(actual, expected) |
| 516 | + |
| 517 | + actual = one_hot(xp.asarray([1, 2, 0]), 3, axis=-2) |
| 518 | + xp_assert_equal(actual, expected) |
| 519 | + |
| 520 | + def test_non_integer(self, xp: ModuleType): |
| 521 | + with pytest.raises(TypeError): |
| 522 | + _ = one_hot(xp.asarray([1.0]), 3) |
| 523 | + |
| 524 | + |
451 | 525 | @pytest.mark.skip_xp_backend(
|
452 | 526 | Backend.SPARSE, reason="read-only backend without .at support"
|
453 | 527 | )
|
|
0 commit comments