-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_flatten.py
59 lines (44 loc) · 1.83 KB
/
test_flatten.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import sys; import os; sys.path.insert(1, os.path.join(os.getcwd(), "numpy_nn"))
import unittest
from typing import Callable, List
import torch
import numpy as np
from test_layer import TestLayer
from numpy_nn.modules.np_nn import Flatten
# np_seed = 0
# torch_seed = 0
# np.random.seed(np_seed)
# torch.manual_seed(torch_seed)
class TestMaxPool2d(TestLayer):
def setUp(self) -> None:
pass
def _test_flatten_with_args(self,
input_shape: List[int],
atol: float = 1e-6,
random_sampler: Callable = np.random.rand,
print_tensors: bool = False,
print_results: bool = False):
batch_size, *rest_input_dim = input_shape
output_shape = [batch_size, np.prod(rest_input_dim)]
self._test_module_randomly(Flatten(),
torch.nn.Flatten(),
input_shape=input_shape,
output_shape=output_shape,
atol=atol,
random_sampler=random_sampler,
print_tensors=print_tensors,
print_results=print_results)
def test_flatten(self):
"""
Maxpool test
"""
input_shape = [2, 3, 5, 5]
n_iters = 3
for sampler in (np.random.rand, np.random.randn):
for _ in range(n_iters):
with self.subTest(sampler = sampler):
self._test_flatten_with_args(input_shape,
atol=1e-6,
random_sampler=sampler)
if __name__ == "__main__":
unittest.main()