-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_activations.py
104 lines (85 loc) · 3.61 KB
/
test_activations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import sys; import os; sys.path.insert(1, os.path.join(os.getcwd(), "numpy_nn"))
import unittest
from typing import Callable, List
import torch
import numpy as np
from test_layer import TestLayer
from numpy_nn.modules.np_nn import ReLULayer, SigmoidLayer
# np_seed = 0
# torch_seed = 0
# np.random.seed(np_seed)
# torch.manual_seed(torch_seed)
class TestActivations(TestLayer):
def setUp(self) -> None:
pass
def _test_activation(self,
my_activation: Callable,
torch_activation: Callable,
input_dim: List[int],
atol: float = 1e-5,
random_sampler: Callable = np.random.rand,
print_tensors: bool = False,
print_results: bool = False):
"""
Samples input data and output gradient from a uniform
distribution and tests if the output and input gradients
are close to the ones computed by pytorch
"""
self._test_module_randomly(my_activation(), torch_activation(),
input_shape=input_dim, output_shape=input_dim,
atol=atol, random_sampler=random_sampler,
print_tensors=print_tensors, print_results=print_results)
def _test_relu_with_args(self,
input_dim: List[int],
atol: float = 1e-5,
random_sampler: Callable = np.random.rand,
print_tensors: bool = False,
print_results: bool = False):
self._test_activation(ReLULayer, torch.nn.ReLU, input_dim,
atol=atol, random_sampler=random_sampler,
print_tensors=print_tensors, print_results=print_results)
def _test_sigmoid_with_args(self,
input_dim: List[int],
atol: float = 1e-5,
random_sampler: Callable = np.random.rand,
print_tensors: bool = False,
print_results: bool = False):
self._test_activation(SigmoidLayer, torch.nn.Sigmoid, input_dim,
atol=atol, random_sampler=random_sampler,
print_tensors=print_tensors, print_results=print_results)
def test_relu(self):
"""
ReLU test
"""
batch_size = 5
n_channels = 6
height = 4
width = 4
input_dim = (batch_size, n_channels, height, width)
atol = 1e-10
n_iters = 3
for sampler in (np.random.rand, np.random.randn):
for _ in range(n_iters):
with self.subTest(sampler = sampler):
self._test_relu_with_args(input_dim,
atol,
sampler)
def test_sigmoid(self):
"""
Sigmoid test
"""
batch_size = 5
n_channels = 6
height = 4
width = 4
input_dim = (batch_size, n_channels, height, width)
atol = 1e-10
n_iters = 3
for sampler in (np.random.rand, np.random.randn):
for _ in range(n_iters):
with self.subTest(sampler = sampler):
self._test_sigmoid_with_args(input_dim,
atol,
sampler)
if __name__ == "__main__":
unittest.main()