-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhyperparameters_selection.py
93 lines (73 loc) · 3.06 KB
/
hyperparameters_selection.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 8 09:30:16 2020
@author: rfuchs
"""
import autograd.numpy as np
def M_growth(it_nb, r_1L, numobs):
'''
Function that controls the growth rate of M through the iterations
it_num (int): The current iteration number
r_1L (dict of list): The dimensions of each layer of each head and tail
numobs (int): The number of observations of the dataset
---------------------------------------------------------------------
returns (dict of lists): The number of MC points to sample on each layer
of each head and tail
'''
M = {}
M['c'] = ((40 / np.log(numobs)) * it_nb * np.sqrt(r_1L['c'])).astype(int)
M['c'][0] = numobs
M['d'] = ((40 / np.log(numobs)) * it_nb * np.sqrt(r_1L['d'])).astype(int)
return M
def look_for_simpler_network(it_num):
'''
Returns whether or not a new architecture of the network have to be
looking for at the current iteration.
it_num (int): The current iteration number
-------------------------------------------------------------------------
returns (Bool): True if a simpler architecture has to be looking for
False otherwise
'''
if it_num in [0, 1, 7, 10]:
return True
else:
return False
def is_min_architecture_reached(k, r, n_clusters):
'''
k (dict of list): The number of components on each layer of each head and tail
r (dict of list): The dimensions of each layer of each head and tail
n_clusters (int): The number of clusters to look for in the data
------------------------------------------------------------------------
returns (Bool): True if the minimal network architecture has been reached
False otherwise
'''
# Check that common tail is minimal
first_layer_k_min = k['t'][0] == n_clusters # geq or eq ?
following_layers_k_min = np.all([kk <= 2 for kk in k['t'][1:]])
is_tail_k_min = first_layer_k_min & following_layers_k_min
is_tail_r_min = r['t'] == [2, 1]
is_tail_min = is_tail_k_min & is_tail_r_min
# Check that the heads are minimal
is_head_k_min = {'c': True, 'd': True}
is_head_r_min = {'c': True, 'd': True}
Lt = len(k['t'])
for h in ['c', 'd']:
Lh = len(k[h])
# If more than one layer on head, then arch is not minimal
if Lh >= 2:
is_head_k_min[h] = False
is_head_r_min[h] = False
continue
for l in range(Lh):
# If all k >= 2
if k[h][l] > 1:
is_head_k_min[h] = False
# The first dimension of the continuous dimension is imposed
if (h == 'd') | (l > 0):
# k is min if r3 = r2 + 1 = r1 + 2 ...
if r[h][l] > Lh + Lt - l:
is_head_r_min[h] = False
are_heads_min = np.all(list(is_head_k_min.values())) \
& np.all(list(is_head_r_min.values()))
is_arch_min = are_heads_min & is_tail_min
return is_arch_min