-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathRN06-Poke10.yml
121 lines (115 loc) · 2.52 KB
/
RN06-Poke10.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
save_dir: resnet_v1_eembc_RN06_bilinear_croptrain
model:
name: resnet_v1_eembc_quantized
filters:
- 32
- 32
l1: 0
l2: 1e-4
kernels:
- 3
- 3
- 3
strides:
- '411'
skip: 0
avg_pooling: 0
pruning:
sparsity: 1.0
fit:
compile:
initial_lr: 0.001
lr_decay: 0.99
optimizer: Adam
loss: categorical_crossentropy
epochs: 200
#40 paitence
patience: 2
batch_size: 32
verbose: 1
quantization:
logit_total_bits: 8
logit_int_bits: 2
logit_quantizer: quantized_bits
activation_total_bits: 8
activation_int_bits: 2
activation_quantizer: quantized_relu
alpha: 1
use_stochastic_rounding: 0
convert:
ProjectName: resnet
RemoveSoftmax: 1
OutputDir: ../inference/hls/pynqz2_resnet_m_axi_8_serial_prj
XilinxPart: xc7z020clg400-1
ApplyPatches: 1
AxiWidth: 8
Interface: m_axi
Implementation: serial
BoardName: pynqz2
Backend: Pynq
IOType: io_stream
Precision: ap_fixed<8,6>
ReuseFactor: 16384
Trace: 0
Build: 1
ClockPeriod: 10
Strategy: Resource
Override:
input_1:
Precision: ap_ufixed<8,0>
q_conv2d_batchnorm:
force_dsp: 0
accum_t: ap_fixed<14,6>
Precision:
weight: ap_fixed<8,3>
bias: ap_fixed<8,3>
result: ap_fixed<9,6>
default: ap_fixed<9,6>
q_conv2d_batchnorm_linear:
Precision:
result: ap_fixed<9,6>
default: ap_fixed<9,6>
q_activation:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
q_conv2d_batchnorm_1:
force_dsp: 0
accum_t: ap_fixed<14,6>
Precision:
weight: ap_fixed<8,3>
bias: ap_fixed<8,3>
result: ap_fixed<9,6>
default: ap_fixed<9,6>
q_conv2d_batchnorm_1_linear:
Precision:
result: ap_fixed<8,6>
default: ap_fixed<8,6>
q_activation_1:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
q_conv2d_batchnorm_2:
force_dsp: 0
accum_t: ap_fixed<14,6>
Precision:
weight: ap_fixed<8,3>
bias: ap_fixed<8,3>
result: ap_fixed<9,6>
default: ap_fixed<9,6>
q_conv2d_batchnorm_2_linear:
Precision:
result: ap_fixed<9,6>
default: ap_fixed<9,6>
q_activation_2:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
q_dense:
force_dsp: 0
accum_t: ap_fixed<12,6>
Precision:
weight: ap_fixed<8,3>
bias: ap_fixed<8,3>
result: ap_fixed<8,6>
default: ap_fixed<8,6>