darknet/cfg/lstm.train.cfg

36 lines
364 B
INI
Raw Normal View History

2019-01-29 01:22:14 +08:00
[net]
subdivisions=8
inputs=256
batch = 128
momentum=0.9
decay=0.001
max_batches = 2000
time_steps=576
2019-01-29 18:46:30 +08:00
learning_rate=0.5
2019-01-29 01:22:14 +08:00
policy=steps
2019-01-29 18:46:30 +08:00
burn_in=10
2019-01-29 01:22:14 +08:00
steps=1000,1500
scales=.1,.1
[lstm]
batch_normalize=1
output = 1024
[lstm]
batch_normalize=1
output = 1024
[lstm]
batch_normalize=1
output = 1024
[connected]
output=256
activation=leaky
[softmax]
[cost]
type=sse