[net] subdivisions=8 inputs=256 batch = 128 momentum=0.9 decay=0.001 max_batches = 2000 time_steps=576 learning_rate=0.1 policy=steps steps=1000,1500 scales=.1,.1 [rnn] batch_normalize=1 output = 1024 hidden=1024 activation=leaky [crnn] batch_normalize=1 size=1 pad=0 output = 1024 hidden=1024 activation=leaky [crnn] batch_normalize=1 size=1 pad=0 output = 1024 hidden=1024 activation=leaky [connected] output=256 activation=leaky [softmax] [cost] type=sse