nn4.def: Add reasoning behind LRN layers.

This commit is contained in:
Brandon Amos 2015-12-28 21:28:06 -05:00
parent 4098a586ae
commit 80135c737c
1 changed files with 11 additions and 0 deletions

View File

@ -28,6 +28,17 @@ function createModel()
net:add(nn.SpatialBatchNormalization(64))
net:add(nn.ReLU())
-- The FaceNet paper just says `norm` and that the models are based
-- heavily on the inception paper (http://arxiv.org/pdf/1409.4842.pdf),
-- which uses pooling and normalization in the same way in the early layers.
--
-- The Caffe and official versions of this network both use LRN:
--
-- + https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet
-- + https://github.com/google/inception/blob/master/inception.ipynb
--
-- The Caffe docs at http://caffe.berkeleyvision.org/tutorial/layers.html
-- define LRN to be across channels.
net:add(nn.SpatialMaxPooling(3, 3, 2, 2, 1, 1))
net:add(nn.CrossMapNormalization(5, 0.0001, 0.75))