mirror of https://github.com/AlexeyAB/darknet.git
fix memory error in batchnorm layer (#7619)
This commit is contained in:
parent
4f794aa152
commit
c9f2c5f0e9
|
@ -36,6 +36,12 @@ layer make_batchnorm_layer(int batch, int w, int h, int c, int train)
|
|||
layer.rolling_mean = (float*)xcalloc(c, sizeof(float));
|
||||
layer.rolling_variance = (float*)xcalloc(c, sizeof(float));
|
||||
|
||||
layer.mean_delta = (float*)xcalloc(c, sizeof(float));
|
||||
layer.variance_delta = (float*)xcalloc(c, sizeof(float));
|
||||
|
||||
layer.x = (float*)xcalloc(layer.batch*layer.outputs, sizeof(float));
|
||||
layer.x_norm = (float*)xcalloc(layer.batch*layer.outputs, sizeof(float));
|
||||
|
||||
layer.forward = forward_batchnorm_layer;
|
||||
layer.backward = backward_batchnorm_layer;
|
||||
layer.update = update_batchnorm_layer;
|
||||
|
|
Loading…
Reference in New Issue