fix memory error in batchnorm layer (#7619)

This commit is contained in:
deoksangkim 2021-04-26 06:58:22 +09:00 committed by GitHub
parent 4f794aa152
commit c9f2c5f0e9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 6 additions and 0 deletions

View File

@ -36,6 +36,12 @@ layer make_batchnorm_layer(int batch, int w, int h, int c, int train)
layer.rolling_mean = (float*)xcalloc(c, sizeof(float));
layer.rolling_variance = (float*)xcalloc(c, sizeof(float));
layer.mean_delta = (float*)xcalloc(c, sizeof(float));
layer.variance_delta = (float*)xcalloc(c, sizeof(float));
layer.x = (float*)xcalloc(layer.batch*layer.outputs, sizeof(float));
layer.x_norm = (float*)xcalloc(layer.batch*layer.outputs, sizeof(float));
layer.forward = forward_batchnorm_layer;
layer.backward = backward_batchnorm_layer;
layer.update = update_batchnorm_layer;