From 6168781ab926667bd07067e5bfc59802263679ad Mon Sep 17 00:00:00 2001 From: Lucas Clemente Vella Date: Sat, 10 Sep 2016 15:31:01 -0300 Subject: [PATCH] Adding specific parameters interface on fc_ layer (#213) --- dlib/dnn/layers.h | 24 ++++++++++++++++ dlib/dnn/layers_abstract.h | 56 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 80 insertions(+) diff --git a/dlib/dnn/layers.h b/dlib/dnn/layers.h index a8d26775e..8d9a1f193 100644 --- a/dlib/dnn/layers.h +++ b/dlib/dnn/layers.h @@ -1086,6 +1086,30 @@ namespace dlib tt::gemm(1,sub.get_gradient_input(), 1,gradient_input,false, w,true); } + alias_tensor_instance get_weights() + { + return weights(params, 0); + } + + alias_tensor_const_instance get_weights() const + { + return weights(params, 0); + } + + alias_tensor_instance get_biases() + { + static_assert(bias_mode == FC_HAS_BIAS, "This fc_ layer doesn't have a bias vector " + "to be retrieved, as per template parameter 'bias_mode'."); + return biases(params, weights.size()); + } + + alias_tensor_const_instance get_biases() const + { + static_assert(bias_mode == FC_HAS_BIAS, "This fc_ layer doesn't have a bias vector " + "to be retrieved, as per template parameter 'bias_mode'."); + return biases(params, weights.size()); + } + const tensor& get_layer_params() const { return params; } tensor& get_layer_params() { return params; } diff --git a/dlib/dnn/layers_abstract.h b/dlib/dnn/layers_abstract.h index d41e4fa89..aec57f97e 100644 --- a/dlib/dnn/layers_abstract.h +++ b/dlib/dnn/layers_abstract.h @@ -539,6 +539,62 @@ namespace dlib - #get_bias_weight_decay_multiplier() == val !*/ + alias_tensor_const_instance get_weights( + ) const; + /*! + ensures + - returns an alias of get_layer_params(), containing the weights matrix of + the fully connected layer. + - #get_weights().num_samples() is the number of elements in input sample, + i.e. sublayer's output's k * nc * nr. + - #get_bias().k() == #get_num_outputs() + - if get_bias_mode() == FC_HAS_BIAS: + - #get_layer_params().size() == (#get_weights().size() + #get_biases().size()) + - else: + - #get_layer_params().size() == #get_weights().size() + !*/ + + alias_tensor_instance get_weights( + ); + /*! + ensures + - returns an alias of get_layer_params(), containing the weights matrix of + the fully connected layer. + - #get_weights().num_samples() is the number of elements in input sample, + i.e. sublayer's output's k * nc * nr. + - #get_bias().k() == #get_num_outputs() + - if get_bias_mode() == FC_HAS_BIAS: + - #get_layer_params().size() == (#get_weights().size() + #get_biases().size()) + - else: + - #get_layer_params().size() == #get_weights().size() + !*/ + + alias_tensor_const_instance get_biases( + ) const; + /*! + requires + - #get_bias_mode() == FC_HAS_BIAS + ensures + - returns an alias of get_layer_params(), containing the bias vector of + the fully connected layer. + - #get_bias().num_samples() == 1 + - #get_bias().k() == #get_num_outputs() + - #get_layer_params().size() == (#get_weights().size() + #get_biases().size()) + !*/ + + alias_tensor_instance get_biases( + ); + /*! + requires + - #get_bias_mode() == FC_HAS_BIAS + ensures + - returns an alias of get_layer_params(), containing the bias vector of + the fully connected layer. + - #get_bias().num_samples() == 1 + - #get_bias().k() == #get_num_outputs() + - #get_layer_params().size() == (#get_weights().size() + #get_biases().size()) + !*/ + template void setup (const SUBNET& sub); template void forward(const SUBNET& sub, resizable_tensor& output); template void backward(const tensor& gradient_input, SUBNET& sub, tensor& params_grad);