28 #include "tiny_dnn/util/util.h"
29 #include "tiny_dnn/layers/layer.h"
48 :
layer({ vector_type::data }, { vector_type::data }),
49 in_shape_(
in_shape), factor_(factor), scale_(scale) {
58 :
layer({ vector_type::data }, { vector_type::data }),
59 in_shape_(prev_layer.out_shape()[0]), factor_(factor), scale_(scale) {
66 std::vector<shape3d>
in_shape()
const override {
75 std::vector<tensor_t*>& out_data)
override {
76 const tensor_t& x = *in_data[0];
77 tensor_t& y = *out_data[0];
79 for (serial_size_t i = 0; i < x.size(); i++) {
80 std::transform(x[i].begin(), x[i].end(), y[i].begin(), [=](float_t x) {
81 return scale_*std::pow(x, factor_);
87 const std::vector<tensor_t*>& out_data,
88 std::vector<tensor_t*>& out_grad,
89 std::vector<tensor_t*>& in_grad)
override {
90 tensor_t& dx = *in_grad[0];
91 const tensor_t& dy = *out_grad[0];
92 const tensor_t& x = *in_data[0];
93 const tensor_t& y = *out_data[0];
95 for (serial_size_t i = 0; i < x.size(); i++) {
96 for (serial_size_t j = 0; j < x[i].size(); j++) {
103 if (std::abs(x[i][j]) > 1e-10) {
104 dx[i][j] = dy[i][j] * factor_ * y[i][j] / x[i][j];
107 dx[i][j] = dy[i][j] * scale_ * factor_ * std::pow(x[i][j], factor_ - 1.0f);
113 template <
class Archive>
114 static void load_and_construct(Archive & ar, cereal::construct<power_layer> & construct) {
119 ar(cereal::make_nvp(
"in_size",
in_shape), cereal::make_nvp(
"factor", factor), cereal::make_nvp(
"scale", scale));
123 template <
class Archive>
124 void serialize(Archive & ar) {
125 layer::serialize_prolog(ar);
126 ar(cereal::make_nvp(
"in_size", in_shape_), cereal::make_nvp(
"factor", factor_), cereal::make_nvp(
"scale", scale_));
129 float_t factor()
const {
133 float_t scale()
const {
base class of all kind of NN layers
Definition: layer.h:62
element-wise pow: y = scale*x^factor
Definition: power_layer.h:38
void forward_propagation(const std::vector< tensor_t * > &in_data, std::vector< tensor_t * > &out_data) override
Definition: power_layer.h:74
std::vector< shape3d > in_shape() const override
array of input shapes (width x height x depth)
Definition: power_layer.h:66
power_layer(const layer &prev_layer, float_t factor, float_t scale=1.0f)
Definition: power_layer.h:57
std::string layer_type() const override
name of layer, should be unique for each concrete class
Definition: power_layer.h:62
std::vector< shape3d > out_shape() const override
array of output shapes (width x height x depth)
Definition: power_layer.h:70
power_layer(const shape3d &in_shape, float_t factor, float_t scale=1.0f)
Definition: power_layer.h:47
void back_propagation(const std::vector< tensor_t * > &in_data, const std::vector< tensor_t * > &out_data, std::vector< tensor_t * > &out_grad, std::vector< tensor_t * > &in_grad) override
return delta of previous layer (delta=\frac{dE}{da}, a=wx in fully-connected layer)
Definition: power_layer.h:86