28 #include "tiny_dnn/util/util.h"
29 #include "tiny_dnn/layers/layer.h"
50 :
layer(std::vector<vector_type>(in_shapes.size(), vector_type::data), {vector_type::data}),
51 in_shapes_(in_shapes) {
60 :
layer(std::vector<vector_type>(num_args, vector_type::data), { vector_type::data }),
61 in_shapes_(std::vector<shape3d>(num_args,
shape3d(ndim,1,1))) {
66 out_shape_ = in_shapes_.front();
67 for (
size_t i = 1; i < in_shapes_.size(); i++) {
68 if (in_shapes_[i].area() != out_shape_.area())
69 throw nn_error(
"each input shapes to concat must have same WxH size");
70 out_shape_.depth_ += in_shapes_[i].depth_;
78 std::vector<shape3d>
in_shape()
const override {
87 std::vector<tensor_t*>& out_data)
override {
88 serial_size_t num_samples =
static_cast<serial_size_t
>((*out_data[0]).size());
90 for (serial_size_t s = 0; s < num_samples; s++) {
91 float_t* outs = &(*out_data[0])[s][0];
93 for (serial_size_t i = 0; i < in_shapes_.size(); i++) {
94 const float_t* ins = &(*in_data[i])[s][0];
95 serial_size_t dim = in_shapes_[i].size();
96 outs = std::copy(ins, ins + dim, outs);
102 const std::vector<tensor_t*>& out_data,
103 std::vector<tensor_t*>& out_grad,
104 std::vector<tensor_t*>& in_grad)
override {
105 CNN_UNREFERENCED_PARAMETER(in_data);
106 CNN_UNREFERENCED_PARAMETER(out_data);
108 size_t num_samples = (*out_grad[0]).size();
110 for (
size_t s = 0; s < num_samples; s++) {
111 const float_t* outs = &(*out_grad[0])[s][0];
113 for (serial_size_t i = 0; i < in_shapes_.size(); i++) {
114 serial_size_t dim = in_shapes_[i].size();
115 float_t* ins = &(*in_grad[i])[s][0];
116 std::copy(outs, outs + dim, ins);
122 template <
class Archive>
123 static void load_and_construct(Archive & ar, cereal::construct<concat_layer> & construct) {
124 std::vector<shape3d> in_shapes;
126 ar(cereal::make_nvp(
"in_size", in_shapes));
127 construct(in_shapes);
130 template <
class Archive>
131 void serialize(Archive & ar) {
132 layer::serialize_prolog(ar);
137 std::vector<shape3d> in_shapes_;
concat N layers along depth
Definition: concat_layer.h:44
void back_propagation(const std::vector< tensor_t * > &in_data, const std::vector< tensor_t * > &out_data, std::vector< tensor_t * > &out_grad, std::vector< tensor_t * > &in_grad) override
return delta of previous layer (delta=\frac{dE}{da}, a=wx in fully-connected layer)
Definition: concat_layer.h:101
std::vector< shape3d > in_shape() const override
array of input shapes (width x height x depth)
Definition: concat_layer.h:78
std::string layer_type() const override
name of layer, should be unique for each concrete class
Definition: concat_layer.h:74
void forward_propagation(const std::vector< tensor_t * > &in_data, std::vector< tensor_t * > &out_data) override
Definition: concat_layer.h:86
concat_layer(const std::vector< shape3d > &in_shapes)
Definition: concat_layer.h:49
concat_layer(serial_size_t num_args, serial_size_t ndim)
Definition: concat_layer.h:59
std::vector< shape3d > out_shape() const override
array of output shapes (width x height x depth)
Definition: concat_layer.h:82
base class of all kind of NN layers
Definition: layer.h:62