tiny_dnn  1.0.0
A header only, dependency-free deep learning framework in C++11
fully_connected_op.h
1 /*
2  COPYRIGHT
3 
4  All contributions by Taiga Nomi
5  Copyright (c) 2013, Taiga Nomi
6  All rights reserved.
7 
8  All other contributions:
9  Copyright (c) 2013-2016, the respective contributors.
10  All rights reserved.
11 
12  Each contributor holds copyright over their respective contributions.
13  The project versioning (Git) records all such contribution source information.
14 
15  LICENSE
16 
17  The BSD 3-Clause License
18 
19 
20  Redistribution and use in source and binary forms, with or without
21  modification, are permitted provided that the following conditions are met:
22 
23  * Redistributions of source code must retain the above copyright notice, this
24  list of conditions and the following disclaimer.
25 
26  * Redistributions in binary form must reproduce the above copyright notice,
27  this list of conditions and the following disclaimer in the documentation
28  and/or other materials provided with the distribution.
29 
30  * Neither the name of tiny-dnn nor the names of its
31  contributors may be used to endorse or promote products derived from
32  this software without specific prior written permission.
33 
34  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
35  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
36  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
37  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
38  FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
39  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
40  SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
41  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
42  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
43  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
44 */
45 #pragma once
46 
47 #include "tiny_dnn/core/framework/op_kernel.h"
48 
49 #include "tiny_dnn/core/kernels/fully_connected_op_avx.h"
50 #include "tiny_dnn/core/kernels/fully_connected_op_internal.h"
51 #include "tiny_dnn/core/kernels/fully_connected_op_nnpack.h"
52 
53 namespace tiny_dnn {
54 
56  public:
57  explicit FullyConnectedOp(const core::OpKernelConstruction& context)
58  : core::OpKernel(context) {}
59 
60  void compute(const core::OpKernelContext& context) override {
61  auto params = OpKernel::params_->fully();
62 
63  // incomimg/outcoming data
64  const tensor_t& in_data = context.input(0);
65  const tensor_t& W = context.input(1);
66  const tensor_t* bias = params.has_bias_ ? &context.input(2) : nullptr;
67  tensor_t& out_data = context.output(1);
68 
69  // initialize outputs
70  fill_tensor(out_data, float_t(0));
71 
72  // call the algorithm depending on the selected engine type
73 
74  const core::backend_t engine = context.engine();
75 
76  if (engine == core::backend_t::internal) {
77  kernels::fully_connected_op_internal(
78  in_data,
79  W[0],
80  params.has_bias_ ? (*bias)[0] : vec_t(),
81  out_data,
82  params,
83  context.parallelize());
84  }
85  else if (engine == core::backend_t::nnpack) {
86  kernels::fully_connected_op_nnpack(
87  in_data,
88  W[0],
89  params.has_bias_ ? (*bias)[0] : vec_t(),
90  out_data,
91  params,
92  context.parallelize());
93  }
94  else if (engine == core::backend_t::avx) {
95  kernels::fully_connected_op_avx(
96  in_data,
97  W[0],
98  params.has_bias_ ? (*bias)[0] : vec_t(),
99  out_data,
100  params,
101  context.parallelize());
102  }
103  else {
104  throw nn_error("Not supported engine: " + to_string(engine));
105  }
106  }
107 };
108 
109 } // namespace tiny_dnn
Definition: fully_connected_op.h:55
Definition: op_kernel.h:55
Definition: op_kernel.h:72
Definition: op_kernel.h:175
error exception class for tiny-dnn
Definition: nn_error.h:37