tiny_dnn  1.0.0
A header only, dependency-free deep learning framework in C++11
Class Hierarchy

Go to the graphical class hierarchy

This inheritance list is sorted roughly, but not completely, alphabetically:
[detail level 123456]
 Ctiny_dnn::absolute
 Ctiny_dnn::absolute_eps< fraction >
 Ctiny_dnn::aligned_allocator< T, alignment >
 Ctiny_dnn::core::backend
 Ctiny_dnn::core::avx_backend
 Ctiny_dnn::core::dnn_backend
 Ctiny_dnn::core::nnp_backend
 Ctiny_dnn::core::tiny_backend
 Ctiny_dnn::blocked_range
 Ctiny_dnn::detail::caffe_layer_vector
 Ctiny_dnn::core::connection_table
 Ctiny_dnn::core::Conv2dPadding
 Ctiny_dnn::core::conv_layer_worker_specific_storage
 Ctiny_dnn::cross_entropy
 Ctiny_dnn::cross_entropy_multiclass
 Ctiny_dnn::core::deconv_layer_worker_specific_storage
 Ctiny_dnn::core::deconv_params
 Ctiny_dnn::deserialization_helper< InputArchive >
 Ctiny_dnn::Device
 Ctiny_dnn::edgeClass containing input/output data
 Cstd::enable_shared_from_this
 Ctiny_dnn::nodeBase class of all kind of tinny-cnn data
 Ctiny_dnn::layerBase class of all kind of NN layers
 Cstd::exception
 Ctiny_dnn::nn_errorError exception class for tiny-dnn
 Ctiny_dnn::nn_not_implemented_error
 Cstd::false_type
 Cfoobar< n >
 Ctiny_dnn::activation::function
 Ctiny_dnn::activation::elu
 Ctiny_dnn::activation::identity
 Ctiny_dnn::activation::leaky_relu
 Ctiny_dnn::activation::relu
 Ctiny_dnn::activation::sigmoid
 Ctiny_dnn::activation::softmax
 Ctiny_dnn::activation::tan_h
 Ctiny_dnn::activation::tan_hp1m2
 Ctiny_dnn::weight_init::function
 Ctiny_dnn::weight_init::scalable
 Ctiny_dnn::weight_init::constant
 Ctiny_dnn::weight_init::gaussian
 Ctiny_dnn::weight_init::he
 Ctiny_dnn::weight_init::lecunUse fan-in(number of input weight for each neuron) for scaling
 Ctiny_dnn::weight_init::xavierUse fan-in and fan-out for scaling
 Cvectorize::detail::generic_vec_type< T >
 Ctiny_dnn::graph_visualizerUtility for graph visualization
 Ctiny_dnn::image< T >Simple image utility class
 Ctiny_dnn::index3d< T >
 Ctiny_dnn::index3d< serial_size_t >
 Ctiny_dnn::detail::layer_node
 Ctiny_dnn::core::max_pooling_layer_worker_specific_storage
 Ctiny_dnn::detail::mnist_header
 Ctiny_dnn::mse
 Cnetwork
 Cmodels::alexnet
 Ctiny_dnn::network< NetType >A model of neural networks in tiny-dnn
 Ctiny_dnn::nn_infoInfo class for tiny-dnn (for debug)
 Ctiny_dnn::nn_warnWarning class for tiny-dnn (for debug)
 Ctiny_dnn::node_tuple< T >
 Ctiny_dnn::nodesBasic class of various network types (sequential, multi-in/multi-out)
 Ctiny_dnn::graphGeneric graph network
 Ctiny_dnn::sequentialSingle-input, single-output feedforward network
 Ctiny_dnn::core::OpKernel
 Ctiny_dnn::Conv2dGradOp
 Ctiny_dnn::Conv2dLibDNNBackwardOp
 Ctiny_dnn::Conv2dLibDNNForwardOp
 Ctiny_dnn::Conv2dOp
 Ctiny_dnn::Conv2dOpenCLBackwardOp
 Ctiny_dnn::Conv2dOpenCLForwardOp
 Ctiny_dnn::FullyConnectedGradOp
 Ctiny_dnn::FullyConnectedOp
 Ctiny_dnn::MaxPoolGradOp
 Ctiny_dnn::MaxPoolOp
 Ctiny_dnn::core::OpKernelConstruction
 Ctiny_dnn::core::OpKernelContext
 Ctiny_dnn::core::OpKernelContext::OpParams
 Ctiny_dnn::optimizerBase class of optimizer usesHessian : true if an optimizer uses hessian (2nd order derivative of loss function)
 Ctiny_dnn::stateful_optimizer< 1 >
 Ctiny_dnn::RMSpropRMSprop
 Ctiny_dnn::adagradAdaptive gradient method
 Ctiny_dnn::momentumSGD with momentum
 Ctiny_dnn::stateful_optimizer< 2 >
 Ctiny_dnn::adam[a new optimizer (2015)]
 Ctiny_dnn::gradient_descentSGD without momentum
 Ctiny_dnn::stateful_optimizer< N >
 Ctiny_dnn::core::Params
 Ctiny_dnn::core::conv_params
 Ctiny_dnn::core::fully_params
 Ctiny_dnn::core::maxpool_params
 Ctiny_dnn::Program
 Ctiny_dnn::ProgramHash
 Ctiny_dnn::ProgramManager
 Ctiny_dnn::progress_display
 Ctiny_dnn::random_generator
 Ctiny_dnn::aligned_allocator< T, alignment >::rebind< U >
 Ctiny_dnn::result
 Ctiny_dnn::serialization_helper< OutputArchive >
 Ctiny_dnn::core::session
 Ctiny_dnn::Tensor< U >
 Ctiny_dnn::timer