| /*! |
| * Copyright (c) 2015 by Contributors |
| * \file leaky_relu.cc |
| * \brief |
| * \author Bing Xu |
| */ |
| |
| #include "./leaky_relu-inl.h" |
| |
| #include <nnvm/op_attr_types.h> |
| namespace mxnet { |
| namespace op { |
| template<> |
| Operator *CreateOp<cpu>(LeakyReLUParam param) { |
| return new LeakyReLUOp<cpu>(param); |
| } |
| |
| Operator *LeakyReLUProp::CreateOperator(Context ctx) const { |
| DO_BIND_DISPATCH(CreateOp, param_); |
| } |
| |
| DMLC_REGISTER_PARAMETER(LeakyReLUParam); |
| |
| MXNET_REGISTER_OP_PROPERTY(LeakyReLU, LeakyReLUProp) |
| .describe(R"code(Applies Leaky rectified linear unit activation element-wise to the input. |
| |
| Leaky ReLUs attempt to fix the "dying ReLU" problem by allowing a small `slope` |
| when the input is negative and has a slope of one when input is positive. |
| |
| The following modified ReLU Activation functions are supported: |
| |
| - *elu*: Exponential Linear Unit. `y = x > 0 ? x : slope * (exp(x)-1)` |
| - *leaky*: Leaky ReLU. `y = x > 0 ? x : slope * x` |
| - *prelu*: Parametric ReLU. This is same as *leaky* except that `slope` is learnt during training. |
| - *rrelu*: Randomized ReLU. same as *leaky* but the `slope` is uniformly and randomly chosen from |
| *[lower_bound, upper_bound)* for training, while fixed to be |
| *(lower_bound+upper_bound)/2* for inference. |
| |
| )code" ADD_FILELINE) |
| .add_argument("data", "NDArray-or-Symbol", "Input data to activation function.") |
| .add_arguments(LeakyReLUParam::__FIELDS__()); |
| |
| NNVM_REGISTER_OP(LeakyReLU) |
| .set_attr<nnvm::FSetInputVarAttrOnCompose>("FSetInputVarAttrOnCompose", |
| [](const nnvm::NodeAttrs& attrs, nnvm::NodePtr var, const int index) { |
| if (index == 1 && var->attrs.dict.find("__init__") == var->attrs.dict.end()) { |
| var->attrs.dict["__init__"] = "[\"Constant\", {\"value\": 0.25}]"; |
| } |
| }); |
| |
| } // namespace op |
| } // namespace mxnet |