Skip to content

Commit 65efde8

Browse files
fohx13pavanky
authored andcommitted
add ThresholdedReLU layer
fixed PReLU, ELU forward methods, mix name changes
1 parent 785f0b9 commit 65efde8

File tree

4 files changed

+45
-24
lines changed

4 files changed

+45
-24
lines changed

include/af/autograd/Functions.hpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ namespace af {
4040
Variable operator >=(const Variable &lhs, const double &rhs);
4141
Variable operator <=(const Variable &lhs, const double &rhs);
4242

43+
Variable operator !(const Variable &input);
44+
4345
Variable negate(const Variable &input);
4446
Variable reciprocal(const Variable &input);
4547

include/af/nn/Modules/Activations.hpp

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,17 @@ namespace af
6767

6868
autograd::Variable forward(const autograd::Variable &input);
6969
};
70+
71+
class ThresholdReLU : public Module
72+
{
73+
private:
74+
double m_threshold;
75+
public:
76+
ThresholdReLU(double threshold = 1.0);
77+
78+
autograd::Variable forward(const autograd::Variable &input);
79+
};
80+
7081

7182

7283
}

src/autograd/Functions.cpp

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -119,26 +119,26 @@ namespace af {
119119

120120
Variable max(const Variable &lhs, const Variable &rhs)
121121
{
122-
auto mask = lhs > rhs;
123-
auto result = max(lhs.array(), rhs.array());
124-
125-
auto grad_func = [](std::vector<Variable> &inputs, const Variable &grad_output) {
126-
inputs[0].addGrad( inputs[2] * grad_output);
127-
inputs[1].addGrad(!inputs[2] * grad_output);
128-
};
129-
return Variable(result, {lhs, rhs, mask}, grad_func);
122+
auto mask = lhs > rhs;
123+
auto result = max(lhs.array(), rhs.array());
124+
125+
auto grad_func = [](std::vector<Variable> &inputs, const Variable &grad_output) {
126+
inputs[0].addGrad( inputs[2] * grad_output);
127+
inputs[1].addGrad(!inputs[2] * grad_output);
128+
};
129+
return Variable(result, {lhs, rhs, mask}, grad_func);
130130
}
131131

132132
Variable min(const Variable &lhs, const Variable &rhs)
133133
{
134-
auto mask = lhs < rhs;
135-
auto result = min(lhs.array(), rhs.array());
136-
137-
auto grad_func = [](std::vector<Variable> &inputs, const Variable &grad_output) {
138-
inputs[0].addGrad( inputs[2] * grad_output);
139-
inputs[1].addGrad(!inputs[2] * grad_output);
140-
};
141-
return Variable(result, {lhs, rhs, mask}, grad_func);
134+
auto mask = lhs < rhs;
135+
auto result = min(lhs.array(), rhs.array());
136+
137+
auto grad_func = [](std::vector<Variable> &inputs, const Variable &grad_output) {
138+
inputs[0].addGrad( inputs[2] * grad_output);
139+
inputs[1].addGrad(!inputs[2] * grad_output);
140+
};
141+
return Variable(result, {lhs, rhs, mask}, grad_func);
142142
}
143143

144144
#define INSTANTIATE_FUNCTION(FN) \

src/nn/Modules/Activations.cpp

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -60,22 +60,30 @@ namespace af
6060

6161
Variable PReLU::forward(const Variable &input)
6262
{
63-
auto tmp = max(input, 0.0);
64-
auto res = expandAs(m_parameters[0],tmp) * tmp;
65-
//TODO: Determine if doing the max after the mul is preferable
66-
return res;
67-
63+
auto mask = input >= 0.0;
64+
return (input * mask) + (input * !mask * expandAs(m_parameters[0],input));
6865
}
6966

7067
ELU::ELU(double alpha) :
71-
m_alpha(alpha)
68+
m_alpha(alpha)
7269
{
7370
}
7471

7572
Variable ELU::forward(const Variable &input)
7673
{
77-
auto res = max(input, m_alpha * (exp(input) - 1));
78-
return res;
74+
auto mask = input >= 0.0;
75+
return (mask * input) + (!mask * m_alpha * (exp(input)-1));
76+
}
77+
78+
ThresholdReLU::ThresholdReLU(double threshold) :
79+
m_threshold(threshold)
80+
{
81+
}
82+
83+
Variable ThresholdReLU::forward(const Variable &input)
84+
{
85+
auto mask = input >= m_threshold;
86+
return input * mask;
7987
}
8088

8189
}

0 commit comments

Comments
 (0)