-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathActivations.cpp
63 lines (54 loc) · 1.38 KB
/
Activations.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
//
// Activations.cpp
// ConvNet
//
// Created by Márton Szemenyei on 2017. 09. 28..
// Copyright © 2017. Márton Szemenyei. All rights reserved.
//
#include "Activations.h"
#include <math.h>
// Inplace activations:
// N is the total number of elements to activate
void ReLU( float *inout, int32_t N )
{
for (int32_t i = 0; i < N; i++) {
if (inout[i] < 0.f) {
inout[i] = 0.f;
}
}
}
void sigmoid( float *inout, int32_t N )
{
for (int32_t i = 0; i < N; i++) {
inout[i] = 1.f/(1.f + expf(inout[i]));
}
}
void leakyReLU( float *inout, int32_t N )
{
for (int32_t i = 0; i < N; i++) {
inout[i] = inout[i] < 0.f ? inout[i] * 0.1f : inout[i];
}
}
void tanh( float *inout, int32_t N )
{
for (int32_t i = 0; i < N; i++) {
inout[i] = tanhf(inout[i]);
}
}
// Non inplace Softmax:
// N is the total number of elements
// channels is the number of channels (classes)
void softmax(const float *input, int32_t N, int32_t channels, float *output)
{
int32_t chLimit = N/channels;
for (int32_t i = 0; i < chLimit; i++) {
float sum = 0;
for (int32_t n = 0; n < channels; n++) {
output[ n*chLimit + i ] = expf(input[ n*chLimit + i ]);
sum += output[ n*chLimit + i ];
}
for (int32_t n = 0; n < channels; n++) {
output[ n*chLimit + i ] /= sum;
}
}
}