-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfunctions.py
65 lines (44 loc) · 1.54 KB
/
functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def derivative_sigmoid(x):
return (1 - sigmoid(x)) * sigmoid(x)
def relu(x):
return x * (1 + np.sign(x)) / 2
def derivative_relu(x):
return (1 + np.sign(x)) / 2
def leaky_relu(x):
return x * ((1 + np.sign(x)) / 2 + 0.2 * (1 + np.sign(-x)) / 2)
def derivative_leaky_relu(x):
return ((1 + np.sign(x)) / 2 + 0.2 * (1 + np.sign(-x)) / 2)
def tanh(x):
return (np.exp(2 * x) - 1) / (np.exp(2 * x) + 1)
def derivative_tanh(x):
return 1 - tanh(x)
def cross_entropy_loss(predicted, true):
true = np.int_(np.arange(0, 10) == true)
return -1 * np.sum(true * np.log(predicted), axis=1)
def cross_entropy_loss_derivative(predicted, true):
true = np.int_(np.arange(0, 10) == true)
return predicted - true
def softmax(z):
if z.ndim == 1:
return np.exp(z) / np.sum(np.exp(z))
else:
return np.exp(z) / np.sum(np.exp(z), axis=1).reshape(-1, 1)
def get_func(func_):
if func_ == 'sigmoid':
return sigmoid, derivative_sigmoid
elif func_ == 'relu':
return relu, derivative_relu
elif func_ == 'leaky_relu':
return leaky_relu, derivative_leaky_relu
elif func_ == 'tanh':
return tanh, derivative_tanh
else:
raise Exception("Activation function is not specified or unknown")
def get_loss_func(func_):
if func_ == 'cross_entropy_loss':
return cross_entropy_loss, cross_entropy_loss_derivative
else:
raise Exception("Loss function is not specified or unknown")