-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathflowerprob.js
94 lines (73 loc) · 3.11 KB
/
flowerprob.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
// Training set, [ length, width, color(0-blue and 1-red)
var dataB1 = [2, 1, 0];
var dataB2 = [3, 1, 0];
var dataB3 = [2, .5, 0];
var dataB4 = [1, 1, 0];
var dataR1 = [3, 1.5, 1];
var dataR2 = [3.5, .5, 1];
var dataR3 = [4, 1.5, 1];
var dataR4 = [5.5, 1, 1];
// unknown type (data we want to find)
var dataU = [4.5, 1];
var all_points = [dataB1, dataB2, dataB3, dataB4, dataR1, dataR2, dataR3, dataR4];
function sigmoid(x) {
return 1/(1 + Math.exp(-x));
}
// training
function train() {
let w1 = Math.random() * .2 - .1;
let w2 = Math.random() * .2 - .1;
let b = Math.random() * .2 - .1;
let learning_rate = 0.2;
for(let iter = 0; iter < 50000; iter++){
//pick a random point
let random_idx = Math.floor(Math.random() * all_points.length);
let point = all_points[random_idx];
let target = point[2]; // target stored in 3rd item in points
// feed forward
let z = w1 * point[0] + w2 * point[1] + b;
let pred = sigmoid(z);
// comparing the model prediction with the target
let cost = (pred - target) ** 2;
// find the slope of the cost by each parameter w1, w2, b
// derivative of { cost } in respect to { pred } according to the chain rule
// d cost / d prediction
let dcost_dpred = 2 * (pred - target);
// finding the derivative of { pred } in respect to { z }.
// being pred = sigmoid, derivative of { sigmoid } in respect of { z }.
// derivative of sigmoid can be written using more sigmoids!!
// d/dz sigmoid(z) = sigmoid(z) * (1 - sigmoid(z))
let dpred_dz = sigmoid(z) * (1 - sigmoid(z));
//derivatives of { z } in respect to { w1 }, { w2 } and { b }
let dz_dw1 = point[0];
let dz_dw2 = point[1];
let dz_db = 1;
// getting the partial derivatives using chain rule
// bringing how the cost changes through each function, first through cost, then through the sigmoid
// and finally whatever is multiplying our parameter of interest becomes the last part
// derivative of { cost } in respect to { w1 }
let dcost_dw1 = dcost_dpred * dpred_dz * dz_dw1;
// derivative of { cost } in respect to { w2 }
let dcost_dw2 = dcost_dpred * dpred_dz * dz_dw2;
// derivative of { cost } in respect to { b }
let dcost_db = dcost_dpred * dpred_dz * dz_db;
// updating the parameters
w1 -= learning_rate * dcost_dw1;
w2 -= learning_rate * dcost_dw2;
b -= learning_rate * dcost_db;
}
// returning w1, w2, b as an object
return {w1: w1, w2: w2, b: b};
}
// getting the updated w1, w2 and b parameters as properties of realData object
const realData = train();
// reconstructing the z variable with the data from dataU, the array that contains length and width of the flower which we want to find the color
let realZ = dataU[0] * realData.w1 + dataU[1] * realData.w2 + realData.b;
// calling the sigmoid function to get a binary value for the program to predict the color easily
let realPred = Math.round(sigmoid(realZ));
// writing the prediction to the console as an output of the program
if(realPred === 1){
console.log("Seems like a red flower!!");
} else {
console.log("Seems like a blue flower!!");
}