-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathneuron.go
117 lines (101 loc) · 2.52 KB
/
neuron.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
package channn
import (
"fmt"
"sync"
"math/rand"
)
// Neuron represents a neuron in a network that is between either others.
// It receives inputs from a chan and calls Fire to send values to
// other Neurons or Outputs.
type Neuron struct {
InChan chan float64
NumIn *int32
Bias float64
mutex *sync.Mutex
// Receives control messages in the Listen goroutine.
Control chan *ControlMessage
// This is a map of a pointer to an input channel
// to the weight associated between Neurons.
OutWeights map[*chan float64]float64
nType NeuronType
}
func (n *Neuron) String() string {
return fmt.Sprintf("neuron %s", &n)
}
// Fire sends the result of the sigmoid function on the
// sum of (all input weights + bias).
func (sn *Neuron) Fire(val float64) {
for nextPointer, w := range sn.OutWeights {
*nextPointer <- Sigmoid(w * val)
}
}
// addOutput adds a pointer to the input and set a random weight.
func (n *Neuron) addOutput(c *chan float64) {
n.mutex.Lock()
n.OutWeights[c] = rand.Float64()
n.mutex.Unlock()
}
func (ne *Neuron) ConnectNeurons(next ChanNeuron) {
// Add weight and pointer to the next neuron's input.
inChanPtr := next.GetInChanPtr()
ne.addOutput(inChanPtr)
// Send message to increment the input.
msg := &ControlMessage{
Id: INCREMENT_INPUT,
}
next.ReceiveControlMsg(msg)
}
func (n *Neuron) ResetAllWeights(val float64) {
n.mutex.Lock()
for k, _ := range n.OutWeights {
n.OutWeights[k] = val
}
n.mutex.Unlock()
}
//////// Satisfy the ChanNeuron interface.
// GetInChanPtr returns a pointer to the input channel
func (n *Neuron) GetInChanPtr() *chan float64 {
return &n.InChan
}
func (n *Neuron) ReceiveControlMsg(msg *ControlMessage) {
n.Control <- msg
}
// Listen reads all the inputs and calls the
// sigmoid function on the sum of all inputs
// and a bais weight.
func (n *Neuron) Listen() {
n.mutex.Lock()
var counter = *n.NumIn
n.mutex.Unlock()
var layerTotal float64
for {
select {
case inVal := <-n.InChan:
layerTotal += inVal
counter--
if counter == 0 {
// layerTotal is the sum of the (Xi * Wj)
n.Fire(layerTotal + n.Bias)
layerTotal = 0
counter = *n.NumIn
}
case ctlMsg := <-n.Control:
switch ctlMsg.Id {
case DESTROY:
return
case SET_WEIGHTS:
n.ResetAllWeights(ctlMsg.Value.(float64))
case SET_WEIGHT:
key := ctlMsg.Key.(*chan float64)
value := ctlMsg.Value.(float64)
n.OutWeights[key] = value
case INCREMENT_INPUT:
cur := (*n.NumIn + 1)
n.NumIn = &cur
counter = *n.NumIn
default:
continue
}
}
}
}