forked from davecom/ClassicComputerScienceProblemsInJava
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLayer.java
More file actions
74 lines (66 loc) · 2.65 KB
/
Layer.java
File metadata and controls
74 lines (66 loc) · 2.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
// Layer.java
// From Classic Computer Science Problems in Java Chapter 7
// Copyright 2020 David Kopec
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package chapter7;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.function.DoubleUnaryOperator;
public class Layer {
public Optional<Layer> previousLayer;
public List<Neuron> neurons = new ArrayList<>();
public double[] outputCache;
public Layer(Optional<Layer> previousLayer, int numNeurons, double learningRate,
DoubleUnaryOperator activationFunction, DoubleUnaryOperator derivativeActivationFunction) {
this.previousLayer = previousLayer;
Random random = new Random();
for (int i = 0; i < numNeurons; i++) {
double[] randomWeights = null;
if (previousLayer.isPresent()) {
randomWeights = random.doubles(previousLayer.get().neurons.size()).toArray();
}
Neuron neuron = new Neuron(randomWeights, learningRate, activationFunction, derivativeActivationFunction);
neurons.add(neuron);
}
outputCache = new double[numNeurons];
}
public double[] outputs(double[] inputs) {
if (previousLayer.isPresent()) {
outputCache = neurons.stream().mapToDouble(n -> n.output(inputs)).toArray();
} else {
outputCache = inputs;
}
return outputCache;
}
// should only be called on output layer
public void calculateDeltasForOutputLayer(double[] expected) {
for (int n = 0; n < neurons.size(); n++) {
neurons.get(n).delta = neurons.get(n).derivativeActivationFunction.applyAsDouble(neurons.get(n).outputCache)
* (expected[n] - outputCache[n]);
}
}
// should not be called on output layer
public void calculateDeltasForHiddenLayer(Layer nextLayer) {
for (int i = 0; i < neurons.size(); i++) {
int index = i;
double[] nextWeights = nextLayer.neurons.stream().mapToDouble(n -> n.weights[index]).toArray();
double[] nextDeltas = nextLayer.neurons.stream().mapToDouble(n -> n.delta).toArray();
double sumWeightsAndDeltas = Util.dotProduct(nextWeights, nextDeltas);
neurons.get(i).delta = neurons.get(i).derivativeActivationFunction
.applyAsDouble(neurons.get(i).outputCache) * sumWeightsAndDeltas;
}
}
}