我是靠谱客的博主 舒服帅哥,最近开发中收集的这篇文章主要介绍Java实现的简单神经网络(基于Sigmoid激活函数),觉得挺不错的,现在分享给大家,希望可以做个参考。

概述

主体代码

NeutronNetwork.java

package com.rockbb.math.nnetwork;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class NeutronNetwork {
private List<NeutronLayer> layers;
public NeutronNetwork(int[] sizes, double bpFactor, Activator activator) {
layers = new ArrayList<>(sizes.length - 1);
int inputSize = sizes[0];
for (int i = 1; i < sizes.length; i++) {
NeutronLayer layer = new NeutronLayer(inputSize, sizes[i], activator, bpFactor);
layers.add(layer);
inputSize = sizes[i];
}
for (int i = 0; i < layers.size() - 1; i++) {
layers.get(i).setNext(layers.get(i + 1));
}
}
public List<NeutronLayer> getLayers() {return layers;}
public void setLayers(List<NeutronLayer> layers) {this.layers = layers;}
public double getError() {
return layers.get(layers.size() - 1).getError();
}
public List<Double> predict(List<Double> inputs) {
List<Double> middle = inputs;
for (int i = 0; i < layers.size(); i++) {
middle = layers.get(i).forward(middle);
}
return middle;
}
public void backward() {
for (int j= layers.size() - 1; j >=0; j--) {
layers.get(j).backward();
}
}
public void fillTargets(List<Double> targets) {
layers.get(layers.size() - 1).fillTargets(targets);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (int j = 0; j < layers.size(); j++) {
sb.append(layers.get(j).toString());
}
return sb.toString();
}
public static String listToString(List<Double> list) {
StringBuilder sb = new StringBuilder();
for (Double t : list) {
sb.append(String.format("% 10.8f ", t));
}
return sb.toString();
}
public static void main(String[] args) {
int[] sz = new int[]{2, 4, 1};
double[][] trainData = {{0d, 0d},{0d, 1d},{1d, 0d},{1d, 1d}};
double[][] targetDate = {{0d},{1d},{1d},{0d}};
NeutronNetwork nn = new NeutronNetwork(sz, 0.5d, new SigmoidActivator());
for (int kk = 0; kk < 20000; kk++) {
double totalError = 0d;
for (int i = 0; i < trainData.length; i++) {
List<Double> inputs = Arrays.asList(trainData[i][0], trainData[i][1]);
List<Double> targets = Arrays.asList(targetDate[i][0]);
nn.fillTargets(targets);
nn.predict(inputs);
//System.out.print(nn);
System.out.println(String.format("kk:%5d, i:%d, error: %.8fn", kk, i, nn.getError()));
totalError += nn.getError();
nn.backward();
}
System.out.println(String.format("kk:%5d, Total Error: %.8fnn", kk, totalError));
if (totalError < 0.0001) {
System.out.println(nn);
break;
}
}
System.out.println(nn);
}
}

 

NeutronLayer.java

package com.rockbb.math.nnetwork;
import java.util.ArrayList;
import java.util.List;
public class NeutronLayer {
private int inputSize;
private List<Neutron> neutrons;
private double bias;
private Activator activator;
private NeutronLayer next;
private double bpFactor;
private List<Double> inputs;
public NeutronLayer(int inputSize, int size, Activator activator, double bpFactor) {
this.inputSize = inputSize;
this.activator = activator;
this.bpFactor = bpFactor;
this.bias = Math.random() - 0.5;
this.neutrons = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Neutron neutron = new Neutron(this, inputSize);
neutrons.add(neutron);
}
}
public int getInputSize() {return inputSize;}
public void setInputSize(int inputSize) {this.inputSize = inputSize;}
public List<Neutron> getNeutrons() {return neutrons;}
public void setNeutrons(List<Neutron> neutrons) {this.neutrons = neutrons;}
public double getBias() {return bias;}
public void setBias(double bias) {this.bias = bias;}
public Activator getActivator() {return activator;}
public void setActivator(Activator activator) {this.activator = activator;}
public NeutronLayer getNext() {return next;}
public void setNext(NeutronLayer next) {this.next = next;}
public List<Double> forward(List<Double> inputs) {
this.inputs = inputs;
List<Double> outputs = new ArrayList<Double>(neutrons.size());
for (int i = 0; i < neutrons.size(); i++) {
outputs.add(0d);
}
for (int i = 0; i < neutrons.size(); i++) {
double output = neutrons.get(i).forward(inputs);
outputs.set(i, output);
}
return outputs;
}
public void backward() {
if (this.next == null) {
// If this is the output layer, calculate delta for each neutron
double totalDelta = 0d;
for (int i = 0; i < neutrons.size(); i++) {
Neutron n = neutrons.get(i);
double delta = -(n.getTarget() - n.getOutput()) * activator.backwardDelta(n.getOutput());
n.setBpDelta(delta);
totalDelta += delta;
// Reflect to each weight under this neutron
for (int j = 0; j < n.getWeights().size(); j++) {
n.getWeights().set(j, n.getWeights().get(j) - bpFactor * delta * inputs.get(j));
}
}
// Relfect to bias
this.bias = this.bias - bpFactor * totalDelta / neutrons.size();
} else {
// if this is the hidden layer
double totalDelta = 0d;
for (int i = 0; i < neutrons.size(); i++) {
Neutron n = neutrons.get(i);
List<Neutron> downNeutrons = next.getNeutrons();
double delta = 0;
for (int j = 0; j < downNeutrons.size(); j++) {
delta += downNeutrons.get(j).getBpDelta() * downNeutrons.get(j).getWeights().get(i);
}
delta = delta * activator.backwardDelta(n.getOutput());
n.setBpDelta(delta);
totalDelta += delta;
// Reflect to each weight under this neutron
for (int j = 0; j < n.getWeights().size(); j++) {
n.getWeights().set(j, n.getWeights().get(j) - bpFactor * delta * inputs.get(j));
}
}
// Relfect to bias
this.bias = this.bias - bpFactor * totalDelta / neutrons.size();
}
}
public double getError() {
double totalError = 0d;
for (int i = 0; i < neutrons.size(); i++) {
totalError += Math.pow(neutrons.get(i).getError(), 2);
}
return totalError / (2 * neutrons.size());
}
public void fillTargets(List<Double> targets) {
for (int i = 0; i < neutrons.size(); i++) {
neutrons.get(i).setTarget(targets.get(i));
}
}
public double filter(double netInput) {
return activator.forward(netInput + bias);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("Input size: %d, bias: %.8fn", inputSize, bias));
for (int i = 0; i < neutrons.size(); i++) {
sb.append(String.format("%3d: %sn", i, neutrons.get(i).toString()));
}
return sb.toString();
}
}

 

Neutron.java

package com.rockbb.math.nnetwork;
import java.util.ArrayList;
import java.util.List;
public class Neutron {
private NeutronLayer layer;
private List<Double> weights;
private double output;
private double target;
private double bpDelta;
public Neutron(NeutronLayer layer, int inputSize) {
this.layer = layer;
this.weights = new ArrayList<>(inputSize);
for (int i = 0; i < inputSize; i++) {
// Initialize each weight with value [0.1, 1)
weights.add(Math.random() * 0.9 + 0.1);
}
this.bpDelta = 0d;
}
public NeutronLayer getLayer() {return layer;}
public void setLayer(NeutronLayer layer) {this.layer = layer;}
public List<Double> getWeights() {return weights;}
public void setWeights(List<Double> weights) {this.weights = weights;}
public double getOutput() {return output;}
public void setOutput(double output) {this.output = output;}
public double getTarget() {return target;}
public void setTarget(double target) {this.target = target;}
public double getBpDelta() {return bpDelta;}
public void setBpDelta(double bpDelta) {this.bpDelta = bpDelta;}
public double calcNetInput(List<Double> inputs) {
double netOutput = 0f;
for (int i = 0; i < weights.size(); i++) {
netOutput += inputs.get(i) * weights.get(i);
}
return netOutput;
}
public double forward(List<Double> inputs) {
double netInput = calcNetInput(inputs);
this.output = layer.filter(netInput);
return this.output;
}
public double getError() {
return target - output;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("O:% 10.8f T:% 10.8f D:% 10.8f w:{", output, target, bpDelta));
for (int i = 0; i < weights.size(); i++) {
sb.append(String.format("% 10.8f ", weights.get(i)));
}
sb.append('}');
return sb.toString();
}
}

激活函数

Activator.java

package com.rockbb.math.nnetwork;
public interface Activator {
double forward(double input);
double backwardDelta(double output);
}

SigmoidActivator.java

package com.rockbb.math.nnetwork;
public class SigmoidActivator implements Activator {
public double forward(double input) {
return 1 / (1 + Math.exp(-input));
}
public double backwardDelta(double output) {
return output * (1 - output);
}
}

在同样的训练数据和误差目标下, 比 http://www.emergentmind.com/neural-network 使用更少的训练次数.

使用Sigmoid激活函数工作正常.

使用ReLu激活函数时总会使某个Newtron冻结, 不能收敛, 待检查

 

最后

以上就是舒服帅哥为你收集整理的Java实现的简单神经网络(基于Sigmoid激活函数)的全部内容,希望文章能够帮你解决Java实现的简单神经网络(基于Sigmoid激活函数)所遇到的程序开发问题。

如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。

本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
点赞(54)

评论列表共有 0 条评论

立即
投稿
返回
顶部