Skip to main content
Code Review

Return to Question

edited tags
Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238
Tweeted twitter.com/#!/StackCodeReview/status/608965010002423808
Frozen GitHub commit
Source Link
200_success
  • 145.6k
  • 22
  • 190
  • 479

I've written a toy neural network in Java. I ran it several million times with the same outputs with only the randomized weights changing from run to run. The average of all of the outputs is not 0.5, as I would have expected. The code is in this Github Repository this Github Repository.

I've written a toy neural network in Java. I ran it several million times with the same outputs with only the randomized weights changing from run to run. The average of all of the outputs is not 0.5, as I would have expected. The code is in this Github Repository.

I've written a toy neural network in Java. I ran it several million times with the same outputs with only the randomized weights changing from run to run. The average of all of the outputs is not 0.5, as I would have expected. The code is in this Github Repository.

deleted 245 characters in body
Source Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238

I've written a toy neural network in Java. I ran it several million times with the same outputs with only the randomized weights changing from run to run. The average of all of the outputs is not 0.5, as I would have expected. The code is in this Github Repository: https://github.com/jack-t/petulant-octo-bearthis Github Repository.

The LayerLayer class:

public class Layer extends AbstractLayer {
 private double[][] weights;
 public Layer(int neurons, int prevLayerNeurons) {
 super(neurons);
 weights = new double[neurons][prevLayerNeurons + 1];
 randomize(weights);
 }
 protected void randomize(double[][] x) {
 for (int j = 0; j < x.length; j++) {
 for (int i = 0; i < x[j].length; i++) {
 weights[j][i] = Math.random();
 }
 }
 }
 @Override
 public double[] compute(double inputs[]) {
 Preconditions.checkArgument(inputs.length == weights[0].length - 1, "incorrect number of inputs");
 double[] ret = new double[neurons];
 for (int i = 0; i < neurons; i++) {
 double acc = 0;
 for (int j = 0; j < inputs.length; j++) {
 acc += inputs[j] * weights[i][j];
 }
 acc -= weights[i][weights[i].length - 1];
 acc = sigmoid(acc);
 ret[i] = acc;
 }
 return ret;
 }
 private double sigmoid(double x) {
 return 1 / (1 + Math.exp(-x));
 }
}
public class Layer extends AbstractLayer {
 private double[][] weights;
 public Layer(int neurons, int prevLayerNeurons) {
 super(neurons);
 weights = new double[neurons][prevLayerNeurons + 1];
 randomize(weights);
 }
 protected void randomize(double[][] x) {
 for (int j = 0; j < x.length; j++) {
 for (int i = 0; i < x[j].length; i++) {
 weights[j][i] = Math.random();
 }
 }
 }
 @Override
 public double[] compute(double inputs[]) {
 Preconditions.checkArgument(inputs.length == weights[0].length - 1, "incorrect number of inputs");
 double[] ret = new double[neurons];
 for (int i = 0; i < neurons; i++) {
 double acc = 0;
 for (int j = 0; j < inputs.length; j++) {
 acc += inputs[j] * weights[i][j];
 }
 acc -= weights[i][weights[i].length - 1];
 acc = sigmoid(acc);
 ret[i] = acc;
 }
 return ret;
 }
 private double sigmoid(double x) {
 return 1 / (1 + Math.exp(-x));
 }
}

And the NetworkNetwork class:

public class Network {
 private List<AbstractLayer> layers;
 public Network(int[] neuronCounts) {
 layers = new LinkedList<AbstractLayer>();
 layers.add(new InputLayer(neuronCounts[0]));
 for (int i = 1; i < neuronCounts.length; i++) {
 layers.add(new Layer(neuronCounts[i], neuronCounts[i - 1]));
 }
 }
 public boolean[] run(double[] inputs) {
 double[] tmps = inputs;
 for (int i = 0; i < layers.size(); i++) {
 tmps = layers.get(i).compute(tmps);
 }
 boolean[] rets = new boolean[tmps.length];
 for (int i = 0; i < rets.length; i++) {
 rets[i] = tmps[i] > 0.5;
 }
 return rets;
 }
 public AbstractLayer getLayer(int i) {
 return layers.get(i);
 }
}
public class Network {
 private List<AbstractLayer> layers;
 public Network(int[] neuronCounts) {
 layers = new LinkedList<AbstractLayer>();
 layers.add(new InputLayer(neuronCounts[0]));
 for (int i = 1; i < neuronCounts.length; i++) {
 layers.add(new Layer(neuronCounts[i], neuronCounts[i - 1]));
 }
 }
 public boolean[] run(double[] inputs) {
 double[] tmps = inputs;
 for (int i = 0; i < layers.size(); i++) {
 tmps = layers.get(i).compute(tmps);
 }
 boolean[] rets = new boolean[tmps.length];
 for (int i = 0; i < rets.length; i++) {
 rets[i] = tmps[i] > 0.5;
 }
 return rets;
 }
 public AbstractLayer getLayer(int i) {
 return layers.get(i);
 }
}

I've written a toy neural network in Java. I ran it several million times with the same outputs with only the randomized weights changing from run to run. The average of all of the outputs is not 0.5, as I would have expected. The code is in this Github Repository: https://github.com/jack-t/petulant-octo-bear.

The Layer class:

public class Layer extends AbstractLayer {
 private double[][] weights;
 public Layer(int neurons, int prevLayerNeurons) {
 super(neurons);
 weights = new double[neurons][prevLayerNeurons + 1];
 randomize(weights);
 }
 protected void randomize(double[][] x) {
 for (int j = 0; j < x.length; j++) {
 for (int i = 0; i < x[j].length; i++) {
 weights[j][i] = Math.random();
 }
 }
 }
 @Override
 public double[] compute(double inputs[]) {
 Preconditions.checkArgument(inputs.length == weights[0].length - 1, "incorrect number of inputs");
 double[] ret = new double[neurons];
 for (int i = 0; i < neurons; i++) {
 double acc = 0;
 for (int j = 0; j < inputs.length; j++) {
 acc += inputs[j] * weights[i][j];
 }
 acc -= weights[i][weights[i].length - 1];
 acc = sigmoid(acc);
 ret[i] = acc;
 }
 return ret;
 }
 private double sigmoid(double x) {
 return 1 / (1 + Math.exp(-x));
 }
}

And the Network class:

public class Network {
 private List<AbstractLayer> layers;
 public Network(int[] neuronCounts) {
 layers = new LinkedList<AbstractLayer>();
 layers.add(new InputLayer(neuronCounts[0]));
 for (int i = 1; i < neuronCounts.length; i++) {
 layers.add(new Layer(neuronCounts[i], neuronCounts[i - 1]));
 }
 }
 public boolean[] run(double[] inputs) {
 double[] tmps = inputs;
 for (int i = 0; i < layers.size(); i++) {
 tmps = layers.get(i).compute(tmps);
 }
 boolean[] rets = new boolean[tmps.length];
 for (int i = 0; i < rets.length; i++) {
 rets[i] = tmps[i] > 0.5;
 }
 return rets;
 }
 public AbstractLayer getLayer(int i) {
 return layers.get(i);
 }
}

I've written a toy neural network in Java. I ran it several million times with the same outputs with only the randomized weights changing from run to run. The average of all of the outputs is not 0.5, as I would have expected. The code is in this Github Repository.

The Layer class:

public class Layer extends AbstractLayer {
 private double[][] weights;
 public Layer(int neurons, int prevLayerNeurons) {
 super(neurons);
 weights = new double[neurons][prevLayerNeurons + 1];
 randomize(weights);
 }
 protected void randomize(double[][] x) {
 for (int j = 0; j < x.length; j++) {
 for (int i = 0; i < x[j].length; i++) {
 weights[j][i] = Math.random();
 }
 }
 }
 @Override
 public double[] compute(double inputs[]) {
 Preconditions.checkArgument(inputs.length == weights[0].length - 1, "incorrect number of inputs");
 double[] ret = new double[neurons];
 for (int i = 0; i < neurons; i++) {
 double acc = 0;
 for (int j = 0; j < inputs.length; j++) {
 acc += inputs[j] * weights[i][j];
 }
 acc -= weights[i][weights[i].length - 1];
 acc = sigmoid(acc);
 ret[i] = acc;
 }
 return ret;
 }
 private double sigmoid(double x) {
 return 1 / (1 + Math.exp(-x));
 }
}

And the Network class:

public class Network {
 private List<AbstractLayer> layers;
 public Network(int[] neuronCounts) {
 layers = new LinkedList<AbstractLayer>();
 layers.add(new InputLayer(neuronCounts[0]));
 for (int i = 1; i < neuronCounts.length; i++) {
 layers.add(new Layer(neuronCounts[i], neuronCounts[i - 1]));
 }
 }
 public boolean[] run(double[] inputs) {
 double[] tmps = inputs;
 for (int i = 0; i < layers.size(); i++) {
 tmps = layers.get(i).compute(tmps);
 }
 boolean[] rets = new boolean[tmps.length];
 for (int i = 0; i < rets.length; i++) {
 rets[i] = tmps[i] > 0.5;
 }
 return rets;
 }
 public AbstractLayer getLayer(int i) {
 return layers.get(i);
 }
}
Post Reopened by Community Bot, Ethan Bierlein, Jamal
added 2540 characters in body
Source Link
Jack
  • 163
  • 6
Loading
Post Closed as "Not suitable for this site" by Jamal
deleted 11 characters in body
Source Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238
Loading
Source Link
Jack
  • 163
  • 6
Loading
lang-java

AltStyle によって変換されたページ (->オリジナル) /