8
\$\begingroup\$

I'm looking for some general tips on code practices, doing things properly in idiomatic pythonic way. But mostly I want to know if this was build correctly. I tried making neural nets in the past, but failed horrible, but this one seems to be working fine.

The backpropagation method is still not developed because I still haven't figured out the math behind it (if anyone have a good material for a non-engineer I would be very glad).

#! /usr/bin/env python
"""
 This module is a framework for a Artificial Neural Network.
 :param NeuralNetwork: See this documentation for how to use this module..
 :type NeuralNetwork: NeuralNetwork
 Author: Fernando Rodrigues dos Santos
"""
import random
import math
class NeuralNetwork:
 """USAGE:
 - Create a NeuralNetwork object and set it's parameters at your will.
 - Use the method start_net(inputs) with the right amount of inputs to calculate the first iteration of the net.
 - Use the get_result() method to extract the result of each output node.
 - Use the get_weights() method to extract the weight of each node.
 - Use the set_weights(weights) method to set the new weight for each node in the net.
 # NOT IMPLEMENTED
 - Use the back_propagate_error(error) method to propagate back the result with the quantified error
 """
 def __init__(self, n_input, n_hidden_layer, n_hidden_nodes, n_output):
 """Creates each layer of the net (input, hidden and output) based on the set parameters
 :param n_input: Number of inputs nodes of the net
 :type n_input: int
 :param n_hidden_layer: Number of hidden layers
 :type n_hidden_layer: int
 :param n_hidden_nodes: Number of nodes per hidden layer
 :type n_hidden_nodes: int
 :param n_output: Number of output nodes of the net
 :type n_output: int
 """
 self.input_layer = Layer(n_input, "Input")
 self.hidden_layers = []
 for n in xrange(n_hidden_layer):
 self.hidden_layers.append(
 Layer(n_hidden_nodes, "Hidden %i" % (n+1))
 )
 self.output_layer = Layer(n_output, "Output")
 self.connect_layers()
 def connect_layers(self):
 """Connects each layer to it previous and next layer."""
 # Input layers are only connected to the first hidden layer
 for node in self.input_layer.nodes:
 node.set_next_layer(self.hidden_layers[0])
 # hidden layers are connected in both directions
 for i, hidden_layer in enumerate(self.hidden_layers):
 for node in hidden_layer.nodes:
 # if its the first hidden layer set its previous layer as the input layer
 # else set its previous layer as the last hidden layer
 if i == 0:
 previous_layer = self.input_layer
 else:
 previous_layer = self.hidden_layers[i-1]
 # if its the last hidden layer, set its next layer as the output layer
 # else set its next layer as the next hidden layer
 if i == len(self.hidden_layers) - 1:
 next_layer = self.output_layer
 else:
 next_layer = self.hidden_layers[i+1]
 node.set_next_layer(next_layer)
 node.set_previous_layer(previous_layer)
 # Output layers are only connected to the last hidden layer
 for node in self.output_layer.nodes:
 node.set_previous_layer(self.hidden_layers[-1])
 def start_net(self, input_values):
 """Pass the initial input values to the neural net and let it compute the result.
 :param input_values: Input values in a list form
 :type input_values: list
 """
 # Set input values to input layer nodes
 for i, node in enumerate(self.input_layer.nodes):
 node.set_value(input_values[i])
 # Feed-forward input values (weighted) to first hidden layer nodes
 self.input_layer.feed_forward()
 # Feed-forward result (weighted again) each subsequent hidden layer nodes
 for layer in self.hidden_layers:
 layer.feed_forward()
 # Finally feed-forward result (once again weighted) to the output layer nodes
 self.output_layer.feed_forward()
 def get_result(self):
 """Return the value of output nodes of the neural net after computing the input
 :rtype : list
 """
 return [node.value for node in self.output_layer.nodes]
 def get_weights(self):
 """Return weights of each node of the neural net. (to use in a GA)"""
 hidden_layers = []
 for layer in self.hidden_layers:
 hidden_weights = []
 for node in layer.nodes:
 hidden_weights.append(node.weights)
 hidden_layers.append(hidden_weights)
 out_weights = []
 for node in self.output_layer.nodes:
 out_weights.append(node.weights)
 return hidden_layers, out_weights
 def set_weights(self, weights):
 output_weights = weights[1]
 for i, node in enumerate(self.output_layer.nodes):
 node.set_weights(output_weights[i])
 for i, layer in enumerate(self.hidden_layers):
 hidden_weights = weights[0][i]
 for j, node in enumerate(layer.nodes):
 node.set_weights(hidden_weights[j])
 def back_propagate_error(self, error): # TODO: to be implemented
 pass
class Layer:
 def __init__(self, n_nodes, layer_name):
 self.nodes = []
 for n in xrange(n_nodes):
 self.nodes.append(
 Node(layer_name + " | Node: %i" % (n+1))
 )
 def feed_forward(self):
 for node in self.nodes:
 node.feed_forward()
class Node:
 def __init__(self, layer_name):
 self.name = layer_name
 self.value = None
 self.previous_layer = None
 self.next_layer = None
 self.weights = []
 self.iteration = 0
 def set_previous_layer(self, layer):
 self.previous_layer = layer
 def set_next_layer(self, layer):
 self.next_layer = layer
 def set_value(self, value):
 self.value = value
 def get_values(self):
 value = []
 for node in self.previous_layer.nodes:
 value.append(node.value)
 return value
 def set_weights(self, weights):
 self.weights = weights
 def get_weights(self):
 weights = []
 if self.iteration == 0:
 # randomize the weights at start
 for _ in xrange(len(self.previous_layer.nodes)):
 weight = random.uniform(-1, 1)
 weights.append(weight)
 else:
 weights = self.weights
 self.iteration += 1
 return weights
 def feed_forward(self):
 """
 1) Get the values of the previous layers
 2) Multiple them by the weights of the node
 3) Sum it all together
 4) Pass the result to the activation function
 5) Set the node value to the returned value of activation function"""
 if self.previous_layer:
 values = self.get_values()
 self.weights = self.get_weights()
 # print self.weights, self.name
 weighted_sum = sum(
 v * w for v, w in zip(values, self.weights)
 )
 self.value = self.activation(weighted_sum)
 @staticmethod
 def activation(x):
 """Sigmoid function"""
 try:
 return 1 / (1 + math.e ** -x)
 except OverflowError:
 return 0
ANN = NeuralNetwork(n_input=1, n_hidden_layer=2, n_hidden_nodes=2, n_output=2)
inputs = [1, 2]
ANN.start_net(inputs)
output = ANN.get_result()
net_weights = ANN.get_weights()
ANN.set_weights(net_weights)
Jamal
35.2k13 gold badges134 silver badges238 bronze badges
asked May 4, 2015 at 23:32
\$\endgroup\$
3
  • 1
    \$\begingroup\$ Back propagation is kind of inherently horrendously complicated. I've never found a resource that explained it well for people whose multivariable calculus is a bit rusty. The third edition of Artificial Intelligence: A Modern Approach has a pretty good discussion of it, and there's also a book by Tom Mitchell that explains it pretty clearly, but I've never found a decent online resource. \$\endgroup\$ Commented May 5, 2015 at 4:10
  • \$\begingroup\$ In my experience, neural networks become much easier to implement when you start thinking of them in terms of linear algebra and matrices. \$\endgroup\$ Commented May 5, 2015 at 4:12
  • \$\begingroup\$ See this question and its answer. \$\endgroup\$ Commented May 5, 2015 at 10:35

1 Answer 1

2
\$\begingroup\$

First off, the below section of code here:

self.hidden_layers = []
for n in xrange(n_hidden_layer):
 self.hidden_layers.append(
 Layer(n_hidden_nodes, "Hidden %i" % (n+1))
 )

Can be shortened to the below, using a generator expression:

self.hidden_layers = [
 Layer(n_hidden_nodes, "Hidden {0}".format(n + 1)) for n in xrange(n_hidden_layer)
]

You also have other places where generator expressions could be used, like here, as a simple example:

for node in self.output_layer.nodes:
 out_weights.append(node.weights)

Do note the use of str.format as well. Using % for string formatting in any version of Python after 2.6 is deprecated, and str.format should be used instead. Here's an example of how str.format is used:

# str.format without positional or named parameters
print "{} {}".format("Hello", "world")
# str.format with positional parameters
print "{1} {0}".format("world", "Hello")
# str.format with named parameters
print "{word1} {word2}".format(word1="Hello", word2="world")

Finally, in any version of Python 2.x, you need to explicitly have all classes inherit from object, like this: class MyClass(object):. If you're using Python 3.x or above, it's okay to just type stuff like clas MyClass:.

answered Jul 10, 2015 at 2:10
\$\endgroup\$

Your Answer

Draft saved
Draft discarded

Sign up or log in

Sign up using Google
Sign up using Email and Password

Post as a guest

Required, but never shown

Post as a guest

Required, but never shown

By clicking "Post Your Answer", you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.