Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit adaf753

Browse files
committed
Initial code for Chapter 7
1 parent 8f0c421 commit adaf753

File tree

5 files changed

+700
-0
lines changed

5 files changed

+700
-0
lines changed
Lines changed: 370 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,370 @@
1+
//: [Previous](@previous)
2+
3+
// Classic Computer Science Problems in Swift Chapter 7 Source
4+
5+
// Copyright 2017 David Kopec
6+
//
7+
// Licensed under the Apache License, Version 2.0 (the "License");
8+
// you may not use this file except in compliance with the License.
9+
// You may obtain a copy of the License at
10+
//
11+
// http://www.apache.org/licenses/LICENSE-2.0
12+
//
13+
// Unless required by applicable law or agreed to in writing, software
14+
// distributed under the License is distributed on an "AS IS" BASIS,
15+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
// See the License for the specific language governing permissions and
17+
// limitations under the License.
18+
19+
/// Fairly Simple Neural Networks
20+
21+
import Accelerate
22+
import Foundation
23+
24+
// MARK: Randomization & Statistical Helpers
25+
26+
/// Create *number* of random Doubles between 0.0 and 1.0
27+
func randomWeights(number: Int) -> [Double] {
28+
return (0..<number).map{ _ in Math.randomFractional() }
29+
}
30+
31+
/// Create *number* of random Doubles between 0.0 and *limit*
32+
func randomNums(number: Int, limit: Double) -> [Double] {
33+
return (0..<number).map{ _ in Math.randomTo(limit: limit) }
34+
}
35+
36+
/// primitive shuffle - not fisher yates... not uniform distribution
37+
extension Sequence where Iterator.Element : Comparable {
38+
var shuffled: [Self.Iterator.Element] {
39+
return sorted { _, _ in arc4random() % 2 == 0 }
40+
}
41+
}
42+
43+
/// assumes all rows are of equal length
44+
/// and divide each column by its max throughout the data set
45+
/// for that column
46+
func normalizeByColumnMax( dataset:inout [[Double]]) {
47+
for colNum in 0..<dataset[0].count {
48+
let column = dataset.map { 0ドル[colNum] }
49+
let maximum = column.max()!
50+
for rowNum in 0..<dataset.count {
51+
dataset[rowNum][colNum] = dataset[rowNum][colNum] / maximum
52+
}
53+
}
54+
}
55+
56+
// MARK: Activation Functions and Their Derivatives
57+
58+
/// the classic sigmoid activation function
59+
func sigmoid(_ x: Double) -> Double {
60+
return 1.0 / (1.0 + exp(-x))
61+
}
62+
63+
// as derived at http://www.ai.mit.edu/courses/6.892/lecture8-html/sld015.htm
64+
func derivativeSigmoid(_ x: Double) -> Double {
65+
return sigmoid(x) * (1 - sigmoid(x))
66+
}
67+
68+
// MARK: SIMD Accelerated Math
69+
70+
// Based on example from Surge project
71+
// https://github.com/mattt/Surge/blob/master/Source/Arithmetic.swift
72+
/// Find the dot product of two vectors
73+
/// assuming that they are of the same length
74+
/// using SIMD instructions to speed computation
75+
func dotProduct(_ xs: [Double], _ ys: [Double]) -> Double {
76+
var answer: Double = 0.0
77+
vDSP_dotprD(xs, 1, ys, 1, &answer, vDSP_Length(xs.count))
78+
return answer
79+
}
80+
81+
// Based on example from Surge project
82+
// https://github.com/mattt/Surge/blob/master/Source/Arithmetic.swift
83+
/// Subtract one vector from another
84+
/// assuming that they are of the same length
85+
/// using SIMD instructions to speed computation
86+
public func sub(x: [Double], y: [Double]) -> [Double] {
87+
var results = [Double](y)
88+
catlas_daxpby(Int32(x.count), 1.0, x, 1, -1, &results, 1)
89+
90+
return results
91+
}
92+
93+
// Another Surge example, see above citation
94+
public func mul(x: [Double], y: [Double]) -> [Double] {
95+
var results = [Double](repeating: 0.0, count: x.count)
96+
vDSP_vmulD(x, 1, y, 1, &results, 1, vDSP_Length(x.count))
97+
98+
return results
99+
}
100+
101+
// Another Surge example, see above citation
102+
public func sum(x: [Double]) -> Double {
103+
var result: Double = 0.0
104+
vDSP_sveD(x, 1, &result, vDSP_Length(x.count))
105+
106+
return result
107+
}
108+
109+
// MARK: Random Number Generation
110+
111+
// this struct & the randomFractional() function
112+
// based on http://stackoverflow.com/a/35919911/281461
113+
struct Math {
114+
private static var seeded = false
115+
116+
static func randomFractional() -> Double {
117+
118+
if !Math.seeded {
119+
let time = Int(NSDate().timeIntervalSinceReferenceDate)
120+
srand48(time)
121+
Math.seeded = true
122+
}
123+
124+
return drand48()
125+
}
126+
127+
// addition, just multiplies random number by *limit*
128+
static func randomTo(limit: Double) -> Double {
129+
130+
if !Math.seeded {
131+
let time = Int(NSDate().timeIntervalSinceReferenceDate)
132+
srand48(time)
133+
Math.seeded = true
134+
}
135+
136+
return drand48() * limit
137+
}
138+
}
139+
140+
/// An individual node in a layer
141+
class Neuron {
142+
var weights: [Double]
143+
var activationFunction: (Double) -> Double
144+
var derivativeActivationFunction: (Double) -> Double
145+
var inputCache: Double = 0.0
146+
var delta: Double = 0.0
147+
var learningRate: Double
148+
149+
init(weights: [Double], activationFunction: @escaping (Double) -> Double, derivativeActivationFunction: @escaping (Double) -> Double, learningRate: Double = 0.25) {
150+
self.weights = weights
151+
self.activationFunction = activationFunction
152+
self.derivativeActivationFunction = derivativeActivationFunction
153+
self.learningRate = learningRate
154+
}
155+
156+
/// The output that will be going to the next layer
157+
/// or the final output if this is an output layer
158+
func output(inputs: [Double]) -> Double {
159+
inputCache = dotProduct(inputs, weights)
160+
return activationFunction(inputCache)
161+
}
162+
163+
}
164+
165+
class Layer {
166+
let previousLayer: Layer?
167+
var neurons: [Neuron]
168+
var outputCache: [Double]
169+
170+
// for future use in deserializing networks
171+
init(previousLayer: Layer? = nil, neurons: [Neuron] = [Neuron]()) {
172+
self.previousLayer = previousLayer
173+
self.neurons = neurons
174+
self.outputCache = Array<Double>(repeating: 0.0, count: neurons.count)
175+
}
176+
177+
// main init
178+
init(previousLayer: Layer? = nil, numNeurons: Int, activationFunction: @escaping (Double) -> Double, derivativeActivationFunction: @escaping (Double)-> Double, learningRate: Double) {
179+
self.previousLayer = previousLayer
180+
self.neurons = Array<Neuron>()
181+
for _ in 0..<numNeurons {
182+
self.neurons.append(Neuron(weights: randomWeights(number: previousLayer?.neurons.count ?? 0), activationFunction: activationFunction, derivativeActivationFunction: derivativeActivationFunction, learningRate: learningRate))
183+
}
184+
self.outputCache = Array<Double>(repeating: 0.0, count: neurons.count)
185+
}
186+
187+
func outputs(inputs: [Double]) -> [Double] {
188+
if previousLayer == nil { // input layer (first layer)
189+
outputCache = inputs
190+
} else { // hidden layer or output layer
191+
outputCache = neurons.map { 0ドル.output(inputs: inputs) }
192+
}
193+
return outputCache
194+
}
195+
196+
// should only be called on an output layer
197+
func calculateDeltasForOutputLayer(expected: [Double]) {
198+
for n in 0..<neurons.count {
199+
neurons[n].delta = neurons[n].derivativeActivationFunction( neurons[n].inputCache) * (expected[n] - outputCache[n])
200+
}
201+
}
202+
203+
// should not be called on output layer
204+
func calculateDeltasForHiddenLayer(nextLayer: Layer) {
205+
for (index, neuron) in neurons.enumerated() {
206+
let nextWeights = nextLayer.neurons.map { 0ドル.weights[index] }
207+
let nextDeltas = nextLayer.neurons.map { 0ドル.delta }
208+
let sumOfWeightsXDeltas = dotProduct(nextWeights, nextDeltas)
209+
neuron.delta = neuron.derivativeActivationFunction( neuron.inputCache) * sumOfWeightsXDeltas
210+
}
211+
}
212+
213+
214+
}
215+
216+
/// Represents an entire neural network. From largest to smallest we go
217+
/// Network -> Layers -> Neurons
218+
class Network {
219+
var layers: [Layer]
220+
221+
init(layerStructure:[Int], activationFunction: @escaping (Double) -> Double = sigmoid, derivativeActivationFunction: @escaping (Double) -> Double = derivativeSigmoid, learningRate: Double = 0.25) {
222+
if (layerStructure.count < 3) {
223+
print("Error: Should be at least 3 layers (1 input, 1 hidden, 1 output)")
224+
}
225+
layers = [Layer]()
226+
// input layer
227+
layers.append(Layer(numNeurons: layerStructure[0], activationFunction: activationFunction, derivativeActivationFunction: derivativeActivationFunction, learningRate: learningRate))
228+
229+
// hidden layers and output layer
230+
for x in layerStructure.enumerated() where x.offset != 0 {
231+
layers.append(Layer(previousLayer: layers[x.offset - 1], numNeurons: x.element, activationFunction: activationFunction, derivativeActivationFunction: derivativeActivationFunction, learningRate: learningRate))
232+
}
233+
}
234+
235+
/// pushes input data to the first layer
236+
/// then output from the first as input to the second
237+
/// second to the third, etc.
238+
func outputs(input: [Double]) -> [Double] {
239+
return layers.reduce(input) { 1ドル.outputs(inputs: 0ドル) }
240+
}
241+
242+
/// Figure out each neuron's changes based on the errors
243+
/// of the output versus the expected outcome
244+
func backPropagate(expected: [Double]) {
245+
//calculate delta for output layer neurons
246+
layers.last?.calculateDeltasForOutputLayer(expected: expected)
247+
//calculate delta for prior layers
248+
for l in 1..<layers.count - 1 {
249+
layers[l].calculateDeltasForHiddenLayer(nextLayer: layers[l + 1])
250+
}
251+
}
252+
253+
/// backPropagate() doesn't actually change any weights
254+
/// this function uses the deltas calculated in backPropagate()
255+
/// to actually make changes to the weights
256+
func updateWeights() {
257+
for layer in layers {
258+
for neuron in layer.neurons {
259+
for w in 0..<neuron.weights.count {
260+
neuron.weights[w] = neuron.weights[w] + (neuron.learningRate * (layer.previousLayer?.outputCache[w])! * neuron.delta)
261+
}
262+
}
263+
}
264+
}
265+
266+
/// train() uses the results of outputs() run over
267+
/// many *inputs* and compared against *expecteds* to feed
268+
/// backPropagate() and updateWeights()
269+
func train(inputs:[[Double]], expecteds:[[Double]], printError:Bool = false, threshold:Double? = nil) {
270+
for (location, xs) in inputs.enumerated() {
271+
let ys = expecteds[location]
272+
let outs = outputs(input: xs)
273+
if (printError) {
274+
let diff = sub(x: outs, y: ys)
275+
let error = sqrt(sum(x: mul(x: diff, y: diff)))
276+
print("\(error) error in run \(location)")
277+
}
278+
backPropagate(expected: ys)
279+
updateWeights()
280+
}
281+
}
282+
283+
/// for generalized results that require classification
284+
/// this function will return the correct number of trials
285+
/// and the percentge correct out of the total
286+
/// See the unit tests for some examples
287+
func validate<T: Equatable>(inputs:[[Double]], expecteds:[T], interpretOutput: ([Double]) -> T) -> (correct: Int, total: Int, percentage: Double) {
288+
var correct = 0
289+
for (input, expected) in zip(inputs, expecteds) {
290+
let result = interpretOutput(outputs(input: input))
291+
if result == expected {
292+
correct += 1
293+
}
294+
}
295+
let percentage = Double(correct) / Double(inputs.count)
296+
return (correct, inputs.count, percentage)
297+
}
298+
299+
// for when result is a single neuron
300+
func validate(inputs:[[Double]], expecteds:[Double], accuracy: Double) -> (correct: Int, total: Int, percentage: Double) {
301+
var correct = 0
302+
for (input, expected) in zip(inputs, expecteds) {
303+
let result = outputs(input: input)[0]
304+
if abs(expected - result) < accuracy {
305+
correct += 1
306+
}
307+
}
308+
let percentage = Double(correct) / Double(inputs.count)
309+
return (correct, inputs.count, percentage)
310+
}
311+
}
312+
313+
var network: Network = Network(layerStructure: [13,7,3], learningRate: 7.0)
314+
// for training
315+
var wineParameters: [[Double]] = [[Double]]()
316+
var wineClassifications: [[Double]] = [[Double]]()
317+
// for testing/validation
318+
var wineSamples: [[Double]] = [[Double]]()
319+
var wineCultivars: [Int] = [Int]()
320+
321+
func parseWineCSV() {
322+
let myBundle = Bundle.main
323+
let urlpath = myBundle.path(forResource: "wine", ofType: "csv")
324+
let url = URL(fileURLWithPath: urlpath!)
325+
let csv = try! String.init(contentsOf: url)
326+
let lines = csv.components(separatedBy: "\n")
327+
328+
let shuffledLines = lines.shuffled
329+
for line in shuffledLines {
330+
if line == "" { continue }
331+
let items = line.components(separatedBy: ",")
332+
let parameters = items[1...13].map{ Double(0ドル)! }
333+
wineParameters.append(parameters)
334+
let species = Int(items[0])!
335+
if species == 1 {
336+
wineClassifications.append([1.0, 0.0, 0.0])
337+
} else if species == 2 {
338+
wineClassifications.append([0.0, 1.0, 0.0])
339+
} else {
340+
wineClassifications.append([0.0, 0.0, 1.0])
341+
}
342+
wineCultivars.append(species)
343+
}
344+
normalizeByColumnMax(dataset: &wineParameters)
345+
wineSamples = Array(wineParameters.dropFirst(150))
346+
wineCultivars = Array(wineCultivars.dropFirst(150))
347+
wineParameters = Array(wineParameters.dropLast(28))
348+
}
349+
350+
func interpretOutput(output: [Double]) -> Int {
351+
if output.max()! == output[0] {
352+
return 1
353+
} else if output.max()! == output[1] {
354+
return 2
355+
} else {
356+
return 3
357+
}
358+
}
359+
360+
parseWineCSV()
361+
// train over entire data set 5 times
362+
for _ in 0..<5 {
363+
network.train(inputs: wineParameters, expecteds: wineClassifications, printError: false)
364+
}
365+
366+
let results = network.validate(inputs: wineSamples, expecteds: wineCultivars, interpretOutput: interpretOutput)
367+
print("\(results.correct) correct of \(results.total) = \(results.percentage * 100)%")
368+
369+
//: [Next](@next)
370+

‎Classic Computer Science Problems in Swift.playground/Pages/Chapter 8.xcplaygroundpage/Contents.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,3 +65,4 @@ let items = [Item(name: "television", weight: 50, value: 500),
6565
knapsack(items: items, maxCapacity: 75)
6666

6767
//: [Next](@next)
68+

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /