luann.lua 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170
  1. --[[
  2. The MIT License (MIT)
  3. Copyright (c) <2013> <Josh Rowe>
  4. Permission is hereby granted, free of charge, to any person obtaining a copy
  5. of this software and associated documentation files (the "Software"), to deal
  6. in the Software without restriction, including without limitation the rights
  7. to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  8. copies of the Software, and to permit persons to whom the Software is
  9. furnished to do so, subject to the following conditions:
  10. The above copyright notice and this permission notice shall be included in
  11. all copies or substantial portions of the Software.
  12. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  13. IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  14. FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  15. AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  16. LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  17. OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  18. THE SOFTWARE.
  19. ]]--
  20. local luann = {}
  21. local Layer = {}
  22. local Cell = {}
  23. local exp = math.exp
  24. --We start by creating the cells.
  25. --The cell has a structure containing weights that modify the input from the previous layer.
  26. --Each cell also has a signal, or output.
  27. function Cell:new(numInputs)
  28. local cell = {delta = 0, weights = {}, signal = 0}
  29. for i = 1, numInputs do
  30. cell.weights[i] = math.random() * .1
  31. end
  32. setmetatable(cell, self)
  33. self.__index = self
  34. return cell
  35. end
  36. function Cell:activate(inputs, bias, threshold)
  37. local signalSum = bias
  38. local weights = self.weights
  39. for i = 1, #weights do
  40. signalSum = signalSum + (weights[i] * inputs[i])
  41. end
  42. self.signal = 1 / (1 + exp((signalSum * -1) / threshold))
  43. end
  44. --Next we create a Layer of cells. The layer is a table of cells.
  45. function Layer:new(numCells, numInputs)
  46. numCells = numCells or 1
  47. numInputs = numInputs or 1
  48. local cells = {}
  49. for i = 1, numCells do cells[i] = Cell:new(numInputs) end
  50. local layer = {cells = cells, bias = math.random()}
  51. setmetatable(layer, self)
  52. self.__index = self
  53. return layer
  54. end
  55. --layers = {table of layer sizes from input to output}
  56. function luann:new(layers, learningRate, threshold)
  57. local network = {learningRate = learningRate, threshold = threshold}
  58. --initialize the input layer
  59. network[1] = Layer:new(layers[1], layers[1])
  60. --initialize the hidden layers and output layer
  61. for i = 2, #layers do
  62. network[i] = Layer:new(layers[i], layers[i-1])
  63. end
  64. setmetatable(network, self)
  65. self.__index = self
  66. return network
  67. end
  68. function luann:activate(inputs)
  69. local threshold = self.threshold
  70. for i = 1, #inputs do
  71. self[1].cells[i].signal = inputs[i]
  72. end
  73. for i = 2, #self do
  74. local passInputs = {}
  75. local cells = self[i].cells
  76. local prevCells = self[i-1].cells
  77. for m = 1, #prevCells do
  78. passInputs[m] = prevCells[m].signal
  79. end
  80. local passBias = self[i].bias
  81. for j = 1, #cells do
  82. --activate each cell
  83. cells[j]:activate(passInputs, passBias, threshold)
  84. end
  85. end
  86. end
  87. function luann:decode(hiddenSignal)
  88. --iterate over the hidden layer and set their signals to hiddenInputs
  89. for i = 1, #self[2].cells do
  90. self[2].cells[i].signal = hiddenSignal[i]
  91. end
  92. local threshold = self.threshold
  93. for i = 3, #self do
  94. local passInputs = {}
  95. local cells = self[i].cells
  96. local prevCells = self[i-1].cells
  97. for m = 1, #prevCells do
  98. passInputs[m] = prevCells[m].signal
  99. end
  100. local passBias = self[i].bias
  101. for j = 1, #cells do
  102. --activate each cell
  103. cells[j]:activate(passInputs, passBias, threshold)
  104. end
  105. end
  106. end
  107. function luann:bp(inputs, outputs)
  108. self:activate(inputs) --update the internal inputs and outputs
  109. local numSelf = #self
  110. local learningRate = self.learningRate
  111. for i = numSelf, 2, -1 do --iterate backwards (nothing to calculate for input layer)
  112. local numCells = #self[i].cells
  113. local cells = self[i].cells
  114. for j = 1, numCells do
  115. local signal = cells[j].signal
  116. if i ~= numSelf then --special calculations for output layer
  117. local weightDelta = 0
  118. local layer = self[i+1].cells
  119. for k = 1, #self[i+1].cells do
  120. weightDelta = weightDelta + layer[k].weights[j] * layer[k].delta
  121. end
  122. cells[j].delta = signal * (1 - signal) * weightDelta
  123. else
  124. cells[j].delta = (outputs[j] - signal) * signal * (1 - signal)
  125. end
  126. end
  127. end
  128. for i = 2, numSelf do
  129. self[i].bias = self[i].cells[#self[i].cells].delta * learningRate
  130. for j = 1, #self[i].cells do
  131. for k = 1, #self[i].cells[j].weights do
  132. local weights = self[i].cells[j].weights
  133. weights[k] = weights[k] + self[i].cells[j].delta * learningRate * self[i-1].cells[k].signal
  134. end
  135. end
  136. end
  137. end
  138. function luann:loadNetwork(network)
  139. local ann = network
  140. ann.bp = luann.bp
  141. ann.activate = luann.activate
  142. for i = 1, #ann do
  143. for j = 1, #ann[i].cells do
  144. ann[i].cells[j].activate = Cell.activate
  145. end
  146. end
  147. return(ann)
  148. end
  149. return luann