neuralNetwork.cpp 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417
  1. #include "StdInc.h"
  2. #include "neuralNetwork.h"
  3. //using namespace std;
  4. #ifndef M_PI
  5. #define M_PI 3.14159265358979323846
  6. #endif
  7. static double norm(void)//add desired mean, multiply to get desired SD
  8. {
  9. static double kept = 0;
  10. static bool in = 0;
  11. if(!in)
  12. {
  13. double x = (rand() + 1) / static_cast<double>(RAND_MAX + 1);
  14. double f = sqrtf( - 2.0 * log(x) );
  15. x = (rand() + 1) / static_cast<double>(RAND_MAX + 1);
  16. kept = f * cosf( 2.0 * M_PI * x );
  17. in = true;
  18. return f * sinf( 2.0 * M_PI * x );
  19. }
  20. else
  21. {
  22. in = false;
  23. return kept;
  24. }
  25. }
  26. /*******************************************************************
  27. * Constructors
  28. ********************************************************************/
  29. neuralNetwork::neuralNetwork() : nInput(0), nHidden1(0), nHidden2(0), nOutput(0)
  30. {
  31. inputNeurons = new double[1] ;
  32. hiddenNeurons1 = new double[1] ;
  33. hiddenNeurons2 = new double[1] ;
  34. outputNeurons = new double[1] ;
  35. wInputHidden = new double*[1] ;
  36. wInputHidden[0] = new double[1];
  37. wHidden2Hidden = new double*[1] ;
  38. wHidden2Hidden[0] = new (double[1]);
  39. wHiddenOutput = new double*[1] ;
  40. wHiddenOutput[0] = new double[1];
  41. }
  42. neuralNetwork::neuralNetwork(const neuralNetwork& other): nInput(0), nHidden1(0), nHidden2(0), nOutput(0)
  43. {
  44. inputNeurons = new double[1] ;
  45. hiddenNeurons1 = new double[1] ;
  46. hiddenNeurons2 = new double[1] ;
  47. outputNeurons = new double[1] ;
  48. wInputHidden = new double*[1] ;
  49. wInputHidden[0] = new double[1];
  50. wHidden2Hidden = new double*[1] ;
  51. wHidden2Hidden[0] = new (double[1]);
  52. wHiddenOutput = new double*[1] ;
  53. wHiddenOutput[0] = new double[1];
  54. *this = other;
  55. }
  56. neuralNetwork::neuralNetwork(int nI, int nH1, int nH2, int nO) : nInput(nI), nHidden1(nH1), nHidden2(nH2), nOutput(nO)
  57. {
  58. //create neuron lists
  59. //--------------------------------------------------------------------------------------------------------
  60. inputNeurons = new double[nInput + 1] ;
  61. for ( int i=0; i < nInput; i++ ) inputNeurons[i] = 0;
  62. //create input bias neuron
  63. inputNeurons[nInput] = -1;
  64. hiddenNeurons1 = new double[nHidden1 + 1] ;
  65. for ( int i=0; i < nHidden1; i++ ) hiddenNeurons1[i] = 0;
  66. //create hidden bias neuron
  67. hiddenNeurons1[nHidden1] = -1;
  68. hiddenNeurons2 = new double[nHidden2 + 1] ;
  69. for ( int i=0; i < nHidden2; i++ ) hiddenNeurons2[i] = 0;
  70. //create hidden bias neuron
  71. hiddenNeurons2[nHidden2] = -1;
  72. outputNeurons = new double[nOutput] ;
  73. for ( int i=0; i < nOutput; i++ ) outputNeurons[i] = 0;
  74. //create weight lists (include bias neuron weights)
  75. //--------------------------------------------------------------------------------------------------------
  76. wInputHidden = new double*[nInput + 1] ;
  77. for ( int i=0; i <= nInput; i++ )
  78. {
  79. wInputHidden[i] = new double[nHidden1];
  80. for ( int j=0; j < nHidden1; j++ ) wInputHidden[i][j] = 0;
  81. }
  82. wHidden2Hidden = new double*[nHidden1 + 1] ;
  83. for ( int i=0; i <= nHidden1; i++ )
  84. {
  85. wHidden2Hidden[i] = new (double[nHidden2]);
  86. for ( int j=0; j < nHidden2; j++ ) wHidden2Hidden[i][j] = 0;
  87. }
  88. wHiddenOutput = new double*[nHidden2 + 1] ;
  89. for ( int i=0; i <= nHidden2; i++ )
  90. {
  91. wHiddenOutput[i] = new double[nOutput];
  92. for ( int j=0; j < nOutput; j++ ) wHiddenOutput[i][j] = 0;
  93. }
  94. //initialize weights
  95. //--------------------------------------------------------------------------------------------------------
  96. initializeWeights();
  97. }
  98. void neuralNetwork::operator = (const neuralNetwork&cpy)//assumes same structure
  99. {
  100. if( nInput != cpy.nInput || nHidden1 != cpy.nHidden1 || nHidden2 != cpy.nHidden2 || nOutput != cpy.nOutput)
  101. {
  102. delete[] inputNeurons;
  103. delete[] hiddenNeurons1;
  104. delete[] hiddenNeurons2;
  105. delete[] outputNeurons;
  106. //delete weight storage
  107. for (int i=0; i <= nInput; i++) delete[] wInputHidden[i];
  108. delete[] wInputHidden;
  109. for (int j=0; j <= nHidden2; j++) delete[] wHiddenOutput[j];
  110. delete[] wHiddenOutput;
  111. for (int j=0; j <= nHidden1; j++) delete[] wHidden2Hidden[j];
  112. delete[] wHidden2Hidden;
  113. nInput = cpy.nInput;
  114. nHidden1 = cpy.nHidden1;
  115. nHidden2 = cpy.nHidden2;
  116. nOutput = cpy.nOutput;
  117. inputNeurons = new double[nInput + 1] ;
  118. inputNeurons[nInput] = -1;
  119. hiddenNeurons1 = new double[nHidden1 + 1] ;
  120. hiddenNeurons1[nHidden1] = -1;
  121. hiddenNeurons2 = new double[nHidden2 + 1] ;
  122. hiddenNeurons2[nHidden2] = -1;
  123. outputNeurons = new double[nOutput] ;
  124. //create weight lists (include bias neuron weights)
  125. //--------------------------------------------------------------------------------------------------------
  126. wInputHidden = new double*[nInput + 1] ;
  127. for ( int i=0; i <= nInput; i++ )
  128. wInputHidden[i] = new double[nHidden1];
  129. wHidden2Hidden = new double*[nHidden1 + 1] ;
  130. for ( int i=0; i <= nHidden1; i++ )
  131. wHidden2Hidden[i] = new (double[nHidden2]);
  132. wHiddenOutput = new double*[nHidden2 + 1] ;
  133. for ( int i=0; i <= nHidden2; i++ )
  134. wHiddenOutput[i] = new double[nOutput];
  135. }
  136. for ( int i=0; i < nInput; i++ ) inputNeurons[i] = cpy.inputNeurons[i];
  137. for ( int i=0; i < nHidden1; i++ ) hiddenNeurons1[i] = cpy.hiddenNeurons1[i];
  138. for ( int i=0; i < nHidden2; i++ ) hiddenNeurons2[i] = cpy.hiddenNeurons2[i];
  139. for ( int i=0; i < nOutput; i++ ) outputNeurons[i] = cpy.outputNeurons[i];
  140. for ( int i=0; i <= nInput; i++ )
  141. for ( int j=0; j < nHidden1; j++ )
  142. wInputHidden[i][j] = cpy.wInputHidden[i][j];
  143. for ( int i=0; i <= nHidden1; i++ )
  144. for ( int j=0; j < nHidden2; j++ )
  145. wHidden2Hidden[i][j] = cpy.wHidden2Hidden[i][j];
  146. for ( int i=0; i <= nHidden2; i++ )
  147. for ( int j=0; j < nOutput; j++ )
  148. wHiddenOutput[i][j] = cpy.wHiddenOutput[i][j];
  149. }
  150. /*******************************************************************
  151. * Destructor
  152. ********************************************************************/
  153. neuralNetwork::~neuralNetwork()
  154. {
  155. //delete neurons
  156. delete[] inputNeurons;
  157. delete[] hiddenNeurons1;
  158. delete[] hiddenNeurons2;
  159. delete[] outputNeurons;
  160. //delete weight storage
  161. for (int i=0; i <= nInput; i++) delete[] wInputHidden[i];
  162. delete[] wInputHidden;
  163. for (int j=0; j <= nHidden2; j++) delete[] wHiddenOutput[j];
  164. delete[] wHiddenOutput;
  165. for (int j=0; j <= nHidden1; j++) delete[] wHidden2Hidden[j];
  166. delete[] wHidden2Hidden;
  167. }
  168. double* neuralNetwork::feedForwardPattern(double *pattern)
  169. {
  170. feedForward(pattern);
  171. return outputNeurons;
  172. }
  173. void neuralNetwork::mate(const neuralNetwork&n1,const neuralNetwork&n2)
  174. {
  175. for(int i = 0; i <= nInput; i++)
  176. {
  177. for(int j = 0; j < nHidden1; j++)
  178. {
  179. if(rand()%2==0)
  180. wInputHidden[i][j] = n1.wInputHidden[i][j];
  181. else
  182. wInputHidden[i][j] = n2.wInputHidden[i][j];
  183. }
  184. }
  185. for(int i = 0; i <= nHidden1; i++)
  186. {
  187. for(int j = 0; j < nHidden2; j++)
  188. {
  189. if(rand()%2==0)
  190. wHidden2Hidden[i][j] =n1.wHidden2Hidden[i][j];
  191. else
  192. wHidden2Hidden[i][j] =n2.wHidden2Hidden[i][j];
  193. }
  194. }
  195. for(int i = 0; i <= nHidden2; i++)
  196. {
  197. for(int j = 0; j < nOutput; j++)
  198. {
  199. if(rand()%2==0)
  200. wHiddenOutput[i][j] =n1.wHiddenOutput[i][j];
  201. else
  202. wHiddenOutput[i][j] =n2.wHiddenOutput[i][j];
  203. }
  204. }
  205. }
  206. void neuralNetwork::tweakWeights(double howMuch)
  207. {
  208. //set range
  209. double rH = 1/sqrt( (double) nInput);
  210. double rO = 1/sqrt( (double) nHidden1);
  211. for(int i = 0; i <= nInput; i++)
  212. {
  213. for(int j = 0; j < nHidden1; j++)
  214. {
  215. wInputHidden[i][j] += howMuch*norm();
  216. }
  217. }
  218. for(int i = 0; i <= nHidden1; i++)
  219. {
  220. for(int j = 0; j < nHidden2; j++)
  221. {
  222. wHidden2Hidden[i][j] += howMuch*norm();
  223. }
  224. }
  225. for(int i = 0; i <= nHidden2; i++)
  226. {
  227. for(int j = 0; j < nOutput; j++)
  228. {
  229. wHiddenOutput[i][j] += howMuch* norm();
  230. }
  231. }
  232. //initializeWeights();
  233. }
  234. void neuralNetwork::initializeWeights()
  235. {
  236. //set range
  237. double rH = 2.0/sqrt( (double) nInput);
  238. double rO = 2.0/sqrt( (double) nHidden1);
  239. //set weights between input and hidden
  240. //--------------------------------------------------------------------------------------------------------
  241. for(int i = 0; i <= nInput; i++)
  242. {
  243. for(int j = 0; j < nHidden1; j++)
  244. {
  245. //set weights to random values
  246. wInputHidden[i][j] = norm()* rH;
  247. }
  248. }
  249. for(int i = 0; i <= nHidden1; i++)
  250. {
  251. for(int j = 0; j < nHidden2; j++)
  252. {
  253. //set weights to random values
  254. wHidden2Hidden[i][j] = norm()* rO;
  255. }
  256. }
  257. //set weights between hidden and output
  258. //--------------------------------------------------------------------------------------------------------
  259. for(int i = 0; i <= nHidden2; i++)
  260. {
  261. for(int j = 0; j < nOutput; j++)
  262. {
  263. //set weights to random values
  264. wHiddenOutput[i][j] = norm()* rO;
  265. }
  266. }
  267. }
  268. /*******************************************************************
  269. * Activation Function
  270. ********************************************************************/
  271. inline double neuralNetwork::activationFunction( double x )
  272. {
  273. //sigmoid function
  274. return 1/(1+exp(-x));
  275. }
  276. /*******************************************************************
  277. * Feed Forward Operation
  278. ********************************************************************/
  279. void neuralNetwork::feedForward(double* pattern)
  280. {
  281. //set input neurons to input values
  282. for(int i = 0; i < nInput; i++) inputNeurons[i] = pattern[i];
  283. //Calculate Hidden Layer values - include bias neuron
  284. //--------------------------------------------------------------------------------------------------------
  285. for(int j=0; j < nHidden1; j++)
  286. {
  287. //clear value
  288. hiddenNeurons1[j] = 0;
  289. //get weighted sum of pattern and bias neuron
  290. for( int i=0; i <= nInput; i++ ) hiddenNeurons1[j] += inputNeurons[i] * wInputHidden[i][j];
  291. //set to result of sigmoid
  292. hiddenNeurons1[j] = activationFunction( hiddenNeurons1[j] );
  293. }
  294. for(int j=0; j < nHidden2; j++)
  295. {
  296. //clear value
  297. hiddenNeurons2[j] = 0;
  298. //get weighted sum of pattern and bias neuron
  299. for( int i=0; i <= nHidden1; i++ ) hiddenNeurons2[j] += hiddenNeurons1[i] * wHidden2Hidden[i][j];
  300. //set to result of sigmoid
  301. hiddenNeurons2[j] = activationFunction( hiddenNeurons2[j] );
  302. }
  303. //Calculating Output Layer values - include bias neuron
  304. //--------------------------------------------------------------------------------------------------------
  305. for(int k=0; k < nOutput; k++)
  306. {
  307. //clear value
  308. outputNeurons[k] = 0;
  309. //get weighted sum of pattern and bias neuron
  310. for( int j=0; j <= nHidden2; j++ ) outputNeurons[k] += hiddenNeurons2[j] * wHiddenOutput[j][k];
  311. //set to result of sigmoid
  312. //outputNeurons[k] = activationFunction( outputNeurons[k] );
  313. }
  314. }
  315. void neuralNetwork::backpropigate(double* pattern, double OLR, double H2LR, double H1LR )
  316. {
  317. //inputError = new double[nInput + 1] ;
  318. double * hiddenError1 = new double[nHidden1 + 1] ;
  319. double * hiddenError2 = new double[nHidden2 + 1] ;
  320. double * outputError = new double[nOutput] ;
  321. memset(hiddenError1,0,sizeof(double)*nHidden1);
  322. memset(hiddenError2,0,sizeof(double)*nHidden2);
  323. for(int i = 0; i < nOutput; i++)
  324. {
  325. outputError[i] = (pattern[i]-outputNeurons[i]);//*(outputNeurons[i]*(1-outputNeurons[i]));
  326. for(int ii = 0; ii <= nHidden2;ii++)
  327. hiddenError2[ii]+=outputError[i]*wHiddenOutput[ii][i];
  328. for(int ii = 0; ii <= nHidden2;ii++)
  329. wHiddenOutput[ii][i]+=OLR*hiddenNeurons2[ii]*outputError[i];
  330. }
  331. for(int i = 0; i < nHidden2; i++)
  332. {
  333. hiddenError2[i] *= (hiddenNeurons2[i]*(1-hiddenNeurons2[i]));
  334. for(int ii = 0; ii <= nHidden1;ii++)
  335. hiddenError1[ii]+=hiddenError2[i]*wHidden2Hidden[ii][i];
  336. for(int ii = 0; ii <= nHidden1;ii++)
  337. wHidden2Hidden[ii][i]+=H2LR*hiddenNeurons1[ii]*hiddenError2[i];
  338. }
  339. for(int i = 0; i < nHidden1; i++)
  340. {
  341. hiddenError1[i] *= (hiddenNeurons1[i]*(1-hiddenNeurons1[i]));
  342. for(int ii = 0; ii <= nInput;ii++)
  343. wInputHidden[ii][i]+=H1LR*inputNeurons[ii]*hiddenError1[i];
  344. }
  345. delete [] hiddenError1;
  346. delete [] hiddenError2;
  347. delete [] outputError;
  348. }