项目作者: cuteboydot

项目描述 :
Implementation of MLP
高级语言: C++
项目地址: git://github.com/cuteboydot/MultiLayerPerceptron.git
创建时间: 2017-05-05T05:35:07Z
项目社区:https://github.com/cuteboydot/MultiLayerPerceptron

开源协议:

下载


MultiLayerPerceptron

Implementation of MLP

cuteboydot@gmail.com

  • example : number classification





  • test result




  • usage : train & test
    ```cpp
    // train
    CMultiLayerPerceptron * p = new CMultiLayerPerceptron();
    p->init(SIZE_TRAIN, SIZE_IN_ATTR, SIZE_HIDD, SIZE_OUT_ATTR);
    p->train(input_data, answer_data);

// test
for(int a=0; aclassfication(test_data[a], guess_data);
printf(“[%d] guess:%.2f,%.2f,%.2f, answer:%.2f,%.2f,%.2f \n”, a,
guess_data[0], guess_data[1], guess_data[2],
answer_data[a2][0], answer_data[a2][1], answer_data[a*2][2]);
}

  1. - usage details : feed forward
  2. ```cpp
  3. void CMultiLayerPerceptron::feedforward(double * pInputs)
  4. {
  5. double dSum = 0;
  6. for(int a=0; a<m_nInput; a++) {
  7. m_pInputNeurons[a] = pInputs[a];
  8. }
  9. // Calculate Hidden Layer
  10. for(int b=0; b<m_nHidden; b++) {
  11. dSum = 0;
  12. m_pHiddenNeurons[b] = 0;
  13. // get weighted sum of pattern with bias (Input To Hidden)
  14. for(int a=0; a<=m_nInput; a++) {
  15. dSum += m_pInputNeurons[a] * m_ppInputHiddenW[a][b];
  16. }
  17. m_pHiddenNeurons[b] = activate(dSum);
  18. }
  19. // Calculate Output Layer
  20. for(int b=0; b<m_nOutput; b++) {
  21. dSum = 0;
  22. m_pOutputNeurons[b] = 0;
  23. // get weighted sum of pattern with bias (Hidden To Output)
  24. for(int a=0; a<=m_nHidden; a++) {
  25. dSum += m_pHiddenNeurons[a] * m_ppHiddenOutputW[a][b];
  26. }
  27. m_pOutputNeurons[b] = activate(dSum);
  28. }
  29. }
  • usage details : back propagation
  1. void CMultiLayerPerceptron::backpropagation(double * pAnswer)
  2. {
  3. // Modify deltas between hidden and output
  4. for(int b=0; b<m_nOutput; b++) {
  5. m_pOutputError[b] = m_pOutputNeurons[b] * (1.0 - m_pOutputNeurons[b]) * (pAnswer[b] - m_pOutputNeurons[b]);
  6. // get weighted sum of pattern with bias (Hidden To Output)
  7. for(int a=0; a<=m_nHidden; a++) {
  8. m_ppHiddenOutputD[a][b] = (LEARNING_RATE * m_pHiddenNeurons[a] * m_pOutputError[b]) + m_dMomentum * m_ppHiddenOutputD[a][b];
  9. }
  10. }
  11. // Modify deltas between input and hidden
  12. for(int b=0; b<m_nHidden; b++) {
  13. double sum = 0.0;
  14. for(int c=0; c<m_nOutput; c++) {
  15. sum += m_ppHiddenOutputW[b][c] * m_pOutputError[c];
  16. }
  17. m_pHiddenError[b] = m_pHiddenNeurons[b] * (1.0 - m_pHiddenNeurons[b]) * sum;
  18. // get weighted sum of pattern with bias (input To hidden)
  19. for(int a=0; a<=m_nInput; a++) {
  20. m_ppInputHiddenD[a][b] = (LEARNING_RATE * m_pInputNeurons[a] * m_pHiddenError[b]) + m_dMomentum * m_ppInputHiddenD[a][b];
  21. }
  22. }
  23. // update weight between input and hidden
  24. for(int a=0; a<=m_nInput; a++) {
  25. for(int b=0; b<m_nHidden; b++) {
  26. m_ppInputHiddenW[a][b] += m_ppInputHiddenD[a][b];
  27. }
  28. }
  29. // update weight between hidden and output
  30. for(int a=0; a<=m_nHidden; a++) {
  31. for(int b=0; b<m_nOutput; b++) {
  32. m_ppHiddenOutputW[a][b] += m_ppHiddenOutputD[a][b];
  33. }
  34. }
  35. }