說(shuō)明:
1.實(shí)現(xiàn)語(yǔ)言為C++
2.關(guān)于神經(jīng)網(wǎng)絡(luò)反向傳播的數(shù)學(xué)原理我會(huì)在后續(xù)的文章中更新
現(xiàn)在開(kāi)始吧~
//Backprop.h
#ifndef Backprop_h
#define Backprop_h
class TLU//一個(gè)TLU單元
{
public:
TLU();
double F_output();//把該TLU對(duì)應(yīng)的輸出Fi打印出來(lái)
protected:
double input1,input2,input3,weight1,weight2,weight3,f,c;//input為輸入的特征向量,weight為權(quán)值,f為輸
c為學(xué)習(xí)效率
};
class FirstLevelTLU:public TLU//第一層TLU的類(lèi)
{
public:
FirstLevelTLU(double x1,double x2,double x3,double w1,double w2,double w3);
void Modify_weight(double delta,double weight);//反向傳播過(guò)程中用于梯度下降
private:
double Middle_delta;//對(duì)應(yīng)梯度下降中的sigmoid函數(shù)的靈敏度ζi = f (i,j)(1 - f(i,j))∑ζ(j + 1)w(i,l)^(j+1)
};
class LastLevelTLU:public TLU//最后一層的TLU
{
public:
LastLevelTLU(double x1,double x2,double x3,double w1,double w2,double w3,double ep);
double Get_delta();//獲取最后一層的靈敏度ζ(k)
void Modify_weight();//梯度下降的過(guò)程
double Get_w1();
double Get_w2();
private:
double d,Base_delta;//分別對(duì)應(yīng)最后的輸入F和基數(shù)ζ(k)
};
#endif
代碼并不難理解,重點(diǎn)定義了三個(gè)類(lèi),分別指代普通TLU,第一層TLU,最后一層TLU,以及各自的輸入,權(quán)值
接下來(lái)是定義部分backpro.cpp主要用于類(lèi)中函數(shù)的定義:在此不做復(fù)制粘貼啦;
最后是main函數(shù),如下:
#include#include#include#include "Backprop.h"
void main()
{
int start=1;
while(start){
FirstLevelTLU first1(1.0,0.0,1.0,2.0,-2.0,0.0);
FirstLevelTLU first2(1.0,0.0,1.0,1.0,3.0,-1.0);//先建立兩個(gè)第一層的TLU
double f1=first1.F_output();
double f2=first2.F_output();
LastLevelTLU last(f1,f2,1.0,3.0,-2.0,-1.0,0.0);定義最后一層TLU
double fout=last.F_output();
last.Modify_weight();
double delta=last.Get_delta();
first1.Modify_weight(delta,last.Get_w1());
first2.Modify_weight(delta,last.Get_w2());//梯度下降過(guò)程,先最后一層,依次反向傳播
cout<<endl;
start--;
}
return 0;
}
寫(xiě)在最后:
后續(xù)的過(guò)程我會(huì)繼續(xù)補(bǔ)充反向傳播用Python語(yǔ)言寫(xiě)的的更加復(fù)雜的實(shí)現(xiàn),敬請(qǐng)期待~