// AnnBP.cpp: CANnBP 클래스 구현
//
////////////// ///////////////////////////////////////////////// /// /////
#include "StdAfx.h"
#include "AnnBP.h"
#include "math.h"
////////////////////////////////////////// ////// ///////////////////////
// 건설/파괴
/ ////// /////////////////////////////////////////// ///////// ////////////
CAnnBP::CAnnBP()
{
eta1=0.3;
모멘텀1=0.3;
}
CAnnBP::~CAnnBP()
{
}
double CANnBP::drnd()
{
return ((double) rand() / (double) BIGRND); >
}
/*** -1.0과 1.0 사이의 배정밀도 난수를 반환합니다***/
double CANnBP::dpn1()
{
return (double) (rand())/(32767/2)-1
}
/*** 액션 함수, 현재 S 유형 함수*** /
double CANnBP::squash(double x)
{
return (1.0 / (1.0 + exp( -x)));
}
/*** 1차원 배정밀도 실수 배열 적용***/
double* CANnBP ::alloc_1d_dbl(int n)
{
double *new1
new1 = (double *) malloc ((unsigned) (n * sizeof (double )));
if ( new1 == NULL) {
AfxMessageBox("ALLOC_1D_DBL: double 배열을 할당할 수 없습니다.\n"); return (NULL);
}
return (new1)
/*** 2차원 적용 배정밀도 실수 배열***/
double ** CANnBP::alloc_2d_dbl(int m, int n)
{
int i;
double **new1;
new1 = (double **) malloc ((부호 없음) (m * sizeof (double *)))
if (new1 == NULL) {
AfxMessageBox("ALLOC_2D_DBL: DBL 포인트 배열을 할당할 수 없습니다.\n")
return (NULL); > }
for (i = 0; i < m; i++) {
new1[i] = alloc_1d_dbl(n)
}
return (new1);
}
/*** 무작위 초기화 가중치***/
void CANnBP::bpnn_randomize_weights(double * *w, int m, int n)
{
int i, j
for (i = 0; i <= m; i++) {
for (j = 0; j <= n; j++) {
w[i][j] = dpn1()
} < / p>
}
}
/*** 0 초기화 가중치***/
void CANnBP::bpnn_zero_weights(double ** w, int m, int n)
{
int i, j
for (i = 0; i <= m; i++) { < /p >
for (j = 0; j <= n; j++) {
w[i][j] = 0.0
}
}
}
/*** 난수 시드 설정***/
void CANnBP::bpnn_initialize(int seed)
{
CString msg,s;
msg="난수 생성기 시드:";
s.Format("%d",seed );
AfxMessageBox(msg+s);
srand(seed);
}
/*** BP 네트워크 생성 ** */
BPNN* CANnBP::bpnn_internal_create(int n_in, int n_hidden, int n_out)
{
BPNN *newnet; > newnet = (BPNN *) malloc (sizeof (BPNN))
if (n
ewnet == NULL) {
printf("BPNN_CREATE: 신경망을 할당할 수 없습니다\n")
return (NULL)
}
newnet->input_n = n_in;
newnet->hidden_n = n_hidden;
newnet->output_n = n_out; >input_units = alloc_1d_dbl(n_in + 1);
newnet->hidden_units = alloc_1d_dbl(n_hidden + 1)
newnet->output_units = alloc_1d_dbl(n_out + 1); p>
p>
newnet->hidden_delta = alloc_1d_dbl(n_hidden + 1);
newnet->output_delta = alloc_1d_dbl(n_out + 1)
newnet- >target = alloc_1d_dbl(n_out + 1);
newnet->input_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1)
newnet->hidden_weights = alloc_2d_dbl(n_hidden + 1 , n_out + 1) ;
newnet->input_prev_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1)
newnet->hidden_prev_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1 );
return (newnet);
}
/* BP 네트워크가 차지하는 메모리 공간 해제*/
void CANnBP::bpnn_free(BPNN *net)
{
int n1, n2, i
n1 = net->input_n; p> n2 = net->hidden_n;
free((char *) net->input_units)
free((char *) net->hidden_units); >
free( (char *) net->output_units);
free((char *) net->hidden_delta)
free((char *) net- >output_delta);
p>free((char *) net->target);
for (i = 0; i <= n1; i++) {
free((char *) net->input_weights[i])
free((char *) net ->input_prev_weights[i]);
}
free((char *) net->input_weights)
free((char *) net- >input_prev_weights);
for (i = 0; i <= n2; i++) {
free((char *) net->hidden_weights[i]); >
free((char *) net->hidden_prev_weights[i]);
}
free((char *) net->hidden_weights)
free((char *) net->hidden_prev_weights);
free((char *) net)
/*** BP 네트워크 생성 및 가중치 초기화***/
BPNN* CANnBP::bpnn_create(int n_in, int n_hidden, int n_out)
{
BPNN *newnet;
newnet = bpnn_internal_create(n_in, n_hidden, n_out)
#ifdef INITZERO
bpnn_zero_weights(newnet->input_weights, n_in, n_hidden ) ;
#else
bpnn_randomize_weights(newnet->input_weights, n_in, n_hidden)
#endif
bpnn_randomize_weights(newnet- > Hidden_weights, n_hidden, n_out);
bpnn_zero_weights(newnet->input_prev_weights, n_in, n_hidden)
bpnn_zero_weights(newnet->hidden_prev_weights, n_hidden, n_out); p> return (newnet);
}
void CANnBP::bpnn_layerforward(double *l1, double *l2, double **conn, int n1, int n2)
p>{
double sum;
int j, k
/*** 임계값 설정***/
>
l1[0] = 1.0;
/*** 두 번째 레이어의 각 뉴런에 대해***/
for (j = 1; j < = n2 ; j++) {
/*** 입력의 가중 합계 계산***/
sum = 0.0
for (k = 0; k <= n1; k++) {
합계 += conn[k][j] * l1[k]
}
l2[j] = squash(sum);
}
}
/* 출력 오류*/
void CANnBP::bpnn_output_error(double * delta , double *target, double *output, int nj, double *err)
{
int j
double o, t, errsum; p>
errsum = 0.0;
for (j = 1; j <= nj; j++) {
o = 출력[j]; > t = 대상[j];
델타[j] = o * (1.0 - o) * (t - o)
errsum += ABS(delta[j ] );
}
*err = errsum;
}
/* 숨겨진 레이어 오류*/
void CANnBP::bpnn_hidden_error(double *delta_h, int nh, double *delta_o, int no, double **who, double *hidden, double *err)
{
int j, k;
double h, sum, errsum;
errsum = 0.0; for (j = 1; j <= nh; j++ )
h = 숨겨진[j];
합계 = 0.0;
for (k = 1; k <= no; k++) {
합계 = 0.0;
p>
합계 += delta_o[k] * who[j][k]
}
delta_h[j] = h * (1.0 - h) * 합계
errsum += ABS(delta_h[j])
}
*err = errsum;
}
/* 무게 조정*/
void CANnBP::bpnn_adjust_we
ights(double *delta, int ndelta, double *ly, int nly, double **w, double **oldw, double eta, double 추진력)
{
double new_dw;
int k, j;
ly[0] = 1.0;
for (j = 1; j <= ndelta; j++) >
for (k = 0; k <= nly; k++) {
new_dw = ((eta * delta[j] * ly[k]) + (운동량 * oldw[k][ j]));
w[k][j] += new_dw;
oldw[k][j] = new_dw; p>
p>
}
}
/* 순방향 작업 수행*/
void CANnBP::bpnn_feedforward(BPNN *net)
{
int in, hid, out;
in = net->input_n;
hid = net->hidden_n;
out = net->output_n;
/*** 피드 포워드 입력 활성화 ***/
bpnn_layerforward(net->input_units, net- >hidden_units,
net->input_weights, in, hid)
bpnn_layerforward(net->hidden_units, net->output_units,
net->hidden_weights , hid, out );
}
/* BP 네트워크 훈련*/
void CANnBP::bpnn_train(BPNN *net, double eta, double 추진력 , double *eo , double *eh)
{
int in, hid, out
double out_err, hid_err; in = net- >input_n;
hid = net->hidden_n;
out = net->output_n; /*** 정방향 입력 활성화* **/
bpnn_layerforward(net->input_units, net->hidden_units,
net->input_weights, in
, hid);
bpnn_layerforward(net->hidden_units, net->output_units,
net->hidden_weights, hid, out)
/** * 숨겨진 레이어 및 출력 레이어 오류 계산***/
bpnn_output_error(net->output_delta, net->target, net->output_units,
out, &out_err); /p>
bpnn_hidden_error(net->hidden_delta, hid, net->output_delta, out,
net->hidden_weights, net->hidden_units, &hid_err)
* eo = out_err;
*eh = hid_err;
/*** 입력 레이어 및 숨겨진 레이어 가중치 조정***/
bpnn_adjust_weights(net -> output_delta, out, net->hidden_units, hid,
net->hidden_weights, net->hidden_prev_weights, eta, 모멘텀)
bpnn_adjust_weights(net->hidden_delta, hid , net ->input_units, in,
net->input_weights, net->input_prev_weights, eta, Momentum)
}
/* BP 네트워크 저장 */
void CANnBP::bpnn_save(BPNN *net, char *filename)
{
CFile 파일
char * mem;
int n1, n2, n3, i, j, memcnt;
double dvalue, **w
n1 = net->input_n; net->hidden_n; n3 = net->output_n;
printf("%dx%dx%d 네트워크를 '%s'에 저장 중\n", n1, n2, n3, 파일 이름) p>
시도
{
file.Open(filename,CFile::modeWrite|CFile::modeCreate|CFile::modeNoTruncate)
}
catch(CFileException* e)
{
e->R
eportError();
e->Delete();
}
file.Write(&n1,sizeof(int)); p> file.Write(&n2,sizeof(int));
file.Write(&n3,sizeof(int))
memcnt = 0; > w = net->input_weights;
mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double))
// mem = (char *) malloc (((n1+1) * (n2+1) * sizeof(double)))
for (i = 0; i <= n1 ; i++) {
for (j = 0; j <= n2; j++) {
dvalue = w[i][j]
/ /fastcopy(&mem[memcnt], &dvalue, sizeof(double));
fastcopy(&mem[memcnt], &dvalue, sizeof(double))
memcnt += sizeof( double);
}
}
file.Write(mem,sizeof(double)*(n1+1)*(n2+1));
free(mem);
memcnt = 0;
w = net->hidden_weights; malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double)))
// mem = (char *) malloc (((n2+1) * ( n3+1) * sizeof(double)));
for (i = 0; i <= n2; i++) {
for (j = 0; j <= n3 ; j++) {
dvalue = w[i][j]
fastcopy(&mem[memcnt], &dvalue, sizeof(double)); // fastcopy(&mem[memcnt], &dvalue, sizeof(double))
memcnt += sizeof(double)
}
} p>
p>
파일.쓰기(mem, (n2+1
) * (n3+1) * sizeof(double));
// free(mem)
file.Close();
}
/* 파일에서 BP 네트워크 읽기*/
BPNN* CANnBP::bpnn_read(char *filename)
{
char *mem;
BPNN *new1;
int n1, n2, n3, i, j, memcnt; 파일;
시도
{
file.Open(filename,CFile::modeRead|CFile::modeCreate|CFile::modeNoTruncate) < /p >
}
catch(CFileException* e)
{
e->ReportError()
e -> 삭제();
}
// printf("'%s'\n 읽는 중", 파일 이름);// fflush(stdout)
file.Read(&n1, sizeof(int));
file.Read(&n2, sizeof(int))
file.Read(&n3, sizeof(int) );
new1 = bpnn_internal_create(n1, n2, n3);
// printf("'%s'에는 %dx%dx%d 네트워크가 포함되어 있습니다\n", filename , n1 , n2, n3);
// printf("입력 가중치를 읽는 중..."); // fflush(stdout); p> mem = (char *) malloc (((n1+1) * (n2+1) * sizeof(double)))
file.Read(mem, ((n1+1 )* (n2+1))*sizeof(double));
for (i = 0; i <= n1; i++) {
for (j = 0; j <= n2; j++) {
//fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double))
fastcopy( &(new1->input_weights[i][j]), &mem[memcnt], sizeof(double))
memcnt += sizeof(double);
}
}
free(mem);
// printf("완료\n숨겨진 가중치를 읽는 중...") ; //fflush(stdout);
memcnt = 0;
mem = (char *) malloc (((n2+1) * (n3+1) * sizeof(double )));
file.Read(mem, (n2+1) * (n3+1) * sizeof(double))
for (i = 0; i < = n2; i++) {
for (j = 0; j <= n3; j++) {
//fastcopy(&(new1->hidden_weights[i][j] ), &mem[memcnt], sizeof(double))
fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double))
memcnt += sizeof(double);
}
}
free(mem); ();
printf("완료\n"); //fflush(stdout);
bpnn_zero_weights(new1->input_prev_weights, n1, n2); p> bpnn_zero_weights(new1->hidden_prev_weights, n2, n3);
return (new1)
}
void CANnBP::CreateBP(int n_in, int n_hidden, int n_out)
{
net=bpnn_create(n_in,n_hidden,n_out)
}
void CANnBP: :FreeBP()
{
bpnn_free(net)
}
void CANnBP::Train(double *input_unit,int input_num, double *target,int target_num, double *eo, double *eh)
{
for(int i=1;i<=input_num;i++)
{
net->input_units[i]=input_unit[i-1]
}
for(int j=1;j< =target_num;j++)
{
net->target[j]=target[j-1];
}
bpnn_train(net,eta1,momentum1,eo,eh);
}
void CANnBP::Identify(double *input_unit,int input_num,double *target,int target_num)
{
for(int i=1;i<=input_num;i++)
{
net->input_units[i]=input_unit[i-1]
}
bpnn_feedforward(net);
for(int j=1;j<=target_num;j++)
{
target[j-1]=net->output_units[j];
}
}
void CANnBP::Save(char *filename) p>
p>
{
bpnn_save(net,filename)
}
void CANnBP::Read(char *filename)
{
net=bpnn_read(filename);
}
void CANnBP::SetBParm(더블 에타, 더블 모멘텀)
{
eta1=eta;
모멘텀1=모멘텀
}
void CANnBP::Initialize( int 시드)
p>
{
bpnn_initialize(seed)
}