#include<iostream.h>
#include<conio.h>
#include <stdio.h>
#include <math.h>
//-------------------------------------------------------------------------------------------------------------
#define b_const 1.00000000000000000000
#define a_const 1.00000000000000000000
double func(double val)
{
return tanh(b_const*val);
//return (val<0 )? -1 : 1 ;
//return (1/(1+exp(val)));
}
//---------------------------------------------------------------------------------------------------------------------------------------
class TNeuron
{
public:
double *W;
double *Wviejo;
//int W_len;
};
//-----------------------------------------------------------------------------------------------------------------------------------
class TLayer
{
public:
TNeuron * neuronas;
int neuronas_len;
double *entradas;
double *delta;
int entradas_len;
double *salidas;
~TLayer()
{
for(int i=0;i<neuronas_len;i++)
{
delete [] neuronas[i].W ;
delete [] neuronas[i].Wviejo ;
}
delete [] neuronas;
delete [] salidas;
delete [] delta;
}
};
//-------------------------------------------------------------------------------------------------------------------------------------
class TNetwork_Trainer
{
public:
TLayer *layers;
int layers_len;
char **etiquetas;
int etiquetas_len;
void cargar();
void inferir(double * vals);
~TNetwork_Trainer(){
delete[] layers[0].entradas;
for(int i=0;i<etiquetas_len;i++)
delete [] etiquetas[i];
delete [] etiquetas;
delete [] layers;
}
};
//-------------------------------------------------------------------------------------------------------------
void TNetwork_Trainer :: inferir(double * vals)
{
double v;
for( int i = 0 ; i < layers[0].entradas_len ; i++ )
layers[0].entradas[i] = vals[i];
for( int i = 0 ; i < layers_len ; i++ )
for( int j = 0 ; j <layers[i].neuronas_len ; j++ )
{
v = 0;
for(int k = 0 ; k < layers[i].entradas_len ; k++)
v+=layers[i].entradas[k] * layers[i].neuronas[j].W[k];
layers[i].salidas[j+1]= func(v);
asm nop;
}
}
//--------------------------------------------------------------------------------------------------------------
void TNetwork_Trainer::cargar()
{
FILE *f;
char line[100];
f = fopen( "red.txt", "r" );
//leer la cantidad de capas
fscanf(f,"%d\n",&layers_len);
layers=new TLayer[layers_len];
//leer la cantidad de entradas
fscanf(f,"%d\n",&layers[0].entradas_len);
layers[0].entradas=new double[layers[0].entradas_len];
layers[0].entradas[0] = 1;
//leer la informacion de cada capa
for(int i=0;i<layers_len;i++)
{
//leer la cantidad de neuronas de la capa
fscanf(f,"%d\n",&layers[i].neuronas_len);
layers[i].neuronas = new TNeuron[ layers[i].neuronas_len];
layers[i].salidas = new double[layers[i].neuronas_len + 1];
layers[i].salidas[0] = 1;
layers[i].delta = new double[layers[i].neuronas_len ];
//para la siguiente capa la entrada es la salida de la actual
if( i<layers_len-1)
{
layers[i+1].entradas = layers[i].salidas ;
layers[i+1].entradas_len = layers[i].neuronas_len+1;
}
//poner todos los pesos en 1
for(int j=0;j<layers[i].neuronas_len;j++)
{
layers[i].neuronas[j].W=new double[layers[i].entradas_len];
layers[i].neuronas[j].Wviejo=new double[layers[i].entradas_len];
for(int k=0;k< layers[i].entradas_len;k++)
layers[i].neuronas[j].Wviejo[k] = layers[i].neuronas[j].W[k] = 1;
}
}
etiquetas_len=layers[layers_len-1].neuronas_len;
etiquetas= new char*[etiquetas_len];
for(int i=0;i< etiquetas_len;i++)
{
fgets( line, 100, f );
line[strlen(line)-1]=0;
etiquetas[i]=new char[strlen(line)+1];
strcpy(etiquetas[i],line);
}
fclose( f );
}
//---------------------------------------------------------------------------------------------------------------
struct TPatern
{
double *entradas,
*salidas;
TPatern() { entradas = salidas = 0; }
~TPatern()
{
if(entradas)
delete [] entradas;
if(salidas)
delete [] salidas;
}
};
//---------------------------------------------------------------------------------------------------------------
class Trainer
{
TNetwork_Trainer net;
TPatern* paterns;
int CPaterns;
public:
void cargar(){net.cargar();}
~Trainer(){ if( paterns)
delete [] paterns;
}
void cargar_paterns();
void entrenar(bool encuesta(double Err));
void salvar();
};
//---------------------------------------------------------------------------------------------------------------
void Trainer :: salvar()
{
FILE *f;
f = fopen( "Network_Out.txt", "w" );
fprintf(f,"%d\n",net.layers_len);
fprintf(f,"%d\n",net.layers[0].entradas_len);
for(int i = 0 ; i < net.layers_len ; i++)
{
fprintf(f,"%d\n",net.layers[i].neuronas_len );
for(int j = 0 ; j < net.layers[i].neuronas_len ; j++)
for(int k = 0 ; k < net.layers[i].entradas_len ; k++)
fprintf(f,"%f\n",net.layers[i].neuronas[j].W[k] );
}
for(int i = 0 ; i < net.etiquetas_len ; i++)
fprintf(f,"%s\n", net.etiquetas[i]);
fclose(f);
}
//---------------------------------------------------------------------------------------------------------------
void Trainer :: cargar_paterns()
{
FILE *f = fopen( "patrones.txt", "r" );
float float_aux;
//leer la cantidad de patrones
fscanf(f,"%d\n",&CPaterns);
paterns= new TPatern[CPaterns];
int CEntradas = net.layers[0].entradas_len ,
CSalidas = net.layers[net.layers_len - 1].neuronas_len ;
//leer los patrones
for(int i = 0 ; i < CPaterns ; i++ )
{
paterns[i].entradas = new double [CEntradas];
paterns[i].salidas = new double [CSalidas] ;
paterns[i].entradas[0] = 1;
for( int j = 1 ; j < CEntradas ; j++ )
{
fscanf(f,"%f\n",&float_aux);
paterns[i].entradas[j] = float_aux;
asm nop;
}
for( int j = 0 ; j < CSalidas ; j++ )
{
fscanf(f,"%f\n",&float_aux);
paterns[i].salidas[j] = float_aux * a_const;
asm nop;
}
}
fclose(f);
}
//---------------------------------------------------------------------------------------------------------------
#define rate 0.5
#define alfa 0.8
void Trainer :: entrenar(bool encuesta(double Err))
{
double E,Eav=0xffffff, EavViejo, e, fact;
TLayer *Current_Layer, *Next_Layer;
int CNeuronas;
do
{
EavViejo = Eav;
Eav = 0;
for(int ipaterns = 0 ; ipaterns < CPaterns ; ipaterns++ )
{
E=0;
//Hecer la inferencia con la entrada del patron actual
net.inferir( paterns[ipaterns].entradas);
//para cada neurona de la capa de salida
Current_Layer = &net.layers[net.layers_len - 1];
CNeuronas = Current_Layer->neuronas_len ;
for(int j = 0 ; j < CNeuronas ; j++)
{ //e = di - yi
e = paterns[ipaterns].salidas[j] - Current_Layer->salidas[j+1];
E += e * e ;
Current_Layer->delta[j] =(b_const/a_const)* e * (a_const - Current_Layer->salidas[j+1]) *
(a_const + Current_Layer->salidas[j+1]) ;
//Para cada entrada de la neurona j:
for(int i = 0 ; i < Current_Layer->entradas_len ; i++ )
{
Current_Layer->neuronas[j].Wviejo[i]+=
//alfa * Current_Layer->neuronas[j].Wviejo[i] +
rate * Current_Layer->delta[j] * Current_Layer->entradas[i];
Current_Layer->neuronas[j].W[i] = Current_Layer->neuronas[j].Wviejo[i];
//Current_Layer->neuronas[j].W[i] = Current_Layer->neuronas[j].Wviejo[i];
asm nop;
}
}
E = E / 2 ;
//Para cada capa desde net.layers_len -2 hasta la capa 0
for(int icapas = net.layers_len - 2 ; icapas >= 0 ; icapas--)
{
Current_Layer = &net.layers[icapas];
CNeuronas = Current_Layer->neuronas_len ;
//Para cada neurona j de la capa
for(int j = 0 ; j < CNeuronas ; j++)
{
fact = 0;
Next_Layer = &net.layers[icapas+1];
//Para cada neurona k de la sgte. capa
for(int k = 0 ; k < Next_Layer->neuronas_len ; k++ )
fact += Next_Layer->delta[k] * Next_Layer->neuronas[k].W[j+1];
Current_Layer->delta[j] = (b_const/a_const) * fact * (a_const - Current_Layer->salidas[j+1]) *
(a_const + Current_Layer->salidas[j+1]) ;
//Para cada entrada de la neurona j:
for(int i = 0 ; i < Current_Layer->entradas_len ; i++ )
{
Current_Layer->neuronas[j].Wviejo[i]+=
/*alfa * Current_Layer->neuronas[j].Wviejo[i] +*/
rate * Current_Layer->delta[j] * Current_Layer->entradas[i];
Current_Layer->neuronas[j].W[i] = Current_Layer->neuronas[j].Wviejo[i];
asm nop;
}
}
}
Eav += E ;
}
Eav = Eav / CPaterns;
}while(/*Eav<EavViejo &&*/ encuesta(Eav));
}
//---------------------------------------------------------------------------------------------------------------
bool func_encuesta(double ErrorAV)
{
cout<< "Eav = "<<ErrorAV<<endl;
cout<< "desea contiuar? (s/n)\n";
return (getch()=='s')? true : false ;
}
//---------------------------------------------------------------------------------------------------------------
void main()
{
Trainer trainer;
trainer.cargar();
trainer.cargar_paterns();
trainer.entrenar(func_encuesta);
trainer.salvar();
getch();
}