Witam,
moj problem polega na tym iż program systematycznie powieksza swoj rozmiar zajetej pamieci. Od razu przyznam ze jestem baardzo poczatkujacy i chyba czasami nie rozumiem pelne idei programowania obiektowego... Program juz jest przeniesiony z Dev C do QT 4.5 do trybu graficznego ale w obu przypadkach powiekszana jest ilosc zajmowanej pamieci. W kodzie oczywiscie nie widze zadnych bledow...
#include <iostream.h>
#include <stdlib.h>
#include <time.h>
#include <math.h>
#include <fstream.h>
#define RANDOM_CLAMP (((float)rand()-(float)rand())/RAND_MAX) //zwraca float z przedzialu -1 1
#define RANDOM_NUM ((float)rand()/(RAND_MAX+1)) // zwraca float z przedzialu 0 1
class Dendryt
{
public:
double d_waga; // waga neuronu
unsigned long d_wskazuje; // index neuronu do ktorego odwoluje sie w nastepnej warstwie
Dendryt(double waga=0.0, unsigned long wskazuje=0) // Konstruktor
{
d_waga=waga; //inicjuje wartosci
d_wskazuje=wskazuje;
}
};
class Neuron
{
public:
unsigned long n_ID; //ID poszczegolnego neuronu w sieci uzywane do
//odnalezienia poszczegolnego neuronu w macierzy
double n_wartosc; //wartosc neuronu
double n_bias; //bias
double n_delta; //delta (backpropagation)
Dendryt *Dendryty; //Dendryty
Neuron(unsigned long ID=0,double wartosc=0.0,double bias=0.0) //Konstruktor
{
n_ID=ID; //inicjuje wartosci
n_wartosc=wartosc;
n_bias=bias;
n_delta=0.0;
}
void UstawDendryty(unsigned long dendryt) // ustawia dendryty z neuronu do danego dendrytu
{
Dendryty = new Dendryt[dendryt];
for(int i=0;i<dendryt;i++)
{
Dendryty[i].d_wskazuje=i; //inicjalizuje dendryt do zalaczenia z nastepna warstwa
}
}
};
class Warstwa
{
public:
Neuron *Neurony; // Wskaznik na macierz neuronow
void Inicjalizacja(unsigned long rozmiar)
{
Neurony = new Neuron[rozmiar]; // inicjalizuje macierz warstw rozmiarem
}
~Warstwa()
{
delete Neurony; // destruktor
}
Neuron GetNeuron(unsigned long index)
{
return Neurony[index];
}
void SetNeuron(Neuron neuron,unsigned long index)
{
Neurony[index]=neuron;
}
};
class Siec
{
public:
double siec_wspolczynnik_uczenia; // eta
Warstwa *Warstwy; // laczna ilosc warstw w sieci
unsigned long siec_liczba_warstw; // ilosc warstw
double *siec_wejscia; // macierz wejsc sieci
double *siec_wyjscia; // macierz wyjsc sieci
unsigned long *siec_warstwy; // miacierz mowiaca ile neuronow jes w poszczegolnej warstwie
Siec()
{
// pusty konstruktor
}
int UstawDane(double wspolczynnik_uczenia,unsigned long warstwy[],unsigned long liczba_warstw)
{
if(liczba_warstw<2)
return(-1);
siec_wspolczynnik_uczenia=wspolczynnik_uczenia;
siec_warstwy = new unsigned long [liczba_warstw]; // inicjalizuje macierz warstw
Warstwy= new Warstwa[liczba_warstw];
for(int i=0;i<liczba_warstw;i++)
{
siec_warstwy[i]=warstwy[i];
Warstwy[i].Inicjalizacja(warstwy[i]); // Inicjazlizuje poszczegolne warstwy rozmiarem
}
siec_wejscia= new double[warstwy[0]];
siec_wyjscia= new double[warstwy[liczba_warstw-1]];
siec_liczba_warstw=liczba_warstw;
return 0;
}
int UstawWejscia(double wejscia[]) // metoda ustawiajaca wejscia
{
for(int i=0;i<siec_warstwy[0];i++)
{
Warstwy[0].Neurony[i].n_wartosc=wejscia[i];
}
return 0;
}
void LosujWagi(void)
{
int i,j,k;
for(i=0;i<siec_liczba_warstw;i++)
{
for(j=0;j<siec_warstwy[i];j++)
{
if(i!=(siec_liczba_warstw-1)) //osttania warstwa nie wymaga wag
{
Warstwy[i].Neurony[j].UstawDendryty(siec_warstwy[i+1]); // inicjalizuje dendryty
for(k=0;k<siec_warstwy[i+1];k++)
{
Warstwy[i].Neurony[j].Dendryty[k].d_waga=GetRand(); // wagi sa losowane przez metode GetRand()
//cout << i << " " << j << " " << k << " " << Warstwy[i].Neurony[j].Dendryty[k].d_waga << "\n";
}
//system("pause");
}
if(i!=0) // Pierwsza warstwa nie potrzebuje Biasu
{
Warstwy[i].Neurony[j].n_bias=GetRand(); //tutaj bias przyjmuje watrosc losowa z przedizlu (-1,1) w literaturze spotyka sie wartosc stala biasu rowna 1...
}
}
}
}
double * GetOutput(void)
{
double *wyjscia;
int i,j,k;
wyjscia= new double[siec_warstwy[siec_liczba_warstw-1]]; //tymczasowa macierz wyjsc
for(i=1;i<siec_liczba_warstw;i++)
{
for(j=0;j<siec_warstwy[i];j++)
{
Warstwy[i].Neurony[j].n_wartosc=0;
for(k=0;k<siec_warstwy[i-1];k++)
{
Warstwy[i].Neurony[j].n_wartosc=Warstwy[i].Neurony[j].n_wartosc+Warstwy[i-1].Neurony[k].n_wartosc*Warstwy[i-1].Neurony[k].Dendryty[j].d_waga; // mnozy i doddaje do wyjsc
}
Warstwy[i].Neurony[j].n_wartosc=Warstwy[i].Neurony[j].n_wartosc+Warstwy[i].Neurony[j].n_bias; //dodaje bias
Warstwy[i].Neurony[j].n_wartosc=Funkcja_Aktywacji(Warstwy[i].Neurony[j].n_wartosc); //Funkcja aktywacji
}
}
for(i=0;i<siec_warstwy[siec_liczba_warstw-1];i++)
{
wyjscia[i]=Warstwy[siec_liczba_warstw-1].Neurony[i].n_wartosc;
}
return wyjscia;
}
void Update(void)
{
GetOutput();
}
double Funkcja_Aktywacji(double value) //funkcja aktywacji (wartosci pomiedzy 1 i -1
{
return (1.0/(1+exp(-/*BETA*/value))); // tutaj mozna dodac parametr BETA (nachylenie funkcji)
}
double GetRand(void) // do ograniecia!!!
{
time_t timer;
struct tm *tblock;
timer=time(NULL);
tblock=localtime(&timer);
int seed=int(tblock->tm_sec+100*RANDOM_CLAMP+100*RANDOM_NUM);
//srand(tblock->tm_sec);
srand(seed);
return (RANDOM_CLAMP+RANDOM_NUM);
}
double SigmaDelta(unsigned long warstwa_num, unsigned long neuron_num)
{
double wynik=0.0;
for(int i=0;i<siec_warstwy[warstwa_num+1];i++) //przejdz przez wsztskie nuerony nastepnej wartswy
{
wynik=wynik+Warstwy[warstwa_num].Neurony[neuron_num].Dendryty[i].d_waga*Warstwy[warstwa_num+1].Neurony[i].n_delta; // oblicza sume
}
return wynik;
}
double Ucz(double wejscia[],double wyjscia[])
{
int i,j,k;
double Cel, Aktualny, Delta, RMS=0,ERMS=0;
UstawWejscia(wejscia); //ustawia wejscia
Update(); //aktualizuje wszytskie wartosci
for(i=siec_liczba_warstw-1;i>0;i--) //idz od ostatniej do pocztaktowej warstwy
{
for(j=0;j<siec_warstwy[i];j++) //przejdz przez wszytskie neurony
{
if(i==siec_liczba_warstw-1)
{
Cel=wyjscia[j]; // wartosc docelowa
Aktualny=Warstwy[i].Neurony[j].n_wartosc; // waktualna wartosc
RMS = RMS + (0.5*(Aktualny-Cel)*(Aktualny-Cel));
Delta=(Cel-Aktualny)*Aktualny*(1-Aktualny); // funkcja liczaca blad
Warstwy[i].Neurony[j].n_delta=Delta;
for(k=0;k<siec_warstwy[i-1];k++)
{
Warstwy[i-1].Neurony[k].Dendryty[j].d_waga += Delta*siec_wspolczynnik_uczenia*Warstwy[i-1].Neurony[k].n_wartosc; // Liczy nowe wagi
}
Warstwy[i].Neurony[j].n_bias= Warstwy[i].Neurony[j].n_bias + Delta * siec_wspolczynnik_uczenia*1; //n_wartosc jest zawsze 1 dla biasu
}
else
{
Aktualny=Warstwy[i].Neurony[j].n_wartosc; // wartosc aktualna
Delta= Aktualny * (1 - Aktualny) * SigmaDelta(i,j); // funkcja liczaca blad
for(k=0;k<siec_warstwy[i-1];k++)
{
Warstwy[i-1].Neurony[k].Dendryty[j].d_waga += Delta * siec_wspolczynnik_uczenia * Warstwy[i-1].Neurony[k].n_wartosc; // licz nowe wagi
}
if(i!=0) // wejscie nie ma biasu
{
Warstwy[i].Neurony[j].n_bias= Warstwy[i].Neurony[j].n_bias + Delta*siec_wspolczynnik_uczenia*1; // n_wartosc jest zawsze 1 dla biasu
}
}
}
}
return RMS;
}
~Siec() //destruktor
{
delete Warstwy;
}
};
int main(void)
{
ofstream plik;
plik.open ("plik.txt");
Siec Daisy;
unsigned long inp=70; // ilosc neuronow w warstiwe wejsciowej
unsigned long hid=30; // ilosc neuronow w warstwie ukrytej
//unsigned long hid2=10;
unsigned long outp=26; // ilosc neuronow w warstiwe wyjsciowej
unsigned long warstwy[3]; // ilosc warstw
warstwy[0]=inp;
warstwy[1]=hid;
//warstwy[2]=hid2;
warstwy[2]=outp;
//cout << warstwy[0] << "\n" << warstwy[1] << "\n" << warstwy[2] <<"\n" ;
int i=0,j=0;
unsigned int epoka=0;
//cout << "Podaj ilosc epok: " ;
//cin >> epoka;
Daisy.UstawDane(0.8,warstwy,3); // (wspolczynnik uczenia,warstwy,liczba warstw)
double wejscie[]={1,0};
double *wyjscia;
double blad;
Daisy.LosujWagi();
//WEKTORY WEJSCIOWE I WYJSCIOWE
double tr_inp[26][71]={{0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,
1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0}, //A
{1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,
0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,
0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,
0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,
0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{0.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,
0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,
0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,
1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,
0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,
0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,
0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,
0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,
0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,
0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,
0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0,0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,
1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,
0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,
1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,
0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,
1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,
0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0},
{1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,
0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,
1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0} }; //Z
double tr_out[26][26]={{1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 ,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0, 0.0,0.0,0.0,0.0,0.0,0.0 }, //A
{0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 },
{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0 } }; //Z
// NAPISAC METODE WCZYTUJACA Z PLIKU
cout << " \n Ucze..";
double blad_epoki;
do
{
blad_epoki=0;
for(j=0;j<26;j++)
{
//Daisy.Ucz(tr_inp[j],tr_out[j]); //Proces uczenia Backprop
blad = Daisy.Ucz(tr_inp[j],tr_out[j]);
blad_epoki=blad_epoki+blad;
}
blad_epoki=blad_epoki/26;
//cout << "\n Blad sieci: " << blad << " EPOKA : " << epoka << "\n";
cout << blad <<"\n ";
plik << epoka << " " << blad ;
plik << endl;
}while(blad_epoki>0.000001);
cout << "\n Blad sieci: " << blad << " EPOKA : " << epoka << "\n";
system("pause");
cout << " \n Koniec nauki..";
cout<<"\n\n testuje... \n";
for(j=0;j<26;j++)
{
cout<<"\n\nLitery : "<<j+1;
Daisy.UstawWejscia(tr_inp[j]);
wyjscia=Daisy.GetOutput();
for(i=0;i<inp;i++)
{
cout<<"\nIn"<<i+1<<" : "<<tr_inp[j][i];
}
for(i=0;i<outp;i++)
{
cout<<"\nOut"<<i+1<<" : "<<wyjscia[i];
}
delete wyjscia;
double *wyjscia;
cout <<"\n";
system("PAUSE");
}
cout<<"\n\n\n\n";
system("PAUSE");
}
,
- Czy wywolujac metode Ucz w petli zmienne beda ciagle tworzone i beda zabierac dodatkowa pamiec czy beda nadpisywane?
double Ucz(double wejscia[],double wyjscia[])
{
int i,j,k;
double Cel, Aktualny, Delta, RMS=0,ERMS=0;