From 32a619895f72fedd07ec7eeec697ba441e009d8a Mon Sep 17 00:00:00 2001 From: mc10011 Date: Mon, 18 Feb 2019 23:30:11 +0200 Subject: [PATCH 1/3] Update pyrenn.py The proposed changes help in two ways: 1. Prevent training from running forever when error improves slightly 2. Stop a training early before k_max is reached if error doesn't improve any more --- python/pyrenn.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/python/pyrenn.py b/python/pyrenn.py index 7381f4b..c0b3833 100644 --- a/python/pyrenn.py +++ b/python/pyrenn.py @@ -674,7 +674,7 @@ def BPTT(net,data): def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ - verbose = False): + verbose = False,min_E_step=1e-09): """ Implementation of the Levenberg-Marquardt-Algorithm (LM) based on: Levenberg, K.: A Method for the Solution of Certain Problems in Least Squares. Quarterly of Applied Mathematics, 2:164-168, 1944. @@ -691,6 +691,7 @@ def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ E_stop: Termination Error, Training stops when the Error <= E_stop dampconst: constant to adapt damping factor of LM dampfac: damping factor of LM + min_E_step: minimum step for error. When reached 5 times, training terminates. Returns: net: trained Neural Network """ @@ -705,15 +706,15 @@ def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ if verbose: print('Iteration: ',k,' Error: ',E,' scale factor: ',dampfac) + early=0 + while True: #run loop until either k_max or E_stop is reached JJ = np.dot(J.transpose(),J) #J.transp * J w = net['w'] #weight vector - while True: #repeat until optimizing step is successful - #gradient g = np.dot(J.transpose(),e) @@ -729,15 +730,23 @@ def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ net['w'] = w + w_delta #new weight vector - Enew = calc_error(net,data) #calculate new Error E - - if Enew=min_E_step: #Optimization Step successful! - dampfac= dampfac/dampconst #adapt scale factor + #if E-Enew<=1e-09: + dampfac= dampfac/dampconst#adapt scale factor break #go to next iteration else: - #Optimization Step NOT successful! + #Optimization Step NOT successful!\ dampfac = dampfac*dampconst#adapt scale factor + if abs(E-Enew)<=min_E_step: + if verbose: + print('E-Enew<=min_E_step Encountered!!') + early=early+1 + if early>=5.0: + if verbose: + print('5 Times * E-Enew<=min_E_step Encountered!!') + break #Calculate Jacobian, Error and error vector for next iteration J,E,e = RTRL(net,data) @@ -753,7 +762,10 @@ def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ elif E<=E_stop: print('Termination Error reached') break - + elif early>=5.0: + print('Error decreased 5 times by minimum step. Force training exit.') + break + net['ErrorHistory'] = ErrorHistory[:k] return net From 8e46bf3d5283daa3ec83ee94d3ab1ae49457932a Mon Sep 17 00:00:00 2001 From: mc10011 Date: Sun, 31 Mar 2019 20:06:27 -0500 Subject: [PATCH 2/3] Refine early stopping criterium Refine early stopping criterium for resetting when a step is successful and verbose is chosen --- python/pyrenn.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/python/pyrenn.py b/python/pyrenn.py index c0b3833..e8956d2 100644 --- a/python/pyrenn.py +++ b/python/pyrenn.py @@ -735,16 +735,17 @@ def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ #Optimization Step successful! #if E-Enew<=1e-09: dampfac= dampfac/dampconst#adapt scale factor + early=0 #reset the early stopping criterium break #go to next iteration else: #Optimization Step NOT successful!\ dampfac = dampfac*dampconst#adapt scale factor if abs(E-Enew)<=min_E_step: + early=early+1 + if verbose: print('E-Enew<=min_E_step Encountered!!') - early=early+1 - if early>=5.0: - if verbose: + if early>=5.0: print('5 Times * E-Enew<=min_E_step Encountered!!') break From dbdba59c287d1d387ff68ced5da5c68f80aee12c Mon Sep 17 00:00:00 2001 From: mc10011 Date: Sun, 31 Mar 2019 20:07:59 -0500 Subject: [PATCH 3/3] Minor change deleted a redundant comment --- python/pyrenn.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/pyrenn.py b/python/pyrenn.py index e8956d2..1d7ee86 100644 --- a/python/pyrenn.py +++ b/python/pyrenn.py @@ -733,7 +733,6 @@ def train_LM(P,Y,net,k_max=100,E_stop=1e-10,dampfac=3.0,dampconst=10.0,\ Enew = calc_error(net,data) #calculate new Error E if Enew=min_E_step: #Optimization Step successful! - #if E-Enew<=1e-09: dampfac= dampfac/dampconst#adapt scale factor early=0 #reset the early stopping criterium break #go to next iteration