69 INTEGER,
INTENT(IN) :: ikind
70 REAL(dp),
DIMENSION(:),
INTENT(IN),
TARGET :: descriptor
71 REAL(dp),
DIMENSION(:),
INTENT(IN) :: outer_deriv
72 REAL(dp),
DIMENSION(:),
INTENT(OUT) :: gradient
74 INTEGER :: i, ilayer, j, nlayers, width, width_in, &
76 REAL(dp),
ALLOCATABLE,
DIMENSION(:, :) :: backward, forward
77 REAL(dp),
DIMENSION(:, :, :),
POINTER :: a
79 a => pao%ml_training_matrices(ikind)%NN
82 width =
SIZE(a, 2); cpassert(
SIZE(a, 2) ==
SIZE(a, 3))
83 width_in =
SIZE(descriptor)
84 width_out =
SIZE(outer_deriv)
86 ALLOCATE (forward(0:nlayers, width), backward(0:nlayers, width))
89 forward(0, 1:width_in) = descriptor
91 DO ilayer = 1, nlayers
94 forward(ilayer, i) = forward(ilayer, i) + a(ilayer, i, j)*tanh(forward(ilayer - 1, j))
101 backward(nlayers, 1:width_out) = outer_deriv(:)
103 DO ilayer = nlayers, 1, -1
106 backward(ilayer - 1, j) = backward(ilayer - 1, j) + backward(ilayer, i)*a(ilayer, i, j)*(1.0_dp - tanh(forward(ilayer - 1, j))**2)
111 gradient(:) = backward(0, 1:width_in)
113 DEALLOCATE (forward, backward)
123 INTEGER :: i, icycle, ikind, ilayer, ipoint, j, &
124 npoints, width, width_in, width_out
125 REAL(dp) :: bak, eps, error, error1, error2, num_grad
126 REAL(dp),
ALLOCATABLE,
DIMENSION(:) :: prediction
127 REAL(dp),
ALLOCATABLE,
DIMENSION(:, :, :) :: gradient
132 DO ikind = 1,
SIZE(pao%ml_training_matrices)
133 training_matrix => pao%ml_training_matrices(ikind)
135 npoints =
SIZE(training_matrix%inputs, 2)
136 cpassert(
SIZE(training_matrix%outputs, 2) == npoints)
137 IF (npoints == 0) cycle
140 IF (pao%iw > 0)
WRITE (pao%iw, *)
"PAO|ML| Training neural network for kind: ", &
141 trim(training_matrix%kindname),
" from ", npoints,
"training points."
144 width_in =
SIZE(training_matrix%inputs, 1)
145 width_out =
SIZE(training_matrix%outputs, 1)
146 width = max(width_in, width_out)
147 ALLOCATE (training_matrix%NN(nlayers, width, width))
151 DO ilayer = 1, nlayers
154 training_matrix%NN(ilayer, i, j) = -1.0_dp + 2.0_dp*rng_stream%next()
160 ALLOCATE (gradient(nlayers, width, width))
161 DO icycle = 1, max_training_cycles
164 DO ipoint = 1, npoints
165 CALL nn_backpropagate(training_matrix%NN, &
166 input=training_matrix%inputs(:, ipoint), &
167 goal=training_matrix%outputs(:, ipoint), &
171 training_matrix%NN(:, :, :) = training_matrix%NN - step_size*gradient
173 IF (pao%iw > 0 .AND. mod(icycle, 100) == 0)
WRITE (pao%iw, *) &
174 "PAO|ML| ", trim(training_matrix%kindname), &
175 " training-cycle:", icycle,
"SQRT(error):", sqrt(error),
"grad:", sum(gradient**2)
177 IF (sum(gradient**2) < convergence_eps)
EXIT
187 CALL nn_backpropagate(training_matrix%NN, &
188 input=training_matrix%inputs(:, ipoint), &
189 goal=training_matrix%outputs(:, ipoint), &
193 ALLOCATE (prediction(width_out))
196 bak = training_matrix%NN(ilayer, i, j)
198 training_matrix%NN(ilayer, i, j) = bak + eps
199 CALL nn_eval(training_matrix%NN, &
200 input=training_matrix%inputs(:, ipoint), &
201 prediction=prediction)
202 error1 = sum((training_matrix%outputs(:, ipoint) - prediction)**2)
204 training_matrix%NN(ilayer, i, j) = bak - eps
205 CALL nn_eval(training_matrix%NN, &
206 input=training_matrix%inputs(:, ipoint), &
207 prediction=prediction)
208 error2 = sum((training_matrix%outputs(:, ipoint) - prediction)**2)
210 training_matrix%NN(ilayer, i, j) = bak
211 num_grad = (error1 - error2)/(2.0_dp*eps)
212 IF (pao%iw > 0)
WRITE (pao%iw, *)
"PAO|ML| Numeric gradient:", i, j, gradient(ilayer, i, j), num_grad
216 DEALLOCATE (prediction)
220 DEALLOCATE (gradient)
223 ALLOCATE (prediction(width_out))
224 DO ipoint = 1, npoints
225 CALL nn_eval(training_matrix%NN, &
226 input=training_matrix%inputs(:, ipoint), &
227 prediction=prediction)
228 error = maxval(abs(training_matrix%outputs(:, ipoint) - prediction))
229 IF (pao%iw > 0)
WRITE (pao%iw, *)
"PAO|ML| ", trim(training_matrix%kindname), &
230 " verify training-point:", ipoint,
"SQRT(error):", sqrt(error)
232 DEALLOCATE (prediction)