17 #include "./base/base_uses.f90"
23 CHARACTER(len=*),
PARAMETER,
PRIVATE :: moduleN =
'pao_ml_neuralnet'
28 REAL(dp),
PARAMETER :: step_size = 0.001_dp
29 INTEGER,
PARAMETER :: nlayers = 3
30 REAL(dp),
PARAMETER :: convergence_eps = 1e-7_dp
31 INTEGER,
PARAMETER :: max_training_cycles = 50000
44 TYPE(pao_env_type),
POINTER :: pao
45 INTEGER,
INTENT(IN) :: ikind
46 REAL(dp),
DIMENSION(:),
INTENT(IN) :: descriptor
47 REAL(dp),
DIMENSION(:),
INTENT(OUT) :: output
48 REAL(dp),
INTENT(OUT) :: variance
50 TYPE(training_matrix_type),
POINTER :: training_matrix
52 training_matrix => pao%ml_training_matrices(ikind)
54 CALL nn_eval(training_matrix%NN, input=descriptor, prediction=output)
68 TYPE(pao_env_type),
POINTER :: pao
69 INTEGER,
INTENT(IN) :: ikind
70 REAL(dp),
DIMENSION(:),
INTENT(IN),
TARGET :: descriptor
71 REAL(dp),
DIMENSION(:),
INTENT(IN) :: outer_deriv
72 REAL(dp),
DIMENSION(:),
INTENT(OUT) :: gradient
74 INTEGER :: i, ilayer, j, nlayers, width, width_in, &
76 REAL(dp),
ALLOCATABLE,
DIMENSION(:, :) :: backward, forward
77 REAL(dp),
DIMENSION(:, :, :),
POINTER :: a
79 a => pao%ml_training_matrices(ikind)%NN
82 width =
SIZE(a, 2); cpassert(
SIZE(a, 2) ==
SIZE(a, 3))
83 width_in =
SIZE(descriptor)
84 width_out =
SIZE(outer_deriv)
86 ALLOCATE (forward(0:nlayers, width), backward(0:nlayers, width))
89 forward(0, 1:width_in) = descriptor
91 DO ilayer = 1, nlayers
94 forward(ilayer, i) = forward(ilayer, i) + a(ilayer, i, j)*tanh(forward(ilayer - 1, j))
101 backward(nlayers, 1:width_out) = outer_deriv(:)
103 DO ilayer = nlayers, 1, -1
106 backward(ilayer - 1, j) = backward(ilayer - 1, j) + backward(ilayer, i)*a(ilayer, i, j)*(1.0_dp - tanh(forward(ilayer - 1, j))**2)
111 gradient(:) = backward(0, 1:width_in)
113 DEALLOCATE (forward, backward)
121 TYPE(pao_env_type),
POINTER :: pao
123 INTEGER :: i, icycle, ikind, ilayer, ipoint, j, &
124 npoints, width, width_in, width_out
125 REAL(dp) :: bak, eps, error, error1, error2, num_grad
126 REAL(dp),
ALLOCATABLE,
DIMENSION(:) :: prediction
127 REAL(dp),
ALLOCATABLE,
DIMENSION(:, :, :) :: gradient
128 TYPE(rng_stream_type) :: rng_stream
129 TYPE(training_matrix_type),
POINTER :: training_matrix
132 DO ikind = 1,
SIZE(pao%ml_training_matrices)
133 training_matrix => pao%ml_training_matrices(ikind)
135 npoints =
SIZE(training_matrix%inputs, 2)
136 cpassert(
SIZE(training_matrix%outputs, 2) == npoints)
137 IF (npoints == 0) cycle
140 IF (pao%iw > 0)
WRITE (pao%iw, *)
"PAO|ML| Training neural network for kind: ", &
141 trim(training_matrix%kindname),
" from ", npoints,
"training points."
144 width_in =
SIZE(training_matrix%inputs, 1)
145 width_out =
SIZE(training_matrix%outputs, 1)
146 width = max(width_in, width_out)
147 ALLOCATE (training_matrix%NN(nlayers, width, width))
150 rng_stream = rng_stream_type(name=
"pao_nn")
151 DO ilayer = 1, nlayers
154 training_matrix%NN(ilayer, i, j) = -1.0_dp + 2.0_dp*rng_stream%next()
160 ALLOCATE (gradient(nlayers, width, width))
161 DO icycle = 1, max_training_cycles
164 DO ipoint = 1, npoints
165 CALL nn_backpropagate(training_matrix%NN, &
166 input=training_matrix%inputs(:, ipoint), &
167 goal=training_matrix%outputs(:, ipoint), &
171 training_matrix%NN(:, :, :) = training_matrix%NN - step_size*gradient
173 IF (pao%iw > 0 .AND. mod(icycle, 100) == 0)
WRITE (pao%iw, *) &
174 "PAO|ML| ", trim(training_matrix%kindname), &
175 " training-cycle:", icycle,
"SQRT(error):", sqrt(error),
"grad:", sum(gradient**2)
177 IF (sum(gradient**2) < convergence_eps)
EXIT
187 CALL nn_backpropagate(training_matrix%NN, &
188 input=training_matrix%inputs(:, ipoint), &
189 goal=training_matrix%outputs(:, ipoint), &
193 ALLOCATE (prediction(width_out))
196 bak = training_matrix%NN(ilayer, i, j)
198 training_matrix%NN(ilayer, i, j) = bak + eps
199 CALL nn_eval(training_matrix%NN, &
200 input=training_matrix%inputs(:, ipoint), &
201 prediction=prediction)
202 error1 = sum((training_matrix%outputs(:, ipoint) - prediction)**2)
204 training_matrix%NN(ilayer, i, j) = bak - eps
205 CALL nn_eval(training_matrix%NN, &
206 input=training_matrix%inputs(:, ipoint), &
207 prediction=prediction)
208 error2 = sum((training_matrix%outputs(:, ipoint) - prediction)**2)
210 training_matrix%NN(ilayer, i, j) = bak
211 num_grad = (error1 - error2)/(2.0_dp*eps)
212 IF (pao%iw > 0)
WRITE (pao%iw, *)
"PAO|ML| Numeric gradient:", i, j, gradient(ilayer, i, j), num_grad
216 DEALLOCATE (prediction)
220 DEALLOCATE (gradient)
223 ALLOCATE (prediction(width_out))
224 DO ipoint = 1, npoints
225 CALL nn_eval(training_matrix%NN, &
226 input=training_matrix%inputs(:, ipoint), &
227 prediction=prediction)
228 error = maxval(abs(training_matrix%outputs(:, ipoint) - prediction))
229 IF (pao%iw > 0)
WRITE (pao%iw, *)
"PAO|ML| ", trim(training_matrix%kindname), &
230 " verify training-point:", ipoint,
"SQRT(error):", sqrt(error)
232 DEALLOCATE (prediction)
244 SUBROUTINE nn_eval(A, input, prediction)
245 REAL(dp),
DIMENSION(:, :, :),
INTENT(IN) :: a
246 REAL(dp),
DIMENSION(:),
INTENT(IN) :: input
247 REAL(dp),
DIMENSION(:),
INTENT(OUT) :: prediction
249 INTEGER :: i, ilayer, j, nlayers, width, width_in, &
251 REAL(dp),
ALLOCATABLE,
DIMENSION(:, :) :: forward
254 width =
SIZE(a, 2); cpassert(
SIZE(a, 2) ==
SIZE(a, 3))
255 width_in =
SIZE(input)
256 width_out =
SIZE(prediction)
258 ALLOCATE (forward(0:nlayers, width))
261 forward(0, 1:width_in) = input(:)
263 DO ilayer = 1, nlayers
266 forward(ilayer, i) = forward(ilayer, i) + a(ilayer, i, j)*tanh(forward(ilayer - 1, j))
271 prediction(:) = forward(nlayers, 1:width_out)
273 END SUBROUTINE nn_eval
283 SUBROUTINE nn_backpropagate(A, input, goal, error, gradient)
284 REAL(dp),
DIMENSION(:, :, :),
INTENT(IN) :: a
285 REAL(dp),
DIMENSION(:),
INTENT(IN) :: input, goal
286 REAL(dp),
INTENT(INOUT) :: error
287 REAL(dp),
DIMENSION(:, :, :),
INTENT(INOUT) :: gradient
289 INTEGER :: i, ilayer, j, nlayers, width, width_in, &
291 REAL(dp),
ALLOCATABLE,
DIMENSION(:) :: prediction
292 REAL(dp),
ALLOCATABLE,
DIMENSION(:, :) :: backward, forward
295 width =
SIZE(a, 2); cpassert(
SIZE(a, 2) ==
SIZE(a, 3))
296 width_in =
SIZE(input)
297 width_out =
SIZE(goal)
299 ALLOCATE (forward(0:nlayers, width), prediction(width_out), backward(0:nlayers, width))
302 forward(0, 1:width_in) = input
304 DO ilayer = 1, nlayers
307 forward(ilayer, i) = forward(ilayer, i) + a(ilayer, i, j)*tanh(forward(ilayer - 1, j))
312 prediction(:) = forward(nlayers, 1:width_out)
314 error = error + sum((prediction - goal)**2)
318 backward(nlayers, 1:width_out) = prediction - goal
320 DO ilayer = nlayers, 1, -1
323 gradient(ilayer, i, j) = gradient(ilayer, i, j) + 2.0_dp*backward(ilayer, i)*tanh(forward(ilayer - 1, j))
324 backward(ilayer - 1, j) = backward(ilayer - 1, j) + backward(ilayer, i)*a(ilayer, i, j)*(1.0_dp - tanh(forward(ilayer - 1, j))**2)
329 DEALLOCATE (forward, backward, prediction)
330 END SUBROUTINE nn_backpropagate
Defines the basic variable types.
integer, parameter, public dp
Neural Network implementation.
subroutine, public pao_ml_nn_gradient(pao, ikind, descriptor, outer_deriv, gradient)
Calculate gradient of neural network.
subroutine, public pao_ml_nn_train(pao)
Trains the neural network on given training points.
subroutine, public pao_ml_nn_predict(pao, ikind, descriptor, output, variance)
Uses neural network to make a prediction.
Types used by the PAO machinery.
Parallel (pseudo)random number generator (RNG) for multiple streams and substreams of random numbers.