Skip to content

Commit

Permalink
Merge pull request #105 from BerkeleyLab/concurrency
Browse files Browse the repository at this point in the history
Exploit additional concurrency in the training algorithm
  • Loading branch information
rouson authored Dec 20, 2023
2 parents aa3064e + f34f467 commit 55bbbcb
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions src/inference_engine/trainable_engine_s.f90
Original file line number Diff line number Diff line change
Expand Up @@ -60,22 +60,22 @@

module procedure infer

real(rkind), allocatable :: z(:,:), a(:,:)
real(rkind), allocatable :: a(:,:)
integer l

call self%assert_consistent

associate(w => self%w, b => self%b, n => self%n, output_layer => ubound(self%n,1))

allocate(z, mold=b)
allocate(a(maxval(n), input_layer:output_layer)) ! Activations

a(1:n(input_layer),input_layer) = inputs%values()

feed_forward: &
do l = 1,output_layer
z(1:n(l),l) = matmul(w(1:n(l),1:n(l-1),l), a(1:n(l-1),l-1)) + b(1:n(l),l)
a(1:n(l),l) = self%differentiable_activation_strategy_%activation(z(1:n(l),l))
a(1:n(l),l) = self%differentiable_activation_strategy_%activation( &
matmul(w(1:n(l),1:n(l-1),l), a(1:n(l-1),l-1)) + b(1:n(l),l) &
)
end do feed_forward

outputs = tensor_t(a(1:n(output_layer),output_layer))
Expand Down Expand Up @@ -186,7 +186,7 @@
real, parameter :: epsilon = real(1.D-08,rkind)

adjust_weights_and_biases: &
do l = 1,output_layer
do concurrent(l = 1:output_layer)
dcdw(1:n(l),1:n(l-1),l) = dcdw(1:n(l),1:n(l-1),l)/(mini_batch_size)
vdw(1:n(l),1:n(l-1),l) = beta(1)*vdw(1:n(l),1:n(l-1),l) + obeta(1)*dcdw(1:n(l),1:n(l-1),l)
sdw (1:n(l),1:n(l-1),l) = beta(2)*sdw(1:n(l),1:n(l-1),l) + obeta(2)*(dcdw(1:n(l),1:n(l-1),l)**2)
Expand All @@ -205,7 +205,7 @@
end block
else
adjust_weights_and_biases: &
do l = 1,output_layer
do concurrent(l = 1:output_layer)
dcdb(1:n(l),l) = dcdb(1:n(l),l)/mini_batch_size
b(1:n(l),l) = b(1:n(l),l) - eta*dcdb(1:n(l),l) ! Adjust biases
dcdw(1:n(l),1:n(l-1),l) = dcdw(1:n(l),1:n(l-1),l)/mini_batch_size
Expand Down

0 comments on commit 55bbbcb

Please sign in to comment.