Skip to content

Commit

Permalink
Wrap all random_init calls with directives so they don't execute
Browse files Browse the repository at this point in the history
when compiling with nagfor. These directives should be removed
when, as expected, nagfor provides support for `random_init` in
an upcoming version.
  • Loading branch information
ktras committed Feb 2, 2024
1 parent 942a4c2 commit d62f97f
Show file tree
Hide file tree
Showing 8 changed files with 21 additions and 5 deletions.
2 changes: 2 additions & 0 deletions example/learn-addition.f90
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ program learn_addition
type(bin_t), allocatable :: bins(:)
real, allocatable :: cost(:), random_numbers(:)

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

trainable_engine = perturbed_identity_network(perturbation_magnitude=0.05)
call output(trainable_engine%to_inference_engine(), string_t("initial-network.json"))
Expand Down
6 changes: 4 additions & 2 deletions example/learn-exponentiation.f90
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ program learn_exponentiation
type(bin_t), allocatable :: bins(:)
real, allocatable :: cost(:), random_numbers(:)

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

trainable_engine = perturbed_identity_network(perturbation_magnitude=0.05)
call output(trainable_engine%to_inference_engine(), string_t("initial-network.json"))
Expand All @@ -60,10 +62,10 @@ program learn_exponentiation
inputs = [(tensor_t(real([(j*i, j = 1,num_inputs)])/(num_inputs*num_pairs)), i = 1, num_pairs)]
desired_outputs = y(inputs)
output_sizes = [(size(desired_outputs(i)%values()),i=1,size(desired_outputs))]
call assert(all([num_outputs==output_sizes]), "fit-polynomials: # outputs", intrinsic_array_t([num_outputs,output_sizes]))
call assert(all([num_outputs==output_sizes]), "fit-polynomials: # outputs", intrinsic_array_t([num_outputs,output_sizes]))
end block
input_output_pairs = input_output_pair_t(inputs, desired_outputs)
block
block
integer b
bins = [(bin_t(num_items=num_pairs, num_bins=num_mini_batches, bin_number=b), b = 1, num_mini_batches)]
end block
Expand Down
2 changes: 2 additions & 0 deletions example/learn-microphysics-procedures.f90
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ program learn_microphysics_procedures
integer, parameter :: nodes_per_layer(*) = [2, 72, 2]
real, parameter :: cost_tolerance = 1.E-08

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

open(newunit=network_unit, file=network_file%string(), form='formatted', status='old', iostat=io_status, action='read')

Expand Down
2 changes: 2 additions & 0 deletions example/learn-multiplication.f90
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ program learn_multiplication
type(bin_t), allocatable :: bins(:)
real, allocatable :: cost(:), random_numbers(:)

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

trainable_engine = perturbed_identity_network(perturbation_magnitude=0.05)
call output(trainable_engine%to_inference_engine(), string_t("initial-network.json"))
Expand Down
2 changes: 2 additions & 0 deletions example/learn-power-series.f90
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ program learn_power_series
type(bin_t), allocatable :: bins(:)
real, allocatable :: cost(:), random_numbers(:)

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

trainable_engine = perturbed_identity_network(perturbation_magnitude=0.05)
call output(trainable_engine%to_inference_engine(), string_t("initial-network.json"))
Expand Down
2 changes: 2 additions & 0 deletions example/learn-saturated-mixing-ratio.f90
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,9 @@ program train_saturated_mixture_ratio
integer, parameter :: nodes_per_layer(*) = [2, 72, 1]
real, parameter :: cost_tolerance = 1.E-08

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

open(newunit=network_unit, file=network_file%string(), form='formatted', status='old', iostat=io_status, action='read')

Expand Down
2 changes: 2 additions & 0 deletions example/train-and-write.f90
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ program train_and_write
type(bin_t), allocatable :: bins(:)
real, allocatable :: cost(:), random_numbers(:)

#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif

trainable_engine = perturbed_identity_network(perturbation_magnitude=0.2)
call output(trainable_engine%to_inference_engine(), string_t("initial-network.json"))
Expand Down
8 changes: 5 additions & 3 deletions test/main.f90
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
! Copyright (c), The Regents of the University of California
! Terms of use are as specified in LICENSE.txt
program main
use inference_engine_test_m, only : inference_engine_test_t
use asymmetric_engine_test_m, only : asymmetric_engine_test_t
use trainable_engine_test_m, only : trainable_engine_test_t
use inference_engine_test_m, only : inference_engine_test_t
use asymmetric_engine_test_m, only : asymmetric_engine_test_t
use trainable_engine_test_m, only : trainable_engine_test_t
use hyperparameters_test_m, only : hyperparameters_test_t
use network_configuration_test_m, only : network_configuration_test_t
use training_configuration_test_m, only : training_configuration_test_t
Expand All @@ -20,7 +20,9 @@ program main
integer :: passes=0, tests=0

call cpu_time(t_start)
#ifndef NAGFOR
call random_init(image_distinct=.true., repeatable=.true.)
#endif
call inference_engine_test%report(passes, tests)
call asymmetric_engine_test%report(passes, tests)
call trainable_engine_test%report(passes, tests)
Expand Down

0 comments on commit d62f97f

Please sign in to comment.