From d36ff97b98da1409e1406fa8027e18768783019d Mon Sep 17 00:00:00 2001 From: Damian Rouson Date: Sat, 2 Dec 2023 17:20:50 -0800 Subject: [PATCH 1/2] chore(fpm): update sourcery dependency to v 4.5.0 --- fpm.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fpm.toml b/fpm.toml index 8f01ab153..9cfa64e6e 100644 --- a/fpm.toml +++ b/fpm.toml @@ -1,10 +1,10 @@ name = "inference-engine" version = "0.5.0" license = "license" -author = "Damian Rouson, Tan Nguyen, Jordan Welsman, David Torres" +author = "Damian Rouson, Tan Nguyen, Jordan Welsman, David Torres, Brad Richardson" maintainer = "rouson@lbl.gov" [dependencies] assert = {git = "https://github.com/sourceryinstitute/assert", tag = "1.5.0"} -sourcery = {git = "https://github.com/sourceryinstitute/sourcery", tag = "4.4.4"} +sourcery = {git = "https://github.com/sourceryinstitute/sourcery", tag = "4.5.0"} netcdf-interfaces = {git = "https://github.com/LKedward/netcdf-interfaces.git", rev = "d2bbb71ac52b4e346b62572b1ca1620134481096"} From ccda53dfd1cb897e53faa09b9325661b95047407 Mon Sep 17 00:00:00 2001 From: Damian Rouson Date: Sat, 2 Dec 2023 18:39:11 -0800 Subject: [PATCH 2/2] feat(example): add concurrent-inferences.f90 --- example/concurrent-inferences.f90 | 62 +++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 example/concurrent-inferences.f90 diff --git a/example/concurrent-inferences.f90 b/example/concurrent-inferences.f90 new file mode 100644 index 000000000..42bf1213e --- /dev/null +++ b/example/concurrent-inferences.f90 @@ -0,0 +1,62 @@ +! Copyright (c), The Regents of the University of California +! Terms of use are as specified in LICENSE.txt +program concurrent_inference + !! This program demonstrates how to read a neural network from a JSON file + !! and use the network to perform concurrent inferences. + use inference_engine_m, only : inference_engine_t, tensor_t + use sourcery_m, only : string_t, command_line_t, file_t + use assert_m, only : assert + use iso_fortran_env, only : int64, real64 + implicit none + + type(string_t) network_file_name + type(command_line_t) command_line + + network_file_name = string_t(command_line%flag_value("--network")) + + if (len(network_file_name%string())==0) then + error stop new_line('a') // new_line('a') // & + 'Usage: ./build/run-fpm.sh run --example identity -- --network ""' + end if + + block + type(inference_engine_t) network, inference_engine + type(tensor_t), allocatable :: inputs(:,:,:), outputs(:,:,:) + real, allocatable :: input_components(:,:,:,:) + integer, parameter :: lat=263, lon=317, lev=15 ! latitudes, longitudes, levels (elevations) + integer i, j, k + + print *, "Constructing a new inference_engine_t object from the file " // network_file_name%string() + inference_engine = inference_engine_t(file_t(network_file_name)) + + print *,"Defining an array of tensor_t input objects with random normalized components" + allocate(inputs(lat,lon,lev)) + allocate(input_components(lat,lon,lev,inference_engine%num_inputs())) + call random_number(input_components) + + do concurrent(i=1:lat, j=1:lon, k=1:lev) + inputs(i,j,k) = tensor_t(input_components(i,j,k,:)) + end do + + block + integer(int64) t_start, t_finish, clock_rate + + print *,"Performing elemental inferences" + call system_clock(t_start, clock_rate) + outputs = inference_engine%infer(inputs) ! implicit allocation of outputs array + call system_clock(t_finish) + print *,"Elemental inference time: ", real(t_finish - t_start, real64)/real(clock_rate, real64) + + call assert(all(shape(outputs) == shape(inputs)), "all(shape(outputs) == shape(inputs))") + + print *,"Performing concurrent inference" + call system_clock(t_start) + do concurrent(i=1:lat, j=1:lon, k=1:lev) + outputs(i,j,k) = inference_engine%infer(inputs(i,j,k)) + end do + call system_clock(t_finish) + print *,"Concurrent inference time: ", real(t_finish - t_start, real64)/real(clock_rate, real64) + end block + end block + +end program