From 38ef111d1cb6c75e2dd4b73a594cf2b636d284f6 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Wed, 1 May 2024 10:28:45 +0100 Subject: [PATCH 1/2] coarse sims done --- docs/data-generation.md | 12 +++-- files/coarse_simulations/coarse_BOUT.inp | 53 +++++++++++++++++++++ files/coarse_simulations/run_coarse_sims.sh | 39 +++++++++++++++ 3 files changed, 101 insertions(+), 3 deletions(-) create mode 100755 files/coarse_simulations/coarse_BOUT.inp create mode 100644 files/coarse_simulations/run_coarse_sims.sh diff --git a/docs/data-generation.md b/docs/data-generation.md index 42cd7ae..c93e2ef 100644 --- a/docs/data-generation.md +++ b/docs/data-generation.md @@ -30,9 +30,9 @@ Following the structure given in the [general data generation](ML_training.md) c cmake --build build --target hasegawa-wakatani ``` - Before simulating the training data, a burn-in run must be conducted at the desired resolution. For an example of this, see [fine_init.sh](files/data-generation/fine_init.sh). Edit `` on line 9 and `x01` in lines containing paths to match your `$WORK` and desired `/scratch` locations and submit via `sbatch fine_init.sh`. + Before simulating the training data, a burn-in run must be conducted at the desired resolution. For an example of this, see [fine_init.sh](../files/data-generation/fine_init.sh). Edit `` on line 9 and `x01` in lines containing paths to match your `$WORK` and desired `/scratch` locations and submit via `sbatch fine_init.sh`. - Following that, we run a number of sequentially trajectories to generate fine-grained ground-truth data. See [fine_trajectories.sh](files/data-generation/fine_trajectories.sh) + Following that, we run a number of sequentially trajectories to generate fine-grained ground-truth data. See [fine_trajectories.sh](../files/data-generation/fine_trajectories.sh) The initial simulation produces "restart files", `/scratch/space1/x01/data/my-scratch-data/initial/data/BOUT.restart.*.nc` from which a simulation can be continued. Those, as well as the input file (`/scratch/space1/x01/data/my-scratch-data/initial/data/BOUT.inp` should be placed in `/scratch/space1/x01/data/my-scratch-data/0`. @@ -40,8 +40,14 @@ Following the structure given in the [general data generation](ML_training.md) c 3. Coarsen selected simulation snapshots. - Fine-grained data must be coarsened to match the desired coarse-grained resolution. This can be done via interpolation for a general solution. Files in [files/coarsening](files/coarsening) perform this task. Submit `submit-resize.sh` via `sbatch submit-resize.sh`. + Fine-grained data must be coarsened to match the desired coarse-grained resolution. This can be done via interpolation for a general solution. Files in [files/coarsening](../files/coarsening) perform this task. Submit `submit-resize.sh` via `sbatch submit-resize.sh`. + + _Note: this operates on one trajectory at a time and will therefore need to be repeated for each trajectory run in step 2. 4. Single-timestep coarse simulations. + With the previous step having extracted fine-grained data for each time step (and each trajectory for which it was repeated), we now need to run a single-timestep coarse-grained simulation. To do this, see [files/coarse_simulations](../files/coarse_simulations/). Submitting [run_coarse_sims.sh](../files/coarse_simulations/run_coarse_sims.sh) will run a single step simulation for each coarsened timestep created in the previous step. + +5. Calculate the correction. + diff --git a/files/coarse_simulations/coarse_BOUT.inp b/files/coarse_simulations/coarse_BOUT.inp new file mode 100755 index 0000000..ee36e86 --- /dev/null +++ b/files/coarse_simulations/coarse_BOUT.inp @@ -0,0 +1,53 @@ +# +# Hasegawa-Wakatani test case +# + +timestep = 0.026 # Output timestep +nout = 1 # Number of output steps +restart = true +append = false + +MYG = 0 # No need for Y communications + +periodicX = true # Domain is periodic in X + +[mesh] + +nx = 260 # Note 4 guard cells in X +ny = 1 +nz = 256 # Periodic, so no guard cells in Z + +dx = 0.1 +dy = 1.0 +dz = 0.1 + +[hw] + +alpha = 1.0 # Adiabaticity (~ conductivity) +kappa = 2.0 # Density gradient drive +Dvort = 0.005 # Vorticity diffusion +Dn = 0.005 # Density diffusion + +modified = true + +bracket = 2 # 0 = std, 1 = simple, 2 = arakawa + +[solver] +type = rk4 +adaptive = false +timestep = 0.026 + +[all] +#scale = 0.0 +bndry_all = dirichlet_o2 + +[n] +#bndry_all = neumann_o2 + +[vort] +#bndry_all = dirichlet_o2 + +#scale = 0.1 # Fluctuation amplitude +#function = mixmode(2*pi*x) * mixmode(z) # Fluctuation function + + diff --git a/files/coarse_simulations/run_coarse_sims.sh b/files/coarse_simulations/run_coarse_sims.sh new file mode 100644 index 0000000..cdac962 --- /dev/null +++ b/files/coarse_simulations/run_coarse_sims.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +#SBATCH --nodes=1 +#SBATCH --ntasks=1 +# #SBATCH --exclusive +#SBATCH --time=08:00:00 +#SBATCH --partition=standard +#SBATCH --qos=standard +#SBATCH --account= + +eval "$(/work/x01/x01/$USER/miniconda3/bin/conda shell.bash hook)" +conda activate boutsmartsim + +module load intel-20.4/mpi +module load intel-20.4/compilers +module load fftw/3.3.10-intel20.4-impi20.4 +module load netcdf-parallel/4.9.2-intel20-impi20 + +ID_TRAJ=1 +BASE_PATH=/scratch/space1/x01/data/my-scratch-data + +executable=/work/x01/x01/$USER/my-hw/build/hasegawa-wakatani + +for i in {0..1000} +do + coarse_TRAJ_PATH=${BASE_PATH}/${i}/coarse + coarse_TRAJ_SIM_PATH=${BASE_PATH}/${i}/coarse_sim + mkdir -p $coarse_TRAJ_SIM_PATH + cp ${coarse_TRAJ_PATH}/BOUT.restart.* $coarse_TRAJ_SIM_PATH + cp coarse_BOUT.inp $coarse_TRAJ_SIM_PATH/BOUT.inp + + cd $coarse_TRAJ_SIM_PATH + + srun --nodes=1 --ntasks=1 --job-name=ss_job_0 --distribution=block:block \ + $executable -d . \ + restart=true append=false \ + solver:type=rk4 solver:adaptive=false solver:timestep=0.026 \ + nout=1 timestep=0.026 mesh:nx=260 mesh:nz=256 mesh:dx=0.1 mesh:dz=0.1 -d . +done From 0f259fe4668d9862e5602a459274e07b173fdc33 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Wed, 1 May 2024 10:36:57 +0100 Subject: [PATCH 2/2] data gen done --- docs/data-generation.md | 4 ++-- docs/workflow.md | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/docs/data-generation.md b/docs/data-generation.md index c93e2ef..63a9fd8 100644 --- a/docs/data-generation.md +++ b/docs/data-generation.md @@ -48,6 +48,6 @@ Following the structure given in the [general data generation](ML_training.md) c With the previous step having extracted fine-grained data for each time step (and each trajectory for which it was repeated), we now need to run a single-timestep coarse-grained simulation. To do this, see [files/coarse_simulations](../files/coarse_simulations/). Submitting [run_coarse_sims.sh](../files/coarse_simulations/run_coarse_sims.sh) will run a single step simulation for each coarsened timestep created in the previous step. -5. Calculate the correction. +Subsequent steps: calculating the error; reformatting data for ingestion into TensorFlow; and model training are covered in [ML model training implementation](training_implementation.md). + - diff --git a/docs/workflow.md b/docs/workflow.md index 3c2ec21..a4fd4be 100644 --- a/docs/workflow.md +++ b/docs/workflow.md @@ -5,8 +5,7 @@ The system needs to have all the tools and packages (in suitable versions) insta The example workflow described here does not require a pre-trained ML model, we are using a placeholder model that alwyas returns zeroes to showcase the framework, and the script is provided here. Obviously, any other model can be exported in the desired format and used in the workflow. [< Back](./) - -## Export the ML model + ## Compile Hasegawa Wakatani with SmartRedis