Skip to content

Commit

Permalink
Bugfix: Actually pass the MPI communicator lel
Browse files Browse the repository at this point in the history
  • Loading branch information
franzpoeschel committed Feb 12, 2021
1 parent 325032b commit 8034b4f
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions include/picongpu/plugins/PhaseSpace/DumpHBufferOpenPMD.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ namespace picongpu
MPI_CHECK(MPI_Comm_size(mpiComm, &size));

/** create parallel domain collector ******************************/
::openPMD::Series series(openPMDFilename.str(), ::openPMD::Access::CREATE, jsonConfig);
::openPMD::Series series(openPMDFilename.str(), ::openPMD::Access::CREATE, mpiComm, jsonConfig);
::openPMD::Iteration iteration = series.iterations[currentStep];

const std::string software("PIConGPU");
Expand Down Expand Up @@ -152,9 +152,6 @@ namespace picongpu

/** write local domain ********************************************/

// avoid deadlock between not finished pmacc tasks and mpi calls in HDF5
__getTransactionEvent().waitForFinished();

::openPMD::Mesh mesh = iteration.meshes[dataSetName.str()];
::openPMD::MeshRecordComponent dataset = mesh[::openPMD::RecordComponent::SCALAR];

Expand Down Expand Up @@ -216,6 +213,9 @@ namespace picongpu
*/
dataset.setPosition(std::vector<float>{0.5, 0.5});

// avoid deadlock between not finished pmacc tasks and mpi calls in openPMD
__getTransactionEvent().waitForFinished();

/** close file ****************************************************/
iteration.close();
}
Expand Down

0 comments on commit 8034b4f

Please sign in to comment.