1#ifndef NGEN_PARALLEL_UTILS_H
2#define NGEN_PARALLEL_UTILS_H
7#ifndef MPI_HF_SUB_CODE_GOOD
8#define MPI_HF_SUB_CODE_GOOD 0
11#ifndef MPI_HF_SUB_CODE_BAD
12#define MPI_HF_SUB_CODE_BAD 1
15#ifndef NGEN_MPI_DATA_TAG
16#define NGEN_MPI_DATA_TAG 100
19#ifndef NGEN_MPI_PROTOCOL_TAG
20#define NGEN_MPI_PROTOCOL_TAG 101
47 bool mpiSyncStatusAnd(
bool status,
int mpi_rank,
int mpi_num_procs,
const std::string &taskDesc);
58 bool mpiSyncStatusAnd(
bool status,
int mpi_rank,
int mpi_num_procs);
79 bool is_hydrofabric_subdivided(
const std::string &catchmentDataFile,
int mpi_rank,
int mpi_num_procs,
bool printMsg);
96 void get_hosts_array(
int mpi_rank,
int mpi_num_procs,
int *host_array);
108 bool mpi_send_text_file(
const char *fileName,
const int mpi_rank,
const int destRank);
122 bool mpi_recv_text_file(
const char *fileName,
const int mpi_rank,
const int srcRank);
173 bool distribute_subdivided_hydrofabric_files(
const std::string &baseCatchmentFile,
const std::string &baseNexusFile,
174 const int sendingRank,
const int mpi_rank,
const int mpi_num_procs,
175 const int *hostIdForRank,
bool syncReturnStatus,
bool blockAll);
191 bool subdivide_hydrofabric(
int mpi_rank,
int mpi_num_procs,
const std::string &catchmentDataFile,
192 const std::string &nexusDataFile,
const std::string &partitionConfigFile);
202 std::vector<std::string> gather_strings(
const std::vector<std::string>& local_strings,
int mpi_rank,
int mpi_num_procs);
212 std::vector<std::string> broadcast_strings(
const std::vector<std::string>& strings,
int mpi_rank,
int mpi_num_procs);