NGen
Loading...
Searching...
No Matches
parallel_utils.h
1#ifndef NGEN_PARALLEL_UTILS_H
2#define NGEN_PARALLEL_UTILS_H
3
4#include <NGenConfig.h>
5#if NGEN_WITH_MPI
6
7#ifndef MPI_HF_SUB_CODE_GOOD
8#define MPI_HF_SUB_CODE_GOOD 0
9#endif
10
11#ifndef MPI_HF_SUB_CODE_BAD
12#define MPI_HF_SUB_CODE_BAD 1
13#endif
14
15#ifndef NGEN_MPI_DATA_TAG
16#define NGEN_MPI_DATA_TAG 100
17#endif
18
19#ifndef NGEN_MPI_PROTOCOL_TAG
20#define NGEN_MPI_PROTOCOL_TAG 101
21#endif
22
23#include <string>
24#include <vector>
25
26namespace parallel {
27
47 bool mpiSyncStatusAnd(bool status, int mpi_rank, int mpi_num_procs, const std::string &taskDesc);
48
58 bool mpiSyncStatusAnd(bool status, int mpi_rank, int mpi_num_procs);
59
79 bool is_hydrofabric_subdivided(const std::string &catchmentDataFile, int mpi_rank, int mpi_num_procs, bool printMsg);
80
96 void get_hosts_array(int mpi_rank, int mpi_num_procs, int *host_array);
97
108 bool mpi_send_text_file(const char *fileName, const int mpi_rank, const int destRank);
109
122 bool mpi_recv_text_file(const char *fileName, const int mpi_rank, const int srcRank);
123
173 bool distribute_subdivided_hydrofabric_files(const std::string &baseCatchmentFile, const std::string &baseNexusFile,
174 const int sendingRank, const int mpi_rank, const int mpi_num_procs,
175 const int *hostIdForRank, bool syncReturnStatus, bool blockAll);
176
191 bool subdivide_hydrofabric(int mpi_rank, int mpi_num_procs, const std::string &catchmentDataFile,
192 const std::string &nexusDataFile, const std::string &partitionConfigFile);
193
202 std::vector<std::string> gather_strings(const std::vector<std::string>& local_strings, int mpi_rank, int mpi_num_procs);
203
212 std::vector<std::string> broadcast_strings(const std::vector<std::string>& strings, int mpi_rank, int mpi_num_procs);
213}
214
215#endif // NGEN_WITH_MPI
216
217#endif //NGEN_PARALLEL_UTILS_H