45 const int nBlocks = 34;
47 MPI_Datatype datatype;
49 MPI_Aint startAddress = 0;
52 int blockLengths[nBlocks];
53 MPI_Aint displacements[nBlocks];
54 MPI_Datatype types[nBlocks];
56 for(
int i = 0; i < nBlocks; i++ ){
67 MPI_Get_address( &
rank, &address )
69 displacements[1] = address - startAddress;
74 displacements[2] = address - startAddress;
79 displacements[3] = address - startAddress;
84 displacements[4] = address - startAddress;
89 displacements[5] = address - startAddress;
94 displacements[6] = address - startAddress;
99 displacements[7] = address - startAddress;
104 displacements[8] = address - startAddress;
109 displacements[9] = address - startAddress;
114 displacements[10] = address - startAddress;
119 displacements[11] = address - startAddress;
123 displacements[12] = address - startAddress;
127 displacements[13] = address - startAddress;
132 displacements[14] = address - startAddress;
136 displacements[15] = address - startAddress;
140 displacements[16] = address - startAddress;
145 displacements[17] = address - startAddress;
149 displacements[18] = address - startAddress;
153 displacements[19] = address - startAddress;
157 displacements[20] = address - startAddress;
161 displacements[21] = address - startAddress;
165 displacements[22] = address - startAddress;
170 displacements[23] = address - startAddress;
171 types[23] = MPI_DOUBLE;
176 displacements[24] = address - startAddress;
177 types[24] = MPI_DOUBLE;
182 displacements[25] = address - startAddress;
183 types[25] = MPI_DOUBLE;
188 displacements[26] = address - startAddress;
189 types[26] = MPI_DOUBLE;
194 displacements[27] = address - startAddress;
195 types[27] = MPI_DOUBLE;
200 displacements[28] = address - startAddress;
201 types[28] = MPI_DOUBLE;
206 displacements[29] = address - startAddress;
207 types[29] = MPI_DOUBLE;
212 displacements[30] = address - startAddress;
213 types[30] = MPI_DOUBLE;
218 displacements[31] = address - startAddress;
219 types[31] = MPI_DOUBLE;
224 displacements[32] = address - startAddress;
225 types[32] = MPI_DOUBLE;
228 MPI_Get_address( &
detTime, &address )
230 displacements[33] = address - startAddress;
231 types[33] = MPI_DOUBLE;
234 MPI_Type_create_struct(nBlocks, blockLengths, displacements, types, &datatype)
250 MPI_Datatype datatype;
253 MPI_Type_commit( &datatype )
256 commMpi->usend(&
interrupted, 1, datatype, destination, tag)
259 MPI_Type_free( &datatype )
272 MPI_Datatype datatype;
275 MPI_Type_commit( &datatype )
278 commMpi->ureceive(&
interrupted, 1, datatype, source, tag)
281 MPI_Type_free( &datatype )
int interrupted
indicate that this solver is interrupted or not. 0: not interrupted, 1: interrupted, 2: checkpoint, 3: racing-ramp up
int nTightened
number of tightened variable bounds during racing stage
double maxRootNodeTime
maximum time consumed by root node processes
int nParaNodesSolvedAtPreCheck
number of ParaNodes solved at pre-checking of root node solvability
static ScipParaCommTh * comm
int nTotalRestarts
number of total restarts
int nTransferredBendersCutsFromSolver
number of benders cuts transferred from this Solver
int totalNImprovedIncumbent
accumulated number of improvements of incumbent value in this ParaSolver
int rank
rank of this solver Counters related to this ParaSolver
int maxRestarts
maximum number of restarts
int maxTransferredLocalCutsFromSolver
maximum number of local cuts transferred from this Solver
int nParaTasksReceived
number of ParaTasks received in this ParaSolver
int nParaTasksSolved
number of ParaTasks solved ( received ) in this ParaSolvere times of this solver
#define PARA_COMM_CALL(paracommcall)
double idleTimeToWaitAckCompletion
idle time to wait ack completion message
double idleTimeToFirstParaTask
idle time to start solving the first ParaTask
void receive(ParaComm *comm, int source, int tag)
receive this object
int nTightenedInt
number of tightened integral variable bounds during racing stage
int minRestarts
minimum number of restarts
int maxTransferredBendersCutsFromSolver
maximum number of benders cuts transferred from this Solver
double idleTimeAfterLastParaTask
idle time after the last ParaTask was solved
void send(ParaComm *comm, int destination, int tag)
send this object
int minTransferredBendersCutsFromSolver
minimum number of benders cuts transferred from this Solver
#define DEF_PARA_COMM(para_comm, comm)
double runningTime
this solver running time
int minTransferredLocalCutsFromSolver
minimum number of local cuts transferred from this Solver
Base class of communicator for UG Framework.
int totalNSent
accumulated number of nodes sent from this ParaSolver
int totalNSolved
Counters related to this ParaSolver
int maxNSolved
maximum number of subtree nodes rooted from ParaNode
double idleTimeToWaitNotificationId
idle time to wait notification Id messages
int minNSolved
minimum number of subtree nodes rooted from ParaNode
int nTransferredLocalCutsFromSolver
number of local cuts transferred from this Solver
double totalRootNodeTime
total time consumed by root node processes
int calcTerminationState
termination sate of a calculation in a Solver times for root node process
double minRootNodeTime
minimum time consumed by root node processes
int nParaNodesSolvedAtRoot
number of ParaNodes solved at root node before sending
MPI_Datatype createDatatype()
create BbParaSolverTerminationStateMpi datatype
double detTime
deterministic time, -1: should be non-deterministic
double idleTimeToWaitToken
idle time to wait token times for root task process
#define MPI_CALL(mpicall)
Base class of communicator object.
double idleTimeBetweenParaTasks
idle time between ParaTasks processing