| 52 |
|
#include "utils/simError.h" |
| 53 |
|
|
| 54 |
|
namespace OpenMD { |
| 55 |
< |
ConstraintWriter::ConstraintWriter(SimInfo* info, const std::string& filename) : info_(info) { |
| 55 |
> |
ConstraintWriter::ConstraintWriter(SimInfo* info, |
| 56 |
> |
const std::string& filename): info_(info) { |
| 57 |
|
//use master - slave mode, only master node writes to disk |
| 58 |
|
#ifdef IS_MPI |
| 59 |
|
if(worldRank == 0){ |
| 60 |
|
#endif |
| 61 |
|
output_.open(filename.c_str()); |
| 62 |
< |
|
| 62 |
> |
|
| 63 |
|
if(!output_){ |
| 64 |
|
sprintf( painCave.errMsg, |
| 65 |
< |
"Could not open %s for Constraint output\n", filename.c_str()); |
| 65 |
> |
"Could not open %s for Constraint output\n", |
| 66 |
> |
filename.c_str()); |
| 67 |
|
painCave.isFatal = 1; |
| 68 |
|
simError(); |
| 69 |
|
} |
| 71 |
|
output_ << "#time(fs)\t" |
| 72 |
|
<< "Index of atom 1\t" |
| 73 |
|
<< "Index of atom 2\tconstraint force" << std::endl; |
| 74 |
< |
|
| 74 |
> |
|
| 75 |
|
#ifdef IS_MPI |
| 76 |
|
} |
| 77 |
< |
#endif |
| 76 |
< |
|
| 77 |
> |
#endif |
| 78 |
|
} |
| 79 |
< |
|
| 80 |
< |
ConstraintWriter::~ConstraintWriter() { |
| 80 |
< |
|
| 79 |
> |
|
| 80 |
> |
ConstraintWriter::~ConstraintWriter() { |
| 81 |
|
#ifdef IS_MPI |
| 82 |
|
if(worldRank == 0 ){ |
| 83 |
|
#endif |
| 86 |
|
} |
| 87 |
|
#endif |
| 88 |
|
} |
| 89 |
< |
|
| 89 |
> |
|
| 90 |
|
void ConstraintWriter::writeConstraintForces(const std::list<ConstraintPair*>& constraints){ |
| 91 |
|
#ifndef IS_MPI |
| 92 |
|
std::list<ConstraintPair*>::const_iterator i; |
| 100 |
|
} |
| 101 |
|
} |
| 102 |
|
#else |
| 103 |
< |
|
| 103 |
> |
|
| 104 |
|
const int masterNode = 0; |
| 105 |
|
int nproc; |
| 106 |
|
int myNode; |
| 108 |
|
MPI_Comm_rank( MPI_COMM_WORLD, &myNode); |
| 109 |
|
|
| 110 |
|
std::vector<int> nConstraints(nproc, 0); |
| 111 |
– |
|
| 111 |
|
nConstraints[myNode] = constraints.size(); |
| 112 |
|
|
| 113 |
|
//do MPI_ALLREDUCE to exchange the total number of constraints: |
| 114 |
< |
MPI_Allreduce(MPI_IN_PLACE, &nConstraints[0], nproc, MPI_INT, MPI_SUM, MPI_COMM_WORLD); |
| 114 |
> |
MPI_Allreduce(MPI_IN_PLACE, &nConstraints[0], nproc, MPI_INT, MPI_SUM, |
| 115 |
> |
MPI_COMM_WORLD); |
| 116 |
|
|
| 117 |
|
MPI_Status ierr; |
| 118 |
|
int atom1, atom2, doPrint; |
| 147 |
|
} |
| 148 |
|
} |
| 149 |
|
} |
| 150 |
< |
|
| 151 |
< |
output_ << info_->getSnapshotManager()->getCurrentSnapshot()->getTime() << std::endl; |
| 152 |
< |
output_ << constraintData.size() << std::endl; |
| 153 |
< |
|
| 150 |
> |
|
| 151 |
|
std::vector<ConstraintData>::iterator l; |
| 152 |
|
for (l = constraintData.begin(); l != constraintData.end(); ++l) { |
| 153 |
|
if (l->printForce) { |
| 168 |
|
|
| 169 |
|
MPI_Send(&atom1, 1, MPI_INT, masterNode, 0, MPI_COMM_WORLD); |
| 170 |
|
MPI_Send(&atom2, 1, MPI_INT, masterNode, 0, MPI_COMM_WORLD); |
| 171 |
< |
MPI_Send(&constraintForce, 1, MPI_REALTYPE, masterNode, 0, MPI_COMM_WORLD); |
| 171 |
> |
MPI_Send(&constraintForce, 1, MPI_REALTYPE, masterNode, 0, |
| 172 |
> |
MPI_COMM_WORLD); |
| 173 |
|
MPI_Send(&printForce, 1, MPI_INT, masterNode, 0, MPI_COMM_WORLD); |
| 174 |
|
} |
| 175 |
|
} |