ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/md_code/DumpWriter.cpp
(Generate patch)

Comparing trunk/mdtools/md_code/DumpWriter.cpp (file contents):
Revision 249 by chuckv, Mon Jan 27 21:28:19 2003 UTC vs.
Revision 256 by chuckv, Thu Jan 30 20:32:45 2003 UTC

# Line 130 | Line 130 | void DumpWriter::writeDump( double currentTime ){
130    // write out header and node 0's coordinates
131  
132    if( worldRank == 0 ){
133 <    outFile << entry_plug->mpiSim->getTotAtoms() << "\n";
133 >    outFile << mpiSim->getTotAtoms() << "\n";
134        
135      outFile << currentTime << "\t"
136              << entry_plug->box_x << "\t"
# Line 170 | Line 170 | void DumpWriter::writeDump( double currentTime ){
170        else
171          strcat( writeLine, "0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n" );
172          
173 <      outfile << writeLine;
173 >      outFile << writeLine;
174        masterIndex++;
175      }
176      outFile.flush();
177    }
178      
179 <  for (procIndex = 1; procIndex < entry_plug->mpiSim->getNumberProcessors();
179 >  for (procIndex = 1; procIndex < mpiSim->getNumberProcessors();
180         procIndex++){
181  
182      if( worldRank == 0 ){
# Line 208 | Line 208 | void DumpWriter::writeDump( double currentTime ){
208  
209        // recieve the nodes writeLines
210  
211 <      for ( i = nodeAtomStart; i <= nodeAtomEnd, i++){
211 >      for ( i = nodeAtomsStart; i <= nodeAtomsEnd; i++){
212            
213 <        mpiErr = MPI_Recv(&read_buffer,BUFFERSIZE,MPI_CHAR,procIndex,
213 >        mpiErr = MPI_Recv(writeLine,BUFFERSIZE,MPI_CHAR,procIndex,
214                            MPI_ANY_TAG,MPI_COMM_WORLD,istatus );
215  
216          outFile << writeLine;
# Line 220 | Line 220 | void DumpWriter::writeDump( double currentTime ){
220  
221      else if( worldRank == procIndex ){
222  
223 <      nodeAtomStart = entry_plug->mpiSim->getMyAtomStart();
224 <      nodeAtomEnd = entry_plug->mpiSim->getMyAtomEnd();
223 >      nodeAtomsStart = mpiSim->getMyAtomStart();
224 >      nodeAtomsEnd = mpiSim->getMyAtomEnd();
225          
226        mpiErr = MPI_Send(&nodeAtomsStart,1,MPI_INT,0,MPI_ANY_TAG,
227                          MPI_COMM_WORLD);
# Line 230 | Line 230 | void DumpWriter::writeDump( double currentTime ){
230          
231        mpiErr = MPI_Recv(&sendError,1,MPI_INT,0,MPI_ANY_TAG,
232                          MPI_COMM_WORLD, istatus);
233 <      if (sendError) mpiCheckpoint();
233 >      if (sendError) MPIcheckPoint();
234  
235        // send current node's configuration line by line.
236  
# Line 273 | Line 273 | void DumpWriter::writeDump( double currentTime ){
273        
274      sprintf(checkPointMsg,"Node %d sent dump configuration.",
275              procIndex);
276 <    mpiCheckPoint();
276 >    MPIcheckPoint();
277    }
278      
279   #endif // is_mpi
# Line 318 | Line 318 | void DumpWriter::writeFinal(){
318   #ifdef IS_MPI
319    }
320    
321 <  sprintf(checkPointMsg,"Opened file for final configuration\n",procIndex);
322 <  mpiCheckPoint();  
321 >  sprintf(checkPointMsg,"Opened file for final configuration\n");
322 >  MPIcheckPoint();  
323    
324   #endif //is_mpi
325  
# Line 384 | Line 384 | void DumpWriter::writeFinal(){
384    // write out header and node 0's coordinates
385  
386    if( worldRank == 0 ){
387 <    finalOut << entry_plug->mpiSim->getTotAtoms() << "\n";
387 >    finalOut << mpiSim->getTotAtoms() << "\n";
388        
389      finalOut << entry_plug->box_x << "\t"
390               << entry_plug->box_y << "\t"
391               << entry_plug->box_z << "\n";
392      
393      masterIndex = 0;
394 +    
395 +    std::cerr << "about to write node 0 aztoms. nAtoms = " << nAtoms << "\n";
396 +    
397      for( i=0; i<nAtoms; i++ ){
398        
399        sprintf( tempBuffer,
# Line 423 | Line 426 | void DumpWriter::writeFinal(){
426        else
427          strcat( writeLine, "0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n" );
428          
429 <      outfile << writeLine;
429 >      finalOut << writeLine;
430        masterIndex++;
431      }
432      finalOut.flush();
433    }
434      
435 <  for (procIndex = 1; procIndex < entry_plug->mpiSim->getNumberProcessors();
435 >  for (procIndex = 1; procIndex < mpiSim->getNumberProcessors();
436         procIndex++){
437  
438      if( worldRank == 0 ){
# Line 461 | Line 464 | void DumpWriter::writeFinal(){
464  
465        // recieve the nodes writeLines
466  
467 <      for ( i = nodeAtomStart; i <= nodeAtomEnd, i++){
467 >      for ( i = nodeAtomsStart; i <= nodeAtomsEnd; i++){
468            
469 <        mpiErr = MPI_Recv(&read_buffer,BUFFERSIZE,MPI_CHAR,procIndex,
469 >        mpiErr = MPI_Recv(writeLine,BUFFERSIZE,MPI_CHAR,procIndex,
470                            MPI_ANY_TAG,MPI_COMM_WORLD,istatus );
471  
472          finalOut << writeLine;
# Line 475 | Line 478 | void DumpWriter::writeFinal(){
478  
479      else if( worldRank == procIndex ){
480  
481 <      nodeAtomStart = entry_plug->mpiSim->getMyAtomStart();
482 <      nodeAtomEnd = entry_plug->mpiSim->getMyAtomEnd();
481 >      nodeAtomsStart = mpiSim->getMyAtomStart();
482 >      nodeAtomsEnd = mpiSim->getMyAtomEnd();
483          
484        mpiErr = MPI_Send(&nodeAtomsStart,1,MPI_INT,0,MPI_ANY_TAG,
485                          MPI_COMM_WORLD);
# Line 485 | Line 488 | void DumpWriter::writeFinal(){
488          
489        mpiErr = MPI_Recv(&sendError,1,MPI_INT,0,MPI_ANY_TAG,
490                          MPI_COMM_WORLD, istatus);
491 <      if (sendError) mpiCheckpoint();
491 >      if (sendError) MPIcheckPoint();
492  
493        // send current node's configuration line by line.
494  
# Line 528 | Line 531 | void DumpWriter::writeFinal(){
531        
532      sprintf(checkPointMsg,"Node %d sent dump configuration.",
533              procIndex);
534 <    mpiCheckPoint();
534 >    MPIcheckPoint();
535    }
536  
537    if( worldRank == 0 ) finalOut.close();

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines