EpetraExt_MultiMpiComm.cpp

Go to the documentation of this file.
00001 //@HEADER
00002 /*
00003 ************************************************************************
00004 
00005               EpetraExt: Extended Linear Algebra Services Package 
00006                 Copyright (2001) Sandia Corporation
00007 
00008 Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
00009 license for use of this work by or on behalf of the U.S. Government.
00010 
00011 This library is free software; you can redistribute it and/or modify
00012 it under the terms of the GNU Lesser General Public License as
00013 published by the Free Software Foundation; either version 2.1 of the
00014 License, or (at your option) any later version.
00015  
00016 This library is distributed in the hope that it will be useful, but
00017 WITHOUT ANY WARRANTY; without even the implied warranty of
00018 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00019 Lesser General Public License for more details.
00020  
00021 You should have received a copy of the GNU Lesser General Public
00022 License along with this library; if not, write to the Free Software
00023 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
00024 USA
00025 Questions? Contact Michael A. Heroux (maherou@sandia.gov) 
00026 
00027 ************************************************************************
00028 */
00029 //@HEADER
00030 
00031 #include "EpetraExt_MultiMpiComm.h" 
00032 
00033 namespace EpetraExt {
00034 
00035 MultiMpiComm::MultiMpiComm(MPI_Comm globalMpiComm, int subDomainProcs, int numTimeSteps_) :
00036         EpetraExt::MultiComm(Teuchos::rcp(new Epetra_MpiComm(globalMpiComm))),
00037         subComm(0)
00038 {
00039   //Need to construct subComm for each sub domain, compute subDomainRank,
00040   //and check that all integer arithmatic works out correctly.
00041  
00042   int ierrmpi, size, rank;
00043   ierrmpi = MPI_Comm_size(globalMpiComm, &size);
00044   ierrmpi = MPI_Comm_rank(globalMpiComm, &rank);
00045 
00046   if (size % subDomainProcs != 0) {cout<<"ERROR: num subDomainProcs "<< subDomainProcs
00047      << " does not divide into num total procs " << size << endl; exit(-1);}
00048 
00049   numSubDomains = size / subDomainProcs;
00050 
00051   // Create split communicators, the size of subDomainProcs
00052   MPI_Comm split_MPI_Comm;
00053   subDomainRank = rank/subDomainProcs;
00054   ierrmpi =  MPI_Comm_split(globalMpiComm, subDomainRank, rank, &split_MPI_Comm);
00055 
00056   // Construct second epetra communicators
00057   subComm = new Epetra_MpiComm(split_MPI_Comm);
00058 
00059   // Compute number of time steps on this sub domain
00060   ResetNumTimeSteps(numTimeSteps_);
00061 
00062   if (numTimeSteps_ > 0)
00063     cout << "Processor " << rank << " is on subdomain " << subDomainRank 
00064          << " and owns " << numTimeStepsOnDomain << " time steps, starting with " 
00065          <<  firstTimeStepOnDomain << endl;
00066   else
00067     cout << "Processor " << rank << " is on subdomain " << subDomainRank << endl;
00068 }
00069 
00070 // This constructor is for just one subdomain, so only adds the info
00071 // for multiple time steps on the domain. No two-level parallelism.
00072 MultiMpiComm::MultiMpiComm(const Epetra_MpiComm& EpetraMpiComm_, int numTimeSteps_) :
00073         EpetraExt::MultiComm(Teuchos::rcp(new Epetra_MpiComm(EpetraMpiComm_))),
00074         subComm(0)
00075 {
00076 
00077   numSubDomains = 1;
00078   subDomainRank = 0;
00079   numTimeSteps = numTimeSteps_;
00080   numTimeStepsOnDomain = numTimeSteps_;
00081   firstTimeStepOnDomain = 0;
00082  
00083   subComm = new Epetra_MpiComm(EpetraMpiComm_);
00084 }
00085   
00086 //Copy Constructor
00087 MultiMpiComm::MultiMpiComm(const MultiMpiComm &MMC ) :
00088         EpetraExt::MultiComm(Teuchos::rcp(new Epetra_MpiComm(dynamic_cast<const Epetra_MpiComm&>(MMC)))),
00089         subComm(new Epetra_MpiComm(*(MMC.subComm)))
00090 {
00091   numSubDomains = MMC.numSubDomains;
00092   subDomainRank = MMC.subDomainRank;
00093   numTimeSteps = MMC.numTimeSteps;
00094   numTimeStepsOnDomain = MMC.numTimeStepsOnDomain;
00095   firstTimeStepOnDomain = MMC.firstTimeStepOnDomain;
00096 }
00097 
00098 MultiMpiComm::~MultiMpiComm()
00099 {
00100   delete subComm;
00101 }
00102 
00103 void MultiMpiComm::ResetNumTimeSteps(int numTimeSteps_)
00104 {
00105   numTimeSteps = numTimeSteps_;
00106 
00107   // Compute number of time steps on this sub domain
00108   if (numTimeSteps > 0) {
00109     // Compute part for number of domains dividing evenly into number of steps
00110     numTimeStepsOnDomain = numTimeSteps / numSubDomains; 
00111     firstTimeStepOnDomain = numTimeStepsOnDomain * subDomainRank;
00112 
00113     // Dole out remainder
00114     int remainder = numTimeSteps % numSubDomains;
00115     if (subDomainRank < remainder) {
00116       numTimeStepsOnDomain++; 
00117       firstTimeStepOnDomain += subDomainRank; 
00118     }
00119     else firstTimeStepOnDomain += remainder; 
00120   }
00121   else {
00122     numTimeStepsOnDomain = -1;
00123     firstTimeStepOnDomain = -1;
00124   }
00125 }
00126 
00127 } //namespace EpetraExt

Generated on Wed May 12 21:24:46 2010 for EpetraExt by  doxygen 1.4.7