EpetraExt Development
EpetraExt_MultiMpiComm.cpp
Go to the documentation of this file.
00001 //@HEADER
00002 // ***********************************************************************
00003 //
00004 //     EpetraExt: Epetra Extended - Linear Algebra Services Package
00005 //                 Copyright (2011) Sandia Corporation
00006 //
00007 // Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
00008 // the U.S. Government retains certain rights in this software.
00009 //
00010 // Redistribution and use in source and binary forms, with or without
00011 // modification, are permitted provided that the following conditions are
00012 // met:
00013 //
00014 // 1. Redistributions of source code must retain the above copyright
00015 // notice, this list of conditions and the following disclaimer.
00016 //
00017 // 2. Redistributions in binary form must reproduce the above copyright
00018 // notice, this list of conditions and the following disclaimer in the
00019 // documentation and/or other materials provided with the distribution.
00020 //
00021 // 3. Neither the name of the Corporation nor the names of the
00022 // contributors may be used to endorse or promote products derived from
00023 // this software without specific prior written permission.
00024 //
00025 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
00026 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00027 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
00028 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
00029 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
00030 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
00031 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00032 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00033 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00034 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00035 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00036 //
00037 // Questions? Contact Michael A. Heroux (maherou@sandia.gov)
00038 //
00039 // ***********************************************************************
00040 //@HEADER
00041 
00042 #include "EpetraExt_MultiMpiComm.h"
00043 #include "Teuchos_Assert.hpp"
00044 #include "Teuchos_VerbosityLevel.hpp"
00045 
00046 namespace EpetraExt {
00047 
00048 MultiMpiComm::MultiMpiComm(MPI_Comm globalMpiComm, int subDomainProcs, int numTimeSteps_,
00049                            const Teuchos::EVerbosityLevel verbLevel) :
00050         Epetra_MpiComm(globalMpiComm),
00051         Teuchos::VerboseObject<MultiMpiComm>(verbLevel),
00052         myComm(Teuchos::rcp(new Epetra_MpiComm(globalMpiComm))),
00053         subComm(0)
00054 {
00055   Teuchos::RCP<Teuchos::FancyOStream> out = this->getOStream();
00056 
00057   // The default output stream only outputs to proc 0, which is not what
00058   // we generally want.  Manually override this if necessary so we get output
00059   // to all processors
00060   int outputRootRank = out->getOutputToRootOnly();
00061   if (outputRootRank >= 0) {
00062     out->setOutputToRootOnly(-1);
00063   }
00064 
00065   //Need to construct subComm for each sub domain, compute subDomainRank,
00066   //and check that all integer arithmatic works out correctly.
00067 
00068   int ierrmpi, size, rank;
00069   ierrmpi = MPI_Comm_size(globalMpiComm, &size);
00070   ierrmpi = MPI_Comm_rank(globalMpiComm, &rank);
00071 
00072   TEUCHOS_TEST_FOR_EXCEPTION(
00073     subDomainProcs <= 0,
00074     std::logic_error,
00075     "ERROR: num subDomainProcs " << subDomainProcs <<
00076     " must be strictly positive." << std::endl);
00077 
00078   TEUCHOS_TEST_FOR_EXCEPTION(
00079     size % subDomainProcs != 0,
00080     std::logic_error,
00081     "ERROR: num subDomainProcs "<< subDomainProcs <<
00082     " does not divide into num total procs " << size << std::endl);
00083 
00084   numSubDomains = size / subDomainProcs;
00085   numTimeDomains = subDomainProcs;
00086 
00087   // Create split communicators
00088   MPI_Comm split_MPI_Comm;
00089   MPI_Comm time_split_MPI_Comm;
00090   subDomainRank = rank / subDomainProcs;
00091   timeDomainRank = rank % subDomainProcs;
00092   ierrmpi =  MPI_Comm_split(globalMpiComm, subDomainRank, rank,
00093                             &split_MPI_Comm);
00094   ierrmpi =  MPI_Comm_split(globalMpiComm, timeDomainRank, rank,
00095                             &time_split_MPI_Comm);
00096 
00097   // Construct second epetra communicators
00098   subComm = new Epetra_MpiComm(split_MPI_Comm);
00099   timeComm = new Epetra_MpiComm(time_split_MPI_Comm);
00100 
00101   // Compute number of time steps on this sub domain
00102   ResetNumTimeSteps(numTimeSteps_);
00103 
00104   if (verbLevel != Teuchos::VERB_NONE) {
00105     if (numTimeSteps_ > 0)
00106       *out << "Processor " << rank << " is on subdomain " << subDomainRank
00107            << " and owns " << numTimeStepsOnDomain
00108            << " time steps, starting with "
00109            <<  firstTimeStepOnDomain << std::endl;
00110     else
00111       *out << "Processor " << rank << " is on subdomain " << subDomainRank
00112            << std::endl;
00113   }
00114 
00115   // Reset output flag if we changed it
00116   if (outputRootRank >= 0) {
00117     out->setOutputToRootOnly(outputRootRank);
00118   }
00119 }
00120 
00121 // This constructor is for just one subdomain, so only adds the info
00122 // for multiple time steps on the domain. No two-level parallelism.
00123 MultiMpiComm::MultiMpiComm(const Epetra_MpiComm& EpetraMpiComm_, int numTimeSteps_,
00124                            const Teuchos::EVerbosityLevel verbLevel) :
00125         Epetra_MpiComm(EpetraMpiComm_),
00126         Teuchos::VerboseObject<MultiMpiComm>(verbLevel),
00127         myComm(Teuchos::rcp(new Epetra_MpiComm(EpetraMpiComm_))),
00128         subComm(0)
00129 {
00130 
00131   numSubDomains = 1;
00132   subDomainRank = 0;
00133   numTimeSteps = numTimeSteps_;
00134   numTimeStepsOnDomain = numTimeSteps_;
00135   firstTimeStepOnDomain = 0;
00136 
00137   subComm = new Epetra_MpiComm(EpetraMpiComm_);
00138 
00139   // Create split communicators for time domain
00140   MPI_Comm time_split_MPI_Comm;
00141   int rank = EpetraMpiComm_.MyPID();
00142   int ierrmpi =  MPI_Comm_split(EpetraMpiComm_.Comm(), rank, rank,
00143                                 &time_split_MPI_Comm);
00144   (void) ierrmpi; // Silence "unused variable" compiler warning.
00145   timeComm = new Epetra_MpiComm(time_split_MPI_Comm);
00146   numTimeDomains = EpetraMpiComm_.NumProc();
00147   timeDomainRank = rank;
00148 }
00149 
00150 //Copy Constructor
00151 MultiMpiComm::MultiMpiComm(const MultiMpiComm &MMC ) :
00152         Epetra_MpiComm(MMC),
00153         myComm(Teuchos::rcp(new Epetra_MpiComm(dynamic_cast<const Epetra_MpiComm&>(MMC)))),
00154         subComm(new Epetra_MpiComm(*MMC.subComm)),
00155         timeComm(new Epetra_MpiComm(*MMC.timeComm))
00156 {
00157   numSubDomains = MMC.numSubDomains;
00158   numTimeDomains = MMC.numTimeDomains;
00159   subDomainRank = MMC.subDomainRank;
00160   timeDomainRank = MMC.timeDomainRank;
00161 
00162   numTimeSteps = MMC.numTimeSteps;
00163   numTimeStepsOnDomain = MMC.numTimeStepsOnDomain;
00164   firstTimeStepOnDomain = MMC.firstTimeStepOnDomain;
00165 }
00166 
00167 MultiMpiComm::~MultiMpiComm()
00168 {
00169   delete subComm;
00170   delete timeComm;
00171 }
00172 
00173 void MultiMpiComm::ResetNumTimeSteps(int numTimeSteps_)
00174 {
00175   numTimeSteps = numTimeSteps_;
00176 
00177   // Compute number of time steps on this sub domain
00178   if (numTimeSteps > 0) {
00179     // Compute part for number of domains dividing evenly into number of steps
00180     numTimeStepsOnDomain = numTimeSteps / numSubDomains;
00181     firstTimeStepOnDomain = numTimeStepsOnDomain * subDomainRank;
00182 
00183     // Dole out remainder
00184     int remainder = numTimeSteps % numSubDomains;
00185     if (subDomainRank < remainder) {
00186       numTimeStepsOnDomain++;
00187       firstTimeStepOnDomain += subDomainRank;
00188     }
00189     else firstTimeStepOnDomain += remainder;
00190   }
00191   else {
00192     numTimeStepsOnDomain = -1;
00193     firstTimeStepOnDomain = -1;
00194   }
00195 }
00196 
00197 } //namespace EpetraExt
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines