Amesos Package Browser (Single Doxygen Collection) Development
Test_Singular/cxx_main.cpp
Go to the documentation of this file.
00001 #include "Amesos_ConfigDefs.h"
00002 
00003 #ifdef HAVE_MPI
00004 #include "mpi.h"
00005 #include "Epetra_MpiComm.h"
00006 #else
00007 #include "Epetra_SerialComm.h"
00008 #endif
00009 #include "Epetra_Util.h"
00010 #include "Epetra_Map.h"
00011 #include "Epetra_Vector.h"
00012 #include "Epetra_CrsMatrix.h"
00013 #include "Epetra_LinearProblem.h"
00014 #include "Epetra_Export.h"
00015 #include "Amesos.h"
00016 #include "Teuchos_ParameterList.hpp"
00017 #include "Teuchos_RefCountPtr.hpp"
00018 #include "Galeri_ReadHB.h"
00019 
00020 //============ //
00021 // main driver //
00022 //============ //
00023 
00024 int main(int argc, char *argv[]) 
00025 {
00026 #ifdef HAVE_MPI
00027   MPI_Init(&argc, &argv);
00028   Epetra_MpiComm Comm(MPI_COMM_WORLD);
00029 #else
00030   Epetra_SerialComm Comm;
00031 #endif
00032 
00033 // ================= //
00034   // reading HB matrix //
00035   // ================= //
00036 
00037   // HB files are for serial matrices. Hence, only
00038   // process 0 reads this matrix (and if present
00039   // solution and RHS). Then, this matrix will be redistributed
00040   // using epetra capabilities.
00041   // All variables that begin with "read" refer to the
00042   // HB matrix read by process 0.
00043   Epetra_Map* readMap;
00044   Epetra_CrsMatrix* readA;
00045   Epetra_Vector* readx;
00046   Epetra_Vector* readb;
00047   Epetra_Vector* readxexact;
00048 
00049   // Name of input file with a numerical singularity
00050   std::string matrix_file="bcsstm05_ns.rua";
00051   try
00052   {
00053     Galeri::ReadHB(matrix_file.c_str(), Comm, readMap,
00054                    readA, readx, readb, readxexact);
00055   }
00056   catch(...)
00057   {
00058     std::cout << "Caught exception, maybe file name is incorrect" << std::endl;
00059 #ifdef HAVE_MPI
00060     MPI_Finalize();
00061 #else
00062     // not to break test harness
00063     exit(EXIT_SUCCESS);
00064 #endif
00065   }
00066 
00067   // Create uniform distributed map.
00068   // Note that linear map are used for simplicity only!
00069   // Amesos (through Epetra) can support *any* map.
00070 
00071   Epetra_Map* mapPtr = 0;
00072   if(readMap->GlobalIndicesInt())
00073     mapPtr = new Epetra_Map((int) readMap->NumGlobalElements(), 0, Comm);
00074   else if(readMap->GlobalIndicesLongLong())
00075     mapPtr = new Epetra_Map(readMap->NumGlobalElements(), 0, Comm);
00076   else
00077     assert(false);
00078 
00079   Epetra_Map& map = *mapPtr;
00080 
00081   // Create the distributed matrix, based on Map.
00082   Epetra_CrsMatrix A(Copy, map, 0);
00083 
00084   const Epetra_Map &OriginalMap = readA->RowMatrixRowMap() ;
00085   assert (OriginalMap.SameAs(*readMap));
00086   Epetra_Export exporter(OriginalMap, map);
00087 
00088   Epetra_Vector x(map);          // distributed solution
00089   Epetra_Vector b(map);          // distributed rhs
00090   Epetra_Vector xexact(map);     // distributed exact solution
00091 
00092   // Exports from data defined on processor 0 to distributed.
00093   x.Export(*readx, exporter, Add);
00094   b.Export(*readb, exporter, Add);
00095   xexact.Export(*readxexact, exporter, Add);
00096   A.Export(*readA, exporter, Add);
00097   A.FillComplete();
00098 
00099   // deletes memory
00100   delete readMap;
00101   delete readA;
00102   delete readx;
00103   delete readb;
00104   delete readxexact;
00105 
00106   // Creates an epetra linear problem, contaning matrix
00107   // A, solution x and rhs b.
00108   Epetra_LinearProblem problem(&A,&x,&b);
00109 
00110   // =========== //
00111   // AMESOS PART //
00112   // =========== //
00113 
00114   // Create the factory
00115   Amesos factory;
00116 
00117   // Create the solver
00118   std::string solverType = "Klu";
00119   Teuchos::RCP<Amesos_BaseSolver> solver = Teuchos::rcp( factory.Create(solverType, problem) );
00120 
00121   // Perform symbolic factorization
00122   int symRet = solver->SymbolicFactorization();
00123   if (symRet != 0) {
00124     std::cout << "Processor "<< map.Comm().MyPID() << " : Symbolic factorization did not complete!" << std::endl;
00125   }
00126 
00127   // Perform numeric factorization
00128   int numRet = solver->NumericFactorization();
00129   if (numRet != 0) {
00130     std::cout << "Processor "<< map.Comm().MyPID() << " : Numeric factorization did not complete!" << std::endl;
00131   } 
00132 
00133   // Check that all processors returned error -22 (Numerically Singular)!
00134   int minRet = 0;
00135   Comm.MinAll( &numRet, &minRet, 1 );
00136 
00137   if (minRet != NumericallySingularMatrixError) {
00138     if (Comm.MyPID()==0)
00139       std::cout << std::endl << "End Result: TEST FAILED" << std::endl;
00140   }
00141   else {
00142     if (Comm.MyPID()==0)
00143       std::cout << std::endl << "End Result: TEST PASSED" << std::endl;
00144   }
00145 
00146   delete mapPtr;
00147  
00148 #ifdef HAVE_MPI
00149   MPI_Finalize();
00150 #endif
00151 
00152   return(0);
00153 }
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines