vert_weights.cpp

This example shows how to use graph partitioning methods with vertex weights.

00001 //@HEADER
00002 // ************************************************************************
00003 //
00004 //               Isorropia: Partitioning and Load Balancing Package
00005 //                 Copyright (2006) Sandia Corporation
00006 //
00007 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
00008 // license for use of this work by or on behalf of the U.S. Government.
00009 //
00010 // This library is free software; you can redistribute it and/or modify
00011 // it under the terms of the GNU Lesser General Public License as
00012 // published by the Free Software Foundation; either version 2.1 of the
00013 // License, or (at your option) any later version.
00014 //
00015 // This library is distributed in the hope that it will be useful, but
00016 // WITHOUT ANY WARRANTY; without even the implied warranty of
00017 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00018 // Lesser General Public License for more details.
00019 //
00020 // You should have received a copy of the GNU Lesser General Public
00021 // License along with this library; if not, write to the Free Software
00022 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
00023 // USA
00024 //
00025 // ************************************************************************
00026 //@HEADER
00027 
00028 //--------------------------------------------------------------------
00029 //This file is a self-contained example of creating an Epetra_RowMatrix
00030 //object, and using Isorropia to repartition/redistribute a copy of it.
00031 //Vertex weights are used to influence the repartitioning.
00032 //--------------------------------------------------------------------
00033 
00034 //Include Isorropia_Exception.hpp only because the helper functions at
00035 //the bottom of this file (which create the epetra objects) can
00036 //potentially throw exceptions.
00037 #include <Isorropia_Exception.hpp>
00038 
00039 //The Isorropia symbols being demonstrated are declared
00040 //in these headers:
00041 #include <Isorropia_Epetra.hpp>
00042 #include <Isorropia_EpetraCostDescriber.hpp>
00043 #include <Isorropia_EpetraRedistributor.hpp>
00044 #include <Isorropia_EpetraPartitioner.hpp>
00045 
00046 #ifdef HAVE_MPI
00047 #include <mpi.h>
00048 #endif
00049 
00050 #ifdef HAVE_EPETRA
00051 #ifdef HAVE_MPI
00052 #include <Epetra_MpiComm.h>
00053 #else
00054 #include <Epetra_SerialComm.h>
00055 #endif
00056 #include <Epetra_Map.h>
00057 #include <Epetra_Vector.h>
00058 #include <Epetra_CrsMatrix.h>
00059 #include <Epetra_LinearProblem.h>
00060 #endif
00061 
00062 #include "ispatest_lbeval_utils.hpp"
00063 
00064 //Declaration for helper-function that creates epetra rowmatrix objects. This
00065 //function is implemented at the bottom of this file.
00066 #ifdef HAVE_EPETRA
00067 Teuchos::RCP<const Epetra_RowMatrix>
00068   create_epetra_rowmatrix(int numProcs,
00069                           int localProc,
00070                           int local_n);
00071 #endif
00072 
00073 int main(int argc, char** argv) {
00074 #if defined(HAVE_MPI) && defined(HAVE_EPETRA)
00075 
00076   int p, numProcs = 1;
00077   int localProc = 0;
00078 
00079   //first, set up our MPI environment...
00080   MPI_Init(&argc, &argv);
00081   MPI_Comm_rank(MPI_COMM_WORLD, &localProc);
00082   MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
00083 
00084   int local_n = 4000;
00085 
00086   //Create a Epetra_RowMatrix object.
00087 
00088   Teuchos::RCP<const Epetra_RowMatrix> rowmatrix;
00089   try {
00090     rowmatrix = create_epetra_rowmatrix(numProcs, localProc, local_n);
00091   }
00092   catch(std::exception& exc) {
00093     std::cout << "vert_weights example: create_epetra_rowmatrix threw"
00094          << " exception '" << exc.what() << "' on proc "
00095          << localProc << std::endl;
00096     MPI_Finalize();
00097     return(-1);
00098   }
00099 
00100   //We'll need a Teuchos::ParameterList object to pass to the
00101   //Isorropia::Epetra::Partitioner class.
00102   Teuchos::ParameterList paramlist;
00103 
00104   // If Zoltan is available, we'll specify that the Zoltan package be
00105   // used for the partitioning operation, by creating a parameter
00106   // sublist named "Zoltan".
00107   // In the sublist, we'll set parameters that we want to send to Zoltan.
00108 #ifdef HAVE_ISORROPIA_ZOLTAN
00109   Teuchos::ParameterList& sublist = paramlist.sublist("Zoltan");
00110   sublist.set("LB_METHOD", "GRAPH");
00111 #else
00112   // If Zoltan is not available, a simple linear partitioner will be
00113   // used.
00114   // No parameter is necessary to specify this.
00115 #endif
00116 
00117 
00118   //Now we're going to create a Epetra_Vector with vertex weights to
00119   //be used in the repartitioning operation.
00120   Teuchos::RCP<Epetra_Vector> vweights =
00121     Teuchos::rcp(new Epetra_Vector(rowmatrix->RowMatrixRowMap()));
00122 
00123   double* vals = vweights->Values();
00124   const Epetra_BlockMap& map = rowmatrix->RowMatrixRowMap();
00125   int num = map.NumMyElements();
00126 
00127   //For this demo, we'll assign the weights to be i+1, where 'i' is
00128   //the global-id of the corresponding matrix row. (If we don't use +1,
00129   //zoltan complains that the first vertex has a zero weight.)
00130 
00131   //Using these linearly-increasing weights should cause the partitioner
00132   //to put an UN-EQUAL number of rows on each processor...
00133   for(int i=0; i<num; ++i) {
00134     vals[i] = 1.0*(map.GID(i)+1);
00135   }
00136 
00137   Teuchos::RCP<Isorropia::Epetra::CostDescriber> costs =
00138     Teuchos::rcp(new Isorropia::Epetra::CostDescriber);
00139 
00140   costs->setVertexWeights(vweights);
00141 
00142   if (localProc == 0) {
00143     std::cout <<"\n Repartitioning with linearly-increasing vertex weights, \n"
00144     << " which should cause the partitioner to put an UN-EQUAL \n"
00145     << " portion of the matrix on each processor...\n" << std::endl;
00146   }
00147 
00148   //Now create the partitioner object using an Isorropia factory-like
00149   //'create_partitioner' function...
00150   Teuchos::RCP<Isorropia::Epetra::Partitioner> partitioner =
00151     Teuchos::rcp(new Isorropia::Epetra::Partitioner(rowmatrix, costs, paramlist));
00152 
00153   //Next create a Redistributor object and use it to create a repartitioned
00154   //copy of the matrix.
00155 
00156   Isorropia::Epetra::Redistributor rd(partitioner);
00157 
00158   Teuchos::RCP<Epetra_CrsMatrix> bal_matrix;
00159 
00160   //Use a try-catch block because Isorropia will throw an exception
00161   //if it encounters an error.
00162 
00163   if (localProc == 0) {
00164     std::cout << " calling Isorropia::Epetra::Redistributor::redistribute..."
00165         << std::endl;
00166   }
00167 
00168   try {
00169     bal_matrix = rd.redistribute(*rowmatrix);
00170   }
00171   catch(std::exception& exc) {
00172     std::cout << "linsys example: Isorropia::Epetra::Redistributor threw "
00173          << "exception '" << exc.what() << "' on proc "
00174          << localProc << std::endl;
00175     MPI_Finalize();
00176     return(-1);
00177   }
00178 
00179   // Results
00180 
00181   double goalWeight = 1.0 / (double)numProcs;
00182   double bal0, bal1, cutn0, cutn1, cutl0, cutl1, cutWgt0, cutWgt1;
00183   int numCuts0, numCuts1;
00184 
00185   // Balance and cut quality before partitioning
00186 
00187   ispatest::compute_graph_metrics(*rowmatrix, *costs, goalWeight,
00188                      bal0, numCuts0, cutWgt0, cutn0, cutl0);
00189 
00190   // Balance and cut quality after partitioning
00191 
00192   Teuchos::RCP<Epetra_Vector> new_weights = rd.redistribute(*vweights);
00193   Isorropia::Epetra::CostDescriber new_costs;
00194   new_costs.setVertexWeights(new_weights);
00195 
00196   ispatest::compute_graph_metrics(*bal_matrix, new_costs, goalWeight,
00197                      bal1, numCuts1, cutWgt1, cutn1, cutl1);
00198 
00199   if (localProc == 0){
00200     std::cout << "Before partitioning: Number of cuts " << numCuts0 << " Cut weight " << cutWgt0 << std::endl;
00201     std::cout << "                     Balance " << bal0 << " cutN " << cutn0 << " cutL " << cutl0;
00202     std::cout << std::endl;
00203 
00204     std::cout << "After partitioning:  Number of cuts " << numCuts1 << " Cut weight " << cutWgt1 << std::endl;
00205     std::cout << "                     Balance " << bal1 << " cutN " << cutn1 << " cutL " << cutl1;
00206     std::cout << std::endl;
00207     std::cout << std::endl;
00208   }
00209 
00210   MPI_Finalize();
00211 
00212 #else
00213   std::cout << "vert_weights: must have both MPI and EPETRA. Make sure "
00214     << "Trilinos is configured with --enable-mpi and --enable-epetra."
00215      << std::endl;
00216 #endif
00217 
00218   return(0);
00219 }
00220 
00221 //Below is the implementation of the helper-function that creates the
00222 //epetra rowmatrix for use in the above example program.
00223 
00224 #if defined(HAVE_MPI) && defined(HAVE_EPETRA)
00225 
00226 Teuchos::RCP<const Epetra_RowMatrix>
00227  create_epetra_rowmatrix(int numProcs,
00228                          int localProc,
00229                          int local_n)
00230 {
00231   if (localProc == 0) {
00232     std::cout << " creating Epetra_CrsMatrix with even row-distribution..."
00233             << std::endl;
00234   }
00235 
00236   //create an Epetra_CrsMatrix with rows spread evenly over
00237   //processors.
00238 
00239   Epetra_MpiComm comm(MPI_COMM_WORLD);
00240   int global_num_rows = numProcs*local_n;
00241 
00242   Epetra_Map rowmap(global_num_rows, local_n, 0, comm);
00243 
00244   int nnz_per_row = 9;
00245   Teuchos::RCP<Epetra_CrsMatrix> matrix =
00246     Teuchos::rcp(new Epetra_CrsMatrix(Copy, rowmap, nnz_per_row));
00247 
00248   // Add  rows one-at-a-time
00249   double negOne = -1.0;
00250   double posTwo = 4.0;
00251   for (int i=0; i<local_n; i++) {
00252     int GlobalRow = matrix->GRID(i);
00253     int RowLess1 = GlobalRow - 1;
00254     int RowPlus1 = GlobalRow + 1;
00255     int RowLess2 = GlobalRow - 2;
00256     int RowPlus2 = GlobalRow + 2;
00257     int RowLess3 = GlobalRow - 3;
00258     int RowPlus3 = GlobalRow + 3;
00259     int RowLess4 = GlobalRow - 4;
00260     int RowPlus4 = GlobalRow + 4;
00261 
00262     if (RowLess4>=0) {
00263       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess4);
00264     }
00265     if (RowLess3>=0) {
00266       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess3);
00267     }
00268     if (RowLess2>=0) {
00269       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess2);
00270     }
00271     if (RowLess1>=0) {
00272       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess1);
00273     }
00274     if (RowPlus1<global_num_rows) {
00275       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus1);
00276     }
00277     if (RowPlus2<global_num_rows) {
00278       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus2);
00279     }
00280     if (RowPlus3<global_num_rows) {
00281       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus3);
00282     }
00283     if (RowPlus4<global_num_rows) {
00284       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus4);
00285     }
00286 
00287     matrix->InsertGlobalValues(GlobalRow, 1, &posTwo, &GlobalRow);
00288   }
00289 
00290   int err = matrix->FillComplete();
00291   if (err != 0) {
00292     throw Isorropia::Exception("create_epetra_matrix: error in matrix.FillComplete()");
00293   }
00294 
00295   return(matrix);
00296 }
00297 
00298 #endif //HAVE_MPI && HAVE_EPETRA
00299