hgedge_weights.cpp

This example shows how to use hypergraph partitioning methods with hyperedge weights.

00001 //@HEADER
00002 // ************************************************************************
00003 //
00004 //               Isorropia: Partitioning and Load Balancing Package
00005 //                 Copyright (2006) Sandia Corporation
00006 //
00007 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
00008 // license for use of this work by or on behalf of the U.S. Government.
00009 //
00010 // This library is free software; you can redistribute it and/or modify
00011 // it under the terms of the GNU Lesser General Public License as
00012 // published by the Free Software Foundation; either version 2.1 of the
00013 // License, or (at your option) any later version.
00014 //
00015 // This library is distributed in the hope that it will be useful, but
00016 // WITHOUT ANY WARRANTY; without even the implied warranty of
00017 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00018 // Lesser General Public License for more details.
00019 //
00020 // You should have received a copy of the GNU Lesser General Public
00021 // License along with this library; if not, write to the Free Software
00022 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
00023 // USA
00024 //
00025 // ************************************************************************
00026 //@HEADER
00027 
00028 //--------------------------------------------------------------------
00029 //This file is a self-contained example of creating an Epetra_RowMatrix
00030 //object, and using Isorropia to create a rebalanced copy of it using
00031 //Zoltan's Hypergraph partitioning.
00032 //Hypergraph edge weights are used to influence the repartitioning.
00033 //--------------------------------------------------------------------
00034 
00035 //Include Isorropia_Exception.hpp only because the helper functions at
00036 //the bottom of this file (which create the epetra objects) can
00037 //potentially throw exceptions.
00038 #include <Isorropia_Exception.hpp>
00039 
00040 //The Isorropia symbols being demonstrated are declared
00041 //in these headers:
00042 #include <Isorropia_Epetra.hpp>
00043 #include <Isorropia_EpetraCostDescriber.hpp>
00044 #include <Isorropia_EpetraRedistributor.hpp>
00045 #include <Isorropia_EpetraPartitioner.hpp>
00046 
00047 #ifdef HAVE_MPI
00048 #include <mpi.h>
00049 #endif
00050 
00051 #ifdef HAVE_EPETRA
00052 #ifdef HAVE_MPI
00053 #include <Epetra_MpiComm.h>
00054 #else
00055 #include <Epetra_SerialComm.h>
00056 #endif
00057 #include <Epetra_Map.h>
00058 #include <Epetra_Vector.h>
00059 #include <Epetra_CrsMatrix.h>
00060 #include <Epetra_LinearProblem.h>
00061 #endif
00062 
00063 #include "ispatest_lbeval_utils.hpp"
00064 
00065 //Declaration for helper-function that creates epetra rowmatrix objects. This
00066 //function is implemented at the bottom of this file.
00067 #ifdef HAVE_EPETRA
00068 Teuchos::RCP<const Epetra_RowMatrix>
00069   create_epetra_rowmatrix(int numProcs,
00070                           int localProc,
00071                           int local_n);
00072 #endif
00073 
00074 int main(int argc, char** argv) {
00075 #if defined(HAVE_MPI) && defined(HAVE_EPETRA)
00076 
00077   int p, numProcs = 1;
00078   int localProc = 0;
00079 
00080   //first, set up our MPI environment...
00081   MPI_Init(&argc, &argv);
00082   MPI_Comm_rank(MPI_COMM_WORLD, &localProc);
00083   MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
00084 
00085   int local_n = 1200;
00086 
00087   //Create a Epetra_RowMatrix object.
00088 
00089   Teuchos::RCP<const Epetra_RowMatrix> rowmatrix;
00090   try {
00091     rowmatrix = create_epetra_rowmatrix(numProcs, localProc, local_n);
00092   }
00093   catch(std::exception& exc) {
00094     std::cout << "vert_weights example: create_epetra_rowmatrix threw"
00095          << " exception '" << exc.what() << "' on proc "
00096          << localProc << std::endl;
00097     MPI_Finalize();
00098     return(-1);
00099   }
00100 
00101   //We'll need a Teuchos::ParameterList object to pass to the
00102   //Isorropia::Epetra::Partitioner class.
00103   Teuchos::ParameterList paramlist;
00104 
00105 #ifdef HAVE_ISORROPIA_ZOLTAN
00106   // If Zoltan is available, we'll specify that the Zoltan package be
00107   // used for the partitioning operation, by creating a parameter
00108   // sublist named "Zoltan".
00109   // In the sublist, we'll set parameters that we want sent to Zoltan.
00110   // (As it turns out, Isorropia selects Zoltan's hypergraph partitioner
00111   //  by default, so we don't actually need to specify it. But it's
00112   //  useful for illustration...)
00113 
00114   Teuchos::ParameterList& sublist = paramlist.sublist("Zoltan");
00115   sublist.set("LB_METHOD", "HYPERGRAPH");
00116 #else
00117   // If Zoltan is not available, a simple linear partitioner will be
00118   // used to partition such that the number of nonzeros is equal (or
00119   // close to equal) on each processor. No parameter is necessary to
00120   // specify this.
00121 #endif
00122 
00123 
00124   //Now we're going to create a Epetra_Vector with weights to
00125   //be used as hypergraph edge weights in the repartitioning operation.
00126   //
00127   // We think of the rows as vertices of the hypergraph, and the columns
00128   // as hyperedges.  Our row matrix is square, so we can use the
00129   // the row map to indicate how the column weights should be
00130   // distributed.
00131 
00132   Teuchos::RCP<Epetra_Vector> hge_weights =
00133     Teuchos::rcp(new Epetra_Vector(rowmatrix->RowMatrixRowMap()));
00134 
00135   double* vals = hge_weights->Values();
00136   const Epetra_BlockMap& map = rowmatrix->RowMatrixRowMap();
00137   int num = map.NumMyElements();
00138 
00139   //For this demo, we'll assign the weights to be elem+1, where 'elem' is
00140   //the global-id of the corresponding row. (If we don't use +1, zoltan
00141   //complains that the first one has a zero weight.)
00142 
00143   //Using these linearly-increasing weights should cause the partitioner
00144   //to put an UN-EQUAL number of rows on each processor...
00145   for(int i=0; i<num; ++i) {
00146     vals[i] = 1.0*(map.GID(i)+1);
00147   }
00148 
00149   Teuchos::RCP<Isorropia::Epetra::CostDescriber> costs =
00150     Teuchos::rcp(new Isorropia::Epetra::CostDescriber);
00151 
00152   costs->setHypergraphEdgeWeights(hge_weights);
00153 
00154   //Now create the partitioner
00155 
00156   Teuchos::RCP<Isorropia::Epetra::Partitioner> partitioner =
00157     Teuchos::rcp(new Isorropia::Epetra::Partitioner(rowmatrix, costs, paramlist));
00158 
00159   //Next create a Redistributor object and use it to create a repartitioned
00160   //copy of the matrix.
00161 
00162   Isorropia::Epetra::Redistributor rd(partitioner);
00163 
00164   Teuchos::RCP<Epetra_CrsMatrix> bal_matrix;
00165 
00166   //Use a try-catch block because Isorropia will throw an exception
00167   //if it encounters an error.
00168 
00169   if (localProc == 0) {
00170     std::cout << " calling Isorropia::Epetra::Redistributor::redistribute..."
00171         << std::endl;
00172   }
00173 
00174   try {
00175     bal_matrix = rd.redistribute(*rowmatrix);
00176   }
00177   catch(std::exception& exc) {
00178     std::cout << "linsys example: Isorropia::Epetra::Redistributor threw "
00179          << "exception '" << exc.what() << "' on proc "
00180          << localProc << std::endl;
00181     MPI_Finalize();
00182     return(-1);
00183   }
00184 
00185   // Results
00186 
00187   double goalWeight = 1.0 / (double)numProcs; 
00188   double bal0, bal1, cutn0, cutn1, cutl0, cutl1;
00189 
00190   // Balance and cut quality before partitioning
00191 
00192   ispatest::compute_hypergraph_metrics(*rowmatrix, *costs, goalWeight,
00193                      bal0, cutn0, cutl0);
00194 
00195   // Balance and cut quality after partitioning
00196 
00197   Teuchos::RCP<Epetra_Vector> new_weights = rd.redistribute(*hge_weights);
00198   Isorropia::Epetra::CostDescriber new_costs;
00199   new_costs.setHypergraphEdgeWeights(new_weights);
00200 
00201   ispatest::compute_hypergraph_metrics(*bal_matrix, new_costs, goalWeight,
00202                      bal1, cutn1, cutl1);
00203 
00204   if (localProc == 0){
00205     std::cout << "Before partitioning: ";
00206     std::cout << "Balance " << bal0 << " cutN " << cutn0 << " cutL " << cutl0;
00207     std::cout << std::endl;
00208 
00209     std::cout << "After partitioning:  ";
00210     std::cout << "Balance " << bal1 << " cutN " << cutn1 << " cutL " << cutl1;
00211     std::cout << std::endl;
00212   }
00213 
00214 
00215   MPI_Finalize();
00216 
00217 #else
00218   std::cout << "vert_weights: must have both MPI and EPETRA. Make sure "
00219     << "Trilinos is configured with --enable-mpi and --enable-epetra."
00220      << std::endl;
00221 #endif
00222 
00223   return(0);
00224 }
00225 
00226 //Below is the implementation of the helper-function that creates the
00227 //epetra rowmatrix for use in the above example program.
00228 
00229 #if defined(HAVE_MPI) && defined(HAVE_EPETRA)
00230 
00231 Teuchos::RCP<const Epetra_RowMatrix>
00232  create_epetra_rowmatrix(int numProcs,
00233                          int localProc,
00234                          int local_n)
00235 {
00236   if (localProc == 0) {
00237     std::cout << " creating Epetra_CrsMatrix with even row-distribution..."
00238             << std::endl;
00239   }
00240 
00241   //create an Epetra_CrsMatrix with rows spread evenly over
00242   //processors.
00243 
00244   Epetra_MpiComm comm(MPI_COMM_WORLD);
00245   int global_num_rows = numProcs*local_n;
00246 
00247   Epetra_Map rowmap(global_num_rows, local_n, 0, comm);
00248 
00249   int nnz_per_row = 9;
00250   Teuchos::RCP<Epetra_CrsMatrix> matrix =
00251     Teuchos::rcp(new Epetra_CrsMatrix(Copy, rowmap, nnz_per_row));
00252 
00253   // Add  rows one-at-a-time
00254   double negOne = -1.0;
00255   double posTwo = 4.0;
00256   for (int i=0; i<local_n; i++) {
00257     int GlobalRow = matrix->GRID(i);
00258     int RowLess1 = GlobalRow - 1;
00259     int RowPlus1 = GlobalRow + 1;
00260     int RowLess2 = GlobalRow - 2;
00261     int RowPlus2 = GlobalRow + 2;
00262     int RowLess3 = GlobalRow - 3;
00263     int RowPlus3 = GlobalRow + 3;
00264     int RowLess4 = GlobalRow - 4;
00265     int RowPlus4 = GlobalRow + 4;
00266 
00267     if (RowLess4>=0) {
00268       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess4);
00269     }
00270     if (RowLess3>=0) {
00271       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess3);
00272     }
00273     if (RowLess2>=0) {
00274       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess2);
00275     }
00276     if (RowLess1>=0) {
00277       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess1);
00278     }
00279     if (RowPlus1<global_num_rows) {
00280       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus1);
00281     }
00282     if (RowPlus2<global_num_rows) {
00283       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus2);
00284     }
00285     if (RowPlus3<global_num_rows) {
00286       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus3);
00287     }
00288     if (RowPlus4<global_num_rows) {
00289       matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus4);
00290     }
00291 
00292     matrix->InsertGlobalValues(GlobalRow, 1, &posTwo, &GlobalRow);
00293   }
00294 
00295   int err = matrix->FillComplete();
00296   if (err != 0) {
00297     throw Isorropia::Exception("create_epetra_matrix: error in matrix.FillComplete()");
00298   }
00299 
00300   return(matrix);
00301 }
00302 
00303 #endif //HAVE_MPI && HAVE_EPETRA
00304