[Scipy-svn] r3437 - trunk/scipy/sandbox/multigrid

scipy-svn at scipy.org scipy-svn at scipy.org
Mon Oct 15 16:33:26 EDT 2007


Author: wnbell
Date: 2007-10-15 15:33:20 -0500 (Mon, 15 Oct 2007)
New Revision: 3437

Modified:
   trunk/scipy/sandbox/multigrid/adaptive.py
   trunk/scipy/sandbox/multigrid/multilevel.py
   trunk/scipy/sandbox/multigrid/sa.py
Log:
make SA work properly w/ blocks



Modified: trunk/scipy/sandbox/multigrid/adaptive.py
===================================================================
--- trunk/scipy/sandbox/multigrid/adaptive.py	2007-10-15 19:23:07 UTC (rev 3436)
+++ trunk/scipy/sandbox/multigrid/adaptive.py	2007-10-15 20:33:20 UTC (rev 3437)
@@ -16,14 +16,14 @@
      
     """
 
-    #candidate prolongator (assumes every value from x is used)
+    #candidate prolongator (assumes every value from x is used)  #TODO permit gaps
     X = csr_matrix((x_l,W_l.indices,W_l.indptr),dims=W_l.shape,check=False)  
     
     R = (P_l.T.tocsr() * X)  # R has at most 1 nz per row
     X = X - P_l*R            # othogonalize X against P_l
     
-    #DROP REDUNDANT COLUMNS FROM P (AND R?) HERE (NULL OUT R ACCORDINGLY?)    
-    #REMOVE CORRESPONDING COLUMNS FROM W_l AND ROWS FROM A_m ALSO
+    #TODO DROP REDUNDANT COLUMNS FROM P (AND R?) HERE (NULL OUT R ACCORDINGLY?)    
+    #TODO REMOVE CORRESPONDING COLUMNS FROM W_l AND ROWS FROM A_m ALSO
     W_l_new = W_l
     W_m_new = W_m
 
@@ -59,7 +59,7 @@
     D = diag_sparse(A)
     D_inv_A = diag_sparse(1.0/D)*A
     omega = 4.0/(3.0*approximate_spectral_radius(D_inv_A))
-    print "spectral radius",approximate_spectral_radius(D_inv_A)
+    print "spectral radius",approximate_spectral_radius(D_inv_A) #TODO remove this
     D_inv_A *= omega
 
     return P - D_inv_A*P
@@ -124,8 +124,9 @@
             x = self.__develop_candidate(A,As,Is,Ps,Ws,AggOps,mu=mu)    
             
             self.candidates.append(x)
-           
-            #As,Is,Ps,Ws = self.__augment_cycle(A,As,Ps,Ws,AggOps,x)             
+            
+            #TODO which is faster?
+            #As,Is,Ps,Ws = self.__augment_cycle(A,As,Ps,Ws,AggOps,x)  
             As,Is,Ps = sa_hierarchy(A,AggOps,self.candidates)
 
         self.Ps = Ps 

Modified: trunk/scipy/sandbox/multigrid/multilevel.py
===================================================================
--- trunk/scipy/sandbox/multigrid/multilevel.py	2007-10-15 19:23:07 UTC (rev 3436)
+++ trunk/scipy/sandbox/multigrid/multilevel.py	2007-10-15 20:33:20 UTC (rev 3437)
@@ -22,7 +22,7 @@
     """
     D = 2*ones(N)
     O =  -ones(N)
-    return scipy.sparse.spdiags([D,O,O],[0,-1,1],N,N).tocoo().tocsr() #eliminate zeros
+    return scipy.sparse.spdiags([D,O,O],[0,-1,1],N,N).tocoo().tocsr() #eliminate explicit zeros
 
 def poisson_problem2D(N):
     """
@@ -34,7 +34,7 @@
     T =  -ones(N*N)
     O =  -ones(N*N)
     T[N-1::N] = 0
-    return scipy.sparse.spdiags([D,O,T,T,O],[0,-N,-1,1,N],N*N,N*N).tocoo().tocsr() #eliminate zeros
+    return scipy.sparse.spdiags([D,O,T,T,O],[0,-N,-1,1,N],N*N,N*N).tocoo().tocsr() #eliminate explicit zeros
     
 
 def ruge_stuben_solver(A,max_levels=10,max_coarse=500):
@@ -60,7 +60,7 @@
         
     return multilevel_solver(As,Ps)
 
-def smoothed_aggregation_solver(A,candidates=None,blocks=None,max_levels=10,max_coarse=500,epsilon=0.08,omega=4.0/3.0):
+def smoothed_aggregation_solver(A,candidates=None,blocks=None,aggregation=None,max_levels=10,max_coarse=500,epsilon=0.08,omega=4.0/3.0):
     """
     Create a multilevel solver using Smoothed Aggregation (SA)
 
@@ -75,15 +75,25 @@
     
     if candidates is None:
         candidates = [ ones(A.shape[0]) ] # use constant vector
-        
-    while len(As) < max_levels  and A.shape[0] > max_coarse:
-        P,candidates = sa_interpolation(A,candidates,epsilon*0.5**(len(As)-1),omega=omega,blocks=blocks)
-        #P = sa_interpolation(A,epsilon=0.0)
 
-        A = (P.T.tocsr() * A) * P     #galerkin operator
+    if aggregation is None:
+        while len(As) < max_levels  and A.shape[0] > max_coarse:
+            P,candidates = sa_interpolation(A,candidates,epsilon*0.5**(len(As)-1),omega=omega,blocks=blocks)
+            blocks = None #only used for 1st level
 
-        As.append(A)
-        Ps.append(P)
+            A = (P.T.tocsr() * A) * P     #galerkin operator
+
+            As.append(A)
+            Ps.append(P)
+    else:
+        #use user-defined aggregation
+        for AggOp in aggregation:
+            P,candidates = sa_interpolation(A,candidates,omega=omega,AggOp=AggOp)
+
+            A = (P.T.tocsr() * A) * P     #galerkin operator
+
+            As.append(A)
+            Ps.append(P)
         
     return multilevel_solver(As,Ps)
 
@@ -163,7 +173,7 @@
         
         if lvl == len(self.As) - 2:
             #use direct solver on coarsest level
-            coarse_x[:] = spsolve(self.As[-1],coarse_b)
+            coarse_x[:] = spsolve(self.As[-1],coarse_b)  #TODO reuse factors for efficiency?
             #coarse_x[:] = scipy.linalg.cg(self.As[-1],coarse_b,tol=1e-12)[0]
             #print "coarse residual norm",scipy.linalg.norm(coarse_b - self.As[-1]*coarse_x)
         else:   
@@ -185,17 +195,19 @@
 if __name__ == '__main__':
     from scipy import *
     candidates = None
-    A = poisson_problem2D(100)
+    blocks = None
+    #A = poisson_problem2D(100)
     #A = io.mmread("rocker_arm_surface.mtx").tocsr()
     #A = io.mmread("9pt-100x100.mtx").tocsr()
     #A = io.mmread("/home/nathan/Desktop/9pt/9pt-100x100.mtx").tocsr()
     #A = io.mmread("/home/nathan/Desktop/BasisShift_W_EnergyMin_Luke/9pt-5x5.mtx").tocsr()
     
-    #A = io.mmread('tests/sample_data/elas30_A.mtx').tocsr()
-    #candidates = io.mmread('tests/sample_data/elas30_nullspace.mtx')
-    #candidates = [ array(candidates[:,x]) for x in range(candidates.shape[1]) ]
-    
-    ml = smoothed_aggregation_solver(A,candidates,max_coarse=10,max_levels=10)
+    A = io.mmread('tests/sample_data/elas30_A.mtx').tocsr()
+    candidates = io.mmread('tests/sample_data/elas30_nullspace.mtx')
+    candidates = [ array(candidates[:,x]) for x in range(candidates.shape[1]) ]
+    blocks = arange(A.shape[0]/2).repeat(2)
+     
+    ml = smoothed_aggregation_solver(A,candidates,blocks=blocks,max_coarse=10,max_levels=10)
     #ml = ruge_stuben_solver(A)
 
     x = rand(A.shape[0])
@@ -210,8 +222,12 @@
             residuals.append(linalg.norm(b - A*x))
         A.psolve = ml.psolve
         x_sol = linalg.cg(A,b,x0=x,maxiter=12,tol=1e-100,callback=add_resid)[0]
+            
 
     residuals = array(residuals)/residuals[0]
+    avg_convergence_ratio = residuals[-1]**(1.0/len(residuals))
+    print "average convergence ratio",avg_convergence_ratio
+    print "last convergence ratio",residuals[-1]/residuals[-2]
 
     print residuals
 

Modified: trunk/scipy/sandbox/multigrid/sa.py
===================================================================
--- trunk/scipy/sandbox/multigrid/sa.py	2007-10-15 19:23:07 UTC (rev 3436)
+++ trunk/scipy/sandbox/multigrid/sa.py	2007-10-15 20:33:20 UTC (rev 3437)
@@ -1,6 +1,7 @@
 import scipy
 import numpy
-from numpy import array,arange,ones,zeros,sqrt,isinf,asarray,empty,diff
+from numpy import array,arange,ones,zeros,sqrt,isinf,asarray,empty,diff,\
+                  ascontiguousarray
 from scipy.sparse import csr_matrix,isspmatrix_csr
 
 from utils import diag_sparse,approximate_spectral_radius
@@ -23,7 +24,7 @@
 
         else:
             num_dofs   = A.shape[0]
-            num_blocks = blocks.max()
+            num_blocks = blocks.max() + 1
             
             if num_dofs != len(blocks):
                 raise ValueError,'improper block specification'
@@ -34,11 +35,10 @@
             B  = csr_matrix((ones(num_dofs),blocks,arange(num_dofs + 1)),dims=(num_dofs,num_blocks))
             Bt = B.T.tocsr()
             
-            #Frobenius norms of blocks entries of A
-            #TODO change to 1-norm ?
-            Block_Frob = Bt * csr_matrix((A.data**2,A.indices,A.indptr),dims=A.shape) * B 
+            #1-norms of blocks entries of A
+            Block_A = Bt * csr_matrix((abs(A.data),A.indices,A.indptr),dims=A.shape) * B 
     
-            S = sa_strong_connections(Block_Frob,epsilon)       
+            S = sa_strong_connections(Block_A,epsilon)       
             S.data[:] = 1
 
             Mask = B * S * Bt
@@ -61,20 +61,20 @@
     
     if blocks is not None:
         num_dofs   = A.shape[0]
-        num_blocks = blocks.max()
+        num_blocks = blocks.max() + 1
         
         if num_dofs != len(blocks):
             raise ValueError,'improper block specification'
-        
+       
+        print "SA has blocks"
         # for non-scalar problems, use pre-defined blocks in aggregation
         # the strength of connection matrix is based on the Frobenius norms of the blocks
        
-        #TODO change this to matrix 1 norm?
         B  = csr_matrix((ones(num_dofs),blocks,arange(num_dofs + 1)),dims=(num_dofs,num_blocks))
-        #Frobenius norms of blocks entries of A
-        Block_Frob = B.T.tocsr() * csr_matrix((A.data**2,A.indices,A.indptr),dims=A.shape) * B 
+        #1-norms of blocks entries of A
+        Block_A = B.T.tocsr() * csr_matrix((abs(A.data),A.indices,A.indptr),dims=A.shape) * B 
 
-        S = sa_strong_connections(Block_Frob,epsilon)
+        S = sa_strong_connections(Block_A,epsilon)
     
         Pj = multigridtools.sa_get_aggregates(S.shape[0],S.indptr,S.indices)
         Pj = Pj[blocks] #expand block aggregates into constituent dofs
@@ -108,7 +108,6 @@
         c = c[diff(AggOp.indptr) == 1]  #eliminate DOFs that aggregation misses
         X = csr_matrix((c,AggOp.indices,AggOp.indptr),dims=AggOp.shape)
        
-
         #orthogonalize X against previous
         for j,A in enumerate(candidate_matrices):
             D_AtX = csr_matrix((A.data*X.data,X.indices,X.indptr),dims=X.shape).sum(axis=0).A.flatten() #same as diagonal of A.T * X            
@@ -134,15 +133,22 @@
         Q_data[i::K] = X.data
     Q = csr_matrix((Q_data,Q_indices,Q_indptr),dims=(N_fine,K*N_coarse))
 
-    coarse_candidates = [array(R[:,i]) for i in range(K)]
+    coarse_candidates = [ascontiguousarray(R[:,i]) for i in range(K)]
 
     return Q,coarse_candidates
     
     
-def sa_interpolation(A,candidates,epsilon,omega=4.0/3.0,blocks=None):
+def sa_interpolation(A,candidates,epsilon=0.0,omega=4.0/3.0,blocks=None,AggOp=None):
     if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
 
-    AggOp  = sa_constant_interpolation(A,epsilon=epsilon,blocks=blocks)
+    if AggOp is None:
+        AggOp = sa_constant_interpolation(A,epsilon=epsilon,blocks=blocks)
+    else:
+        if not isspmatrix_csr(AggOp):
+            raise TypeError,'aggregation is specified by a list of csr_matrix objects'
+        if A.shape[1] != AggOp.shape[0]:
+            raise ValueError,'incompatible aggregation operator'
+
     T,coarse_candidates = sa_fit_candidates(AggOp,candidates)  
 
     A_filtered = sa_filtered_matrix(A,epsilon,blocks) #use filtered matrix for anisotropic problems




More information about the Scipy-svn mailing list