[Scipy-svn] r3394 - trunk/scipy/sandbox/multigrid

scipy-svn@scip... scipy-svn@scip...
Wed Oct 3 13:42:27 CDT 2007


Author: wnbell
Date: 2007-10-03 13:42:23 -0500 (Wed, 03 Oct 2007)
New Revision: 3394

Modified:
   trunk/scipy/sandbox/multigrid/adaptive.py
   trunk/scipy/sandbox/multigrid/coarsen.py
   trunk/scipy/sandbox/multigrid/multilevel.py
Log:
added support for multiple near-nullspace candidates in SA code


Modified: trunk/scipy/sandbox/multigrid/adaptive.py
===================================================================
--- trunk/scipy/sandbox/multigrid/adaptive.py	2007-10-03 06:19:08 UTC (rev 3393)
+++ trunk/scipy/sandbox/multigrid/adaptive.py	2007-10-03 18:42:23 UTC (rev 3394)
@@ -32,60 +32,8 @@
     return Q,R
 
 
-def fit_candidates(AggOp,candidates):
-    K = len(candidates)
 
-    N_fine,N_coarse = AggOp.shape
 
-    if len(candidates[0]) == K*N_fine:
-        #see if fine space has been expanded (all levels except for first)
-        AggOp = csr_matrix((AggOp.data.repeat(K),AggOp.indices.repeat(K),arange(K*N_fine + 1)),dims=(K*N_fine,N_coarse))
-        N_fine = K*N_fine
-
-    R = zeros((K*N_coarse,K))
-
-    candidate_matrices = []
-    for i,c in enumerate(candidates):
-        X = csr_matrix((c.copy(),AggOp.indices,AggOp.indptr),dims=AggOp.shape)
-       
-        #TODO optimize this  
-
-        #orthogonalize X against previous
-        for j,A in enumerate(candidate_matrices):
-            D_AtX = csr_matrix((A.data*X.data,X.indices,X.indptr),dims=X.shape).sum(axis=0).A.flatten() #same as diagonal of A.T * X            
-            R[j::K,i] = D_AtX
-            X.data -= D_AtX[X.indices] * A.data
-
-            #AtX = csr_matrix(A.T.tocsr() * X
-            #R[j::K,i] = AtX.data
-            #X = X - A * AtX 
-    
-        #normalize X
-        XtX = X.T.tocsr() * X
-        col_norms = sqrt(XtX.sum(axis=0)).flatten()
-        R[i::K,i] = col_norms
-        col_norms = 1.0/col_norms
-        col_norms[isinf(col_norms)] = 0
-        X.data *= col_norms[X.indices]
-
-        candidate_matrices.append(X)
-
-
-    Q_indptr  = K*AggOp.indptr
-    Q_indices = (K*AggOp.indices).repeat(K)
-    for i in range(K):
-        Q_indices[i::K] += i
-    Q_data = empty(N_fine * K)
-    for i,X in enumerate(candidate_matrices):
-        Q_data[i::K] = X.data
-    Q = csr_matrix((Q_data,Q_indices,Q_indptr),dims=(N_fine,K*N_coarse))
-
-    coarse_candidates = [R[:,i] for i in range(K)]
-
-    return Q,coarse_candidates
-
-
-
 ##def orthonormalize_candidate(I,x,basis):
 ##    Px = csr_matrix((x,I.indices,I.indptr),dims=I.shape,check=False) 
 ##    Rs = []

Modified: trunk/scipy/sandbox/multigrid/coarsen.py
===================================================================
--- trunk/scipy/sandbox/multigrid/coarsen.py	2007-10-03 06:19:08 UTC (rev 3393)
+++ trunk/scipy/sandbox/multigrid/coarsen.py	2007-10-03 18:42:23 UTC (rev 3394)
@@ -1,8 +1,11 @@
-import multigridtools
-import scipy,numpy,scipy.sparse
+import scipy
+import numpy
+
+from numpy import arange,ones,zeros,sqrt,isinf,asarray,empty
 from scipy.sparse import csr_matrix,isspmatrix_csr
 
 from utils import diag_sparse,approximate_spectral_radius
+import multigridtools
 
 
 def rs_strong_connections(A,theta):
@@ -14,10 +17,9 @@
                 -A[i,j] >= theta * max( -A[i,k] )   where k != i
     """
     if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
-    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
 
     Sp,Sj,Sx = multigridtools.rs_strong_connections(A.shape[0],theta,A.indptr,A.indices,A.data)
-    return scipy.sparse.csr_matrix((Sx,Sj,Sp),A.shape)
+    return csr_matrix((Sx,Sj,Sp),A.shape)
 
 
 def rs_interpolation(A,theta=0.25):
@@ -32,7 +34,7 @@
                                                S.indptr,S.indices,S.data,\
                                                T.indptr,T.indices,T.data)
 
-    return scipy.sparse.csr_matrix((Ix,Ij,Ip))
+    return csr_matrix((Ix,Ij,Ip))
 
 
 def sa_strong_connections(A,epsilon):
@@ -40,11 +42,13 @@
 
     Sp,Sj,Sx = multigridtools.sa_strong_connections(A.shape[0],epsilon,A.indptr,A.indices,A.data)
 
-    return scipy.sparse.csr_matrix((Sx,Sj,Sp),A.shape)
+    return csr_matrix((Sx,Sj,Sp),A.shape)
 
 def sa_constant_interpolation(A,epsilon,blocks=None):
     if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
     
+    #handle epsilon = 0 case without creating strength of connection matrix?
+    
     if blocks is not None:
         num_dofs   = A.shape[0]
         num_blocks = blocks.max()
@@ -71,9 +75,61 @@
         Pp = numpy.arange(len(Pj)+1)
         Px = numpy.ones(len(Pj))
     
-    return scipy.sparse.csr_matrix((Px,Pj,Pp))
+    return csr_matrix((Px,Pj,Pp))
 
+
+def fit_candidates(AggOp,candidates):
+    K = len(candidates)
+
+    N_fine,N_coarse = AggOp.shape
+
+    if K > 1 and len(candidates[0]) == K*N_fine:
+        #see if fine space has been expanded (all levels except for first)
+        AggOp = csr_matrix((AggOp.data.repeat(K),AggOp.indices.repeat(K),arange(K*N_fine + 1)),dims=(K*N_fine,N_coarse))
+        N_fine = K*N_fine
+
+    R = zeros((K*N_coarse,K))
+
+    candidate_matrices = []
+    for i,c in enumerate(candidates):
+        X = csr_matrix((c.copy(),AggOp.indices,AggOp.indptr),dims=AggOp.shape)
+       
+        #TODO optimize this  
+
+        #orthogonalize X against previous
+        for j,A in enumerate(candidate_matrices):
+            D_AtX = csr_matrix((A.data*X.data,X.indices,X.indptr),dims=X.shape).sum(axis=0).A.flatten() #same as diagonal of A.T * X            
+            R[j::K,i] = D_AtX
+            X.data -= D_AtX[X.indices] * A.data
+
+            #AtX = csr_matrix(A.T.tocsr() * X
+            #R[j::K,i] = AtX.data
+            #X = X - A * AtX 
     
+        #normalize X
+        XtX = X.T.tocsr() * X
+        col_norms = sqrt(asarray(XtX.sum(axis=0)).flatten())
+        R[i::K,i] = col_norms
+        col_norms = 1.0/col_norms
+        col_norms[isinf(col_norms)] = 0
+        X.data *= col_norms[X.indices]
+
+        candidate_matrices.append(X)
+
+
+    Q_indptr  = K*AggOp.indptr
+    Q_indices = (K*AggOp.indices).repeat(K)
+    for i in range(K):
+        Q_indices[i::K] += i
+    Q_data = empty(N_fine * K)
+    for i,X in enumerate(candidate_matrices):
+        Q_data[i::K] = X.data
+    Q = csr_matrix((Q_data,Q_indices,Q_indptr),dims=(N_fine,K*N_coarse))
+
+    coarse_candidates = [R[:,i] for i in range(K)]
+
+    return Q,coarse_candidates
+    
 ##    S = sa_strong_connections(A,epsilon)
 ##
 ##    #tentative (non-smooth) interpolation operator I
@@ -88,19 +144,20 @@
 ##
 ##    return csr_matrix((Bx,Bj,Bp),dims=A.shape)
     
-def sa_interpolation(A,epsilon,omega=4.0/3.0,blocks=None):
+def sa_interpolation(A,candidates,epsilon,omega=4.0/3.0,blocks=None):
     if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
    
-    P  = sa_constant_interpolation(A,epsilon=epsilon,blocks=blocks)
+    AggOp  = sa_constant_interpolation(A,epsilon=epsilon,blocks=blocks)
+    T,coarse_candidates = fit_candidates(AggOp,candidates)
 
     D_inv = diag_sparse(1.0/diag_sparse(A))       
     
     D_inv_A  = D_inv * A
     D_inv_A *= omega/approximate_spectral_radius(D_inv_A)
 
-    I = P - (D_inv_A*P)  #same as I=S*P, (faster?)
+    P = T - (D_inv_A*T)  #same as I=S*P, (faster?)
            
-    return I
+    return P,coarse_candidates
 
 
 

Modified: trunk/scipy/sandbox/multigrid/multilevel.py
===================================================================
--- trunk/scipy/sandbox/multigrid/multilevel.py	2007-10-03 06:19:08 UTC (rev 3393)
+++ trunk/scipy/sandbox/multigrid/multilevel.py	2007-10-03 18:42:23 UTC (rev 3394)
@@ -58,7 +58,7 @@
         
     return multilevel_solver(As,Ps)
 
-def smoothed_aggregation_solver(A,blocks=None,max_levels=10,max_coarse=500,epsilon=0.08,omega=4.0/3.0):
+def smoothed_aggregation_solver(A,candidates=None,blocks=None,max_levels=10,max_coarse=500,epsilon=0.08,omega=4.0/3.0):
     """
     Create a multilevel solver using Smoothed Aggregation (SA)
 
@@ -71,8 +71,11 @@
     As = [A]
     Ps = []
     
+    if candidates is None:
+        candidates = [ ones(A.shape[0]) ] # use constant vector
+        
     while len(As) < max_levels  and A.shape[0] > max_coarse:
-        P = sa_interpolation(A,blocks=blocks,epsilon=epsilon*0.5**(len(As)-1),omega=omega)
+        P,candidates = sa_interpolation(A,candidates,epsilon*0.5**(len(As)-1),omega=omega,blocks=blocks)
         #P = sa_interpolation(A,epsilon=0.0)
 
         A = (P.T.tocsr() * A) * P     #galerkin operator
@@ -157,7 +160,7 @@
         coarse_b = self.Ps[lvl].T * residual
         
         if lvl == len(self.As) - 2:
-            #direct solver on coarsest level
+            #use direct solver on coarsest level
             coarse_x[:] = scipy.linsolve.spsolve(self.As[-1],coarse_b)
             #coarse_x[:] = scipy.linalg.cg(self.As[-1],coarse_b,tol=1e-12)[0]
             #print "coarse residual norm",scipy.linalg.norm(coarse_b - self.As[-1]*coarse_x)
@@ -170,29 +173,26 @@
 
 
     def presmoother(self,A,x,b):
-        gauss_seidel(A,x,b,iterations=1,sweep="forward")
-        gauss_seidel(A,x,b,iterations=1,sweep="backward")
-        #sor(A,x,b,omega=1.85,iterations=1,sweep="backward")
-
-        #x += 4.0/(3.0*infinity_norm(A))*(b - A*x)
+        gauss_seidel(A,x,b,iterations=1,sweep="symmetric")
     
     def postsmoother(self,A,x,b):
-        #sor(A,x,b,omega=1.85,iterations=1,sweep="forward")
-        gauss_seidel(A,x,b,iterations=1,sweep="forward")
-        gauss_seidel(A,x,b,iterations=1,sweep="backward")
-        #x += 4.0/(3.0*infinity_norm(A))*(b - A*x)
+        gauss_seidel(A,x,b,iterations=1,sweep="symmetric")
 
 
 
 if __name__ == '__main__':
     from scipy import *
-    #A = poisson_problem2D(100)
+    candidates = None
+    A = poisson_problem2D(100)
     #A = io.mmread("rocker_arm_surface.mtx").tocsr()
     #A = io.mmread("9pt-100x100.mtx").tocsr()
-    A = io.mmread("/home/nathan/Desktop/9pt/9pt-100x100.mtx").tocsr()
+    #A = io.mmread("/home/nathan/Desktop/9pt/9pt-100x100.mtx").tocsr()
     #A = io.mmread("/home/nathan/Desktop/BasisShift_W_EnergyMin_Luke/9pt-5x5.mtx").tocsr()
-
-    ml = smoothed_aggregation_solver(A,max_coarse=100,max_levels=3)
+    #A = io.mmread('tests/sample_data/elas30_A.mtx').tocsr()
+    #candidates = io.mmread('tests/sample_data/elas30_nullspace.mtx')
+    #candidates = [ array(candidates[:,x]) for x in range(candidates.shape[1]) ]
+    
+    ml = smoothed_aggregation_solver(A,candidates,max_coarse=100,max_levels=3)
     #ml = ruge_stuben_solver(A)
 
     x = rand(A.shape[0])



More information about the Scipy-svn mailing list