[Scipy-svn] r3378 - in trunk/scipy/sandbox/multigrid: . tests tests/sample_data

scipy-svn@scip... scipy-svn@scip...
Thu Sep 27 21:43:16 CDT 2007


Author: wnbell
Date: 2007-09-27 21:43:06 -0500 (Thu, 27 Sep 2007)
New Revision: 3378

Added:
   trunk/scipy/sandbox/multigrid/tests/examples.py
   trunk/scipy/sandbox/multigrid/tests/sample_data/
   trunk/scipy/sandbox/multigrid/tests/sample_data/336_triangle_A.mtx.gz
   trunk/scipy/sandbox/multigrid/tests/sample_data/336_triangle_B.mtx.gz
   trunk/scipy/sandbox/multigrid/tests/sample_data/rocker_arm_surface.mtx.gz
   trunk/scipy/sandbox/multigrid/tests/sample_data/torus.mtx.gz
   trunk/scipy/sandbox/multigrid/tests/test_adaptive.py
Modified:
   trunk/scipy/sandbox/multigrid/adaptive.py
   trunk/scipy/sandbox/multigrid/coarsen.py
   trunk/scipy/sandbox/multigrid/multilevel.py
   trunk/scipy/sandbox/multigrid/relaxation.py
   trunk/scipy/sandbox/multigrid/tests/test_coarsen.py
   trunk/scipy/sandbox/multigrid/tests/test_relaxation.py
   trunk/scipy/sandbox/multigrid/tests/test_utils.py
   trunk/scipy/sandbox/multigrid/utils.py
Log:
some improvements to smoothed aggregation
continued work towards adaptive SA



Modified: trunk/scipy/sandbox/multigrid/adaptive.py
===================================================================
--- trunk/scipy/sandbox/multigrid/adaptive.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/adaptive.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -1,13 +1,13 @@
 import numpy,scipy,scipy.sparse
-from numpy import sqrt,ravel,diff,zeros,zeros_like,inner,concatenate
+from numpy import sqrt,ravel,diff,zeros,zeros_like,inner,concatenate,asarray
 from scipy.sparse import csr_matrix,coo_matrix
 
 from relaxation import gauss_seidel
 from multilevel import multilevel_solver
 from coarsen import sa_constant_interpolation
-from utils import infinity_norm
+#from utils import infinity_norm
+from utils import approximate_spectral_radius
 
-
 def fit_candidate(I,x):
     """
     For each aggregate in I (i.e. each column of I) compute vector R and 
@@ -18,9 +18,11 @@
     In otherwords, find a prolongator Q with orthonormal columns so that
     x is represented exactly on the coarser level by R.
     """
+    x = asarray(x)
     Q = csr_matrix((x.copy(),I.indices,I.indptr),dims=I.shape,check=False)
     R = sqrt(ravel(csr_matrix((x*x,I.indices,I.indptr),dims=I.shape,check=False).sum(axis=0)))  #column 2-norms  
-    Q.data *= (1.0/R)[Q.indices]
+
+    Q.data *= (1.0/R)[Q.indices]  #normalize columns of Q
    
     #print "norm(R)",scipy.linalg.norm(R)
     #print "min(R),max(R)",min(R),max(R)
@@ -30,6 +32,60 @@
     return Q,R
 
 
+def fit_candidates(AggOp,candidates):
+    K = len(candidates)
+
+    N_fine,N_coarse = AggOp.shape
+
+    if len(candidates[0]) == K*N_fine:
+        #see if fine space has been expanded (all levels except for first)
+        AggOp = csr_matrix((AggOp.data.repeat(K),AggOp.indices.repeat(K),arange(K*N_fine + 1)),dims=(K*N_fine,N_coarse))
+        N_fine = K*N_fine
+
+    R = zeros((K*N_coarse,K))
+
+    candidate_matrices = []
+    for i,c in enumerate(candidates):
+        X = csr_matrix((c.copy(),AggOp.indices,AggOp.indptr),dims=AggOp.shape)
+       
+        #TODO optimize this  
+
+        #orthogonalize X against previous
+        for j,A in enumerate(candidate_matrices):
+            D_AtX = csr_matrix((A.data*X.data,X.indices,X.indptr),dims=X.shape).sum(axis=0).A.flatten() #same as diagonal of A.T * X            
+            R[j::K,i] = D_AtX
+            X.data -= D_AtX[X.indices] * A.data
+
+            #AtX = csr_matrix(A.T.tocsr() * X
+            #R[j::K,i] = AtX.data
+            #X = X - A * AtX 
+    
+        #normalize X
+        XtX = X.T.tocsr() * X
+        col_norms = sqrt(XtX.sum(axis=0)).flatten()
+        R[i::K,i] = col_norms
+        col_norms = 1.0/col_norms
+        col_norms[isinf(col_norms)] = 0
+        X.data *= col_norms[X.indices]
+
+        candidate_matrices.append(X)
+
+
+    Q_indptr  = K*AggOp.indptr
+    Q_indices = (K*AggOp.indices).repeat(K)
+    for i in range(K):
+        Q_indices[i::K] += i
+    Q_data = empty(N_fine * K)
+    for i,X in enumerate(candidate_matrices):
+        Q_data[i::K] = X.data
+    Q = csr_matrix((Q_data,Q_indices,Q_indptr),dims=(N_fine,K*N_coarse))
+
+    coarse_candidates = [R[:,i] for i in range(K)]
+
+    return Q,coarse_candidates
+
+
+
 ##def orthonormalize_candidate(I,x,basis):
 ##    Px = csr_matrix((x,I.indices,I.indptr),dims=I.shape,check=False) 
 ##    Rs = []
@@ -110,16 +166,19 @@
 
 def smoothed_prolongator(P,A):
     #just use Richardson for now
-    #omega = 4.0/(3.0*infinity_norm(A))
+    #omega = 4.0/(3.0*approximate_spectral_radius(A))
     #return P - omega*(A*P)
-    #return P
+    #return P  #TEST
+    
     D = diag_sparse(A)
     D_inv_A = diag_sparse(1.0/D)*A
-    omega = 4.0/(3.0*infinity_norm(D_inv_A))
+    omega = 4.0/(3.0*approximate_spectral_radius(D_inv_A))
+    print "spectral radius",approximate_spectral_radius(D_inv_A)
     D_inv_A *= omega
     return P - D_inv_A*P
  
 
+
 def sa_hierarchy(A,Ws,x):
     """
     Construct multilevel hierarchy using Smoothed Aggregation
@@ -138,7 +197,8 @@
     Ps = []
 
     for W in Ws:
-        P,x = fit_candidate(W,x)
+        #P,x = fit_candidate(W,x)
+        P,x = fit_candidates(W,x)
         I   = smoothed_prolongator(P,A)  
         A   = I.T.tocsr() * A * I
         As.append(A)
@@ -152,59 +212,57 @@
     return csr_matrix((I.data,I.indices,ptr),dims=(N,I.shape[1]),check=False)
 
 class adaptive_sa_solver:
-    def __init__(self,A,options=None):
+    def __init__(self,A,options=None,max_levels=10,max_coarse=100,max_candidates=1,mu=5,epsilon=0.1):
         self.A = A
 
         self.Rs = [] 
-        self.__construct_hierarchy(A)
-
-    def __construct_hierarchy(self,A):
+        
         #if self.A.shape[0] <= self.opts['coarse: max size']:
         #    raise ValueError,'small matrices not handled yet'
         
-        x,AggOps = self.__initialization_stage(A) #first candidate
+        x,AggOps = self.__initialization_stage(A,max_levels=max_levels,max_coarse=max_coarse,mu=mu,epsilon=epsilon) #first candidate
+        
         Ws = AggOps
 
-        #x[:] = 1  #TEST
-    
         self.candidates = [x]
-        #self.candidates = [1.0/D.data]
 
         #create SA using x here
-        As,Is,Ps = sa_hierarchy(A,Ws,x)
+        As,Is,Ps = sa_hierarchy(A,Ws,self.candidates)
 
-        for i in range(0):
-            x           = self.__develop_candidate(A,As,Is,Ps,Ws,AggOps)
+        for i in range(max_candidates - 1):
+            x = self.__develop_candidate(A,As,Is,Ps,Ws,AggOps,mu=mu)    
+            
+            self.candidates.append(x)
+
             #if i == 0:
-            #    x = arange(20).repeat(20).astype(float)
+            #    x = arange(50).repeat(50).astype(float)
             #elif i == 1:
-            #    x = arange(20).repeat(20).astype(float)
-            #    x = numpy.ravel(transpose(x.reshape((20,20))))
+            #    x = arange(50).repeat(50).astype(float)
+            #    x = numpy.ravel(transpose(x.reshape((50,50))))
+           
+            #As,Is,Ps,Ws = self.__augment_cycle(A,As,Ps,Ws,AggOps,x)             
+            As,Is,Ps = sa_hierarchy(A,AggOps,self.candidates)
+   
+            #random.seed(0)
+            #solver = multilevel_solver(As,Is)
+            #x = solver.solve(zeros(A.shape[0]), x0=rand(A.shape[0]), tol=1e-12, maxiter=30)
+            #self.candidates.append(x)
 
-            As,Is,Ps,Ws = self.__augment_cycle(A,As,Ps,Ws,AggOps,x)
-             
-            self.candidates.append(x)
-        
         self.Ps = Ps 
         self.solver = multilevel_solver(As,Is)
         self.AggOps = AggOps
                 
 
 
-    def __develop_candidate(self,A,As,Is,Ps,Ws,AggOps):
+    def __develop_candidate(self,A,As,Is,Ps,Ws,AggOps,mu):
+        #scipy.random.seed(0)  #TEST
         x = scipy.rand(A.shape[0])
         b = zeros_like(x)
 
-        
-        #x = arange(200).repeat(200).astype(float)
-        #x[:] = 1 #TEST
-
-        mu = 5
- 
         solver = multilevel_solver(As,Is)
 
-        for n in range(mu):
-            x = solver.solve(b, x0=x, tol=1e-8, maxiter=1)
+        x = solver.solve(b, x0=x, tol=1e-10, maxiter=mu)
+    
         #TEST FOR CONVERGENCE HERE
  
         A_l,P_l,W_l,x_l = As[0],Ps[0],Ws[0],x
@@ -271,18 +329,15 @@
         return new_As,new_Is,new_Ps,new_Ws
 
 
-    def __initialization_stage(self,A):
-        max_levels = 10
-        max_coarse = 50
-
+    def __initialization_stage(self,A,max_levels,max_coarse,mu,epsilon):
         AggOps = []
         Is     = []
 
         # aSA parameters 
-        mu      = 5    # number of test relaxation iterations
-        epsilon = 0.1  # minimum acceptable relaxation convergence factor 
+        # mu      - number of test relaxation iterations
+        # epsilon - minimum acceptable relaxation convergence factor 
         
-        scipy.random.seed(0) 
+        #scipy.random.seed(0) #TEST
 
         #step 1
         A_l = A
@@ -291,15 +346,15 @@
         
         #step 2
         b = zeros_like(x)
-        gauss_seidel(A_l,x,b,iterations=mu)
+        gauss_seidel(A_l,x,b,iterations=mu,sweep='symmetric')
         #step 3
         #test convergence rate here 
       
         As = [A]
 
         while len(AggOps) + 1 < max_levels  and A_l.shape[0] > max_coarse:
-            W_l   = sa_constant_interpolation(A_l,epsilon=0.08*0.5**(len(AggOps)-1))           #step 4b  #TEST
-            #W_l   = sa_constant_interpolation(A_l,epsilon=0)           #step 4b  #TEST
+            #W_l   = sa_constant_interpolation(A_l,epsilon=0.08*0.5**(len(AggOps)-1))           #step 4b  #TEST
+            W_l   = sa_constant_interpolation(A_l,epsilon=0)              #step 4b
             P_l,x = fit_candidate(W_l,x)                                  #step 4c  
             I_l   = smoothed_prolongator(P_l,A_l)                         #step 4d
             A_l   = I_l.T.tocsr() * A_l * I_l                             #step 4e
@@ -312,10 +367,10 @@
 
             if not skip_f_to_i:
                 print "."
-                x_hat = x.copy()                                        #step 4g
-                gauss_seidel(A_l,x,zeros_like(x),iterations=mu)         #step 4h
+                x_hat = x.copy()                                                   #step 4g
+                gauss_seidel(A_l,x,zeros_like(x),iterations=mu,sweep='symmetric')  #step 4h
                 x_A_x = inner(x,A_l*x) 
-                if (x_A_x/inner(x_hat,A_l*x_hat))**(1.0/mu) < epsilon:  #step 4i
+                if (x_A_x/inner(x_hat,A_l*x_hat))**(1.0/mu) < epsilon:             #step 4i
                     print "sufficient convergence, skipping"
                     skip_f_to_i = True
                     if x_A_x == 0:       
@@ -323,7 +378,7 @@
         
         #update fine-level candidate
         for A_l,I in reversed(zip(As[1:],Is)):
-            gauss_seidel(A_l,x,zeros_like(x),iterations=mu)         #TEST
+            gauss_seidel(A_l,x,zeros_like(x),iterations=mu,sweep='symmetric')         #TEST
             x = I * x
         
         gauss_seidel(A,x,b,iterations=mu)         #TEST
@@ -336,23 +391,39 @@
 from scipy import *
 from utils import diag_sparse
 from multilevel import poisson_problem1D,poisson_problem2D
-#A = poisson_problem2D(100)
-A = io.mmread("tests/sample_data/laplacian_40_3dcube.mtx").tocsr()
+A = poisson_problem2D(50)
+#A = io.mmread("tests/sample_data/laplacian_41_3dcube.mtx").tocsr()
+#A = io.mmread("laplacian_40_3dcube.mtx").tocsr()
+#A = io.mmread("/home/nathan/Desktop/9pt/9pt-100x100.mtx").tocsr()
+#A = io.mmread("/home/nathan/Desktop/BasisShift_W_EnergyMin_Luke/9pt-5x5.mtx").tocsr()
 
 #A = A*A
 #D = diag_sparse(1.0/sqrt(10**(12*rand(A.shape[0])-6))).tocsr()
 #A = D * A * D
 #A = io.mmread("nos2.mtx").tocsr()
-asa = adaptive_sa_solver(A)
+asa = adaptive_sa_solver(A,max_candidates=1)
+#x = arange(A.shape[0]).astype('d') + 1
+scipy.random.seed(0)  #TEST
 x = rand(A.shape[0])
 b = zeros_like(x)
 
 
 print "solving"
-x_sol,residuals = asa.solver.solve(b,x,tol=1e-12,maxiter=30,return_residuals=True)
+#x_sol,residuals = asa.solver.solve(b,x,tol=1e-8,maxiter=30,return_residuals=True)
+if True:
+    x_sol,residuals = asa.solver.solve(b,x0=x,maxiter=10,tol=1e-12,return_residuals=True)
+else:
+    residuals = []
+    def add_resid(x):
+        residuals.append(linalg.norm(b - A*x))
+    A.psolve = asa.solver.psolve
+    x_sol = linalg.cg(A,b,x0=x,maxiter=20,tol=1e-100,callback=add_resid)[0]
 residuals = array(residuals)/residuals[0]
 print "residuals ",residuals
+print "mean convergence factor",(residuals[-1]/residuals[0])**(1.0/len(residuals))
+print "last convergence factor",residuals[-1]/residuals[-2]
 
+print
 print asa.solver
 
 print "constant Rayleigh quotient",dot(ones(A.shape[0]),A*ones(A.shape[0]))/float(A.shape[0])

Modified: trunk/scipy/sandbox/multigrid/coarsen.py
===================================================================
--- trunk/scipy/sandbox/multigrid/coarsen.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/coarsen.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -1,24 +1,31 @@
-
 import multigridtools
-import scipy
-import numpy
-    
-from utils import diag_sparse,infinity_norm
+import scipy,numpy,scipy.sparse
+from scipy.sparse import csr_matrix,isspmatrix_csr
 
+from utils import diag_sparse,approximate_spectral_radius
 
+
 def rs_strong_connections(A,theta):
-    if not scipy.sparse.isspmatrix_csr(A): raise TypeError('expected sparse.csr_matrix')
+    """
+    Return a strength of connection matrix using the method of Ruge and Stuben
 
+        An off-diagonal entry A[i.j] is a strong connection iff
+
+                -A[i,j] >= theta * max( -A[i,k] )   where k != i
+    """
+    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
+    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
+
     Sp,Sj,Sx = multigridtools.rs_strong_connections(A.shape[0],theta,A.indptr,A.indices,A.data)
     return scipy.sparse.csr_matrix((Sx,Sj,Sp),A.shape)
 
 
 def rs_interpolation(A,theta=0.25):
-    if not scipy.sparse.isspmatrix_csr(A): raise TypeError('expected sparse.csr_matrix')
+    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
     
     S = rs_strong_connections(A,theta)
 
-    T = S.T.tocsr()
+    T = S.T.tocsr()  #transpose S for efficient column access
 
     Ip,Ij,Ix = multigridtools.rs_interpolation(A.shape[0],\
                                                A.indptr,A.indices,A.data,\
@@ -29,44 +36,67 @@
 
 
 def sa_strong_connections(A,epsilon):
-    if not scipy.sparse.isspmatrix_csr(A): raise TypeError('expected sparse.csr_matrix')
+    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
 
     Sp,Sj,Sx = multigridtools.sa_strong_connections(A.shape[0],epsilon,A.indptr,A.indices,A.data)
 
     return scipy.sparse.csr_matrix((Sx,Sj,Sp),A.shape)
 
-def sa_constant_interpolation(A,epsilon):
-    if not scipy.sparse.isspmatrix_csr(A): raise TypeError('expected sparse.csr_matrix')
+def sa_constant_interpolation(A,epsilon,blocks=None):
+    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
     
-    S = sa_strong_connections(A,epsilon)
+    if blocks is not None:
+        num_dofs   = A.shape[0]
+        num_blocks = blocks.max()
+        
+        if num_dofs != len(blocks):
+            raise ValueError,'improper block specification'
+        
+        # for non-scalar problems, use pre-defined blocks in aggregation
+        # the strength of connection matrix is based on the Frobenius norms of the blocks
+        
+        B  = csr_matrix((ones(num_dofs),blocks,arange(num_dofs + 1)),dims=(num_dofs,num_blocks))
+        Block_Frob = B.T.tocsr() * csr_matrix((A.data**2,A.indices,A.indptr),dims=A.shape) * B #Frobenius norms of blocks entries of A
 
-    #S.ensure_sorted_indices()
-
-    #tentative (non-smooth) interpolation operator I
-    Pj = multigridtools.sa_get_aggregates(S.shape[0],S.indptr,S.indices)
-    Pp = numpy.arange(len(Pj)+1)
-    Px = numpy.ones(len(Pj))
+        S = sa_strong_connections(Block_Frob,epsilon)
     
+        Pj = multigridtools.sa_get_aggregates(S.shape[0],S.indptr,S.indices)
+        Pj = Pj[blocks] #expand block aggregates into constituent dofs
+        Pp = B.indptr
+        Px = B.data
+    else:
+        S = sa_strong_connections(A,epsilon)
+        
+        Pj = multigridtools.sa_get_aggregates(S.shape[0],S.indptr,S.indices)
+        Pp = numpy.arange(len(Pj)+1)
+        Px = numpy.ones(len(Pj))
+    
     return scipy.sparse.csr_matrix((Px,Pj,Pp))
 
+    
+##    S = sa_strong_connections(A,epsilon)
+##
+##    #tentative (non-smooth) interpolation operator I
+##    Pj = multigridtools.sa_get_aggregates(S.shape[0],S.indptr,S.indices)
+##    Pp = numpy.arange(len(Pj)+1)
+##    Px = numpy.ones(len(Pj))
+##    
+##    return scipy.sparse.csr_matrix((Px,Pj,Pp))
+
 ##def sa_smoother(A,S,omega):
 ##    Bp,Bj,Bx = multigridtools.sa_smoother(A.shape[0],omega,A.indptr,A.indices,A.data,S.indptr,S.indices,S.data)
 ##
 ##    return csr_matrix((Bx,Bj,Bp),dims=A.shape)
     
-def sa_interpolation(A,epsilon,omega=4.0/3.0):
-    if not scipy.sparse.isspmatrix_csr(A): raise TypeError('expected sparse.csr_matrix')
+def sa_interpolation(A,epsilon,omega=4.0/3.0,blocks=None):
+    if not isspmatrix_csr(A): raise TypeError('expected csr_matrix')
    
-    P  = sa_constant_interpolation(A,epsilon)
+    P  = sa_constant_interpolation(A,epsilon=epsilon,blocks=blocks)
 
-##    As = sa_strong_connections(A,epsilon)
-##    S  = sa_smoother(A,S,omega)
-    
-
     D_inv = diag_sparse(1.0/diag_sparse(A))       
     
     D_inv_A  = D_inv * A
-    D_inv_A *= omega/infinity_norm(D_inv_A)
+    D_inv_A *= omega/approximate_spectral_radius(D_inv_A)
 
     I = P - (D_inv_A*P)  #same as I=S*P, (faster?)
            

Modified: trunk/scipy/sandbox/multigrid/multilevel.py
===================================================================
--- trunk/scipy/sandbox/multigrid/multilevel.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/multilevel.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -2,14 +2,13 @@
            'ruge_stuben_solver','smoothed_aggregation_solver',
            'multilevel_solver']
 
-
 from numpy.linalg import norm
 from numpy import zeros,zeros_like,array
 import scipy
 import numpy
 
 from coarsen import sa_interpolation,rs_interpolation
-from relaxation import gauss_seidel,jacobi 
+from relaxation import gauss_seidel,jacobi,sor
 from utils import infinity_norm
 
 
@@ -59,7 +58,7 @@
         
     return multilevel_solver(As,Ps)
 
-def smoothed_aggregation_solver(A,max_levels=10,max_coarse=500,epsilon=0.08):
+def smoothed_aggregation_solver(A,blocks=None,max_levels=10,max_coarse=500,epsilon=0.08,omega=4.0/3.0):
     """
     Create a multilevel solver using Smoothed Aggregation (SA)
 
@@ -73,7 +72,7 @@
     Ps = []
     
     while len(As) < max_levels  and A.shape[0] > max_coarse:
-        P = sa_interpolation(A,epsilon=epsilon*0.5**(len(As)-1))
+        P = sa_interpolation(A,blocks=blocks,epsilon=epsilon*0.5**(len(As)-1),omega=omega)
         #P = sa_interpolation(A,epsilon=0.0)
 
         A = (P.T.tocsr() * A) * P     #galerkin operator
@@ -172,28 +171,42 @@
 
     def presmoother(self,A,x,b):
         gauss_seidel(A,x,b,iterations=1,sweep="forward")
+        gauss_seidel(A,x,b,iterations=1,sweep="backward")
+        #sor(A,x,b,omega=1.85,iterations=1,sweep="backward")
+
         #x += 4.0/(3.0*infinity_norm(A))*(b - A*x)
     
     def postsmoother(self,A,x,b):
+        #sor(A,x,b,omega=1.85,iterations=1,sweep="forward")
         gauss_seidel(A,x,b,iterations=1,sweep="forward")
-        #gauss_seidel(A,x,b,iterations=1,sweep="backward")
+        gauss_seidel(A,x,b,iterations=1,sweep="backward")
         #x += 4.0/(3.0*infinity_norm(A))*(b - A*x)
 
 
 
 if __name__ == '__main__':
     from scipy import *
-    A = poisson_problem2D(200)
+    #A = poisson_problem2D(100)
     #A = io.mmread("rocker_arm_surface.mtx").tocsr()
+    #A = io.mmread("9pt-100x100.mtx").tocsr()
+    A = io.mmread("/home/nathan/Desktop/9pt/9pt-100x100.mtx").tocsr()
+    #A = io.mmread("/home/nathan/Desktop/BasisShift_W_EnergyMin_Luke/9pt-5x5.mtx").tocsr()
 
-    ml = smoothed_aggregation_solver(A)
+    ml = smoothed_aggregation_solver(A,max_coarse=100,max_levels=3)
     #ml = ruge_stuben_solver(A)
 
     x = rand(A.shape[0])
     b = zeros_like(x)
     #b = rand(A.shape[0])
     
-    x_sol,residuals = ml.solve(b,x0=x,maxiter=40,tol=1e-10,return_residuals=True)
+    if True:
+        x_sol,residuals = ml.solve(b,x0=x,maxiter=30,tol=1e-12,return_residuals=True)
+    else:
+        residuals = []
+        def add_resid(x):
+            residuals.append(linalg.norm(b - A*x))
+        A.psolve = ml.psolve
+        x_sol = linalg.cg(A,b,x0=x,maxiter=12,tol=1e-100,callback=add_resid)[0]
 
     residuals = array(residuals)/residuals[0]
 

Modified: trunk/scipy/sandbox/multigrid/relaxation.py
===================================================================
--- trunk/scipy/sandbox/multigrid/relaxation.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/relaxation.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -1,6 +1,22 @@
 import multigridtools
-import numpy
+from numpy import empty_like
 
+
+def sor(A,x,b,omega,iterations=1,sweep='forward'):
+    """
+    Perform SOR iteration on the linear system Ax=b
+    """
+    x_old = empty_like(x)
+        
+    for i in range(iterations):
+        x_old[:] = x
+        gauss_seidel(A,x,b,iterations=1,sweep=sweep)
+        
+        x     *= omega
+        x_old *= (1-omega)
+        x     += x_old
+        
+        
 def gauss_seidel(A,x,b,iterations=1,sweep='forward'):
     """
     Perform Gauss-Seidel iteration on the linear system Ax=b
@@ -11,7 +27,8 @@
          b - rank 1 ndarray of length N
      Optional:
          iterations - number of iterations to perform (default: 1)
-         sweep      - slice of unknowns to relax (default: all in forward direction)
+         sweep      - direction of sweep:
+                        'forward' (default), 'backward', or 'symmetric'
     """ 
     if A.shape[0] != A.shape[1]:
         raise ValueError,'expected symmetric matrix'
@@ -21,16 +38,25 @@
 
     if sweep == 'forward':
         row_start,row_stop,row_step = 0,len(x),1
+        for iter in xrange(iterations):
+            multigridtools.gauss_seidel(A.shape[0],
+                                        A.indptr, A.indices, A.data,
+                                        x, b,
+                                        row_start, row_stop, row_step)
     elif sweep == 'backward':
         row_start,row_stop,row_step = len(x)-1,-1,-1
+        for iter in xrange(iterations):
+            multigridtools.gauss_seidel(A.shape[0],
+                                        A.indptr, A.indices, A.data,
+                                        x, b,
+                                        row_start, row_stop, row_step)
+    elif sweep == 'symmetric':
+        for iter in xrange(iterations):
+            gauss_seidel(A,x,b,iterations=1,sweep='forward')
+            gauss_seidel(A,x,b,iterations=1,sweep='backward')
     else:
-       raise ValueError,'valid sweep directions are \'forward\' and \'backward\''
+       raise ValueError,'valid sweep directions are \'forward\', \'backward\', and \'symmetric\''
 
-    for iter in xrange(iterations):
-        multigridtools.gauss_seidel(A.shape[0],
-                                    A.indptr, A.indices, A.data,
-                                    x, b,
-                                    row_start, row_stop, row_step)
 
 def jacobi(A,x,b,iterations=1,omega=1.0):
     """
@@ -54,7 +80,7 @@
     if (row_stop - row_start) * row_step <= 0:  #no work to do
         return
 
-    temp = numpy.empty_like(x)
+    temp = empty_like(x)
     
     for iter in xrange(iterations):
         multigridtools.jacobi(A.shape[0],
@@ -88,6 +114,8 @@
     Note: Horner's Rule is applied to avoid computing A^k directly.
     """
 
+    #TODO skip first matvec if x is all zero
+    
     residual = (b - A*x)
     h = coeffs[0]*residual
     

Added: trunk/scipy/sandbox/multigrid/tests/examples.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/examples.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/tests/examples.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -0,0 +1,27 @@
+import gzip
+from scipy.io import mmread
+
+
+def read_matrix(filename):
+    filename = "sample_data/" + filename
+    if filename.endswith(".gz"):
+        fid = gzip.open(filename)
+    else:
+        fid = open(filename)
+
+    return mmread(fid).tocsr()
+
+
+mesh2d_laplacians = ['torus.mtx.gz','rocker_arm_surface.mtx.gz',
+                     '336_triangle_A.mtx.gz','336_triangle_B.mtx.gz']
+
+
+all_examples = mesh2d_laplacians
+
+if __name__ == '__main__':
+    print "All Available Examples Are Listed Below\n"
+    for filename in all_examples:
+        print filename
+        print repr(read_matrix(filename))
+        print "\n"
+

Added: trunk/scipy/sandbox/multigrid/tests/sample_data/336_triangle_A.mtx.gz
===================================================================
(Binary files differ)


Property changes on: trunk/scipy/sandbox/multigrid/tests/sample_data/336_triangle_A.mtx.gz
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: trunk/scipy/sandbox/multigrid/tests/sample_data/336_triangle_B.mtx.gz
===================================================================
(Binary files differ)


Property changes on: trunk/scipy/sandbox/multigrid/tests/sample_data/336_triangle_B.mtx.gz
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: trunk/scipy/sandbox/multigrid/tests/sample_data/rocker_arm_surface.mtx.gz
===================================================================
(Binary files differ)


Property changes on: trunk/scipy/sandbox/multigrid/tests/sample_data/rocker_arm_surface.mtx.gz
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: trunk/scipy/sandbox/multigrid/tests/sample_data/torus.mtx.gz
===================================================================
(Binary files differ)


Property changes on: trunk/scipy/sandbox/multigrid/tests/sample_data/torus.mtx.gz
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: trunk/scipy/sandbox/multigrid/tests/test_adaptive.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_adaptive.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/tests/test_adaptive.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -0,0 +1,53 @@
+from numpy.testing import *
+
+from scipy.sparse import csr_matrix
+from scipy import arange,ones,zeros,array,eye
+
+set_package_path()
+from scipy.sandbox.multigrid.adaptive import fit_candidates
+restore_path()
+
+
+class test_fit_candidates(NumpyTestCase):
+    def setUp(self):
+        self.cases = []
+
+        #one candidate
+        self.cases.append((csr_matrix((ones(5),array([0,0,0,1,1]),arange(6)),dims=(5,2)),[ones(5)]))
+        self.cases.append((csr_matrix((ones(5),array([1,1,0,0,0]),arange(6)),dims=(5,2)),[ones(5)]))
+        self.cases.append((csr_matrix((ones(9),array([0,0,0,1,1,1,2,2,2]),arange(10)),dims=(9,3)),[ones(9)]))
+        self.cases.append((csr_matrix((ones(9),array([2,1,0,0,1,2,1,0,2]),arange(10)),dims=(9,3)),[arange(9)]))
+
+        #two candidates
+        self.cases.append((csr_matrix((ones(4),array([0,0,1,1]),arange(5)),dims=(4,2)),[ones(4),arange(4)]))
+        self.cases.append((csr_matrix((ones(9),array([0,0,0,1,1,1,2,2,2]),arange(10)),dims=(9,3)),[ones(9),arange(9)]))
+        self.cases.append((csr_matrix((ones(9),array([0,0,1,1,2,2,3,3,3]),arange(10)),dims=(9,4)),[ones(9),arange(9)]))
+
+    def check_all(self):
+        for AggOp,fine_candidates in self.cases:
+            Q,coarse_candidates = fit_candidates(AggOp,fine_candidates)
+
+            assert_equal(len(coarse_candidates),len(fine_candidates))
+            assert_almost_equal((Q.T*Q).todense(),eye(Q.shape[1]))
+
+            for fine,coarse in zip(fine_candidates,coarse_candidates):
+                assert_almost_equal(fine,Q*coarse)
+
+            #aggregate one more level (to a single aggregate)
+            K = len(coarse_candidates)
+            N = K*AggOp.shape[1]
+            AggOp = csr_matrix((ones(N),zeros(N),arange(N + 1)),dims=(N,1))
+            fine_candidates = coarse_candidates
+            
+            Q,coarse_candidates = fit_candidates(AggOp,fine_candidates)
+
+            assert_equal(len(coarse_candidates),len(fine_candidates))
+            assert_almost_equal((Q.T*Q).todense(),eye(Q.shape[1]))
+
+            for fine,coarse in zip(fine_candidates,coarse_candidates):
+                assert_almost_equal(fine,Q*coarse)
+
+if __name__ == '__main__':
+    NumpyTest().run()
+      
+

Modified: trunk/scipy/sandbox/multigrid/tests/test_coarsen.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_coarsen.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/tests/test_coarsen.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -6,9 +6,9 @@
 import numpy
 
 set_package_path()
-import scipy.multigrid
-from scipy.multigrid.coarsen import sa_strong_connections,sa_constant_interpolation
-from scipy.multigrid.multilevel import poisson_problem1D,poisson_problem2D
+import scipy.sandbox.multigrid
+from scipy.sandbox.multigrid.coarsen import sa_strong_connections,sa_constant_interpolation
+from scipy.sandbox.multigrid.multilevel import poisson_problem1D,poisson_problem2D
 restore_path()
 
 
@@ -39,7 +39,6 @@
     aggregates = empty(n,dtype=A.indices.dtype)
     aggregates[:] = -1
 
-
     # Pass #1
     for i,row in enumerate(S):
         Ni = set(row) | set([i])
@@ -120,17 +119,10 @@
                 S_expected = reference_sa_strong_connections(A,epsilon)
                 assert_array_equal(S_result.todense(),S_expected.todense())
 
-##    def check_sample_data(self):
-##        for filename in all_matrices:
-##            A = open_matrix(filename)
 
-
-S_result = None
-S_expected = None
 class test_sa_constant_interpolation(NumpyTestCase):
     def check_random(self):
         numpy.random.seed(0)
-
         for N in [2,3,5,10]:
             A = csr_matrix(rand(N,N))
             for epsilon in [0.0,0.1,0.5,0.8,1.0]:
@@ -154,7 +146,16 @@
                 S_expected = reference_sa_constant_interpolation(A,epsilon)
                 assert_array_equal(S_result.todense(),S_expected.todense())
 
+    def check_sample_data(self):
+        from examples import all_examples,read_matrix
 
+        for filename in all_examples:
+            A = read_matrix(filename)            
+            for epsilon in [0.0,0.08,0.51,1.0]:
+                S_result   = sa_constant_interpolation(A,epsilon)
+                S_expected = reference_sa_constant_interpolation(A,epsilon)
+                assert_array_equal((S_result - S_expected).nnz,0)
+
 if __name__ == '__main__':
     NumpyTest().run()
 

Modified: trunk/scipy/sandbox/multigrid/tests/test_relaxation.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_relaxation.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/tests/test_relaxation.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -7,8 +7,8 @@
 
 
 set_package_path()
-import scipy.multigrid
-from scipy.multigrid.relaxation import polynomial_smoother,gauss_seidel,jacobi
+import scipy.sandbox.multigrid
+from scipy.sandbox.multigrid.relaxation import polynomial_smoother,gauss_seidel,jacobi
 restore_path()
 
 

Modified: trunk/scipy/sandbox/multigrid/tests/test_utils.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_utils.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/tests/test_utils.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -7,7 +7,7 @@
 
 
 set_package_path()
-from scipy.multigrid.utils import infinity_norm,diag_sparse
+from scipy.sandbox.multigrid.utils import infinity_norm,diag_sparse
 restore_path()
 
 

Modified: trunk/scipy/sandbox/multigrid/utils.py
===================================================================
--- trunk/scipy/sandbox/multigrid/utils.py	2007-09-27 18:37:13 UTC (rev 3377)
+++ trunk/scipy/sandbox/multigrid/utils.py	2007-09-28 02:43:06 UTC (rev 3378)
@@ -1,11 +1,20 @@
-__all__ =['inf_norm','diag_sparse']
+__all__ =['approximate_spectral_radius','infinity_norm','diag_sparse']
 
-import numpy,scipy,scipy.sparse,scipy.weave
+import numpy,scipy,scipy.sparse
 from numpy import ravel,arange
 from scipy.sparse import isspmatrix,isspmatrix_csr,isspmatrix_csc, \
                         csr_matrix,csc_matrix,extract_diagonal
 
 
+def approximate_spectral_radius(A,tol=0.1,maxiter=20):
+    """
+    Approximate the spectral radius of a symmetric matrix using ARPACK
+    """
+    from scipy.sandbox.arpack import eigen_symmetric
+    return eigen_symmetric(A, k=1, ncv=10, which='LM', maxiter=maxiter, tol=tol, return_eigenvectors=False)[0]
+
+
+
 def infinity_norm(A):
     """
     Infinity norm of a sparse matrix (maximum absolute row sum).  This serves 



More information about the Scipy-svn mailing list