[Scipy-svn] r3831 - in trunk/scipy/sandbox/multigrid: . gallery gallery/tests tests

scipy-svn@scip... scipy-svn@scip...
Mon Jan 14 13:54:52 CST 2008


Author: wnbell
Date: 2008-01-14 13:54:37 -0600 (Mon, 14 Jan 2008)
New Revision: 3831

Removed:
   trunk/scipy/sandbox/multigrid/dec_example.py
   trunk/scipy/sandbox/multigrid/simple_example.py
Modified:
   trunk/scipy/sandbox/multigrid/adaptive.py
   trunk/scipy/sandbox/multigrid/gallery/poisson.py
   trunk/scipy/sandbox/multigrid/gallery/tests/test_poisson.py
   trunk/scipy/sandbox/multigrid/sa.py
   trunk/scipy/sandbox/multigrid/tests/test_adaptive.py
   trunk/scipy/sandbox/multigrid/tests/test_sa.py
   trunk/scipy/sandbox/multigrid/tests/test_utils.py
   trunk/scipy/sandbox/multigrid/utils.py
Log:
updated poisson() gallery function


Modified: trunk/scipy/sandbox/multigrid/adaptive.py
===================================================================
--- trunk/scipy/sandbox/multigrid/adaptive.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/adaptive.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -7,7 +7,7 @@
 from relaxation import gauss_seidel
 from multilevel import multilevel_solver
 from sa import sa_constant_interpolation,sa_fit_candidates
-from utils import approximate_spectral_radius,hstack_csr,vstack_csr,expand_into_blocks,diag_sparse
+from utils import approximate_spectral_radius,hstack_csr,vstack_csr,diag_sparse
 
 
 

Deleted: trunk/scipy/sandbox/multigrid/dec_example.py
===================================================================
--- trunk/scipy/sandbox/multigrid/dec_example.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/dec_example.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -1,241 +0,0 @@
-
-from scipy import *
-from scipy.sparse import *
-from pydec import *
-from pydec.multigrid.discrete_laplacian import boundary_hierarchy, discrete_laplacian_solver, hodge_solver
-
-from scipy.sandbox.multigrid import smoothed_aggregation_solver,multigridtools,multilevel_solver
-from scipy.sandbox.multigrid.adaptive import adaptive_sa_solver
-from scipy.sandbox.multigrid.sa import sa_smoothed_prolongator
-from scipy.sandbox.multigrid.utils import expand_into_blocks
-
-
-## Load mesh from file
-mesh_path = '../../../../../hirani_group/wnbell/meshes/'
-#mesh = read_mesh(mesh_path + 'rocket/rocket.xml')
-#mesh = read_mesh(mesh_path + 'genus3/genus3_168k.xml')
-#mesh = read_mesh(mesh_path + 'genus3/genus3_455k.xml')
-#mesh = read_mesh(mesh_path + '/torus/torus.xml')
-mesh = read_mesh(mesh_path + '/sq14tri/sq14tri.xml')
-for i in range(5):
-    mesh['vertices'],mesh['elements'] = loop_subdivision(mesh['vertices'],mesh['elements'])
-cmplx = simplicial_complex(mesh['vertices'],mesh['elements'])
-
-## Construct mesh manually
-#bitmap = ones((60,60),dtype='bool')
-#bitmap[1::10,1::10] = False
-#bitmap[100:150,100:400] = False
-#cmplx = regular_cube_complex(regular_cube_mesh(bitmap))
-
-def curl_curl_prolongator(D_nodal,vertices):
-    if not isspmatrix_csr(D_nodal):
-        raise TypeError('expected csr_matrix')
-
-    A = D_nodal.T.tocsr() * D_nodal
-    aggs = multigridtools.sa_get_aggregates(A.shape[0],A.indptr,A.indices)
-
-    num_edges = D_nodal.shape[0]
-    num_basis = vertices.shape[1]
-    num_aggs  = aggs.max() + 1
-
-    # replace with CSR + eliminate duplicates
-    #indptr  = (2*num_basis) * arange(num_edges+1)
-    ## same same
-    #csr_matrix((data,indices,indptr),shape=(num_edges,num_aggs))
-
-    row  = arange(num_edges).repeat(2*num_basis)
-    col  = (num_basis*aggs[D_nodal.indices]).repeat(num_basis)
-    col = col.reshape(-1,num_basis) + arange(num_basis)
-    col = col.reshape(-1)
-    data = tile(0.5 * (D_nodal*vertices),(1,2)).reshape(-1)
-
-    return coo_matrix((data,(row,col)),shape=(num_edges,num_basis*num_aggs)).tocsr()
-
-
-
-
-
-def whitney_innerproduct_cache(cmplx,k):
-    h = hash(cmplx.vertices.tostring()) ^ hash(cmplx.simplices.tostring()) ^ hash(k)
-
-    filename = "/home/nathan/.pydec/cache/whitney_" + str(h) + ".mtx"
-
-    try:
-        import pydec
-        M = pydec.io.read_array(filename)
-    except:
-        import pydec
-        M = whitney_innerproduct(cmplx,k)
-        pydec.io.write_array(filename,M)
-
-    return M
-
-
-
-def cube_innerproduct_cache(cmplx,k):
-    h = hash(cmplx.mesh.bitmap.tostring()) ^ hash(cmplx.mesh.bitmap.shape) ^ hash(k)
-
-    filename = "/home/nathan/.pydec/cache/cube_" + str(h) + ".mtx"
-
-    try:
-        import pydec
-        M = pydec.io.read_array(filename)
-    except:
-        import pydec
-        M = regular_cube_innerproduct(cmplx,k)
-        pydec.io.write_array(filename,M)
-
-    return M
-
-
-
-#solve d_k d_k problem for all reasonable k
-#from pylab import semilogy,show,xlabel,ylabel,legend,ylim,xlim
-#from matplotlib.font_manager import fontManager, FontProperties
-
-cochain_complex = cmplx.cochain_complex()
-
-for i in [1]: #range(len(cochain_complex)-1):
-    print "computing mass matrix"
-
-    if isinstance(cmplx,simplicial_complex):
-        Mi = whitney_innerproduct_cache(cmplx,i+1)
-    else:
-        Mi = regular_cube_innerproduct(cmplx,i+1)
-
-
-    dimension = mesh['vertices'].shape[1]
-
-    if True:
-
-        d0 = cmplx[0].d
-        d1 = cmplx[1].d
-
-        #A = (d1.T.tocsr() * d1 + d0 * d0.T.tocsr()).astype('d')
-        A = (d1.T.tocsr()  * d1).astype('d')
-
-        P = curl_curl_prolongator(d0,mesh['vertices'])
-
-        num_blocks = P.shape[1]/dimension
-        blocks = arange(num_blocks).repeat(dimension)
-
-        P = sa_smoothed_prolongator(A,P,epsilon=0,omega=4.0/3.0)
-
-        PAP = P.T.tocsr() * A * P
-
-        candidates = None
-        candidates = zeros((num_blocks,dimension,dimension))
-        for n in range(dimension):
-            candidates[:,n,n] = 1.0
-        candidates = candidates.reshape(-1,dimension)
-
-        ml = smoothed_aggregation_solver(PAP,epsilon=0.0,candidates=candidates,blocks=blocks)
-        #A = PAP
-        ml = multilevel_solver([A] + ml.As, [P] + ml.Ps)
-    else:
-
-        bh = boundary_hierarchy(cochain_complex)
-        while len(bh) < 3:
-            bh.coarsen()
-        print repr(bh)
-
-        N = len(cochain_complex) - 1
-
-        B =  bh[0][N - i].B
-
-        A = (B.T.tocsr() * B).astype('d')
-        #A = B.T.tocsr() * Mi * B
-
-        constant_prolongators = [lvl[N - i].I for lvl in bh[:-1]]
-
-        method = 'aSA'
-
-        if method == 'RS':
-            As = [A]
-            Ps = []
-            for T in constant_prolongators:
-                Ps.append( sa_smoothed_prolongator(As[-1],T,epsilon=0.0,omega=4.0/3.0) )
-                As.append(Ps[-1].T.tocsr() * As[-1] * Ps[-1])
-            ml = multilevel_solver(As,Ps)
-
-        else:
-            if method == 'BSA':
-                if i == 0:
-                    candidates = None
-                else:
-                    candidates = cmplx[0].d * mesh['vertices']
-                    K = candidates.shape[1]
-
-                    constant_prolongators = [constant_prolongators[0]] + \
-                            [expand_into_blocks(T,K,1).tocsr() for T in constant_prolongators[1:] ]
-
-                    ml = smoothed_aggregation_solver(A,candidates,aggregation=constant_prolongators)
-            elif method == 'aSA':
-                asa = adaptive_sa_solver(A,aggregation=constant_prolongators,max_candidates=dimension,epsilon=0.0)
-                ml = asa.solver
-            else:
-                raise ValuerError,'unknown method'
-
-        #ml = smoothed_aggregation_solver(A,candidates)
-
-    #x = d0 * mesh['vertices'][:,0]
-    x = rand(A.shape[0])
-    b = zeros_like(x)
-    #b = A*rand(A.shape[0])
-
-    if True:
-        x_sol,residuals = ml.solve(b,x0=x,maxiter=50,tol=1e-12,return_residuals=True)
-    else:
-        residuals = []
-        def add_resid(x):
-            residuals.append(linalg.norm(b - A*x))
-        A.psolve = ml.psolve
-
-        from pydec import cg
-        x_sol = cg(A,b,x0=x,maxiter=40,tol=1e-8,callback=add_resid)[0]
-
-
-    residuals = array(residuals)/residuals[0]
-    avg_convergence_ratio = residuals[-1]**(1.0/len(residuals))
-    print "average convergence ratio",avg_convergence_ratio
-    print "last convergence ratio",residuals[-1]/residuals[-2]
-
-    print residuals
-
-
-
-
-##candidates = None
-##blocks = None
-##
-##
-##
-##A = io.mmread('tests/sample_data/elas30_A.mtx').tocsr()
-##candidates = io.mmread('tests/sample_data/elas30_nullspace.mtx')
-##candidates = [ array(candidates[:,x]) for x in range(candidates.shape[1]) ]
-##blocks = arange(A.shape[0]/2).repeat(2)
-##
-##ml = smoothed_aggregation_solver(A,candidates,blocks=blocks,epsilon=0,max_coarse=10,max_levels=10)
-###ml = ruge_stuben_solver(A)
-##
-##x = rand(A.shape[0])
-###b = zeros_like(x)
-##b = A*rand(A.shape[0])
-##
-##if True:
-##    x_sol,residuals = ml.solve(b,x0=x,maxiter=30,tol=1e-12,return_residuals=True)
-##else:
-##    residuals = []
-##    def add_resid(x):
-##        residuals.append(linalg.norm(b - A*x))
-##    A.psolve = ml.psolve
-##    x_sol = linalg.cg(A,b,x0=x,maxiter=25,tol=1e-12,callback=add_resid)[0]
-##
-##
-##residuals = array(residuals)/residuals[0]
-##avg_convergence_ratio = residuals[-1]**(1.0/len(residuals))
-##print "average convergence ratio",avg_convergence_ratio
-##print "last convergence ratio",residuals[-1]/residuals[-2]
-##
-##print residuals
-##

Modified: trunk/scipy/sandbox/multigrid/gallery/poisson.py
===================================================================
--- trunk/scipy/sandbox/multigrid/gallery/poisson.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/gallery/poisson.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -1,55 +1,84 @@
 __all__ = ['poisson']
 
-from scipy import array, empty 
-from scipy.sparse import dia_matrix
+from scipy import arange, empty, intc, ravel, prod
+from scipy.sparse import coo_matrix
 
-def poisson(N, stencil='5pt', dtype=float, format=None):
-    """Finite Difference approximations to the Poisson problem
 
-    TheDirichlet boundary conditions are
+def poisson( grid, spacing=None, dtype=float, format=None):
+    """Finite Difference approximation to the Poisson problem on a 
+    regular n-dimensional grid with Dirichlet boundary conditions.
    
     
     Parameters
     ==========
-        - N : integer 
-            - grid size
-        - stencil : one of the following strings
-            - '3pt' : 3-point Finite Difference stencil in 1 dimension
-            - '5pt' : 5-point Finite Difference stencil in 2 dimensions
-            - '8pt' : NotImplemented
-            - '27pt' : NotImplemented
+        - grid : tuple
+            - grid dimensions e.g. (100,100)
 
+
+    Examples
+    ========
+
+    >>> # 4 nodes in one dimension
+    >>> poisson( (4,) ).todense()
+    matrix([[ 2., -1.,  0.,  0.],
+            [-1.,  2., -1.,  0.],
+            [ 0., -1.,  2., -1.],
+            [ 0.,  0., -1.,  2.]])
+
+    >>> # rectangular two dimensional grid 
+    >>> poisson( (2,3) ).todense()
+    matrix([[ 4., -1.,  0., -1.,  0.,  0.],
+            [-1.,  4., -1.,  0., -1.,  0.],
+            [ 0., -1.,  4.,  0.,  0., -1.],
+            [-1.,  0.,  0.,  4., -1.,  0.],
+            [ 0., -1.,  0., -1.,  4., -1.],
+            [ 0.,  0., -1.,  0., -1.,  4.]])
+
     """
-    if N < 1:
-        raise ValueError,'invalid grid size %s' % N
+    grid = tuple(grid)
 
-    if stencil == '3pt':
-        if N == 1:
-            diags   = array( [[2]], dtype=dtype)
-            return dia_matrix((diags,[0]), shape=(1,1)).asformat(format)
-        else:
-            data = empty((3,N),dtype=dtype)
-            data[0,:] = 2 #main diagonal
-            data[1,:] = -1 
-            data[2,:] = -1
-    
-            return dia_matrix((data,[0,-1,1]),shape=(N,N)).asformat(format)
-    elif stencil == '5pt':
-        if N == 1:
-            data = array( [[4]], dtype=dtype)
-            return dia_matrix((diags,[0]), shape=(1,1)).asformat(format)
-        else:
-            diags = array([0,-N,N,-1,1])
+    D = len(grid) # grid dimension
 
-            data = empty((5,N**2),dtype=dtype)
+    if D < 1 or min(grid) < 1:
+        raise ValueError,'invalid grid shape: %s' % str(grid)
 
-            data[0]  =  4 #main diagonal
-            data[1::,:] = -1
-            data[3,N-1::N] = 0 
-            data[4,N::N]   = 0    
-            
-            return dia_matrix((data,diags),shape=(N**2,N**2)).asformat(format)
-    else:
-        raise NotImplementedError,'unsupported stencil=%s' % stencil
+    nodes = arange(prod(grid)).reshape(*grid)
 
+    nnz = nodes.size 
+    for i in range(D):
+        nnz += 2 * prod( grid[:i] + grid[i+1:] ) * (grid[i] - 1)
+    
+    row  = empty(nnz, dtype=intc)
+    col  = empty(nnz, dtype=intc)
+    data = empty(nnz, dtype=dtype)
+    
+    row[:nodes.size]  = ravel(nodes)
+    col[:nodes.size]  = ravel(nodes)
+    data[:nodes.size] = 2*D
+    data[nodes.size:] = -1
+    
+    ptr = nodes.size
+    
+    for i in range(D):
+        s0 = [slice(None)] * i + [slice(0,-1)  ] + [slice(None)] * (D - i - 1)
+        s1 = [slice(None)] * i + [slice(1,None)] + [slice(None)] * (D - i - 1)
+    
+        n0 = nodes[s0]
+        n1 = nodes[s1]
+    
+        row0 = row[ ptr:ptr + n0.size].reshape(n0.shape)
+        col0 = col[ ptr:ptr + n0.size].reshape(n0.shape)
+        ptr += n0.size
+    
+        row1 = row[ ptr:ptr + n0.size].reshape(n0.shape)
+        col1 = col[ ptr:ptr + n0.size].reshape(n0.shape)
+        ptr += n0.size
+    
+        row0[:] = n0
+        col0[:] = n1
+    
+        row1[:] = n1
+        col1[:] = n0
+    
+    return coo_matrix((data,(row,col)),shape=(nodes.size,nodes.size)).asformat(format)
 

Modified: trunk/scipy/sandbox/multigrid/gallery/tests/test_poisson.py
===================================================================
--- trunk/scipy/sandbox/multigrid/gallery/tests/test_poisson.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/gallery/tests/test_poisson.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -5,22 +5,21 @@
 from scipy.sandbox.multigrid.gallery.poisson import *
 
 class TestPoisson(TestCase):
-    def test_3pt(self):
+    def test_poisson(self):
         cases = []
 
-        cases.append( (1,matrix([[2]])) )
-        cases.append( (2,matrix([[ 2,-1],
-                                 [-1, 2]])) )
-        cases.append( (4,matrix([[ 2,-1, 0, 0],
-                                 [-1, 2,-1, 0],
-                                 [ 0,-1, 2,-1],
-                                 [ 0, 0,-1, 2]])) )
+        cases.append( ((1,),matrix([[2]])) )
+        cases.append( ((2,),matrix([[ 2,-1],
+                                    [-1, 2]])) )
+        cases.append( ((4,),matrix([[ 2,-1, 0, 0],
+                                    [-1, 2,-1, 0],
+                                    [ 0,-1, 2,-1],
+                                    [ 0, 0,-1, 2]])) )
+        
+        for grid,expected in cases:
+            assert_equal( poisson(grid).todense(), expected )
 
 
-        for N,expected in cases:
-            assert_equal( poisson(N,stencil='3pt').todense(), expected )
 
-
-
 if __name__ == '__main__':
     unittest.main()

Modified: trunk/scipy/sandbox/multigrid/sa.py
===================================================================
--- trunk/scipy/sandbox/multigrid/sa.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/sa.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -5,7 +5,7 @@
 from scipy.sparse import csr_matrix, isspmatrix_csr, bsr_matrix, isspmatrix_bsr
 
 from utils import diag_sparse, approximate_spectral_radius, \
-                  symmetric_rescaling, expand_into_blocks, scale_columns
+                  symmetric_rescaling, scale_columns
 import multigridtools
 
 __all__ = ['sa_filtered_matrix','sa_strong_connections','sa_constant_interpolation',
@@ -105,10 +105,6 @@
 
     N_fine,N_coarse = AggOp.shape
 
-    #if blocksize > 1:
-    #    #see if fine space has been expanded (all levels except for first)
-    #    AggOp = expand_into_blocks(AggOp,blocksize,1).tocsr()
-
     R = zeros((N_coarse,K,K),dtype=candidates.dtype) #storage for coarse candidates
 
     candidate_matrices = []
@@ -124,7 +120,6 @@
 
         #orthogonalize X against previous
         for j,A in enumerate(candidate_matrices):
-            #import pdb; pdb.set_trace()
             D_AtX = bsr_matrix((A.data*X.data,X.indices,X.indptr),shape=X.shape).sum(axis=0).A.flatten() #same as diagonal of A.T * X
             R[:,j,i] = D_AtX
             X.data -= scale_columns(A,D_AtX).data
@@ -141,7 +136,6 @@
 
         candidate_matrices.append(X)
 
-    # expand AggOp blocks horizontally
     Q_indptr  = AggOp.indptr
     Q_indices = AggOp.indices
     Q_data = empty((AggOp.nnz,blocksize,K)) #if AggOp includes all nodes, then this is (N_fine * K)

Deleted: trunk/scipy/sandbox/multigrid/simple_example.py
===================================================================
--- trunk/scipy/sandbox/multigrid/simple_example.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/simple_example.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -1,28 +0,0 @@
-from scipy import *
-from scipy.sandbox.multigrid.sa import *
-from scipy.sandbox.multigrid import *
-from scipy.sandbox.multigrid.utils import *
-from time import clock
-
-mats = io.loadmat('/home/nathan/Work/ogroup/matrices/elasticity/simple_grid_2d/elasticity_500x500.mat')
-A = mats['A'].tobsr(blocksize=(2,2))
-B = mats['B']
-
-#A = poisson_problem2D(50)
-#B = None
-
-start = clock()
-sa = smoothed_aggregation_solver(A,B=B)
-print "constructed solver in %s seconds" % (clock() - start)
-
-x0 = rand(A.shape[0])
-b = zeros_like(x0)
-start = clock()
-x,residuals = sa.solve(b,x0=x0,return_residuals=True)
-print "solved in %s seconds" % (clock() - start)
-    
-residuals = array(residuals)/residuals[0]
-avg_convergence_ratio = residuals[-1]**(1.0/len(residuals))
-print "average convergence ratio",avg_convergence_ratio
-print "last convergence ratio",residuals[-1]/residuals[-2]
-print 'residuals',residuals

Modified: trunk/scipy/sandbox/multigrid/tests/test_adaptive.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_adaptive.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/tests/test_adaptive.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -8,62 +8,59 @@
 from scipy.sandbox.multigrid.adaptive import augment_candidates
 
 
-#import pdb; pdb.set_trace()
 
 class TestAdaptiveSA(TestCase):
     def setUp(self):
         pass
 
-class TestAugmentCandidates(TestCase):
-    def setUp(self):
-        self.cases = []
+#class TestAugmentCandidates(TestCase):
+#    def setUp(self):
+#        self.cases = []
+#
+#        #two candidates
+#
+#        ##block candidates
+#        ##self.cases.append((csr_matrix((ones(9),array([0,0,0,1,1,1,2,2,2]),arange(10)),shape=(9,3)), vstack((array([1]*9 + [0]*9),arange(2*9))).T ))
+#
+#    def test_first_level(self):
+#        cases = []
+#
+#        ## tests where AggOp includes all DOFs
+#        cases.append((csr_matrix((ones(4),array([0,0,1,1]),arange(5)),shape=(4,2)), vstack((ones(4),arange(4))).T ))
+#        cases.append((csr_matrix((ones(9),array([0,0,0,1,1,1,2,2,2]),arange(10)),shape=(9,3)), vstack((ones(9),arange(9))).T ))
+#        cases.append((csr_matrix((ones(9),array([0,0,1,1,2,2,3,3,3]),arange(10)),shape=(9,4)), vstack((ones(9),arange(9))).T ))
+#
+#        ## tests where AggOp excludes some DOFs
+#        cases.append((csr_matrix((ones(4),array([0,0,1,1]),array([0,1,2,2,3,4])),shape=(5,2)), vstack((ones(5),arange(5))).T ))
+#
+#        # overdetermined blocks
+#        cases.append((csr_matrix((ones(4),array([0,0,1,1]),array([0,1,2,2,3,4])),shape=(5,2)), vstack((ones(5),arange(5),arange(5)**2)).T  ))
+#        cases.append((csr_matrix((ones(6),array([1,3,0,2,1,0]),array([0,0,1,2,2,3,4,5,5,6])),shape=(9,4)), vstack((ones(9),arange(9),arange(9)**2)).T ))
+#        cases.append((csr_matrix((ones(6),array([1,3,0,2,1,0]),array([0,0,1,2,2,3,4,5,5,6])),shape=(9,4)), vstack((ones(9),arange(9))).T ))
+#
+#        def mask_candidate(AggOp,candidates):
+#            #mask out all DOFs that are not included in the aggregation
+#            candidates[diff(AggOp.indptr) == 0,:] = 0
+#
+#        for AggOp,fine_candidates in cases:
+#
+#            mask_candidate(AggOp,fine_candidates)
+#
+#            for i in range(1,fine_candidates.shape[1]):
+#                Q_expected,R_expected = sa_fit_candidates(AggOp,fine_candidates[:,:i+1])
+#
+#                old_Q, old_R = sa_fit_candidates(AggOp,fine_candidates[:,:i])
+#
+#                Q_result,R_result = augment_candidates(AggOp, old_Q, old_R, fine_candidates[:,[i]])
+#
+#                # compare against SA method (which is assumed to be correct)
+#                assert_almost_equal(Q_expected.todense(),Q_result.todense())
+#                assert_almost_equal(R_expected,R_result)
+#
+#                #each fine level candidate should be fit exactly
+#                assert_almost_equal(fine_candidates[:,:i+1],Q_result*R_result)
+#                assert_almost_equal(Q_result*(Q_result.T*fine_candidates[:,:i+1]),fine_candidates[:,:i+1])
+#
 
-        #two candidates
-
-        ##block candidates
-        ##self.cases.append((csr_matrix((ones(9),array([0,0,0,1,1,1,2,2,2]),arange(10)),shape=(9,3)), vstack((array([1]*9 + [0]*9),arange(2*9))).T ))
-
-
-
-    def test_first_level(self):
-        cases = []
-
-        ## tests where AggOp includes all DOFs
-        cases.append((csr_matrix((ones(4),array([0,0,1,1]),arange(5)),shape=(4,2)), vstack((ones(4),arange(4))).T ))
-        cases.append((csr_matrix((ones(9),array([0,0,0,1,1,1,2,2,2]),arange(10)),shape=(9,3)), vstack((ones(9),arange(9))).T ))
-        cases.append((csr_matrix((ones(9),array([0,0,1,1,2,2,3,3,3]),arange(10)),shape=(9,4)), vstack((ones(9),arange(9))).T ))
-
-        ## tests where AggOp excludes some DOFs
-        cases.append((csr_matrix((ones(4),array([0,0,1,1]),array([0,1,2,2,3,4])),shape=(5,2)), vstack((ones(5),arange(5))).T ))
-
-        # overdetermined blocks
-        cases.append((csr_matrix((ones(4),array([0,0,1,1]),array([0,1,2,2,3,4])),shape=(5,2)), vstack((ones(5),arange(5),arange(5)**2)).T  ))
-        cases.append((csr_matrix((ones(6),array([1,3,0,2,1,0]),array([0,0,1,2,2,3,4,5,5,6])),shape=(9,4)), vstack((ones(9),arange(9),arange(9)**2)).T ))
-        cases.append((csr_matrix((ones(6),array([1,3,0,2,1,0]),array([0,0,1,2,2,3,4,5,5,6])),shape=(9,4)), vstack((ones(9),arange(9))).T ))
-
-        def mask_candidate(AggOp,candidates):
-            #mask out all DOFs that are not included in the aggregation
-            candidates[diff(AggOp.indptr) == 0,:] = 0
-
-        for AggOp,fine_candidates in cases:
-
-            mask_candidate(AggOp,fine_candidates)
-
-            for i in range(1,fine_candidates.shape[1]):
-                Q_expected,R_expected = sa_fit_candidates(AggOp,fine_candidates[:,:i+1])
-
-                old_Q, old_R = sa_fit_candidates(AggOp,fine_candidates[:,:i])
-
-                Q_result,R_result = augment_candidates(AggOp, old_Q, old_R, fine_candidates[:,[i]])
-
-                # compare against SA method (which is assumed to be correct)
-                assert_almost_equal(Q_expected.todense(),Q_result.todense())
-                assert_almost_equal(R_expected,R_result)
-
-                #each fine level candidate should be fit exactly
-                assert_almost_equal(fine_candidates[:,:i+1],Q_result*R_result)
-                assert_almost_equal(Q_result*(Q_result.T*fine_candidates[:,:i+1]),fine_candidates[:,:i+1])
-
-
 if __name__ == '__main__':
     unittest.main()

Modified: trunk/scipy/sandbox/multigrid/tests/test_sa.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_sa.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/tests/test_sa.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -22,7 +22,7 @@
 from scipy.sandbox.multigrid.multilevel import smoothed_aggregation_solver
 from scipy.sandbox.multigrid.utils import diag_sparse
 
-from scipy.sandbox.multigrid.gallery import poisson
+from scipy.sandbox.multigrid.gallery.poisson import poisson
 
 #def sparsity(A):
 #    A = A.copy()
@@ -48,9 +48,9 @@
 
         # poisson problems in 1D and 2D
         for N in [2,3,5,7,10,11,19]:
-            self.cases.append( poisson(N,stencil='3pt',format='csr') )
+            self.cases.append( poisson( (N,), format='csr') )
         for N in [2,3,5,7,10,11]:
-            self.cases.append( poisson(N,stencil='5pt',format='csr') )
+            self.cases.append( poisson( (N,N), format='csr') )
 
 
     def test_sa_strong_connections(self):
@@ -74,7 +74,7 @@
                 #assert_array_equal(S_result.todense(),S_expected.todense())
 
         # two aggregates in 1D
-        A = poisson(6,stencil='3pt')
+        A = poisson( (6,), format='csr')
         AggOp = csr_matrix((ones(6),array([0,0,0,1,1,1]),arange(7)),shape=(6,2))
         candidates = ones((6,1))
 
@@ -106,13 +106,13 @@
         user_cases = []
 
         #simple 1d example w/ two aggregates
-        A = poisson(6, stencil='3pt', format='csr')
+        A = poisson( (6,), format='csr')
         AggOp = csr_matrix((ones(6),array([0,0,0,1,1,1]),arange(7)),shape=(6,2))
         candidates = ones((6,1))
         user_cases.append((A,AggOp,candidates))
 
         #simple 1d example w/ two aggregates (not all nodes are aggregated)
-        A = poisson(6, stencil='3pt', format='csr')
+        A = poisson( (6,), format='csr')
         AggOp = csr_matrix((ones(4),array([0,0,1,1]),array([0,1,1,2,3,3,4])),shape=(6,2))
         candidates = ones((6,1))
         user_cases.append((A,AggOp,candidates))
@@ -183,8 +183,8 @@
     def setUp(self):
         self.cases = []
 
-        self.cases.append(( poisson(100, stencil='3pt', format='csr'), None))
-        self.cases.append(( poisson(100, stencil='5pt', format='csr'), None))
+        self.cases.append(( poisson( (100,),    format='csr'), None))
+        self.cases.append(( poisson( (100,100), format='csr'), None))
         # TODO add unstructured tests
 
 

Modified: trunk/scipy/sandbox/multigrid/tests/test_utils.py
===================================================================
--- trunk/scipy/sandbox/multigrid/tests/test_utils.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/tests/test_utils.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -8,13 +8,9 @@
 from scipy.linalg import norm
 
 
-from scipy.sandbox.multigrid.utils import approximate_spectral_radius, \
-                                          infinity_norm, diag_sparse, \
-                                          symmetric_rescaling, \
-                                          expand_into_blocks
+from scipy.sandbox.multigrid.utils import *
+from scipy.sandbox.multigrid.utils import symmetric_rescaling
 
-
-
 class TestUtils(TestCase):
     def test_approximate_spectral_radius(self):
         cases = []
@@ -105,28 +101,7 @@
             D_sqrt,D_sqrt_inv = diag_sparse(D_sqrt),diag_sparse(D_sqrt_inv)
             assert_almost_equal((D_sqrt_inv*A*D_sqrt_inv).todense(), DAD.todense())
 
-    def test_expand_into_blocks(self):
-        cases = []
-        cases.append( ( matrix([[1]]), (1,2) ) )
-        cases.append( ( matrix([[1]]), (2,1) ) )
-        cases.append( ( matrix([[1]]), (2,2) ) )
-        cases.append( ( matrix([[1,2]]), (1,2) ) )
-        cases.append( ( matrix([[1,2],[3,4]]), (2,2) ) )
-        cases.append( ( matrix([[0,0],[0,0]]), (3,1) ) )
-        cases.append( ( matrix([[0,1,0],[0,2,3]]), (3,2) ) )
-        cases.append( ( matrix([[1,0,0],[2,0,3]]), (2,5) ) )
 
-        for A,shape in cases:
-            m,n = shape
-            result = expand_into_blocks(csr_matrix(A),m,n).todense()
 
-            expected = zeros((m*A.shape[0],n*A.shape[1]))
-            for i in range(m):
-                for j in range(n):
-                    expected[i::m,j::n] = A
-
-            assert_equal(expected,result)
-
-
 if __name__ == '__main__':
     unittest.main()

Modified: trunk/scipy/sandbox/multigrid/utils.py
===================================================================
--- trunk/scipy/sandbox/multigrid/utils.py	2008-01-14 19:29:12 UTC (rev 3830)
+++ trunk/scipy/sandbox/multigrid/utils.py	2008-01-14 19:54:37 UTC (rev 3831)
@@ -1,5 +1,5 @@
 __all__ =['approximate_spectral_radius','infinity_norm','diag_sparse',
-          'hstack_csr','vstack_csr','expand_into_blocks']
+          'hstack_csr','vstack_csr']
 
 import numpy
 import scipy
@@ -238,41 +238,41 @@
     return coo_matrix((V,(I,J)),shape=(A.shape[0]+B.shape[0],A.shape[1])).tocsr()
 
 
-def expand_into_blocks(A,m,n):
-    """Expand each element in a sparse matrix A into an m-by-n block.
 
-          Example:
-          >>> A.todense()
-          matrix([[ 1.,  2.],
-                  [ 4.,  5.]])
 
-          >>> expand_into_blocks(A,2,2).todense()
-          matrix([[ 1.,  1.,  2.,  2.],
-                  [ 1.,  1.,  2.,  2.],
-                  [ 4.,  4.,  5.,  5.],
-                  [ 4.,  4.,  5.,  5.]])
 
-    """
-    #TODO EXPLAIN MORE
-    #TODO use spkron instead, time for compairson
 
-    if m == 1 and n == 1:
-        return A #nothing to do
 
-    A = A.tocoo()
 
-    # expand 1x1 -> mxn
-    row  = ( m*A.row ).repeat(m*n).reshape(-1,m,n)
-    col  = ( n*A.col ).repeat(m*n).reshape(-1,m,n)
 
-    # increment indices
-    row += tile(arange(m).reshape(-1,1),(1,n))
-    col += tile(arange(n).reshape(1,-1),(m,1))
 
-    # flatten
-    row = row.reshape(-1)
-    col = col.reshape(-1)
 
-    data = A.data.repeat(m*n)
 
-    return coo_matrix((data,(row,col)),shape=(m*A.shape[0],n*A.shape[1]))
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+



More information about the Scipy-svn mailing list