[Scipy-svn] r3278 - trunk/scipy/sandbox/models

scipy-svn@scip... scipy-svn@scip...
Wed Aug 29 04:56:10 CDT 2007


Author: matthew.brett@gmail.com
Date: 2007-08-29 04:55:33 -0500 (Wed, 29 Aug 2007)
New Revision: 3278

Modified:
   trunk/scipy/sandbox/models/glm.py
   trunk/scipy/sandbox/models/model.py
   trunk/scipy/sandbox/models/regression.py
Log:
Allow wls weights to be 1 or 2D, use super() for called methods

Modified: trunk/scipy/sandbox/models/glm.py
===================================================================
--- trunk/scipy/sandbox/models/glm.py	2007-08-29 07:20:42 UTC (rev 3277)
+++ trunk/scipy/sandbox/models/glm.py	2007-08-29 09:55:33 UTC (rev 3278)
@@ -12,8 +12,7 @@
     
     def __init__(self, design, family=family.Gaussian()):
         self.family = family
-        self.weights = 1
-        self.initialize(design)
+        super(model, self).__init__(design, weights=1)
 
     def __iter__(self):                                                   
         self.iter = 0
@@ -39,7 +38,7 @@
         self.weights = self.family.weights(results.mu)
         self.initialize(self.design)
         Z = results.predict + self.family.link.deriv(results.mu) * (Y - results.mu)
-        newresults = wls_model.fit(self, Z)
+        newresults = super(model, self).fit(self, Z)
         newresults.Y = Y
         newresults.mu = self.family.link.inverse(newresults.predict)
         self.iter += 1
@@ -70,12 +69,14 @@
         if Y is None:
             Y = self.Y
         resid = Y - results.mu
-        return (N.power(resid, 2) / self.family.variance(results.mu)).sum() / results.df_resid
-    
+        return ((N.power(resid, 2) / self.family.variance(results.mu)).sum()
+                / results.df_resid)
+
     def fit(self, Y):
         self.Y = N.asarray(Y, N.float64)
         iter(self)
-        self.results = wls_model.fit(self, self.family.link.initialize(Y))
+        self.results = super(model, self).fit(
+            self.family.link.initialize(Y))
         self.results.mu = self.family.link.inverse(self.results.predict)
         self.scale = self.results.scale = self.estimate_scale()
         

Modified: trunk/scipy/sandbox/models/model.py
===================================================================
--- trunk/scipy/sandbox/models/model.py	2007-08-29 07:20:42 UTC (rev 3277)
+++ trunk/scipy/sandbox/models/model.py	2007-08-29 09:55:33 UTC (rev 3278)
@@ -5,7 +5,7 @@
 from scipy.sandbox.models.contrast import ContrastResults
 from scipy.sandbox.models.utils import recipr
 
-class Model:
+class Model(object):
     """
     A (predictive) statistical model. The class Model itself does nothing
     but lays out the methods expected of any subclass.

Modified: trunk/scipy/sandbox/models/regression.py
===================================================================
--- trunk/scipy/sandbox/models/regression.py	2007-08-29 07:20:42 UTC (rev 3277)
+++ trunk/scipy/sandbox/models/regression.py	2007-08-29 09:55:33 UTC (rev 3278)
@@ -22,7 +22,8 @@
 import numpy.linalg as L
 from scipy.linalg import norm, toeplitz
 
-from scipy.sandbox.models.model import likelihood_model, likelihood_model_results
+from scipy.sandbox.models.model import likelihood_model, \
+     likelihood_model_results
 from scipy.sandbox.models import utils
 
 class ols_model(likelihood_model):
@@ -65,7 +66,7 @@
             design : TODO
                 TODO
         """
-        likelihood_model.__init__(self)
+        super(ols_model, self).__init__()
         self.initialize(design)
 
     def initialize(self, design):
@@ -89,7 +90,6 @@
         """
         OLS model whitener does nothing: returns Y.
         """
-
         return Y
     
     def est_coef(self, Y):
@@ -98,7 +98,6 @@
         and coefficients, but initialize is not called so no
         psuedo-inverse is calculated.
         """
-            
         Z = self.whiten(Y)
 
         lfit = regression_results(L.lstsq(self.wdesign, Z)[0], Y)
@@ -111,7 +110,6 @@
         (whitened) residuals and scale. 
 
         """
-    
         Z = self.whiten(Y)
 
         lfit = regression_results(N.dot(self.calc_beta, Z), Y,
@@ -173,7 +171,6 @@
     >>> print model.rho
     [-0.61887622 -0.88137957]
     """
-
     def __init__(self, design, rho):
         if type(rho) is type(1):
             self.order = rho
@@ -185,7 +182,7 @@
             if self.rho.shape == ():
                 self.rho.shape = (1,)
             self.order = self.rho.shape[0]
-        ols_model.__init__(self, design)
+        super(ar_model, self).__init__(design)
 
     def iterative_fit(self, Y, niter=3):
         """
@@ -203,7 +200,6 @@
             results = self.fit(Y)
             self.rho, _ = self.yule_walker(Y - results.predict)
 
-
     def whiten(self, X):
         """
         Whiten a series of columns according to an AR(p)
@@ -297,16 +293,23 @@
     >>> print results.Fcontrast(N.identity(2))
     <F contrast: F=26.9986072423, df_denom=5, df_num=2>
     """
-
     def __init__(self, design, weights=1):
-        self.weights = weights
-        ols_model.__init__(self, design)
+        weights = N.array(weights)
+        if weights.shape == (): # scalar
+            self.weights = weights
+        else: 
+            design_rows = design.shape[0]
+            if not(weights.shape[0] == design_rows and
+                   weights.size == design_rows) :
+                raise ValueError(
+                    'Weights must be scalar or same length as design')
+            self.weights = weights.reshape(design_rows)
+        super(wls_model, self).__init__(design)
 
     def whiten(self, X):
         """
         Whitener for WLS model, multiplies by sqrt(self.weights)
         """
-
         X = N.asarray(X, N.float64)
 
         if X.ndim == 1:
@@ -326,7 +329,9 @@
     """
 
     def __init__(self, beta, Y, normalized_cov_beta=None, scale=1.):
-        likelihood_model_results.__init__(self, beta, normalized_cov_beta, scale)
+        super(regression_results, self).__init__(beta,
+                                                 normalized_cov_beta,
+                                                 scale)
         self.Y = Y
 
     def norm_resid(self):
@@ -335,7 +340,6 @@
 
         Note: residuals are whitened residuals.
         """
-
         if not hasattr(self, 'resid'):
             raise ValueError, 'need normalized residuals to estimate standard deviation'
 
@@ -364,7 +368,6 @@
     if the contrast C is estimable by looking at the rank of vstack([C,D]) and
     verifying it is the same as the rank of D.
     """
-
     if C.ndim == 1:
         C.shape = (C.shape[0], 1)
     new = N.vstack([C, D])



More information about the Scipy-svn mailing list