SourceForge logo
SourceForge logo
Menu

matplotlib-checkins

From: <js...@us...> - 2008年07月22日 02:17:11
Revision: 5806
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5806&view=rev
Author: jswhit
Date: 2008年07月22日 02:17:09 +0000 (2008年7月22日)
Log Message:
-----------
added griddata function (left out in previous commit)
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月22日 01:52:12 UTC (rev 5805)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月22日 02:17:09 UTC (rev 5806)
@@ -90,6 +90,12 @@
 
 import matplotlib.nxutils as nxutils
 import matplotlib.cbook as cbook
+try:
+ import mpl_tookits._natgrid as _natgrid
+ _use_natgrid = True
+except ImportError:
+ import matplotlib.delaunay as delaunay
+ _use_natgrid = False
 
 # set is a new builtin function in 2.4; delete the following when
 # support for 2.3 is dropped.
@@ -2691,3 +2697,55 @@
 in zip(funcs, row, rowmask, mvals)])
 if opened:
 fh.close()
+
+def griddata(x,y,z,xi,yi):
+ """
+ zi = griddata(x,y,z,xi,yi) fits a surface of the form z = f(x,y)
+ to the data in the (usually) nonuniformly spaced vectors (x,y,z).
+ griddata interpolates this surface at the points specified by (xi,yi)
+ to produce zi. xi and yi must describe a regular grid, can be
+ either 1D or 2D, but must be monotonically increasing.
+ 
+ A masked array is returned if any grid points are outside convex 
+ hull defined by input data (no extrapolation is done).
+
+ Uses natural neighbor interpolation based on Delaunay triangulation.
+ """
+ if xi.ndim != yi.ndim:
+ raise TypeError("inputs xi and yi must have same number of dimensions (1 or 2)")
+ if xi.ndim != 1 and xi.ndim != 2:
+ raise TypeError("inputs xi and yi must be 1D or 2D.")
+ if _use_natgrid: # use natgrid toolkit if available.
+ if xi.ndim == 2:
+ xi = xi[0,:]
+ yi = yi[:,0]
+ # override default natgrid internal parameters.
+ _natgrid.seti('ext',0)
+ _natgrid.setr('nul',np.nan)
+ # cast input arrays to doubles (this makes a copy)
+ x = x.astype(np.float)
+ y = y.astype(np.float)
+ z = z.astype(np.float)
+ xo = xi.astype(np.float)
+ yo = yi.astype(np.float)
+ if min(xo[1:]-xo[0:-1]) < 0 or min(yo[1:]-yo[0:-1]) < 0:
+ raise ValueError, 'output grid defined by xi,yi must be monotone increasing'
+ # allocate array for output (buffer will be overwritten by nagridd)
+ zo = np.empty((yo.shape[0],xo.shape[0]), np.float)
+ _natgrid.natgridd(x,y,z,xo,yo,zo)
+ else: # use Robert Kern's delaunay package from scikits (default)
+ if xi.ndim != yi.ndim:
+ raise TypeError("inputs xi and yi must have same number of dimensions (1 or 2)")
+ if xi.ndim != 1 and xi.ndim != 2:
+ raise TypeError("inputs xi and yi must be 1D or 2D.")
+ if xi.ndim == 1:
+ xi,yi = np.meshgrid(xi,yi)
+ # triangulate data
+ tri = delaunay.Triangulation(x,y)
+ # interpolate data
+ interp = tri.nn_interpolator(z)
+ zo = interp(xi,yi)
+ # mask points on grid outside convex hull of input data.
+ if np.any(np.isnan(zo)):
+ zo = np.ma.masked_where(np.isnan(zo),zo)
+ return zo
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <js...@us...> - 2008年07月22日 02:47:04
Revision: 5807
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5807&view=rev
Author: jswhit
Date: 2008年07月22日 02:47:02 +0000 (2008年7月22日)
Log Message:
-----------
fix typo in as yet nonexistent natgrid import.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月22日 02:17:09 UTC (rev 5806)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月22日 02:47:02 UTC (rev 5807)
@@ -91,7 +91,7 @@
 import matplotlib.nxutils as nxutils
 import matplotlib.cbook as cbook
 try:
- import mpl_tookits._natgrid as _natgrid
+ from mpl_toolkits.natgrid import _natgrid
 _use_natgrid = True
 except ImportError:
 import matplotlib.delaunay as delaunay
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <js...@us...> - 2008年07月22日 11:23:51
Revision: 5809
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5809&view=rev
Author: jswhit
Date: 2008年07月22日 11:23:49 +0000 (2008年7月22日)
Log Message:
-----------
update docstring for griddata to reflect the fact that
mpl_toolkits.natgrid will be used if installed.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月22日 11:12:50 UTC (rev 5808)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月22日 11:23:49 UTC (rev 5809)
@@ -2710,6 +2710,15 @@
 hull defined by input data (no extrapolation is done).
 
 Uses natural neighbor interpolation based on Delaunay triangulation.
+ By default, this algorithm is provided by the matplotlib.delaunay
+ package, written by Robert Kern. The triangulation algorithm in this
+ package is known to fail on some nearly pathological cases. For
+ this reason, a separate toolkit (mpl_tookits.natgrid) has been created
+ that provides a more robust algorithm fof triangulation and interpolation.
+ This toolkit is based on the NCAR natgrid library, which contains code 
+ that is not redistributable under a BSD-compatible license. When installed,
+ this function will use the mpl_toolkits.natgrid algorithm, otherwise it
+ will use the built-in matplotlib.delaunay package.
 """
 if xi.ndim != yi.ndim:
 raise TypeError("inputs xi and yi must have same number of dimensions (1 or 2)")
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <dmk...@us...> - 2008年07月24日 07:59:20
Revision: 5828
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5828&view=rev
Author: dmkaplan
Date: 2008年07月24日 07:59:18 +0000 (2008年7月24日)
Log Message:
-----------
Changes to documentation of norm and orth functions and deprecation
warning for norm in favor of numpy version.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月24日 02:28:20 UTC (rev 5827)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月24日 07:59:18 UTC (rev 5828)
@@ -1816,25 +1816,25 @@
 
 def norm(x,y=2):
 """
- Norm of a matrix or a vector according to Matlab.
- The description is taken from Matlab:
+ This function is deprecated - use numpy.linalg.norm instead.
 
- For matrices...
- NORM(X) is the largest singular value of X, max(svd(X)).
- NORM(X,2) is the same as NORM(X).
- NORM(X,1) is the 1-norm of X, the largest column sum,
- = max(sum(abs((X)))).
- NORM(X,inf) is the infinity norm of X, the largest row sum,
- = max(sum(abs((X')))).
- NORM(X,'fro') is the Frobenius norm, sqrt(sum(diag(X'*X))).
- NORM(X,P) is available for matrix X only if P is 1, 2, inf or 'fro'.
+ Norm of a matrix or a vector. Functions similar to the Matlab (TM)
+ function of the same name.
 
- For vectors...
- NORM(V,P) = sum(abs(V).^P)^(1/P).
- NORM(V) = norm(V,2).
- NORM(V,inf) = max(abs(V)).
- NORM(V,-inf) = min(abs(V)).
+ Call signature::
+
+ norm(x,y=2)
+
+ This function behaves differently for vectors and matrices. For vectors,
+ it returns the y'th norm of x (i.e. (sum(abs(x)**y))**(1.0/y).
+
+ For matrices, if y=2, then it returns the largest singular value
+ of X, namely max(linalg.svd(x)). If y=1, returns the largest
+ column sum of x (i.e., max(sum(abs(x),axis=0)) ). If y=inf,
+ returns the largest row sum. If y='fro', returns the Frobenius
+ norm, sqrt(sum(diag(dot(x.transpose(),x)))).
 """
+ warnings.warn( "Use numpy.linalg.norm instead", DeprecationWarning )
 
 x = np.asarray(x)
 if x.ndim == 2:
@@ -1862,13 +1862,16 @@
 
 def orth(A):
 """
- Orthogonalization procedure by Matlab.
- The description is taken from its help:
+ Orthogonalization procedure similar to Matlab (TM) function of the same
+ name.
 
- Q = ORTH(A) is an orthonormal basis for the range of A.
- That is, Q'*Q = I, the columns of Q span the same space as
- the columns of A, and the number of columns of Q is the
- rank of A.
+ Call signature::
+
+ Q = orth(A)
+
+ Returns an orthonormal basis with the range of A. Q is an orthonormal
+ matrix (i.e., dot( Q.transpose(), Q ) is an identity matrix) and the
+ columns of Q span the same space as the columns of A.
 """
 
 A = np.asarray(A)
@@ -2086,9 +2089,9 @@
 join record arrays r1 and r2 on key; key is a tuple of field
 names. If r1 and r2 have equal values on all the keys in the key
 tuple, then their fields will be merged into a new record array
- containing the intersection of the fields of r1 and r2. 
+ containing the intersection of the fields of r1 and r2.
 
- r1 (also r2) must not have any duplicate keys. 
+ r1 (also r2) must not have any duplicate keys.
 
 The jointype keyword can be 'inner', 'outer', 'leftouter'.
 To do a rightouter join just reverse r1 and r2.
@@ -2702,8 +2705,8 @@
 griddata interpolates this surface at the points specified by (xi,yi)
 to produce zi. xi and yi must describe a regular grid, can be
 either 1D or 2D, but must be monotonically increasing.
- 
- A masked array is returned if any grid points are outside convex 
+
+ A masked array is returned if any grid points are outside convex
 hull defined by input data (no extrapolation is done).
 
 Uses natural neighbor interpolation based on Delaunay triangulation.
@@ -2712,7 +2715,7 @@
 package is known to fail on some nearly pathological cases. For
 this reason, a separate toolkit (mpl_tookits.natgrid) has been created
 that provides a more robust algorithm fof triangulation and interpolation.
- This toolkit is based on the NCAR natgrid library, which contains code 
+ This toolkit is based on the NCAR natgrid library, which contains code
 that is not redistributable under a BSD-compatible license. When installed,
 this function will use the mpl_toolkits.natgrid algorithm, otherwise it
 will use the built-in matplotlib.delaunay package.
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <sa...@us...> - 2008年07月24日 21:56:09
Revision: 5851
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5851&view=rev
Author: sameerd
Date: 2008年07月24日 21:56:08 +0000 (2008年7月24日)
Log Message:
-----------
Fixing edge cases in rec_join
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月24日 21:56:06 UTC (rev 5850)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月24日 21:56:08 UTC (rev 5851)
@@ -2032,12 +2032,13 @@
 newrec[k] = v
 
 for field in r1.dtype.names:
- newrec[field][:common_len] = r1[field][r1ind]
+ if common_len:
+ newrec[field][:common_len] = r1[field][r1ind]
 if (jointype == "outer" or jointype == "leftouter") and left_len:
 newrec[field][common_len:(common_len+left_len)] = r1[field][left_ind]
 
 for field in r2.dtype.names:
- if field not in key:
+ if field not in key and common_len:
 newrec[field][:common_len] = r2[field][r2ind]
 if jointype == "outer" and right_len:
 newrec[field][-right_len:] = r2[field][right_ind]
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <js...@us...> - 2008年07月28日 16:58:20
Revision: 5913
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5913&view=rev
Author: jswhit
Date: 2008年07月28日 16:58:16 +0000 (2008年7月28日)
Log Message:
-----------
make sure griddata issues verbose report only the first time it is called.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月28日 16:42:10 UTC (rev 5912)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月28日 16:58:16 UTC (rev 5913)
@@ -2603,10 +2603,12 @@
 import matplotlib.delaunay as delaunay
 from matplotlib.delaunay import __version__
 _use_natgrid = False
- if _use_natgrid:
- verbose.report('using natgrid version %s' % __version__)
- else:
- verbose.report('using delaunay version %s' % __version__)
+ if not griddata._reported:
+ if _use_natgrid:
+ verbose.report('using natgrid version %s' % __version__)
+ else:
+ verbose.report('using delaunay version %s' % __version__)
+ griddata._reported = True
 if xi.ndim != yi.ndim:
 raise TypeError("inputs xi and yi must have same number of dimensions (1 or 2)")
 if xi.ndim != 1 and xi.ndim != 2:
@@ -2645,3 +2647,4 @@
 if np.any(np.isnan(zo)):
 zo = np.ma.masked_where(np.isnan(zo),zo)
 return zo
+griddata._reported = False
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <js...@us...> - 2008年07月30日 16:32:55
Revision: 5931
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=5931&view=rev
Author: jswhit
Date: 2008年07月30日 16:32:50 +0000 (2008年7月30日)
Log Message:
-----------
updated download instructions.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月30日 16:19:40 UTC (rev 5930)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年07月30日 16:32:50 UTC (rev 5931)
@@ -2591,10 +2591,8 @@
 this function will use the mpl_toolkits.natgrid algorithm, otherwise it
 will use the built-in matplotlib.delaunay package.
 
- The natgrid matplotlib toolkit can be checked out through SVN with the
- following command:
-
- svn co https://matplotlib.svn.sourceforge.net/svnroot/matplotlib/trunk/toolkits/natgrid natgrid
+ The natgrid matplotlib toolkit can be downloaded from
+ http://sourceforge.net/project/showfiles.php?group_id=80706&package_id=142792
 """
 try:
 from mpl_toolkits.natgrid import _natgrid, __version__
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <sa...@us...> - 2008年10月08日 14:38:34
Revision: 6170
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6170&view=rev
Author: sameerd
Date: 2008年10月08日 14:38:26 +0000 (2008年10月08日)
Log Message:
-----------
rec_join now handles two record arrays with the same column names with "*fixes"
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年10月08日 14:09:55 UTC (rev 6169)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年10月08日 14:38:26 UTC (rev 6170)
@@ -1665,6 +1665,14 @@
 except TypeError: return False
 else: return b
 
+def rec_view(rec):
+ """ Return a view of an ndarray as a recarray
+ http://projects.scipy.org/pipermail/numpy-discussion/2008-August/036429.html
+ Reverting Travis' fix because it doesn't work for object arrays
+ """
+ return rec.view(np.recarray)
+ #return rec.view(dtype=(np.record, rec.dtype), type=np.recarray)
+
 def rec_append_field(rec, name, arr, dtype=None):
 """
 return a new record array with field name populated with data from array arr.
@@ -1703,7 +1711,7 @@
 newrec[field] = rec[field]
 for name, arr in zip(names, arrs):
 newrec[name] = arr
- return newrec.view(np.recarray)
+ return rec_view(newrec)
 
 
 def rec_drop_fields(rec, names):
@@ -1719,7 +1727,7 @@
 for field in newdtype.names:
 newrec[field] = rec[field]
 
- return newrec.view(np.recarray)
+ return rec_view(newrec)
 
 
 
@@ -1789,7 +1797,7 @@
 return np.rec.fromarrays(arrays, names=names)
 
 
-def rec_join(key, r1, r2, jointype='inner', defaults=None):
+def rec_join(key, r1, r2, jointype='inner', defaults=None, r1postfix='1', r2postfix='2'):
 """
 join record arrays r1 and r2 on key; key is a tuple of field
 names. If r1 and r2 have equal values on all the keys in the key
@@ -1803,6 +1811,9 @@
 
 The defaults keyword is a dictionary filled with
 {column_name:default_value} pairs.
+
+ The keywords r1postfix and r2postfix are postfixed to column names 
+ (other than keys) that are both in r1 and r2.
 """
 
 for name in key:
@@ -1850,13 +1861,21 @@
 return (name, dt2.descr[0][1])
 
 
-
 keydesc = [key_desc(name) for name in key]
+ 
+ def mapped_r1field(name):
+ """ the column name in newrec that corresponds to the colmn in r1 """
+ if name in key or name not in r2.dtype.names: return name
+ else: return name + r1postfix
 
- newdtype = np.dtype(keydesc +
- [desc for desc in r1.dtype.descr if desc[0] not in key ] +
- [desc for desc in r2.dtype.descr if desc[0] not in key ] )
+ def mapped_r2field(name):
+ """ the column name in newrec that corresponds to the colmn in r2 """
+ if name in key or name not in r1.dtype.names: return name
+ else: return name + r2postfix
 
+ r1desc = [(mapped_r1field(desc[0]), desc[1]) for desc in r1.dtype.descr if desc[0] not in key]
+ r2desc = [(mapped_r2field(desc[0]), desc[1]) for desc in r2.dtype.descr if desc[0] not in key]
+ newdtype = np.dtype(keydesc + r1desc + r2desc)
 
 newrec = np.empty(common_len + left_len + right_len, dtype=newdtype)
 
@@ -1867,20 +1886,22 @@
 newrec[k] = v
 
 for field in r1.dtype.names:
+ newfield = mapped_r1field(field)
 if common_len:
- newrec[field][:common_len] = r1[field][r1ind]
+ newrec[newfield][:common_len] = r1[field][r1ind]
 if (jointype == "outer" or jointype == "leftouter") and left_len:
- newrec[field][common_len:(common_len+left_len)] = r1[field][left_ind]
+ newrec[newfield][common_len:(common_len+left_len)] = r1[field][left_ind]
 
 for field in r2.dtype.names:
+ newfield = mapped_r2field(field)
 if field not in key and common_len:
- newrec[field][:common_len] = r2[field][r2ind]
+ newrec[newfield][:common_len] = r2[field][r2ind]
 if jointype == "outer" and right_len:
- newrec[field][-right_len:] = r2[field][right_ind]
+ newrec[newfield][-right_len:] = r2[field][right_ind]
 
 newrec.sort(order=key)
 
- return newrec.view(np.recarray)
+ return rec_view(newrec)
 
 
 def csv2rec(fname, comments='#', skiprows=0, checkrows=0, delimiter=',',
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <mme...@us...> - 2008年11月04日 13:38:22
Revision: 6361
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6361&view=rev
Author: mmetz_bn
Date: 2008年11月04日 13:38:15 +0000 (2008年11月04日)
Log Message:
-----------
sqrtm implemented in scipy
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年10月31日 14:54:42 UTC (rev 6360)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月04日 13:38:15 UTC (rev 6361)
@@ -1811,7 +1811,7 @@
 """
 Deprecated - needs clean room implementation
 """
- raise NotImplementedError('Deprecated - needs clean room implementation')
+ raise NotImplementedError('Deprecated - see scipy.linalg.sqrtm')
 
 
 def mfuncC(f, x):
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ry...@us...> - 2008年11月06日 22:53:06
Revision: 6368
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6368&view=rev
Author: ryanmay
Date: 2008年11月06日 22:53:02 +0000 (2008年11月06日)
Log Message:
-----------
Improve the docstrings for mlab.psd and mlab.csd.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月05日 17:12:03 UTC (rev 6367)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月06日 22:53:02 UTC (rev 6368)
@@ -238,39 +238,52 @@
 a = y.mean() - b*x.mean()
 return y - (b*x + a)
 
-
-
 def psd(x, NFFT=256, Fs=2, detrend=detrend_none,
 window=window_hanning, noverlap=0):
 """
- The power spectral density by Welches average periodogram method.
- The vector x is divided into NFFT length segments. Each segment
- is detrended by function detrend and windowed by function window.
- noperlap gives the length of the overlap between segments. The
- absolute(fft(segment))**2 of each segment are averaged to compute Pxx,
- with a scaling to correct for power loss due to windowing.
+ The power spectral density by Welch's average periodogram method.
+ The vector *x* is divided into *NFFT* length blocks. Each block
+ is detrended by the function *detrend* and windowed by the function
+ *window*. *noverlap* gives the length of the overlap between blocks.
+ The absolute(fft(block))**2 of each segment are averaged to compute
+ *Pxx*, with a scaling to correct for power loss due to windowing.
 
- Fs is the sampling frequency (samples per time unit). It is used
- to calculate the Fourier frequencies, freqs, in cycles per time
- unit.
+ If len(*x*) < *NFFT*, it will be zero padded to *NFFT*.
 
+ *x*
+ Array or sequence containing the data
+
 *NFFT*
- The length of the FFT window. Must be even; a power 2 is most efficient.
+ The number of data points used in each block for the FFT.
+ Must be even; a power 2 is most efficient. The default value is 256.
 
+ *Fs*
+ The sampling frequency (samples per time unit). It is used
+ to calculate the Fourier frequencies, freqs, in cycles per time
+ unit. The default value is 2.
+
 *detrend*
- is a function, unlike in matlab where it is a vector.
+ Any callable function (unlike in matlab where it is a vector).
+ For examples, see :func:`detrend`, :func:`detrend_none`, and
+ :func:`detrend_mean`. The default is :func:`detrend_none`.
 
 *window*
- can be a function or a vector of length NFFT. To create window
- vectors see numpy.blackman, numpy.hamming, numpy.bartlett,
- scipy.signal, scipy.signal.get_window etc.
+ A function or a vector of length *NFFT*. To create window
+ vectors see :func:`window_hanning`, :func:`window_none`,
+ :func:`numpy.blackman`, :func:`numpy.hamming`,
+ :func:`numpy.bartlett`, :func:`scipy.signal`,
+ :func:`scipy.signal.get_window`, etc. The default is
+ :func:`window_hanning`.
 
- If len(*x*) < *NFFT*, it will be zero padded to *NFFT*.
+ *noverlap*
+ The number of points of overlap between blocks. The default value
+ is 0 (no overlap).
 
 Returns the tuple (*Pxx*, *freqs*).
 
- Refs: Bendat & Piersol -- Random Data: Analysis and Measurement Procedures, John Wiley & Sons (1986)
-
+ Refs:
+ Bendat & Piersol -- Random Data: Analysis and Measurement
+ Procedures, John Wiley & Sons (1986)
 """
 # I think we could remove this condition without hurting anything.
 if NFFT % 2:
@@ -317,26 +330,50 @@
 def csd(x, y, NFFT=256, Fs=2, detrend=detrend_none,
 window=window_hanning, noverlap=0):
 """
- The cross spectral density Pxy by Welches average periodogram
+ The cross power spectral density by Welch's average periodogram
 method. The vectors *x* and *y* are divided into *NFFT* length
- segments. Each segment is detrended by function *detrend* and
- windowed by function *window*. *noverlap* gives the length of the
- overlap between segments. The product of the direct FFTs of *x*
- and *y* are averaged over each segment to compute *Pxy*, with a
- scaling to correct for power loss due to windowing. *Fs* is the
- sampling frequency.
+ blocks. Each block is detrended by the function *detrend* and
+ windowed by the function *window*. *noverlap* gives the length
+ of the overlap between blocks. The product of the direct FFTs
+ of *x* and *y* are averaged over each segment to compute *Pxy*,
+ with a scaling to correct for power loss due to windowing.
 
- *NFFT* must be even; a power of 2 is most efficient
+ If len(*x*) < *NFFT* or len(*y*) < *NFFT*, they will be zero
+ padded to *NFFT*.
 
- *window* can be a function or a vector of length *NFFT*. To create
- window vectors see :func:`numpy.blackman`, :func:`numpy.hamming`,
- :func:`numpy.bartlett`, :func:`scipy.signal`,
- :func:`scipy.signal.get_window` etc.
+ *x*, *y*
+ Array or sequence containing the data
 
- Returns the tuple (*Pxy*, *freqs*)
+ *NFFT*
+ The number of data points used in each block for the FFT.
+ Must be even; a power 2 is most efficient. The default value is 256.
 
+ *Fs*
+ The sampling frequency (samples per time unit). It is used
+ to calculate the Fourier frequencies, freqs, in cycles per time
+ unit. The default value is 2.
+
+ *detrend*
+ Any callable function (unlike in matlab where it is a vector).
+ For examples, see :func:`detrend`, :func:`detrend_none`, and
+ :func:`detrend_mean`. The default is :func:`detrend_none`.
+
+ *window*
+ A function or a vector of length *NFFT*. To create window
+ vectors see :func:`window_hanning`, :func:`window_none`,
+ :func:`numpy.blackman`, :func:`numpy.hamming`,
+ :func:`numpy.bartlett`, :func:`scipy.signal`,
+ :func:`scipy.signal.get_window`, etc. The default is
+ :func:`window_hanning`.
+
+ *noverlap*
+ The number of points of overlap between blocks. The default value
+ is 0 (no overlap).
+
+ Returns the tuple (*Pxy*, *freqs*).
+
 Refs:
- Bendat & Piersol -- Random Data: Analysis and Measurement
+ Bendat & Piersol -- Random Data: Analysis and Measurement
 Procedures, John Wiley & Sons (1986)
 """
 
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ry...@us...> - 2008年11月11日 20:20:30
Revision: 6394
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6394&view=rev
Author: ryanmay
Date: 2008年11月11日 20:20:27 +0000 (2008年11月11日)
Log Message:
-----------
Make mlab.psd() call mlab.csd() instead of duplicating 95% of the code. Tweak csd() to check if x and y are the same and avoid duplicating the work if so.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月11日 19:28:38 UTC (rev 6393)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月11日 20:20:27 UTC (rev 6394)
@@ -259,54 +259,8 @@
 Bendat & Piersol -- Random Data: Analysis and Measurement
 Procedures, John Wiley & Sons (1986)
 """
- x = np.asarray(x) # make sure we're dealing with a numpy array
+ return csd(x, x, NFFT, Fs, detrend, window, noverlap, pad_to, sides)
 
- # zero pad x up to NFFT if it is shorter than NFFT
- if len(x)<NFFT:
- n = len(x)
- x = np.resize(x, (NFFT,)) # Can't use resize method.
- x[n:] = 0
-
- if pad_to is None:
- pad_to = NFFT
-
- # For real x, ignore the negative frequencies unless told otherwise
- if (sides == 'default' and np.iscomplexobj(x)) or sides == 'twosided':
- numFreqs = pad_to
- elif sides in ('default', 'onesided'):
- numFreqs = pad_to//2 + 1
- else:
- raise ValueError("sides must be one of: 'default', 'onesided', or "
- "'twosided'")
-
- if cbook.iterable(window):
- assert(len(window) == NFFT)
- windowVals = window
- else:
- windowVals = window(np.ones((NFFT,), x.dtype))
-
- step = NFFT - noverlap
- ind = range(0, len(x) - NFFT + 1, step)
- n = len(ind)
- Pxx = np.zeros((numFreqs,n), np.float_)
-
- # do the FFTs of the slices
- for i in range(n):
- thisX = x[ind[i]:ind[i]+NFFT]
- thisX = windowVals * detrend(thisX)
- fx = np.absolute(np.fft.fft(thisX, n=pad_to))**2
- Pxx[:,i] = fx[:numFreqs]
-
- if n>1:
- Pxx = Pxx.mean(axis=1)
- # Scale the spectrum by the norm of the window to compensate for
- # windowing loss; see Bendat & Piersol Sec 11.5.2
- Pxx /= (np.abs(windowVals)**2).sum()
-
- freqs = float(Fs) / pad_to * np.arange(numFreqs)
-
- return Pxx, freqs
-
 #Split out these keyword docs so that they can be used elsewhere
 kwdocd = dict()
 kwdocd['PSD'] ="""
@@ -387,15 +341,24 @@
 Procedures, John Wiley & Sons (1986)
 """
 
- x = np.asarray(x) # make sure we're dealing with a numpy array
- y = np.asarray(y) # make sure we're dealing with a numpy array
+ #The checks for if y is x are so that we can use csd() to implement
+ #psd() without doing extra work.
+ 
+ #Make sure we're dealing with a numpy array. If y and x were the same
+ #object to start with, keep them that way
+ do_psd = y is x
 
+ x = np.asarray(x)
+ if not do_psd:
+ y = np.asarray(y)
+
 # zero pad x and y up to NFFT if they are shorter than NFFT
 if len(x)<NFFT:
 n = len(x)
 x = np.resize(x, (NFFT,))
 x[n:] = 0
- if len(y)<NFFT:
+
+ if not do_psd and len(y)<NFFT:
 n = len(y)
 y = np.resize(y, (NFFT,))
 y[n:] = 0
@@ -427,10 +390,14 @@
 for i in range(n):
 thisX = x[ind[i]:ind[i]+NFFT]
 thisX = windowVals * detrend(thisX)
- thisY = y[ind[i]:ind[i]+NFFT]
- thisY = windowVals * detrend(thisY)
 fx = np.fft.fft(thisX, n=pad_to)
- fy = np.fft.fft(thisY, n=pad_to)
+
+ if do_psd:
+ fy = fx
+ else:
+ thisY = y[ind[i]:ind[i]+NFFT]
+ thisY = windowVals * detrend(thisY)
+ fy = np.fft.fft(thisY, n=pad_to)
 Pxy[:,i] = np.conjugate(fx[:numFreqs]) * fy[:numFreqs]
 
 # Scale the spectrum by the norm of the window to compensate for
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ry...@us...> - 2008年11月11日 22:02:38
Revision: 6398
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6398&view=rev
Author: ryanmay
Date: 2008年11月11日 22:02:34 +0000 (2008年11月11日)
Log Message:
-----------
Update module docstring to include specgram().
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月11日 21:45:15 UTC (rev 6397)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月11日 22:02:34 UTC (rev 6398)
@@ -35,6 +35,8 @@
 :func:`rk4`
 A 4th order runge kutta integrator for 1D or ND systems
 
+:func:`specgram`
+ Spectrogram (power spectral density over segments of time)
 
 Miscellaneous functions
 -------------------------
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ry...@us...> - 2008年11月11日 22:22:08
Revision: 6396
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6396&view=rev
Author: ryanmay
Date: 2008年11月11日 21:32:29 +0000 (2008年11月11日)
Log Message:
-----------
Factor out common core of psd(), csd(), and specgram() into _spectral_helper() function. This allows all of them to have the same calling signature and capabilities and to have the code in a single location.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月11日 20:34:25 UTC (rev 6395)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月11日 21:32:29 UTC (rev 6396)
@@ -238,29 +238,78 @@
 a = y.mean() - b*x.mean()
 return y - (b*x + a)
 
-def psd(x, NFFT=256, Fs=2, detrend=detrend_none,
+#This is a helper function that implements the commonality between the
+#psd, csd, and spectrogram. It is *NOT* meant to be used outside of mlab
+def _spectral_helper(x, y, NFFT=256, Fs=2, detrend=detrend_none,
 window=window_hanning, noverlap=0, pad_to=None, sides='default'):
- """
- The power spectral density by Welch's average periodogram method.
- The vector *x* is divided into *NFFT* length blocks. Each block
- is detrended by the function *detrend* and windowed by the function
- *window*. *noverlap* gives the length of the overlap between blocks.
- The absolute(fft(block))**2 of each segment are averaged to compute
- *Pxx*, with a scaling to correct for power loss due to windowing.
+ #The checks for if y is x are so that we can use the same function to
+ #implement the core of psd(), csd(), and spectrogram() without doing
+ #extra calculations. We return the unaveraged Pxy, freqs, and t.
+ 
+ #Make sure we're dealing with a numpy array. If y and x were the same
+ #object to start with, keep them that way
+ same_data = y is x
 
- If len(*x*) < *NFFT*, it will be zero padded to *NFFT*.
+ x = np.asarray(x)
+ if not same_data:
+ y = np.asarray(y)
 
- *x*
- Array or sequence containing the data
- %(PSD)s
- Returns the tuple (*Pxx*, *freqs*).
+ # zero pad x and y up to NFFT if they are shorter than NFFT
+ if len(x)<NFFT:
+ n = len(x)
+ x = np.resize(x, (NFFT,))
+ x[n:] = 0
 
- Refs:
- Bendat & Piersol -- Random Data: Analysis and Measurement
- Procedures, John Wiley & Sons (1986)
- """
- return csd(x, x, NFFT, Fs, detrend, window, noverlap, pad_to, sides)
+ if not same_data and len(y)<NFFT:
+ n = len(y)
+ y = np.resize(y, (NFFT,))
+ y[n:] = 0
 
+ if pad_to is None:
+ pad_to = NFFT
+
+ # For real x, ignore the negative frequencies unless told otherwise
+ if (sides == 'default' and np.iscomplexobj(x)) or sides == 'twosided':
+ numFreqs = pad_to
+ elif sides in ('default', 'onesided'):
+ numFreqs = pad_to//2 + 1
+ else:
+ raise ValueError("sides must be one of: 'default', 'onesided', or "
+ "'twosided'")
+
+ if cbook.iterable(window):
+ assert(len(window) == NFFT)
+ windowVals = window
+ else:
+ windowVals = window(np.ones((NFFT,), x.dtype))
+
+ step = NFFT - noverlap
+ ind = np.arange(0, len(x) - NFFT + 1, step)
+ n = len(ind)
+ Pxy = np.zeros((numFreqs,n), np.complex_)
+
+ # do the ffts of the slices
+ for i in range(n):
+ thisX = x[ind[i]:ind[i]+NFFT]
+ thisX = windowVals * detrend(thisX)
+ fx = np.fft.fft(thisX, n=pad_to)
+
+ if same_data:
+ fy = fx
+ else:
+ thisY = y[ind[i]:ind[i]+NFFT]
+ thisY = windowVals * detrend(thisY)
+ fy = np.fft.fft(thisY, n=pad_to)
+ Pxy[:,i] = np.conjugate(fx[:numFreqs]) * fy[:numFreqs]
+
+ # Scale the spectrum by the norm of the window to compensate for
+ # windowing loss; see Bendat & Piersol Sec 11.5.2
+ Pxy /= (np.abs(windowVals)**2).sum()
+ t = 1./Fs * (ind + NFFT / 2.)
+ freqs = float(Fs) / pad_to * np.arange(numFreqs)
+ 
+ return Pxy, freqs, t
+
 #Split out these keyword docs so that they can be used elsewhere
 kwdocd = dict()
 kwdocd['PSD'] ="""
@@ -315,6 +364,31 @@
 for complex data. 'one' forces the return of a one-sided PSD, while
 'both' forces two-sided.
 """
+
+def psd(x, NFFT=256, Fs=2, detrend=detrend_none,
+ window=window_hanning, noverlap=0, pad_to=None, sides='default'):
+ """
+ The power spectral density by Welch's average periodogram method.
+ The vector *x* is divided into *NFFT* length blocks. Each block
+ is detrended by the function *detrend* and windowed by the function
+ *window*. *noverlap* gives the length of the overlap between blocks.
+ The absolute(fft(block))**2 of each segment are averaged to compute
+ *Pxx*, with a scaling to correct for power loss due to windowing.
+
+ If len(*x*) < *NFFT*, it will be zero padded to *NFFT*.
+
+ *x*
+ Array or sequence containing the data
+ %(PSD)s
+ Returns the tuple (*Pxx*, *freqs*).
+
+ Refs:
+ Bendat & Piersol -- Random Data: Analysis and Measurement
+ Procedures, John Wiley & Sons (1986)
+ """
+ Pxx,freqs = csd(x, x, NFFT, Fs, detrend, window, noverlap, pad_to, sides)
+ return Pxx.real,freqs
+
 psd.__doc__ = psd.__doc__ % kwdocd
 
 def csd(x, y, NFFT=256, Fs=2, detrend=detrend_none,
@@ -340,93 +414,28 @@
 Bendat & Piersol -- Random Data: Analysis and Measurement
 Procedures, John Wiley & Sons (1986)
 """
+ Pxy, freqs, t = _spectral_helper(x, y, NFFT, Fs, detrend, window,
+ noverlap, pad_to, sides)
 
- #The checks for if y is x are so that we can use csd() to implement
- #psd() without doing extra work.
- 
- #Make sure we're dealing with a numpy array. If y and x were the same
- #object to start with, keep them that way
- do_psd = y is x
-
- x = np.asarray(x)
- if not do_psd:
- y = np.asarray(y)
-
- # zero pad x and y up to NFFT if they are shorter than NFFT
- if len(x)<NFFT:
- n = len(x)
- x = np.resize(x, (NFFT,))
- x[n:] = 0
-
- if not do_psd and len(y)<NFFT:
- n = len(y)
- y = np.resize(y, (NFFT,))
- y[n:] = 0
-
- if pad_to is None:
- pad_to = NFFT
-
- # For real x, ignore the negative frequencies unless told otherwise
- if (sides == 'default' and np.iscomplexobj(x)) or sides == 'twosided':
- numFreqs = pad_to
- elif sides in ('default', 'onesided'):
- numFreqs = pad_to//2 + 1
- else:
- raise ValueError("sides must be one of: 'default', 'onesided', or "
- "'twosided'")
-
- if cbook.iterable(window):
- assert(len(window) == NFFT)
- windowVals = window
- else:
- windowVals = window(np.ones((NFFT,), x.dtype))
-
- step = NFFT - noverlap
- ind = range(0, len(x) - NFFT + 1, step)
- n = len(ind)
- Pxy = np.zeros((numFreqs,n), np.complex_)
-
- # do the ffts of the slices
- for i in range(n):
- thisX = x[ind[i]:ind[i]+NFFT]
- thisX = windowVals * detrend(thisX)
- fx = np.fft.fft(thisX, n=pad_to)
-
- if do_psd:
- fy = fx
- else:
- thisY = y[ind[i]:ind[i]+NFFT]
- thisY = windowVals * detrend(thisY)
- fy = np.fft.fft(thisY, n=pad_to)
- Pxy[:,i] = np.conjugate(fx[:numFreqs]) * fy[:numFreqs]
-
- # Scale the spectrum by the norm of the window to compensate for
- # windowing loss; see Bendat & Piersol Sec 11.5.2
- if n>1:
+ if len(Pxy.shape) == 2 and Pxy.shape[1]>1:
 Pxy = Pxy.mean(axis=1)
-
- Pxy /= (np.abs(windowVals)**2).sum()
- freqs = float(Fs) / pad_to * np.arange(numFreqs)
 return Pxy, freqs
 
 csd.__doc__ = csd.__doc__ % kwdocd
 
 def specgram(x, NFFT=256, Fs=2, detrend=detrend_none,
- window=window_hanning, noverlap=128):
+ window=window_hanning, noverlap=128, pad_to=None,
+ sides='default'):
 """
 Compute a spectrogram of data in *x*. Data are split into *NFFT*
 length segements and the PSD of each section is computed. The
 windowing function *window* is applied to each segment, and the
 amount of overlap of each segment is specified with *noverlap*.
 
- *window* can be a function or a vector of length *NFFT*. To create
- window vectors see :func:`numpy.blackman`, :func:`numpy.hamming`,
- :func:`numpy.bartlett`, :func:`scipy.signal`,
- :func:`scipy.signal.get_window` etc.
-
- If *x* is real (i.e. non-complex) only the positive spectrum is
- given. If *x* is complex then the complete spectrum is given.
-
+ If *x* is real (i.e. non-complex) only the spectrum of the positive
+ frequencie is returned. If *x* is complex then the complete
+ spectrum is returned.
+ %(PSD)s
 Returns a tuple (*Pxx*, *freqs*, *t*):
 
 - *Pxx*: 2-D array, columns are the periodograms of
@@ -444,56 +453,21 @@
 the mean of the segment periodograms; and in not returning
 times.
 """
- x = np.asarray(x)
- assert(NFFT>noverlap)
- #if np.log(NFFT)/np.log(2) != int(np.log(NFFT)/np.log(2)):
- # raise ValueError, 'NFFT must be a power of 2'
- if NFFT % 2:
- raise ValueError('NFFT must be even')
+ assert(NFFT > noverlap)
 
+ Pxx, freqs, t = _spectral_helper(x, x, NFFT, Fs, detrend, window,
+ noverlap, pad_to, sides)
+ Pxx = Pxx.real #Needed since helper implements generically
 
- # zero pad x up to NFFT if it is shorter than NFFT
- if len(x)<NFFT:
- n = len(x)
- x = np.resize(x, (NFFT,))
- x[n:] = 0
-
-
- # for real x, ignore the negative frequencies
- if np.iscomplexobj(x):
- numFreqs=NFFT
- else:
- numFreqs = NFFT//2+1
-
- if cbook.iterable(window):
- assert(len(window) == NFFT)
- windowVals = np.asarray(window)
- else:
- windowVals = window(np.ones((NFFT,),x.dtype))
- step = NFFT-noverlap
- ind = np.arange(0,len(x)-NFFT+1,step)
- n = len(ind)
- Pxx = np.zeros((numFreqs,n), np.float_)
- # do the ffts of the slices
-
- for i in range(n):
- thisX = x[ind[i]:ind[i]+NFFT]
- thisX = windowVals*detrend(thisX)
- fx = np.absolute(np.fft.fft(thisX))**2
- Pxx[:,i] = fx[:numFreqs]
- # Scale the spectrum by the norm of the window to compensate for
- # windowing loss; see Bendat & Piersol Sec 11.5.2
- Pxx /= (np.abs(windowVals)**2).sum()
- t = 1/Fs*(ind+NFFT/2)
- freqs = Fs/NFFT*np.arange(numFreqs)
-
- if np.iscomplexobj(x):
+ if (np.iscomplexobj(x) and sides == 'default') or sides == 'twosided':
 # center the frequency range at zero
 freqs = np.concatenate((freqs[NFFT/2:]-Fs,freqs[:NFFT/2]))
 Pxx = np.concatenate((Pxx[NFFT/2:,:],Pxx[:NFFT/2,:]),0)
 
 return Pxx, freqs, t
 
+specgram.__doc__ = specgram.__doc__ % kwdocd
+
 _coh_error = """Coherence is calculated by averaging over *NFFT*
 length segments. Your signal is too short for your choice of *NFFT*.
 """
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <jd...@us...> - 2008年11月25日 19:56:44
Revision: 6450
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6450&view=rev
Author: jdh2358
Date: 2008年11月25日 19:56:39 +0000 (2008年11月25日)
Log Message:
-----------
removed comment from invalid shared axis merge
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月25日 19:33:05 UTC (rev 6449)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年11月25日 19:56:39 UTC (rev 6450)
@@ -2460,8 +2460,10 @@
 
 # Get header and remove invalid characters
 needheader = names is None
+
 if needheader:
 for row in reader:
+ #print 'csv2rec', row
 if len(row) and row[0].startswith(comments):
 continue
 headers = row
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ry...@us...> - 2008年12月01日 19:07:12
Revision: 6464
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6464&view=rev
Author: ryanmay
Date: 2008年12月01日 19:07:08 +0000 (2008年12月01日)
Log Message:
-----------
Typo in docstring.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年12月01日 16:27:15 UTC (rev 6463)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年12月01日 19:07:08 UTC (rev 6464)
@@ -351,7 +351,7 @@
 is 0 (no overlap).
 
 *pad_to*: integer
- The number of points to which the data segment is padd when
+ The number of points to which the data segment is padded when
 performing the FFT. This can be different from *NFFT*, which
 specifies the number of data points used. While not increasing
 the actual resolution of the psd (the minimum distance between
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <jd...@us...> - 2008年12月17日 14:57:32
Revision: 6648
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=6648&view=rev
Author: jdh2358
Date: 2008年12月17日 14:57:28 +0000 (2008年12月17日)
Log Message:
-----------
added some threshold crossing helper funcs to mlab
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2008年12月17日 14:55:42 UTC (rev 6647)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2008年12月17日 14:57:28 UTC (rev 6648)
@@ -54,6 +54,16 @@
 yourself stranded without scipy (and the far superior
 scipy.integrate tools)
 
+:meth:`contiguous_regions`
+ return the indices of the regions spanned by some logical mask
+
+:meth:`cross_from_below`
+ return the indices where a 1D array crosses a threshold from below
+
+:meth:`cross_from_above`
+ return the indices where a 1D array crosses a threshold from above
+
+
 record array helper functions
 -------------------------------
 
@@ -3236,6 +3246,63 @@
 boundaries.append((in_region, i+1))
 return boundaries
 
+
+def cross_from_below(x, threshold):
+ """
+ return the indices into *x* where *x* crosses some threshold from
+ below, eg the i's where::
+
+ x[i-1]<threshold and x[i]>=threshold
+
+ Example code::
+
+ import matplotlib.pyplot as plt
+
+ t = np.arange(0.0, 2.0, 0.1)
+ s = np.sin(2*np.pi*t)
+
+ fig = plt.figure()
+ ax = fig.add_subplot(111)
+ ax.plot(t, s, '-o')
+ ax.axhline(0.5)
+ ax.axhline(-0.5)
+
+ ind = cross_from_below(s, 0.5)
+ ax.vlines(t[ind], -1, 1)
+
+ ind = cross_from_above(s, -0.5)
+ ax.vlines(t[ind], -1, 1)
+
+ plt.show()
+
+ .. seealso::
+
+ :func:`cross_from_above` and :func:`contiguous_regions`
+
+ """
+ x = np.asarray(x)
+ threshold = threshold
+ ind = np.nonzero( (x[:-1]<threshold) & (x[1:]>=threshold))[0]
+ if len(ind): return ind+1
+ else: return ind
+
+def cross_from_above(x, threshold):
+ """
+ return the indices into *x* where *x* crosses some threshold from
+ below, eg the i's where::
+
+ x[i-1]>threshold and x[i]<=threshold
+
+ .. seealso::
+
+ :func:`cross_from_below` and :func:`contiguous_regions`
+
+ """
+ x = np.asarray(x)
+ ind = np.nonzero( (x[:-1]>=threshold) & (x[1:]<threshold))[0]
+ if len(ind): return ind+1
+ else: return ind
+
 ##################################################
 # Vector and path length geometry calculations
 ##################################################
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ef...@us...> - 2009年05月06日 23:03:06
Revision: 7088
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7088&view=rev
Author: efiring
Date: 2009年05月06日 23:02:57 +0000 (2009年5月06日)
Log Message:
-----------
Spelling correction and other minor cleanups in mlab
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年05月06日 20:52:55 UTC (rev 7087)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年05月06日 23:02:57 UTC (rev 7088)
@@ -175,14 +175,7 @@
 import matplotlib.nxutils as nxutils
 import matplotlib.cbook as cbook
 
-# set is a new builtin function in 2.4; delete the following when
-# support for 2.3 is dropped.
-try:
- set
-except NameError:
- from sets import Set as set
 
-
 def linspace(*args, **kw):
 warnings.warn("use numpy.linspace", DeprecationWarning)
 return np.linspace(*args, **kw)
@@ -617,12 +610,10 @@
 :func:`polyval`
 polyval function
 """
- warnings.warn("use numpy.poyfit", DeprecationWarning)
+ warnings.warn("use numpy.polyfit", DeprecationWarning)
 return np.polyfit(*args, **kwargs)
 
 
-
-
 def polyval(*args, **kwargs):
 """
 *y* = polyval(*p*, *x*)
@@ -899,14 +890,8 @@
 """
 warnings.warn("Use numpy.trapz(y,x) instead of trapz(x,y)", DeprecationWarning)
 return np.trapz(y, x)
- #if len(x)!=len(y):
- # raise ValueError, 'x and y must have the same length'
- #if len(x)<2:
- # raise ValueError, 'x and y must have > 1 element'
- #return np.sum(0.5*np.diff(x)*(y[1:]+y[:-1]))
 
 
-
 def longest_contiguous_ones(x):
 """
 Return the indices of the longest stretch of contiguous ones in *x*,
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <sa...@us...> - 2009年05月28日 18:02:54
Revision: 7159
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7159&view=rev
Author: sameerd
Date: 2009年05月28日 18:02:49 +0000 (2009年5月28日)
Log Message:
-----------
Updated the record array helper functions to create an empty record array where necessary
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年05月28日 17:50:38 UTC (rev 7158)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年05月28日 18:02:49 UTC (rev 7159)
@@ -2047,18 +2047,6 @@
 except TypeError: return False
 else: return b
 
-def rec_view(rec):
- """
- Return a view of an ndarray as a recarray
-
- .. seealso::
-
- http://projects.scipy.org/pipermail/numpy-discussion/2008-August/036429.html
- Motivation for this function
- """
- return rec.view(np.recarray)
- #return rec.view(dtype=(np.record, rec.dtype), type=np.recarray)
-
 def rec_append_field(rec, name, arr, dtype=None):
 """
 Return a new record array with field name populated with data from
@@ -2094,12 +2082,12 @@
 raise ValueError, "dtypes must be None, a single dtype or a list"
 
 newdtype = np.dtype(rec.dtype.descr + zip(names, dtypes))
- newrec = np.empty(rec.shape, dtype=newdtype)
+ newrec = np.recarray(rec.shape, dtype=newdtype)
 for field in rec.dtype.fields:
 newrec[field] = rec[field]
 for name, arr in zip(names, arrs):
 newrec[name] = arr
- return rec_view(newrec)
+ return newrec
 
 
 def rec_drop_fields(rec, names):
@@ -2113,11 +2101,11 @@
 newdtype = np.dtype([(name, rec.dtype[name]) for name in rec.dtype.names
 if name not in names])
 
- newrec = np.empty(Nr, dtype=newdtype)
+ newrec = np.recarray(rec.shape, dtype=newdtype)
 for field in newdtype.names:
 newrec[field] = rec[field]
 
- return rec_view(newrec)
+ return newrec
 
 
 
@@ -2279,7 +2267,7 @@
 r2desc = [(mapped_r2field(desc[0]), desc[1]) for desc in r2.dtype.descr if desc[0] not in key]
 newdtype = np.dtype(keydesc + r1desc + r2desc)
 
- newrec = np.empty(common_len + left_len + right_len, dtype=newdtype)
+ newrec = np.recarray((common_len + left_len + right_len,), dtype=newdtype)
 
 if defaults is not None:
 for thiskey in defaults:
@@ -2314,7 +2302,7 @@
 
 newrec.sort(order=key)
 
- return rec_view(newrec)
+ return newrec
 
 
 def csv2rec(fname, comments='#', skiprows=0, checkrows=0, delimiter=',',
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <jd...@us...> - 2009年06月09日 16:47:51
Revision: 7205
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7205&view=rev
Author: jdh2358
Date: 2009年06月09日 16:47:46 +0000 (2009年6月09日)
Log Message:
-----------
add rec_keep_fields w/ support for rec2txt
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年06月08日 20:49:29 UTC (rev 7204)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年06月09日 16:47:46 UTC (rev 7205)
@@ -2107,8 +2107,22 @@
 
 return newrec
 
+def rec_keep_fields(rec, names):
+ """
+ Return a new numpy record array with only fields listed in names
+ """
 
+ if cbook.is_string_like(names):
+ names = names.split(',')
+ 
+ arrays = []
+ for name in names:
+ arrays.append(rec[name])
 
+ return np.rec.fromarrays(arrays, names=names)
+
+
+
 def rec_groupby(r, groupby, stats):
 """
 *r* is a numpy record array
@@ -2699,7 +2713,7 @@
 format.fmt = '%r'
 return format
 
-def rec2txt(r, header=None, padding=3, precision=3):
+def rec2txt(r, header=None, padding=3, precision=3, fields=None):
 """
 Returns a textual representation of a record array.
 
@@ -2714,6 +2728,10 @@
 list of integers to apply precision individually.
 Precision for non-floats is simply ignored.
 
+ *fields* : if not None, a list of field names to print. fields
+ can be a list of strings like ['field1', 'field2'] or a single
+ comma separated string like 'field1,field2'
+
 Example::
 
 precision=[0,2,3]
@@ -2725,6 +2743,9 @@
 XYZ 6.32 -0.076
 """
 
+ if fields is not None:
+ r = rec_keep_fields(r, fields)
+ 
 if cbook.is_numlike(precision):
 precision = [precision]*len(r.dtype)
 
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <sa...@us...> - 2009年09月14日 19:32:40
Revision: 7760
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7760&view=rev
Author: sameerd
Date: 2009年09月14日 19:32:27 +0000 (2009年9月14日)
Log Message:
-----------
added jointype == "inner" to mlab.recs_join
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年09月14日 19:16:49 UTC (rev 7759)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年09月14日 19:32:27 UTC (rev 7760)
@@ -1893,23 +1893,29 @@
 
 return newrec
 
-def recs_join(key, name, recs,missing=0.):
+def recs_join(key, name, recs, jointype='outer', missing=0.):
 """
- Join a sequence of record arrays on key
+ Join a sequence of record arrays on single column key.
 
+ This function only joins a single column of the multiple record arrays
+
 *key*
 is the column name that acts as a key
 
 *name*
- is the name that we want to join
+ is the name of the column that we want to join
 
+ *recs*
+ is a list of record arrays to join
+
+ *jointype*
+ is a string 'inner' or 'outer'
+
 *missing"
- is what the missing fields are replaced by
+ is what any missing field is replaced by
 
- *recarrays*
- is a list of record arrays to join
 
- returns a record array with columns [rowkey, name1, name2, ... namen]
+ returns a record array with columns [rowkey, name1, name2, ... namen].
 
 Example::
 
@@ -1917,12 +1923,21 @@
 
 """
 results = []
+ aligned_iters = cbook.align_iterators(operator.attrgetter(key), *[iter(r) for r in recs])
+
 def extract(r):
 if r is None: return missing
 else: return r[name]
 
- for rowkey, row in cbook.align_iterators(operator.attrgetter(key), *[iter(r) for r in recs]):
- results.append([rowkey] + map(extract, row))
+
+ if jointype == "outer":
+ for rowkey, row in aligned_iters:
+ results.append([rowkey] + map(extract, row))
+ elif jointype == "inner":
+ for rowkey, row in aligned_iters:
+ if None not in row: # throw out any Nones
+ results.append([rowkey] + map(extract, row))
+
 names = ",".join([key] + ["%s%d" % (name, d) for d in range(len(recs))])
 return np.rec.fromrecords(results, names=names)
 
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <jd...@us...> - 2009年11月03日 16:00:25
Revision: 7920
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7920&view=rev
Author: jdh2358
Date: 2009年11月03日 16:00:13 +0000 (2009年11月03日)
Log Message:
-----------
support postfixes in recs_join
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年11月03日 15:46:14 UTC (rev 7919)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年11月03日 16:00:13 UTC (rev 7920)
@@ -1893,7 +1893,7 @@
 
 return newrec
 
-def recs_join(key, name, recs, jointype='outer', missing=0.):
+def recs_join(key, name, recs, jointype='outer', missing=0., postfixes=None):
 """
 Join a sequence of record arrays on single column key.
 
@@ -1911,11 +1911,15 @@
 *jointype*
 is a string 'inner' or 'outer'
 
- *missing"
+ *missing*
 is what any missing field is replaced by
 
+ *postfixes*
+ if not None, a len recs sequence of postfixes
 
- returns a record array with columns [rowkey, name1, name2, ... namen].
+ returns a record array with columns [rowkey, name0, name1, ... namen-1].
+ or if postfixes [PF0, PF1, ..., PFN-1] are supplied,
+ [rowkey, namePF0, namePF1, ... namePFN-1].
 
 Example::
 
@@ -1938,7 +1942,9 @@
 if None not in row: # throw out any Nones
 results.append([rowkey] + map(extract, row))
 
- names = ",".join([key] + ["%s%d" % (name, d) for d in range(len(recs))])
+ if postfixes is None:
+ postfixes = ['%d'%i for i in range(len(recs))]
+ names = ",".join([key] + ["%s%s" % (name, postfix) for postfix in postfixes])
 return np.rec.fromrecords(results, names=names)
 
 
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <jd...@us...> - 2009年11月03日 20:27:31
Revision: 7926
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7926&view=rev
Author: jdh2358
Date: 2009年11月03日 20:27:23 +0000 (2009年11月03日)
Log Message:
-----------
added PCA helper class to mlab and deprecated prepca
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年11月03日 17:57:52 UTC (rev 7925)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年11月03日 20:27:23 UTC (rev 7926)
@@ -759,6 +759,9 @@
 
 def prepca(P, frac=0):
 """
+
+ WARNING: this function is deprecated -- please see class PCA instead
+ 
 Compute the principal components of *P*. *P* is a (*numVars*,
 *numObs*) array. *frac* is the minimum fraction of variance that a
 component must contain to be included.
@@ -778,6 +781,7 @@
 R13 Neural Network Toolbox but is not found in later versions;
 its successor seems to be called "processpcs".
 """
+ warnings.warn('This function is deprecated -- see class PCA instead')
 U,s,v = np.linalg.svd(P)
 varEach = s**2/P.shape[1]
 totVar = varEach.sum()
@@ -789,6 +793,83 @@
 Pcomponents = np.dot(Trans,P)
 return Pcomponents, Trans, fracVar[ind]
 
+
+class PCA:
+ def __init__(self, a):
+ """
+ compute the SVD of a and store data for PCA. Use project to
+ project the data onto a reduced set of dimensions
+
+ Inputs:
+
+ *a*: a numobservations x numdims array
+
+ Attrs:
+
+ *a* a centered unit sigma version of input a
+
+ *numrows*, *numcols*: the dimensions of a
+
+ *mu* : a numdims array of means of a
+
+ *sigma* : a numdims array of atandard deviation of a
+
+ *fracs* : the proportion of variance of each of the principal components
+
+ *Wt* : the weight vector for projecting a numdims point or array into PCA space
+
+ *Y* : a projected into PCA space
+
+ """
+ n, m = a.shape
+ if n<m:
+ raise RuntimeError('we assume data in a is organized with numrows>numcols')
+
+ self.numrows, self.numcols = n, m
+ self.mu = a.mean(axis=0)
+ self.sigma = a.std(axis=0)
+
+ a = self.center(a)
+
+ self.a = a
+
+ U, s, Vh = np.linalg.svd(a, full_matrices=False)
+
+
+ Y = np.dot(Vh, a.T).T
+
+ vars = s**2/float(len(s))
+ self.fracs = vars/vars.sum()
+
+
+ self.Wt = Vh
+ self.Y = Y
+
+
+ def project(self, x, minfrac=0.):
+ 'project x onto the principle axes, dropping any axes where fraction of variance<minfrac'
+ x = np.asarray(x)
+
+ ndims = len(x.shape)
+
+ if (x.shape[-1]!=self.numcols):
+ raise ValueError('Expected an array with dims[-1]==%d'%self.numcols)
+
+
+ Y = np.dot(self.Wt, self.center(x).T).T
+ mask = self.fracs>=minfrac
+ if ndims==2:
+ Yreduced = Y[:,mask]
+ else:
+ Yreduced = Y[mask]
+ return Yreduced
+
+
+
+ def center(self, x):
+ 'center the data using the mean and sigma from training set a'
+ return (x - self.mu)/self.sigma
+
 def prctile(x, p = (0.0, 25.0, 50.0, 75.0, 100.0)):
 """
 Return the percentiles of *x*. *p* can either be a sequence of
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <js...@us...> - 2009年11月14日 15:57:56
Revision: 7965
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=7965&view=rev
Author: jswhit
Date: 2009年11月14日 15:57:46 +0000 (2009年11月14日)
Log Message:
-----------
fix bug in griddata that occurs when mask is a scalar boolean (found by James Conners)
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年11月13日 19:22:52 UTC (rev 7964)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年11月14日 15:57:46 UTC (rev 7965)
@@ -2687,9 +2687,11 @@
 raise TypeError("inputs x,y,z must all be 1D arrays of the same length")
 # remove masked points.
 if hasattr(z,'mask'):
- x = x.compress(z.mask == False)
- y = y.compress(z.mask == False)
- z = z.compressed()
+ # make sure mask is not a scalar boolean array.
+ if a.mask.ndim: 
+ x = x.compress(z.mask == False)
+ y = y.compress(z.mask == False)
+ z = z.compressed()
 if _use_natgrid: # use natgrid toolkit if available.
 if interp != 'nn':
 raise ValueError("only natural neighor interpolation"
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <as...@us...> - 2009年12月11日 00:59:44
Revision: 8021
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=8021&view=rev
Author: astraw
Date: 2009年12月11日 00:59:34 +0000 (2009年12月11日)
Log Message:
-----------
rec2csv raises explicit error when recarray ndim not 1
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2009年12月11日 00:59:25 UTC (rev 8020)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2009年12月11日 00:59:34 UTC (rev 8021)
@@ -2598,6 +2598,9 @@
 return func(val)
 return newfunc
 
+ if r.ndim != 1:
+ raise ValueError('rec2csv only operates on 1 dimensional recarrays')
+
 formatd = get_formatd(r, formatd)
 funcs = []
 for i, name in enumerate(r.dtype.names):
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
From: <ry...@us...> - 2010年11月17日 04:13:37
Revision: 8803
 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=8803&view=rev
Author: ryanmay
Date: 2010年11月17日 04:13:31 +0000 (2010年11月17日)
Log Message:
-----------
Some cosmetic changes to _spectral_helper(). Biggest change is to update the documentation to indicate that NFFT should *not* be used for zero padding, otherwise scaling will be incorrect.
Modified Paths:
--------------
 trunk/matplotlib/lib/matplotlib/mlab.py
Modified: trunk/matplotlib/lib/matplotlib/mlab.py
===================================================================
--- trunk/matplotlib/lib/matplotlib/mlab.py	2010年11月15日 10:40:58 UTC (rev 8802)
+++ trunk/matplotlib/lib/matplotlib/mlab.py	2010年11月17日 04:13:31 UTC (rev 8803)
@@ -245,12 +245,6 @@
 raise ValueError("sides must be one of: 'default', 'onesided', or "
 "'twosided'")
 
- # MATLAB divides by the sampling frequency so that density function
- # has units of dB/Hz and can be integrated by the plotted frequency
- # values. Perform the same scaling here.
- if scale_by_freq:
- scaling_factor /= Fs
-
 if cbook.iterable(window):
 assert(len(window) == NFFT)
 windowVals = window
@@ -260,7 +254,7 @@
 step = NFFT - noverlap
 ind = np.arange(0, len(x) - NFFT + 1, step)
 n = len(ind)
- Pxy = np.zeros((numFreqs,n), np.complex_)
+ Pxy = np.zeros((numFreqs, n), np.complex_)
 
 # do the ffts of the slices
 for i in range(n):
@@ -278,16 +272,18 @@
 
 # Scale the spectrum by the norm of the window to compensate for
 # windowing loss; see Bendat & Piersol Sec 11.5.2.
- Pxy *= 1 / (np.abs(windowVals)**2).sum()
+ Pxy /= (np.abs(windowVals)**2).sum()
 
 # Also include scaling factors for one-sided densities and dividing by the
 # sampling frequency, if desired. Scale everything, except the DC component
 # and the NFFT/2 component:
 Pxy[1:-1] *= scaling_factor
 
- #But do scale those components by Fs, if required
+ # MATLAB divides by the sampling frequency so that density function
+ # has units of dB/Hz and can be integrated by the plotted frequency
+ # values. Perform the same scaling here.
 if scale_by_freq:
- Pxy[[0,-1]] /= Fs
+ Pxy /= Fs
 
 t = 1./Fs * (ind + NFFT / 2.)
 freqs = float(Fs) / pad_to * np.arange(numFreqs)
@@ -306,6 +302,8 @@
 *NFFT*: integer
 The number of data points used in each block for the FFT.
 Must be even; a power 2 is most efficient. The default value is 256.
+ This should *NOT* be used to get zero padding, or the scaling of the
+ result will be incorrect. Use *pad_to* for this instead.
 
 *Fs*: scalar
 The sampling frequency (samples per time unit). It is used
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
1 2 > >> (Page 1 of 2)
Want the latest updates on software, tech news, and AI?
Get latest updates about software, tech news, and AI from SourceForge directly in your inbox once a month.
Thanks for helping keep SourceForge clean.
X





Briefly describe the problem (required):
Upload screenshot of ad (required):
Select a file, or drag & drop file here.
Screenshot instructions:

Click URL instructions:
Right-click on the ad, choose "Copy Link", then paste here →
(This may not be possible with some types of ads)

More information about our ad policies

Ad destination/click URL:

AltStyle によって変換されたページ (->オリジナル) /