In [22]:
%matplotlib inline

In [29]:
import pylab
import numpy as np
from scipy import stats
from sklearn.mixture import GMM
from astroML.resample import jackknife
from astroML.plotting import hist

In [9]:
np.random.seed(0)
X = np.random.normal(size=(10000, 1)) # 100 points
# in 1 dim
model = GMM(2) # two components
model.fit(X)
model.means_ # the locations of the best-fit


Out[9]:
array([[ 0.51770444],
       [-0.54979272]])

In [12]:
np.random.seed(0)
x = np.random.normal(loc=0, scale=1, size=10000)
jackknife(x, np.std, kwargs=dict(ddof=1, axis=1))


Out[12]:
(0.98763026955902355, 0.0069301696671178791)

In [15]:
np.random.seed(0)
vals = np.random.normal(loc=0, scale=1, size=1000)
stats.kstest(vals, "norm")


Out[15]:
KstestResult(statistic=0.037375194298040482, pvalue=0.11933334159116327)

In [16]:
np.random.seed(0)
x, y = np.random.normal(0, 1, size=(2, 1000))
stats.mannwhitneyu(x, y)


Out[16]:
MannwhitneyuResult(statistic=482654.0, pvalue=0.089596993528215041)

In [18]:
np.random.seed(0)
x, y = np.random.normal(0, 1, size=(2, 1000))
T,p = stats.wilcoxon(x, y)
stats.wilcoxon(x, y)


Out[18]:
WilcoxonResult(statistic=238373.0, pvalue=0.19357179019702442)

In [19]:
np.random.seed(0)
x = np.random.normal(0, 1, size=1000)
stats.shapiro(x)


Out[19]:
(0.9985557794570923, 0.5914123058319092)

In [21]:
np.random.seed(0)
x, y = np.random.normal(size=(2, 1000))
stats.ttest_ind(x, y)


Out[21]:
Ttest_indResult(statistic=-1.3458875936027896, pvalue=0.17849146353581696)

In [28]:
np.random.seed(0)
x = np.random.normal(size=1000)
_ = pylab.hist(x, bins=50)
counts, bins = np.histogram(x, bins=50)



In [33]:
_ = hist(x, bins='freedman', lw=0, alpha=0.6)
_ = hist(x, bins='knuth', lw=0, alpha=0.6)
_ = hist(x, bins='scott', lw=0, alpha=0.6)



In [ ]: