In [23]:
x=True

In [24]:
import numpy as np
np.random.seed(0)

In [25]:
not x


Out[25]:
False

In [26]:
X = np.random.randn(300, 2)
y = np.logical_or(X[:, 0] > 0, X[:, 1] > 0)
y_T = np.where(y)
y_F = np.shape(y is True)

In [27]:
y_T


Out[27]:
(array([  0,   1,   2,   3,   4,   5,   6,   7,   8,   9,  10,  11,  12,
         13,  14,  15,  17,  18,  21,  23,  25,  27,  28,  32,  33,  34,
         35,  36,  37,  39,  40,  41,  42,  43,  44,  45,  46,  47,  48,
         49,  50,  51,  52,  54,  55,  56,  57,  58,  59,  60,  61,  63,
         64,  65,  66,  67,  68,  69,  70,  71,  72,  73,  75,  78,  80,
         81,  82,  84,  85,  86,  87,  89,  92,  93,  94,  95,  97,  98,
         99, 101, 102, 104, 105, 106, 108, 109, 111, 112, 113, 116, 117,
        118, 119, 123, 125, 126, 127, 129, 132, 133, 134, 135, 136, 138,
        139, 140, 141, 142, 143, 144, 146, 147, 148, 149, 150, 152, 154,
        155, 157, 161, 163, 164, 165, 166, 167, 168, 169, 170, 173, 174,
        176, 179, 180, 181, 182, 185, 186, 187, 188, 189, 190, 192, 194,
        195, 198, 199, 201, 202, 203, 205, 207, 208, 209, 210, 211, 212,
        213, 214, 216, 217, 218, 219, 221, 222, 223, 225, 226, 227, 229,
        231, 232, 234, 237, 238, 239, 240, 242, 243, 244, 246, 247, 250,
        251, 254, 255, 257, 258, 259, 262, 263, 264, 265, 266, 270, 271,
        273, 274, 275, 276, 277, 278, 280, 281, 282, 283, 284, 285, 286,
        287, 289, 291, 293, 295, 297]),)

In [28]:
6/5


Out[28]:
1.2

In [29]:
X[y]


Out[29]:
array([[ 1.76405235,  0.40015721],
       [ 0.97873798,  2.2408932 ],
       [ 1.86755799, -0.97727788],
       [ 0.95008842, -0.15135721],
       [-0.10321885,  0.4105985 ],
       [ 0.14404357,  1.45427351],
       [ 0.76103773,  0.12167502],
       [ 0.44386323,  0.33367433],
       [ 1.49407907, -0.20515826],
       [ 0.3130677 , -0.85409574],
       [-2.55298982,  0.6536186 ],
       [ 0.8644362 , -0.74216502],
       [ 2.26975462, -1.45436567],
       [ 0.04575852, -0.18718385],
       [ 1.53277921,  1.46935877],
       [ 0.15494743,  0.37816252],
       [-0.34791215,  0.15634897],
       [ 1.23029068,  1.20237985],
       [-1.70627019,  1.9507754 ],
       [-1.25279536,  0.77749036],
       [-0.89546656,  0.3869025 ],
       [-0.02818223,  0.42833187],
       [ 0.06651722,  0.3024719 ],
       [ 0.17742614, -0.40178094],
       [-1.63019835,  0.46278226],
       [-0.90729836,  0.0519454 ],
       [ 0.72909056,  0.12898291],
       [ 1.13940068, -1.23482582],
       [ 0.40234164, -0.68481009],
       [-0.31155253,  0.05616534],
       [-1.16514984,  0.90082649],
       [ 0.46566244, -1.53624369],
       [ 1.48825219,  1.89588918],
       [ 1.17877957, -0.17992484],
       [-1.07075262,  1.05445173],
       [-0.40317695,  1.22244507],
       [ 0.20827498,  0.97663904],
       [ 0.3563664 ,  0.70657317],
       [ 0.01050002,  1.78587049],
       [ 0.12691209,  0.40198936],
       [ 1.8831507 , -1.34775906],
       [-1.270485  ,  0.96939671],
       [-1.17312341,  1.94362119],
       [ 1.92294203,  1.48051479],
       [ 1.86755896,  0.90604466],
       [-0.86122569,  1.91006495],
       [-0.26800337,  0.8024564 ],
       [ 0.94725197, -0.15501009],
       [ 0.61407937,  0.92220667],
       [ 0.37642553, -1.09940079],
       [ 0.29823817,  1.3263859 ],
       [-0.43515355,  1.84926373],
       [ 0.67229476,  0.40746184],
       [-0.76991607,  0.53924919],
       [-0.67433266,  0.03183056],
       [-0.63584608,  0.67643329],
       [ 0.57659082, -0.20829876],
       [ 0.39600671, -1.09306151],
       [-1.49125759,  0.4393917 ],
       [ 0.1666735 ,  0.63503144],
       [ 2.38314477,  0.94447949],
       [-0.91282223,  1.11701629],
       [-0.06824161,  1.71334272],
       [ 1.12663592, -1.07993151],
       [-0.49803245,  1.92953205],
       [ 0.94942081,  0.08755124],
       [-1.22543552,  0.84436298],
       [ 1.18802979,  0.31694261],
       [ 0.92085882,  0.31872765],
       [ 0.85683061, -0.65102559],
       [-1.03424284,  0.68159452],
       [-0.4555325 ,  0.01747916],
       [ 0.62523145, -1.60205766],
       [-1.10438334,  0.05216508],
       [-0.739563  ,  1.5430146 ],
       [-1.29285691,  0.26705087],
       [ 0.52327666, -0.17154633],
       [ 0.77179055,  0.82350415],
       [ 2.16323595,  1.33652795],
       [ 1.0996596 ,  0.65526373],
       [ 0.64013153, -1.61695604],
       [ 0.2799246 , -0.09815039],
       [ 0.91017891,  0.31721822],
       [ 0.78632796, -0.4664191 ],
       [-0.01702041,  0.37915174],
       [ 2.25930895, -0.04225715],
       [-0.46359597,  0.48148147],
       [-1.54079701,  0.06326199],
       [ 0.15650654,  0.23218104],
       [-0.54286148,  0.41605005],
       [-1.15618243,  0.7811981 ],
       [ 1.49448454, -2.06998503],
       [ 0.42625873,  0.67690804],
       [ 1.15233156,  1.07961859],
       [ 0.52106488, -0.57578797],
       [ 0.14195316, -0.31932842],
       [ 0.69153875,  0.69474914],
       [-1.5829384 ,  0.61037938],
       [-1.93627981,  0.1887786 ],
       [ 0.52389102,  0.08842209],
       [-0.31088617,  0.09740017],
       [ 0.39904635, -2.77259276],
       [ 1.95591231,  0.39009332],
       [ 0.49374178, -0.11610394],
       [-2.03068447,  2.06449286],
       [-0.11054066,  1.02017271],
       [-0.69204985,  1.53637705],
       [ 0.28634369,  0.60884383],
       [-1.04525337,  1.21114529],
       [ 0.68981816,  1.30184623],
       [ 2.3039167 , -1.06001582],
       [-0.1359497 ,  1.13689136],
       [ 0.09772497,  0.58295368],
       [-0.39944903,  0.37005589],
       [-1.30652685,  1.65813068],
       [ 0.66638308, -0.46071979],
       [ 0.69377315, -0.15957344],
       [-0.13370156,  1.07774381],
       [-0.38487981,  0.09435159],
       [ 0.27451636, -0.89091508],
       [-0.15766702,  2.2567235 ],
       [-0.70470028,  0.94326072],
       [ 0.74718833, -1.18894496],
       [ 0.77325298, -1.18388064],
       [-2.65917224,  0.60631952],
       [-1.75589058,  0.45093446],
       [-0.6840109 ,  1.6595508 ],
       [ 1.0685094 , -0.4533858 ],
       [-0.36469354,  0.15670386],
       [ 0.5785215 ,  0.34965446],
       [ 1.36453185, -0.68944918],
       [-0.47965581,  0.6203583 ],
       [ 0.69845715,  0.00377089],
       [ 0.93184837,  0.33996498],
       [-0.01568211,  0.16092817],
       [ 0.28044171, -0.99312361],
       [ 0.84163126, -0.24945858],
       [ 0.04949498,  0.49383678],
       [ 0.64331447, -1.57062341],
       [-0.20690368,  0.88017891],
       [-1.69810582,  0.38728048],
       [ 0.03863055, -1.6567151 ],
       [ 1.64813493,  0.16422776],
       [ 0.56729028, -0.2226751 ],
       [ 0.85792392,  1.14110187],
       [ 1.46657872,  0.85255194],
       [ 0.76666318,  0.35629282],
       [-1.76853845,  0.35548179],
       [ 0.81451982,  0.05892559],
       [-1.4465347 ,  0.80029795],
       [ 1.73272119,  0.68450111],
       [ 0.370825  ,  0.14206181],
       [ 1.51999486,  1.71958931],
       [ 0.92950511,  0.58222459],
       [-2.09460307,  0.12372191],
       [-0.13010695,  0.09395323],
       [ 0.94304609, -2.73967717],
       [-0.56931205,  0.26990435],
       [ 0.86896349,  0.27687191],
       [-0.97110457,  0.3148172 ],
       [ 0.82158571,  0.00529265],
       [ 0.8005648 ,  0.07826018],
       [-0.08593077,  0.19429294],
       [ 0.87583276, -0.11510747],
       [ 0.45741561, -0.96461201],
       [-1.05462846,  0.82024784],
       [ 0.46313033,  0.27909576],
       [ 0.33890413,  2.02104356],
       [ 0.1993002 , -0.05060354],
       [-0.43918952,  0.18133843],
       [-0.5028167 ,  2.41245368],
       [-2.28862004,  0.25148442],
       [ 1.73887268,  0.99439439],
       [ 1.31913688, -0.88241882],
       [ 1.12859406,  0.49600095],
       [ 0.77140595,  1.02943883],
       [ 0.86259601, -2.65561909],
       [ 1.51332808,  0.55313206],
       [-0.04570396,  0.22050766],
       [ 1.10028434,  1.29802197],
       [ 2.69622405, -0.07392467],
       [ 0.38273243, -0.03424228],
       [ 1.09634685, -0.2342158 ],
       [-1.17915793,  1.30142807],
       [ 0.89526027,  1.37496407],
       [-0.66005632,  0.17581895],
       [ 0.49869027,  1.04797216],
       [ 0.28427967,  1.74266878],
       [ 0.24211796, -0.88872026],
       [ 0.93674246,  1.41232771],
       [-2.36958691,  0.8640523 ],
       [-2.23960406,  0.40149906],
       [ 1.22487056,  0.06485611],
       [ 0.21348005, -1.20857365],
       [-0.24201983,  1.51826117],
       [ 1.0781973 , -2.55918467],
       [ 1.1813786 , -0.63190376],
       [ 0.16392857,  0.09632136],
       [ 0.94246812, -0.26759475],
       [-0.67802578,  1.29784579],
       [-2.36417382,  0.02033418],
       [ 2.01125668, -0.04459543],
       [ 0.1950697 , -1.78156286],
       [-0.72904466,  0.1965574 ],
       [ 0.35475769,  0.61688655],
       [ 0.0086279 ,  0.52700421],
       [ 0.45378191, -1.82974041],
       [ 0.03700572,  0.76790241],
       [ 0.58987982, -0.36385881],
       [-0.13105401,  1.13307988],
       [-1.13980246,  0.78495752],
       [-0.21694957,  0.44539325],
       [ 0.54331189,  0.43904296],
       [ 0.35178011,  0.37923553]])

In [30]:
!pip install xgboost


Collecting xgboost
  Downloading xgboost-0.7.post3.tar.gz (450kB)
    100% |████████████████████████████████| 460kB 2.1MB/s eta 0:00:01
Requirement already satisfied: numpy in /home/nbuser/anaconda3_501/lib/python3.6/site-packages (from xgboost)
Requirement already satisfied: scipy in /home/nbuser/anaconda3_501/lib/python3.6/site-packages (from xgboost)
Building wheels for collected packages: xgboost
  Running setup.py bdist_wheel for xgboost ... done
  Stored in directory: /home/nbuser/.cache/pip/wheels/ca/b3/02/d44d5e12c5c1eecff4a822555bac96b182551cd5e13c4795f6
Successfully built xgboost
Installing collected packages: xgboost
Successfully installed xgboost-0.7.post3
You are using pip version 9.0.1, however version 9.0.2 is available.
You should consider upgrading via the 'pip install --upgrade pip' command.

In [31]:
import xgboost as xgb

In [32]:
'''
Created on 1 Apr 2015
@author: Jamie Hall
'''
import pickle

import numpy as np
import xgboost as xgb
from sklearn.datasets import load_iris, load_digits, load_boston
from sklearn.metrics import confusion_matrix, mean_squared_error
from sklearn.model_selection import KFold, train_test_split, GridSearchCV

rng = np.random.RandomState(31337)

print("Zeros and Ones from the Digits data set: binary classification")
digits = load_digits(2)
y = digits['target']
X = digits['data']
kf = KFold(n_splits=2, shuffle=True, random_state=rng)
for train_index, test_index in kf.split(X):
    xgb_model = xgb.XGBClassifier().fit(X[train_index], y[train_index])
    predictions = xgb_model.predict(X[test_index])
    actuals = y[test_index]
    print(confusion_matrix(actuals, predictions))

print("Iris: multi-class classification")
iris = load_iris()
y = iris['target']
X = iris['data']
kf = KFold(n_splits=2, shuffle=True, random_state=rng)
for train_index, test_index in kf.split(X):
    xgb_model = xgb.XGBClassifier().fit(X[train_index], y[train_index])
    predictions = xgb_model.predict(X[test_index])
    actuals = y[test_index]
    print(confusion_matrix(actuals, predictions))

print("Boston Housing: regression")
boston = load_boston()
y = boston['target']
X = boston['data']
kf = KFold(n_splits=2, shuffle=True, random_state=rng)
for train_index, test_index in kf.split(X):
    xgb_model = xgb.XGBRegressor().fit(X[train_index], y[train_index])
    predictions = xgb_model.predict(X[test_index])
    actuals = y[test_index]
    print(mean_squared_error(actuals, predictions))

print("Parameter optimization")
y = boston['target']
X = boston['data']
xgb_model = xgb.XGBRegressor()
clf = GridSearchCV(xgb_model,
                   {'max_depth': [2, 4, 6],
                    'n_estimators': [50, 100, 200]}, verbose=1)
clf.fit(X, y)
print(clf.best_score_)
print(clf.best_params_)

# The sklearn API models are picklable
print("Pickling sklearn API models")
# must open in binary format to pickle
pickle.dump(clf, open("best_boston.pkl", "wb"))
clf2 = pickle.load(open("best_boston.pkl", "rb"))
print(np.allclose(clf.predict(X), clf2.predict(X)))

# Early-stopping

X = digits['data']
y = digits['target']
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
clf = xgb.XGBClassifier()
clf.fit(X_train, y_train, early_stopping_rounds=10, eval_metric="auc",
        eval_set=[(X_test, y_test)])


Zeros and Ones from the Digits data set: binary classification
[[87  0]
 [ 1 92]]
[[91  0]
 [ 3 86]]
Iris: multi-class classification
[[19  0  0]
 [ 0 31  3]
 [ 0  1 21]]
[[31  0  0]
 [ 0 16  0]
 [ 0  3 25]]
Boston Housing: regression
9.86281492905
15.9899625729
Parameter optimization
Fitting 3 folds for each of 9 candidates, totalling 27 fits
[Parallel(n_jobs=1)]: Done  27 out of  27 | elapsed:    2.1s finished
0.598487960649
{'max_depth': 4, 'n_estimators': 100}
Pickling sklearn API models
True
[0]	validation_0-auc:0.999497
Will train until validation_0-auc hasn't improved in 10 rounds.
[1]	validation_0-auc:0.999497
[2]	validation_0-auc:0.999497
[3]	validation_0-auc:0.999749
[4]	validation_0-auc:0.999749
[5]	validation_0-auc:0.999749
[6]	validation_0-auc:0.999749
[7]	validation_0-auc:0.999749
[8]	validation_0-auc:0.999749
[9]	validation_0-auc:0.999749
[10]	validation_0-auc:1
[11]	validation_0-auc:1
[12]	validation_0-auc:1
[13]	validation_0-auc:1
[14]	validation_0-auc:1
[15]	validation_0-auc:1
[16]	validation_0-auc:1
[17]	validation_0-auc:1
[18]	validation_0-auc:1
[19]	validation_0-auc:1
[20]	validation_0-auc:1
Stopping. Best iteration:
[10]	validation_0-auc:1

Out[32]:
XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
       colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
       max_depth=3, min_child_weight=1, missing=None, n_estimators=100,
       n_jobs=1, nthread=None, objective='binary:logistic', random_state=0,
       reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
       silent=True, subsample=1)

fastai 线性代数

QR 分解

  • for any matrix $A$, $A = QR$ where $Q$ is orthogonal and $R$ is upper-triangular.

In [33]:
import numpy as np
n = 5
A = np.random.rand(n,n)
npQ, npR = np.linalg.qr(A)

Check that $Q$ is orthogonal:


In [34]:
np.allclose(np.eye(n), npQ @ npQ.T), np.allclose(np.eye(n), npQ.T @ npQ)


Out[34]:
(True, True)

Check that $R$ is upper triangular


In [36]:
npR


Out[36]:
array([[-1.3507867 , -1.05857666, -0.69126872, -0.93597832, -1.20211099],
       [ 0.        ,  0.89451338,  0.32043198,  0.86957418,  0.53652547],
       [ 0.        ,  0.        , -0.45573763,  0.15933525, -0.72432884],
       [ 0.        ,  0.        ,  0.        ,  0.36263356, -0.40181217],
       [ 0.        ,  0.        ,  0.        ,  0.        , -0.16596369]])

Gram-Schmidt

For each $j$, calculate a single projection $$v_j = P_ja_j$$ where $P_j$ projects onto the space orthogonal to the span of $q_1,\ldots,q_{j-1}$.


In [38]:
def cgs(A):
    m, n = A.shape
    Q = np.zeros([m,n], dtype=np.float64)
    R = np.zeros([n,n], dtype=np.float64)
    for j in range(n):
        v = A[:,j]
        for i in range(j):
            R[i,j] = np.dot(Q[:,i], A[:,j])
            v = v - (R[i,j] * Q[:,i])
        R[j,j] = np.linalg.norm(v)
        Q[:, j] = v / R[j,j]
    return Q, R

In [39]:
Q, R = cgs(A)

In [40]:
np.allclose(A, Q @ R)


Out[40]:
True

Check if $Q$ is unitary:


In [41]:
np.allclose(np.eye(len(Q)), Q.dot(Q.T))


Out[41]:
True

In [42]:
np.allclose(npQ, -Q)


Out[42]:
False

In [43]:
npQ


Out[43]:
array([[-0.42065679, -0.22217724, -0.82687683,  0.13443385, -0.26811825],
       [-0.69038376, -0.37900741,  0.24930135, -0.29212259,  0.48190961],
       [-0.02960704,  0.68010585, -0.37282158, -0.50272193,  0.38059714],
       [-0.52490557,  0.445696  ,  0.33732086, -0.09671002, -0.63457942],
       [-0.2646048 ,  0.38183998,  0.03663688,  0.79656121,  0.385137  ]])

In [44]:
-Q


Out[44]:
array([[-0.42065679,  0.22217724, -0.82687683, -0.13443385, -0.26811825],
       [-0.69038376,  0.37900741,  0.24930135,  0.29212259,  0.48190961],
       [-0.02960704, -0.68010585, -0.37282158,  0.50272193,  0.38059714],
       [-0.52490557, -0.445696  ,  0.33732086,  0.09671002, -0.63457942],
       [-0.2646048 , -0.38183998,  0.03663688, -0.79656121,  0.385137  ]])

In [45]:
np.eye(len(Q))


Out[45]:
array([[ 1.,  0.,  0.,  0.,  0.],
       [ 0.,  1.,  0.,  0.,  0.],
       [ 0.,  0.,  1.,  0.,  0.],
       [ 0.,  0.,  0.,  1.,  0.],
       [ 0.,  0.,  0.,  0.,  1.]])

In [85]:
np.allclose(np.eye(len(Q)), Q.dot(Q.T))


Out[85]:
True

In [75]:
a = np.random.randint(4, size=(2, 3))
b = np.random.randint(5, size=(3, 2))

In [76]:
a


Out[76]:
array([[3, 3, 3],
       [2, 3, 2]])

In [77]:
b


Out[77]:
array([[0, 0],
       [4, 2],
       [2, 1]])

In [81]:
a.dot(b)


Out[81]:
array([[18,  9],
       [16,  8]])

In [83]:
a @ b


Out[83]:
array([[18,  9],
       [16,  8]])

In [86]:
x = np.array([23, 1])
x.dot(x.T)


Out[86]:
530

In [87]:
x.shape


Out[87]:
(2,)

In [91]:
x.T.shape


Out[91]:
(2,)

In [94]:
y = np.expand_dims(x, -1)

In [95]:
y.dot(y.T)


Out[95]:
array([[529,  23],
       [ 23,   1]])

Modified Gram-Schmidt: for each $j$, calculate $j-1$ projections $$P_j = P_{\perp q_{j-1}\cdots\perp q_{2}\perp q_{1}}$$


In [96]:
import numpy as np
n = 3
A = np.random.rand(n,n).astype(np.float64)

In [97]:
def mgs(A):
    V = A.copy()
    m, n = A.shape
    Q = np.zeros([m,n], dtype=np.float64)
    R = np.zeros([n,n], dtype=np.float64)
    for i in range(n):
        R[i,i] = np.linalg.norm(V[:,i])
        Q[:,i] = V[:,i] / R[i,i]
        for j in range(i, n):
            R[i,j] = np.dot(Q[:,i],V[:,j])
            V[:,j] = V[:,j] - R[i,j]*Q[:,i]
    return Q, R

In [98]:
Q, R = mgs(A)

In [99]:
np.allclose(np.eye(len(Q)), Q.dot(Q.T.conj()))


Out[99]:
True

In [100]:
np.allclose(A, np.matmul(Q,R))


Out[100]:
True

In [102]:
A.conj()


Out[102]:
array([[ 0.20825325,  0.93239394,  0.2153982 ],
       [ 0.85833764,  0.80289337,  0.15914624],
       [ 0.60571196,  0.11566187,  0.72788816]])

卷积原理


In [107]:
from numpy.linalg import norm
import numpy as np

In [108]:
?norm

In [112]:
a = np.arange(9).reshape(-1, 3) - 4

In [113]:
a


Out[113]:
array([[-4, -3, -2],
       [-1,  0,  1],
       [ 2,  3,  4]])

norm() 求范数

  • default 2 范数,各个元素绝对值平方和开2次根 $$||A||_F = [\sum_{i,j} abs(a_{i,j})^2]^{1/2}$$
  • ord
    =====  ============================  ==========================
    ord    norm for matrices             norm for vectors
    =====  ============================  ==========================
    None   Frobenius norm                2-norm
    'fro'  Frobenius norm                --
    'nuc'  nuclear norm                  --
    inf    max(sum(abs(x), axis=1))      max(abs(x))
    -inf   min(sum(abs(x), axis=1))      min(abs(x))
    0      --                            sum(x != 0)
    1      max(sum(abs(x), axis=0))      as below
    -1     min(sum(abs(x), axis=0))      as below
    2      2-norm (largest sing. value)  as below
    -2     smallest singular value       as below
    other  --                            sum(abs(x)**ord)**(1./ord)
    =====  ============================  ==========================

In [114]:
norm(a)


Out[114]:
7.745966692414834

In [117]:
16+9+4+1+1+4+9+16


Out[117]:
60

In [118]:
np.sqrt(_)


Out[118]:
7.745966692414834

In [119]:
norm(a, np.inf)


Out[119]:
9.0

In [120]:
max([4+3+2, 1+0+1, 2+3+4])


Out[120]:
9

In [121]:
norm(a, -np.inf)


Out[121]:
2.0

In [122]:
min([4+3+2, 1+0+1, 2+3+4])


Out[122]:
2

In [123]:
norm(a, 1)


Out[123]:
7.0

In [124]:
max([4 + 1+2,3+0+3,2+1+4])


Out[124]:
7

In [125]:
import matplotlib.pyplot as plt
%matplotlib inline

In [126]:
x = np.arange(16).reshape(-1, 4) - 4

In [129]:
plt.imshow(x)


Out[129]:
<matplotlib.image.AxesImage at 0x7fc5b5d93cf8>

In [127]:
x = x / 16

In [128]:
x


Out[128]:
array([[-0.25  , -0.1875, -0.125 , -0.0625],
       [ 0.    ,  0.0625,  0.125 ,  0.1875],
       [ 0.25  ,  0.3125,  0.375 ,  0.4375],
       [ 0.5   ,  0.5625,  0.625 ,  0.6875]])

In [130]:
plt.imshow(x)


Out[130]:
<matplotlib.image.AxesImage at 0x7fc5b479a9b0>

In [139]:
y = np.expand_dims(x.ravel(), -1).T

In [140]:
plt.matshow(y)


Out[140]:
<matplotlib.image.AxesImage at 0x7fc5b46ae320>

In [141]:
plt.gray()


<matplotlib.figure.Figure at 0x7fc5b46bcd68>

In [142]:
plt.matshow(x)


Out[142]:
<matplotlib.image.AxesImage at 0x7fc5b43c9898>
  • 切片 slice 构造

In [146]:
np.index_exp[2::2]


Out[146]:
(slice(2, None, 2),)

In [147]:
np.s_[2::2]


Out[147]:
slice(2, None, 2)

In [148]:
slice(2, None, 2)


Out[148]:
slice(2, None, 2)

In [149]:
x = np.arange(6)

In [150]:
x


Out[150]:
array([0, 1, 2, 3, 4, 5])

In [151]:
np.delete(x, np.s_[2::2])


Out[151]:
array([0, 1, 3, 5])

In [152]:
x


Out[152]:
array([0, 1, 2, 3, 4, 5])
  • slice define
    class slice(stop)
    class slice(start, stop[, step])
    

In [155]:
np.delete(x, slice(2,None,2))


Out[155]:
array([0, 1, 3, 5])

数组旋转


In [158]:
yt = np.arange(16).reshape(-1, 4)
yt


Out[158]:
array([[ 0,  1,  2,  3],
       [ 4,  5,  6,  7],
       [ 8,  9, 10, 11],
       [12, 13, 14, 15]])

In [161]:
# 旋转一次
np.rot90(yt)


Out[161]:
array([[ 3,  7, 11, 15],
       [ 2,  6, 10, 14],
       [ 1,  5,  9, 13],
       [ 0,  4,  8, 12]])

In [162]:
np.rot90(yt, k=2)


Out[162]:
array([[15, 14, 13, 12],
       [11, 10,  9,  8],
       [ 7,  6,  5,  4],
       [ 3,  2,  1,  0]])

In [164]:
np.rot90(yt, axes=(1, 0))


Out[164]:
array([[12,  8,  4,  0],
       [13,  9,  5,  1],
       [14, 10,  6,  2],
       [15, 11,  7,  3]])

In [ ]: