In [1]:
%matplotlib inline


/Users/NIGG/anaconda/lib/python3.5/site-packages/matplotlib/__init__.py:1035: UserWarning: Duplicate key in file "/Users/NIGG/.matplotlib/matplotlibrc", line #515
  (fname, cnt))
/Users/NIGG/anaconda/lib/python3.5/site-packages/matplotlib/__init__.py:1035: UserWarning: Duplicate key in file "/Users/NIGG/.matplotlib/matplotlibrc", line #516
  (fname, cnt))

In [2]:
import numpy as np
import pandas as pd

# 統計用ツール
import statsmodels.api as sm
import statsmodels.tsa.api as tsa
from patsy import dmatrices

# 自作の空間統計用ツール
from spatialstat import *

#描画
import matplotlib.pyplot as plt
from pandas.tools.plotting import autocorrelation_plot
import seaborn as sns
sns.set(font=['IPAmincho'])

#深層学習
import chainer
from chainer import cuda, Function, gradient_check, Variable, optimizers, serializers, utils
from chainer import Link, Chain, ChainList
import chainer.functions as F
import chainer.links as L

import pyper

変数名とデータの内容メモ

CENSUS: 市区町村コード(9桁)
P:      成約価格
S:      専有面積
L:      土地面積
R:      部屋数
RW:     前面道路幅員
CY:     建築年
A:      建築後年数(成約時)
TS:     最寄駅までの距離
TT:     東京駅までの時間
ACC:    ターミナル駅までの時間
WOOD:   木造ダミー
SOUTH:  南向きダミー
RSD:    住居系地域ダミー
CMD:    商業系地域ダミー
IDD:    工業系地域ダミー
FAR:    建ぺい率
FLR:    容積率
TDQ:    成約時点(四半期)
X:      緯度
Y:      経度
CITY_CODE: 市区町村コード(5桁)
CITY_NAME: 市区町村名
BLOCK:     地域ブロック名

In [9]:
data = pd.read_csv("TokyoSingle.csv")
data = data.dropna()
CITY_NAME = data['CITY_CODE'].copy()

In [10]:
CITY_NAME[CITY_NAME == 13101] = '01千代田区'
CITY_NAME[CITY_NAME == 13102] = "02中央区"
CITY_NAME[CITY_NAME == 13103] = "03港区"
CITY_NAME[CITY_NAME == 13104] = "04新宿区"
CITY_NAME[CITY_NAME == 13105] = "05文京区"
CITY_NAME[CITY_NAME == 13106] = "06台東区"
CITY_NAME[CITY_NAME == 13107] = "07墨田区"
CITY_NAME[CITY_NAME == 13108] = "08江東区"
CITY_NAME[CITY_NAME == 13109] = "09品川区"
CITY_NAME[CITY_NAME == 13110] = "10目黒区"
CITY_NAME[CITY_NAME == 13111] = "11大田区"
CITY_NAME[CITY_NAME == 13112] = "12世田谷区"
CITY_NAME[CITY_NAME == 13113] = "13渋谷区"
CITY_NAME[CITY_NAME == 13114] = "14中野区"
CITY_NAME[CITY_NAME == 13115] = "15杉並区"
CITY_NAME[CITY_NAME == 13116] = "16豊島区"
CITY_NAME[CITY_NAME == 13117] = "17北区"
CITY_NAME[CITY_NAME == 13118] = "18荒川区"
CITY_NAME[CITY_NAME == 13119] = "19板橋区"
CITY_NAME[CITY_NAME == 13120] = "20練馬区"
CITY_NAME[CITY_NAME == 13121] = "21足立区"
CITY_NAME[CITY_NAME == 13122] = "22葛飾区"
CITY_NAME[CITY_NAME == 13123] = "23江戸川区"

In [11]:
#Make Japanese Block name
BLOCK = data["CITY_CODE"].copy()
BLOCK[BLOCK == 13101] = "01都心・城南"
BLOCK[BLOCK == 13102] = "01都心・城南"
BLOCK[BLOCK == 13103] = "01都心・城南"
BLOCK[BLOCK == 13104] = "01都心・城南"
BLOCK[BLOCK == 13109] = "01都心・城南"
BLOCK[BLOCK == 13110] = "01都心・城南"
BLOCK[BLOCK == 13111] = "01都心・城南"
BLOCK[BLOCK == 13112] = "01都心・城南"
BLOCK[BLOCK == 13113] = "01都心・城南"
BLOCK[BLOCK == 13114] = "02城西・城北"
BLOCK[BLOCK == 13115] = "02城西・城北"
BLOCK[BLOCK == 13105] = "02城西・城北"
BLOCK[BLOCK == 13106] = "02城西・城北"
BLOCK[BLOCK == 13116] = "02城西・城北"
BLOCK[BLOCK == 13117] = "02城西・城北"
BLOCK[BLOCK == 13119] = "02城西・城北"
BLOCK[BLOCK == 13120] = "02城西・城北"
BLOCK[BLOCK == 13107] = "03城東"
BLOCK[BLOCK == 13108] = "03城東"
BLOCK[BLOCK == 13118] = "03城東"
BLOCK[BLOCK == 13121] = "03城東"
BLOCK[BLOCK == 13122] = "03城東"
BLOCK[BLOCK == 13123] = "03城東"

In [12]:
names = list(data.columns) + ['CITY_NAME', 'BLOCK']
data = pd.concat((data, CITY_NAME, BLOCK), axis = 1)
data.columns = names

市区町村別の件数を集計


In [13]:
print(data['CITY_NAME'].value_counts())


12世田谷区    12340
20練馬区      9979
15杉並区      8131
11大田区      7052
21足立区      6479
19板橋区      4827
14中野区      3924
10目黒区      3418
22葛飾区      3165
23江戸川区     3156
09品川区      2424
16豊島区      2153
04新宿区      1885
17北区       1799
13渋谷区      1487
05文京区      1242
18荒川区      1005
08江東区       981
03港区        757
07墨田区       725
06台東区       371
02中央区        56
01千代田区       32
Name: CITY_NAME, dtype: int64

In [23]:
vars = ['P', 'S', 'L', 'R', 'RW', 'A', 'TS', 'TT', 'WOOD', 'SOUTH', 'CMD', 'IDD', 'FAR', 'X', 'Y']
eq = fml_build(vars)

y, X = dmatrices(eq, data=data, return_type='dataframe')

CITY_NAME = pd.get_dummies(data['CITY_NAME'])
TDQ = pd.get_dummies(data['TDQ'])

X = pd.concat((X, CITY_NAME, TDQ), axis=1)

datas = pd.concat((y, X), axis=1)
datas = datas[datas['12世田谷区'] == 1][0:5000]

In [24]:
class CAR(Chain):
    def __init__(self, unit1, unit2, unit3, col_num):
        self.unit1 = unit1
        self.unit2 = unit2
        self.unit3 = unit3
        super(CAR, self).__init__(
            l1 = L.Linear(col_num, unit1),
            l2 = L.Linear(self.unit1, self.unit1),
            l3 = L.Linear(self.unit1, self.unit2),
            l4 = L.Linear(self.unit2, self.unit3),
            l5 = L.Linear(self.unit3, self.unit3),
            l6 = L.Linear(self.unit3, 1),
        )
    
    def __call__(self, x, y):
        fv = self.fwd(x, y)
        loss = F.mean_squared_error(fv, y)
        return loss
    
    def fwd(self, x, y):
        h1 = F.sigmoid(self.l1(x))
        h2 = F.sigmoid(self.l2(h1))
        h3 = F.sigmoid(self.l3(h2))
        h4 = F.sigmoid(self.l4(h3))
        h5 = F.sigmoid(self.l5(h4))
        h6 = self.l6(h5)
        return h6

In [75]:
class DLmodel(object):
    def __init__(self, data, vars, bs=200, n=1000):
        self.vars = vars
        eq = fml_build(vars)
        y, X = dmatrices(eq, data=datas, return_type='dataframe')
        self.y_in = y[:-n]
        self.X_in = X[:-n]
        self.y_ex = y[-n:]
        self.X_ex = X[-n:]
        
        self.logy_in = np.log(self.y_in)
        self.logy_ex = np.log(self.y_ex)
        
        self.bs = bs
        
    def DL(self, ite=100, bs=200, add=False):
        y_in = np.array(self.y_in, dtype='float32')       
        X_in = np.array(self.X_in, dtype='float32')

        y = Variable(y_in)
        x = Variable(X_in)

        num, col_num = X_in.shape
        
        if add is False:
            self.model1 = CAR(13, 13, 3, col_num)
            
        optimizer = optimizers.Adam()
        optimizer.setup(self.model1)
        
        loss_val = 100000000

        for j in range(ite + 10000):
            sffindx = np.random.permutation(num)
            for i in range(0, num, bs):
                x = Variable(X_in[sffindx[i:(i+bs) if (i+bs) < num else num]])
                y = Variable(y_in[sffindx[i:(i+bs) if (i+bs) < num else num]])
                self.model1.zerograds()
                loss = self.model1(x, y)
                loss.backward()
                optimizer.update()
            if loss_val >= loss.data:
                loss_val = loss.data
            if j > ite:
                if loss_val >= loss.data:
                    loss_val = loss.data
                    print('epoch:', j)
                    print('train mean loss={}'.format(loss_val))
                    print(' - - - - - - - - - ')
                    break
            if j % 1000 == 0:
                print('epoch:', j)
                print('train mean loss={}'.format(loss_val))
                print(' - - - - - - - - - ')
            
    def predict(self):
        y_ex = np.array(self.y_ex, dtype='float32').reshape(len(self.y_ex))
        
        X_ex = np.array(self.X_ex, dtype='float32')
        X_ex = Variable(X_ex)
        resid_pred =  self.model1.fwd(X_ex, X_ex).data  
        print(resid_pred[:10])
        
        self.pred = resid_pred
        self.error = np.array(y_ex - self.pred.reshape(len(self.pred),))[0]
        
    def compare(self):
        plt.hist(self.error)

In [76]:
vars = ['P', 'S', 'L', 'R', 'RW', 'A', 'TS', 'TT', 'WOOD', 'SOUTH', 'CMD', 'IDD', 'FAR']
#vars += vars + list(TDQ.columns)

In [77]:
model = DLmodel(datas, vars)

In [78]:
model.DL(ite=20000, bs=200)


epoch: 0
train mean loss=56888976.0
 - - - - - - - - - 
epoch: 1000
train mean loss=48387068.0
 - - - - - - - - - 
epoch: 2000
train mean loss=46748284.0
 - - - - - - - - - 
epoch: 3000
train mean loss=44967768.0
 - - - - - - - - - 
epoch: 4000
train mean loss=41678352.0
 - - - - - - - - - 
epoch: 5000
train mean loss=41678352.0
 - - - - - - - - - 
epoch: 6000
train mean loss=41678352.0
 - - - - - - - - - 
epoch: 7000
train mean loss=40469888.0
 - - - - - - - - - 
epoch: 8000
train mean loss=40469888.0
 - - - - - - - - - 
epoch: 9000
train mean loss=38288768.0
 - - - - - - - - - 
epoch: 10000
train mean loss=37253708.0
 - - - - - - - - - 
epoch: 11000
train mean loss=37253708.0
 - - - - - - - - - 
epoch: 12000
train mean loss=37020724.0
 - - - - - - - - - 
epoch: 13000
train mean loss=35186492.0
 - - - - - - - - - 
epoch: 14000
train mean loss=34883460.0
 - - - - - - - - - 
epoch: 15000
train mean loss=33269868.0
 - - - - - - - - - 
epoch: 16000
train mean loss=33269868.0
 - - - - - - - - - 
epoch: 17000
train mean loss=31171240.0
 - - - - - - - - - 
epoch: 18000
train mean loss=31171240.0
 - - - - - - - - - 
epoch: 19000
train mean loss=30821448.0
 - - - - - - - - - 
epoch: 20000
train mean loss=29305584.0
 - - - - - - - - - 
epoch: 21000
train mean loss=29305584.0
 - - - - - - - - - 
epoch: 21037
train mean loss=29037132.0
 - - - - - - - - - 
epoch: 21609
train mean loss=28761692.0
 - - - - - - - - - 
epoch: 22000
train mean loss=28761692.0
 - - - - - - - - - 
epoch: 22391
train mean loss=28700480.0
 - - - - - - - - - 
epoch: 22637
train mean loss=28154572.0
 - - - - - - - - - 
epoch: 23000
train mean loss=28154572.0
 - - - - - - - - - 
epoch: 23155
train mean loss=28066752.0
 - - - - - - - - - 
epoch: 23589
train mean loss=27559478.0
 - - - - - - - - - 
epoch: 23716
train mean loss=26798796.0
 - - - - - - - - - 
epoch: 24000
train mean loss=26798796.0
 - - - - - - - - - 
epoch: 24378
train mean loss=25748072.0
 - - - - - - - - - 
epoch: 25000
train mean loss=25748072.0
 - - - - - - - - - 
epoch: 25087
train mean loss=24504744.0
 - - - - - - - - - 
epoch: 25880
train mean loss=24191330.0
 - - - - - - - - - 
epoch: 26000
train mean loss=24191330.0
 - - - - - - - - - 
epoch: 27000
train mean loss=24191330.0
 - - - - - - - - - 
epoch: 28000
train mean loss=24191330.0
 - - - - - - - - - 
epoch: 28167
train mean loss=23600912.0
 - - - - - - - - - 
epoch: 28654
train mean loss=23484652.0
 - - - - - - - - - 
epoch: 29000
train mean loss=23484652.0
 - - - - - - - - - 
epoch: 29516
train mean loss=22430598.0
 - - - - - - - - - 
epoch: 29634
train mean loss=22330502.0
 - - - - - - - - - 
epoch: 29937
train mean loss=22088384.0
 - - - - - - - - - 

In [79]:
model.DL(ite=20000, bs=200, add=True)


epoch: 0
train mean loss=22346982.0
 - - - - - - - - - 
epoch: 1000
train mean loss=21413112.0
 - - - - - - - - - 
epoch: 2000
train mean loss=21413112.0
 - - - - - - - - - 
epoch: 3000
train mean loss=20903448.0
 - - - - - - - - - 
epoch: 4000
train mean loss=19988990.0
 - - - - - - - - - 
epoch: 5000
train mean loss=18561308.0
 - - - - - - - - - 
epoch: 6000
train mean loss=18561308.0
 - - - - - - - - - 
epoch: 7000
train mean loss=18013032.0
 - - - - - - - - - 
epoch: 8000
train mean loss=16432581.0
 - - - - - - - - - 
epoch: 9000
train mean loss=16432581.0
 - - - - - - - - - 
epoch: 10000
train mean loss=16432581.0
 - - - - - - - - - 
epoch: 11000
train mean loss=15418825.0
 - - - - - - - - - 
epoch: 12000
train mean loss=15239754.0
 - - - - - - - - - 
epoch: 13000
train mean loss=15239754.0
 - - - - - - - - - 
epoch: 14000
train mean loss=14351450.0
 - - - - - - - - - 
epoch: 15000
train mean loss=14333427.0
 - - - - - - - - - 
epoch: 16000
train mean loss=11407301.0
 - - - - - - - - - 
epoch: 17000
train mean loss=11407301.0
 - - - - - - - - - 
epoch: 18000
train mean loss=11407301.0
 - - - - - - - - - 
epoch: 19000
train mean loss=11407301.0
 - - - - - - - - - 
epoch: 20000
train mean loss=10856371.0
 - - - - - - - - - 
epoch: 21000
train mean loss=10856371.0
 - - - - - - - - - 
epoch: 22000
train mean loss=10856371.0
 - - - - - - - - - 
epoch: 22204
train mean loss=10669836.0
 - - - - - - - - - 
epoch: 22764
train mean loss=10097909.0
 - - - - - - - - - 
epoch: 23000
train mean loss=10097909.0
 - - - - - - - - - 
epoch: 24000
train mean loss=10097909.0
 - - - - - - - - - 
epoch: 24106
train mean loss=10008961.0
 - - - - - - - - - 
epoch: 25000
train mean loss=10008961.0
 - - - - - - - - - 
epoch: 25033
train mean loss=9990300.0
 - - - - - - - - - 
epoch: 25227
train mean loss=8940188.0
 - - - - - - - - - 
epoch: 26000
train mean loss=8940188.0
 - - - - - - - - - 
epoch: 27000
train mean loss=8940188.0
 - - - - - - - - - 
epoch: 27170
train mean loss=8830927.0
 - - - - - - - - - 
epoch: 27503
train mean loss=8301732.5
 - - - - - - - - - 
epoch: 27717
train mean loss=7827611.0
 - - - - - - - - - 
epoch: 28000
train mean loss=7827611.0
 - - - - - - - - - 
epoch: 29000
train mean loss=7827611.0
 - - - - - - - - - 

In [303]:
model.predict()


[[-200.914505  ]
 [ 813.39611816]
 [-176.11975098]
 [  97.72927856]
 [ 813.26416016]
 [ 806.31604004]
 [-188.42362976]
 [-201.4822998 ]
 [  57.99349976]
 [-201.93777466]]

青がOLSの誤差、緑がOLSと深層学習を組み合わせた誤差。


In [51]:
model.compare()



In [52]:
print(np.mean(model.error1))
print(np.mean(model.error2))


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-52-a8639f66dddd> in <module>()
----> 1 print(np.mean(model.error1))
      2 print(np.mean(model.error2))

AttributeError: 'DLmodel' object has no attribute 'error1'

In [53]:
print(np.mean(np.abs(model.error1)))
print(np.mean(np.abs(model.error2)))


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-53-49cb976b3e2b> in <module>()
----> 1 print(np.mean(np.abs(model.error1)))
      2 print(np.mean(np.abs(model.error2)))

AttributeError: 'DLmodel' object has no attribute 'error1'

In [54]:
print(max(np.abs(model.error1)))
print(max(np.abs(model.error2)))


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-54-74a0f30af4ac> in <module>()
----> 1 print(max(np.abs(model.error1)))
      2 print(max(np.abs(model.error2)))

AttributeError: 'DLmodel' object has no attribute 'error1'

In [55]:
print(np.var(model.error1))
print(np.var(model.error2))


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-55-6c0a4f9e73d3> in <module>()
----> 1 print(np.var(model.error1))
      2 print(np.var(model.error2))

AttributeError: 'DLmodel' object has no attribute 'error1'

In [316]:
fig = plt.figure()
ax = fig.add_subplot(111)

errors = [model.error1, model.error2]

bp = ax.boxplot(errors)

plt.grid()
plt.ylim([-5000,5000])

plt.title('分布の箱ひげ図')

plt.show()



In [317]:
X = model.X_ex['X'].values
Y = model.X_ex['Y'].values

In [318]:
e = model.error2

In [319]:
import numpy
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d.axes3d import Axes3D

fig=plt.figure()
ax=Axes3D(fig)
 
ax.scatter3D(X, Y, e)
plt.show()



In [249]:



Out[249]:
array([ 35.66455,  35.65134,  35.63957,  35.60207,  35.66327,  35.65029,
        35.64262,  35.62602,  35.60105,  35.64538,  35.63457,  35.64502,
        35.66455,  35.65314,  35.62616,  35.61948,  35.66079,  35.63714,
        35.65052,  35.65285,  35.66081,  35.65083,  35.65156,  35.62632,
        35.66519,  35.66333,  35.62063,  35.66388,  35.61562,  35.61499,
        35.61528,  35.59893,  35.60094,  35.60108,  35.65905,  35.64066,
        35.6026 ,  35.67156,  35.59519,  35.66376,  35.63561,  35.63352,
        35.66988,  35.65884,  35.67615,  35.64243,  35.64283,  35.65176,
        35.66519,  35.65055,  35.63795,  35.63552,  35.65722,  35.63948,
        35.6573 ,  35.63795,  35.63536,  35.66584,  35.63999,  35.64131,
        35.59604,  35.66175,  35.65339,  35.67573,  35.63049,  35.65297,
        35.62073,  35.65945,  35.64087,  35.63558,  35.65117,  35.65121,
        35.64002,  35.65805,  35.62836,  35.62884,  35.60686,  35.6357 ,
        35.60126,  35.64529,  35.64953,  35.64086,  35.64087,  35.67482,
        35.6604 ,  35.61882,  35.63772,  35.66388,  35.629  ,  35.64018,
        35.65722,  35.65805,  35.63635,  35.63635,  35.629  ,  35.6357 ,
        35.60186,  35.60167,  35.63684,  35.66676,  35.6233 ,  35.65425,
        35.61969,  35.63188,  35.62841,  35.65902,  35.64165,  35.64747,
        35.66146,  35.62018,  35.59893,  35.63811,  35.65907,  35.65722,
        35.65219,  35.61958,  35.63969,  35.63821,  35.63795,  35.66439,
        35.64775,  35.64712,  35.65314,  35.65285,  35.63681,  35.63496,
        35.63753,  35.62465,  35.64226,  35.64176,  35.66451,  35.67651,
        35.65781,  35.66298,  35.66298,  35.66237,  35.63262,  35.66222,
        35.66034,  35.6573 ,  35.65176,  35.64915,  35.6269 ,  35.66336,
        35.6344 ,  35.65227,  35.62104,  35.61906,  35.62021,  35.65617,
        35.62073,  35.63787,  35.64086,  35.60166,  35.61883,  35.63839,
        35.64301,  35.68041,  35.64822,  35.66089,  35.60163,  35.66145,
        35.65727,  35.60392,  35.62003,  35.66682,  35.65776,  35.65626,
        35.64225,  35.61039,  35.64093,  35.65502,  35.66023,  35.6623 ,
        35.6233 ,  35.66248,  35.61986,  35.67714,  35.61941,  35.64136,
        35.641  ,  35.66824,  35.64747,  35.64165,  35.62073,  35.62073,
        35.61787,  35.64703,  35.65219,  35.66145,  35.65596,  35.63716,
        35.62988,  35.64887,  35.66552,  35.62336,  35.63873,  35.64384,
        35.64887,  35.64661,  35.65626,  35.65431,  35.60861,  35.66163,
        35.64973,  35.64884,  35.63143,  35.63961,  35.65502,  35.65617,
        35.65617,  35.64001,  35.61688,  35.64165,  35.65922,  35.66126,
        35.63467,  35.65055,  35.66576,  35.6045 ,  35.64089,  35.65233,
        35.64747,  35.63518,  35.62668,  35.65494,  35.67666,  35.6382 ,
        35.64977,  35.62066,  35.62923,  35.62994,  35.60823,  35.6348 ,
        35.63916,  35.60968,  35.59604,  35.64093,  35.63517,  35.6493 ,
        35.65815,  35.61724,  35.65922,  35.65922,  35.63274,  35.67625,
        35.65156,  35.63627,  35.63538,  35.65902,  35.67096,  35.61947,
        35.62089,  35.63585,  35.61882,  35.63111,  35.66934,  35.65922,
        35.67028,  35.6798 ,  35.62073,  35.64621,  35.64452,  35.67573,
        35.60233,  35.60233,  35.64063,  35.63128,  35.64742,  35.66962,
        35.63519,  35.63634,  35.60332,  35.61476,  35.65502,  35.67001,
        35.60744,  35.6266 ,  35.65815,  35.65902,  35.64845,  35.63561,
        35.66586,  35.65377,  35.67693,  35.6513 ,  35.67957,  35.63474,
        35.65262,  35.65271,  35.64202,  35.65029,  35.61434,  35.63457,
        35.64933,  35.66969,  35.64147,  35.62261,  35.61682,  35.61947,
        35.62673,  35.64341,  35.65719,  35.61046,  35.61307,  35.62365,
        35.62038,  35.6738 ,  35.63251,  35.64151,  35.62619,  35.66988,
        35.60126,  35.64917,  35.6526 ,  35.61787,  35.63503,  35.66084,
        35.66453,  35.63795,  35.661  ,  35.63957,  35.64063,  35.59325,
        35.6249 ,  35.61365,  35.66388,  35.64341,  35.64147,  35.65958,
        35.65494,  35.65857,  35.60823,  35.65464,  35.65488,  35.64147,
        35.65455,  35.65554,  35.60228,  35.62112,  35.63562,  35.63143,
        35.65931,  35.6587 ,  35.62104,  35.6623 ,  35.65922,  35.64741,
        35.64742,  35.65018,  35.65973,  35.61842,  35.61842,  35.64923,
        35.64669,  35.60207,  35.63771,  35.6171 ,  35.59361,  35.63873,
        35.64747,  35.64761,  35.63552,  35.64251,  35.60312,  35.66036,
        35.62994,  35.63814,  35.62207,  35.66485,  35.67437,  35.63506,
        35.65502,  35.62238,  35.63873,  35.63561,  35.63561,  35.64208,
        35.641  ,  35.6526 ,  35.66298,  35.6078 ,  35.65253,  35.64747,
        35.64387,  35.63518,  35.63518,  35.63123,  35.62032,  35.64089,
        35.66969,  35.64736,  35.63345,  35.66453,  35.629  ,  35.62982,
        35.64708,  35.66283,  35.66388,  35.66542,  35.63123,  35.65331,
        35.65285,  35.66676,  35.63085,  35.63552,  35.63457,  35.64147,
        35.61741,  35.61958,  35.62261,  35.61947,  35.65496,  35.63457,
        35.65383,  35.67043,  35.66973,  35.62498,  35.64274,  35.63827,
        35.63799,  35.6374 ,  35.66775,  35.59604,  35.66848,  35.65018,
        35.63561,  35.64741,  35.64669,  35.64968,  35.64644,  35.66283,
        35.61958,  35.63123,  35.61365,  35.65596,  35.61947,  35.61947,
        35.61759,  35.66973,  35.65494,  35.65262,  35.65596,  35.65371,
        35.64949,  35.6357 ,  35.64548,  35.64548,  35.63528,  35.64243,
        35.65107,  35.63552,  35.66647,  35.66036,  35.61958,  35.63382,
        35.64642,  35.60559,  35.60659,  35.60228,  35.63457,  35.60395,
        35.63143,  35.64429,  35.64541,  35.63561,  35.63873,  35.6566 ,
        35.64777,  35.61083,  35.63839,  35.63382,  35.66576,  35.67957,
        35.62982,  35.66388,  35.65525,  35.63067,  35.62261,  35.60312,
        35.61763,  35.62261,  35.66089,  35.62592,  35.63017,  35.61046,
        35.66334,  35.64341,  35.63518,  35.64911,  35.60765,  35.661  ,
        35.63717,  35.6242 ,  35.66163,  35.59945,  35.65596,  35.61046,
        35.63999,  35.65667,  35.65902,  35.66334,  35.62502,  35.63561,
        35.66298,  35.67054,  35.64747,  35.60571,  35.64747,  35.61787,
        35.66416,  35.64028,  35.62698,  35.66084,  35.65262,  35.64975,
        35.61601,  35.63552,  35.65176,  35.63874,  35.64473,  35.66841,
        35.61511,  35.62986,  35.63517,  35.63151,  35.61307,  35.62569,
        35.64774,  35.61901,  35.61986,  35.66848,  35.67028,  35.66642,
        35.66145,  35.64619,  35.64736,  35.64619,  35.63964,  35.64712,
        35.66234,  35.66655,  35.63536,  35.6286 ,  35.66973,  35.65791,
        35.65719,  35.67505,  35.63961,  35.64408,  35.65314,  35.61702,
        35.63873,  35.65271,  35.59361,  35.60225,  35.60721,  35.66453,
        35.63836,  35.66126,  35.66586,  35.66005,  35.63166,  35.65494,
        35.6149 ,  35.63836,  35.66439,  35.66336,  35.64857,  35.61582,
        35.65595,  35.66303,  35.66837,  35.6623 ,  35.65922,  35.65464,
        35.64701,  35.60571,  35.63805,  35.67184,  35.65167,  35.65167,
        35.64139,  35.63691,  35.60312,  35.66962,  35.60823,  35.66682,
        35.66682,  35.59923,  35.60171,  35.61402,  35.64657,  35.6279 ,
        35.62757,  35.67555,  35.65271,  35.64628,  35.65083,  35.60233,
        35.60233,  35.67573,  35.65107,  35.64637,  35.64975,  35.66328,
        35.62354,  35.63128,  35.60312,  35.61398,  35.627  ,  35.64887,
        35.66364,  35.65525,  35.63164,  35.61046,  35.62798,  35.6389 ,
        35.59356,  35.65342,  35.62148,  35.59519,  35.63033,  35.66546,
        35.66542,  35.61738,  35.6623 ,  35.63626,  35.64703,  35.60233,
        35.63629,  35.67977,  35.62927,  35.67523,  35.67666,  35.65907,
        35.63924,  35.60385,  35.61873,  35.60004,  35.66089,  35.6581 ,
        35.65776,  35.63677,  35.63691,  35.63483,  35.61884,  35.64147,
        35.64193,  35.67043,  35.66089,  35.66163,  35.61307,  35.62334,
        35.59746,  35.64281,  35.61688,  35.64028,  35.6045 ,  35.66128,
        35.62187,  35.63518,  35.62062,  35.64085,  35.66412,  35.66412,
        35.63747,  35.65805,  35.65945,  35.629  ,  35.63704,  35.6573 ,
        35.65987,  35.63992,  35.60312,  35.64677,  35.67063,  35.64761,
        35.64147,  35.65914,  35.66175,  35.62724,  35.66441,  35.59633,
        35.62757,  35.65083,  35.63638,  35.6347 ,  35.60721,  35.60571,
        35.66128,  35.64085,  35.64496,  35.62586,  35.66653,  35.64933,
        35.61884,  35.65719,  35.63524,  35.64135,  35.65922,  35.65881,
        35.65394,  35.59299,  35.6488 ,  35.66234,  35.65573,  35.6617 ,
        35.64496,  35.64502,  35.64496,  35.6516 ,  35.5942 ,  35.65262,
        35.61691,  35.6512 ,  35.65083,  35.67154,  35.63274,  35.61702,
        35.60847,  35.65862,  35.66001,  35.59746,  35.61787,  35.66362,
        35.61884,  35.67645,  35.66969,  35.66036,  35.66227,  35.63642,
        35.64598,  35.66296,  35.64637,  35.65573,  35.63116,  35.6617 ,
        35.64842,  35.63704,  35.63821,  35.63842,  35.64842,  35.64341,
        35.65464,  35.65587,  35.63457,  35.64547,  35.64521,  35.63067,
        35.64499,  35.64628,  35.64628,  35.64421,  35.64028,  35.6171 ,
        35.6623 ,  35.64628,  35.61787,  35.64747,  35.65776,  35.66767,
        35.62054,  35.63873,  35.66676,  35.61738,  35.6105 ,  35.63799,
        35.64641,  35.66676,  35.65048,  35.64845,  35.64311,  35.64747,
        35.64541,  35.67573,  35.61798,  35.65271,  35.61906,  35.62381,
        35.6242 ,  35.64387,  35.63433,  35.65776,  35.59325,  35.627  ,
        35.66233,  35.64384,  35.64677,  35.64148,  35.60346,  35.64301,
        35.62238,  35.65596,  35.61506,  35.64897,  35.61986,  35.66416,
        35.62357,  35.62718,  35.65192,  35.66294,  35.66294,  35.65805,
        35.66034,  35.59325,  35.627  ,  35.61367,  35.65676,  35.65805,
        35.64086,  35.64161,  35.63388,  35.6344 ,  35.65955,  35.61989,
        35.65233,  35.62718,  35.65014,  35.6171 ,  35.62698,  35.62378,
        35.65573,  35.60362,  35.6344 ,  35.66269,  35.66269,  35.62249,
        35.64724,  35.66327,  35.66327,  35.65201,  35.66294,  35.63493,
        35.61626,  35.64669,  35.66436,  35.627  ,  35.63748,  35.66412,
        35.64302,  35.64999,  35.64429,  35.63033,  35.62614,  35.62799,
        35.62087,  35.63836,  35.62322,  35.64341,  35.66327,  35.64833,
        35.65304,  35.66962,  35.6249 ,  35.64637,  35.5942 ,  35.59909,
        35.64883,  35.65596,  35.61703,  35.617  ,  35.67216,  35.6738 ,
        35.6642 ,  35.64931,  35.64855,  35.61644,  35.61854,  35.66056,
        35.66634,  35.62054,  35.64712,  35.65371,  35.67675,  35.62632,
        35.66294,  35.64387,  35.627  ,  35.6622 ,  35.64789,  35.66142,
        35.62646,  35.65958,  35.63116,  35.61883,  35.62135,  35.66622,
        35.5942 ,  35.62923,  35.6526 ,  35.61702,  35.67148,  35.63211,
        35.64623,  35.64521,  35.62534,  35.59776,  35.61309,  35.65174,
        35.64685,  35.64708,  35.6149 ,  35.64599,  35.62103,  35.61525,
        35.64431,  35.64813,  35.65283,  35.63836,  35.63839,  35.67216,
        35.64883,  35.66249,  35.62502,  35.6825 ,  35.65659,  35.65291,
        35.6622 ,  35.65596,  35.65525,  35.5942 ,  35.63391,  35.63877,
        35.6149 ,  35.66767,  35.65907,  35.61499,  35.61883,  35.65554,
        35.61978,  35.65574,  35.64148,  35.6273 ,  35.67736,  35.61868,
        35.62038,  35.61986,  35.6486 ,  35.65304,  35.67148,  35.66362,
        35.63016,  35.60011,  35.66426,  35.63391,  35.62066,  35.64391,
        35.6238 ,  35.64675,  35.6149 ,  35.61499,  35.63621,  35.64804,
        35.6149 ,  35.64369,  35.62864,  35.6206 ,  35.6389 ,  35.64795,
        35.63748,  35.63802,  35.61196,  35.61234,  35.65116,  35.65083,
        35.62357,  35.67216,  35.64623,  35.62322,  35.67216,  35.65156,
        35.65371,  35.63836,  35.63771,  35.64547,  35.64309,  35.66145,
        35.65025,  35.65371,  35.63795,  35.67045,  35.63537,  35.61476,
        35.6618 ,  35.60758,  35.62207,  35.65706,  35.67216,  35.64075,
        35.62261,  35.66034,  35.66459,  35.61499,  35.64797,  35.60118,
        35.64445,  35.64127,  35.63538,  35.62555])

In [265]:
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.tri as mtri



#============
# First plot
#============
# Plot the surface.  The triangles in parameter space determine which x, y, z
# points are connected by an edge.
ax = fig.add_subplot(1, 2, 1, projection='3d')
ax.plot_trisurf(X, Y, e)
ax.set_zlim(-1, 1)
plt.show()

In [ ]: