In [315]:
%matplotlib inline
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib
import numpy as np
from scipy.spatial import distance_matrix
from scipy import sparse

SLX as conv nets

What does a SLX model do

  • Calculate average of neighbors

One interpretation of the SLX model is that it computes the average of all observations within all cells not more then three away from the farm

If we assume we define for each farm a 9x9 cell


In [316]:
N = 9
# make an empty data set
data = np.ones((N, N)) * np.nan
# fill in some fake data
for j in range(3)[::-1]:
    data[N//2 - j : N//2 + j +1, N//2 - j : N//2 + j +1] = j
data[data==2] = 1    
    
# make a figure + axes
fig, ax = plt.subplots(1, 1)
# make color map
my_cmap = matplotlib.colors.ListedColormap(['r', 'g', 'b'])
# set the 'bad' values (nan) to be white and transparent
my_cmap.set_bad(color='w', alpha=0)
# draw the grid
for x in range(N + 1):
    ax.axhline(x, lw=2, color='k', zorder=5)
    ax.axvline(x, lw=2, color='k', zorder=5)
# draw the boxes
ax.imshow(data, interpolation='none', cmap=my_cmap, extent=[0, N, 0, N], zorder=0)
# turn off the axis labels
ax.axis('off')


Out[316]:
(0.0, 9.0, 0.0, 9.0)

This could also be implemented using a 10x10 convolution filter defined as follows


In [317]:
slxFilt = np.zeros((9,9))
slxFilt[2:7,2:7]=1
slxFilt[4,4]=0


slxFilt


Out[317]:
array([[ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.],
       [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.],
       [ 0.,  0.,  1.,  1.,  1.,  1.,  1.,  0.,  0.],
       [ 0.,  0.,  1.,  1.,  1.,  1.,  1.,  0.,  0.],
       [ 0.,  0.,  1.,  1.,  0.,  1.,  1.,  0.,  0.],
       [ 0.,  0.,  1.,  1.,  1.,  1.,  1.,  0.,  0.],
       [ 0.,  0.,  1.,  1.,  1.,  1.,  1.,  0.,  0.],
       [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.],
       [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.]])

In [318]:
# =============================================================================
# Gernerate data
# =============================================================================

sizeGrid = 9 # size of the grid
nObs = 6 # number of observations
K = 1 # number of features

# set coefficients
b0 = np.random.normal(5, 2, 1)
b1 = np.random.normal(5, 2, K).reshape(K,1) # coef for own characteristics
b2 = np.random.normal(5, 2, K).reshape(K,1) # coef for neighbor characteristics
errStd = 0 # error added to Y
cutW = distance_matrix([[4,4]], [[4+2,4+2]]) # cut distance in W all cells not more then 2 away

In [319]:
def generate_landscape(sizeGrid,nObs,K,b0,b1,b2,errStd,cutW):
    """
    Generate a landscape with a fixed number of observations (nObs)
    
    Inputs
        nObs = Number of farms
        K = Number of characteristics X
        b0 = constant
        b1 = coef of own effect
        b2 = coef of WX
        errStd = std of error terms
        cutW = disctance cut off
    """ 
    

    # Create location of observations
    locOwn = np.array([[int(sizeGrid/2),int(sizeGrid/2)]])
    while  True:
        loc = np.random.randint(0,sizeGrid, size=(nObs,2))
        locAll = np.concatenate((locOwn, loc), axis=0)
        # Make sure that farm are not one same location
        locAll = np.unique(locAll, axis=0)
        if nObs+1 == locAll.shape[0]:
            break

    # Create own characteristics
    X = np.random.randint(0,100, size=(nObs,K))
    Xown = np.random.randint(0,100, size=(1,K))
    # Create spatial weigthing matrix W
    W = distance_matrix(locOwn, loc)<=cutW
    row_sum = W.sum(axis=1,keepdims=True) # calc row sum
    if row_sum!=0:
        W = W/row_sum  # row standardize

    # Create error
    err = np.random.normal(0, errStd, 1)
    # Calcualte Y
    Y = b0 + np.matmul(Xown,b1)+ np.matmul(np.matmul(W,X),b2)+err

    assert(Y.shape==(1,1))

    maps = np.zeros((sizeGrid,sizeGrid,K))

    # 
    for k in range(0,K):
        I = np.concatenate((locOwn[:,0],loc[:,0]),axis=0)
        J = np.concatenate((locOwn[:,1],loc[:,1]),axis=0)
        V = np.concatenate((Xown[:,k],X[:,k]),axis=0)
        A = sparse.coo_matrix((V,(I,J)),shape=(sizeGrid,sizeGrid))
        maps[:,:,k] = A.todense()
    #
    return maps,Y,X,Xown,W,loc,locOwn

In [320]:
def generate_landscape2(sizeGrid,nobsMax,K,b0,b1,b2,errStd,cutW):
    """
    Generate a landscape with a random number  of observations, only the 
    maximum number of neighboring Observations is set (nObsMax)
    
    Inputs
        nObs = Number of farms
        K = Number of characteristics X
        b0 = constant
        b1 = coef of own effect
        b2 = coef of WX
        errStd = std of error terms
        cutW = disctance cut off
    """ 
    

    # Draw nObs 
    nObs = int(np.random.randint(0,nobsMax, size=(1,1)))
    
    # Create location of observations
    locOwn = np.array([[int(sizeGrid/2),int(sizeGrid/2)]])
    while  True:
        loc = np.random.randint(0,sizeGrid, size=(nObs,2))
        locAll = np.concatenate((locOwn, loc), axis=0)
        # Make sure that farm are not one same location
        locAll = np.unique(locAll, axis=0)
        if nObs+1 == locAll.shape[0]:
            break

    # Create own characteristics
    X = np.random.randint(0,100, size=(nObs,K))
    Xown = np.random.randint(0,100, size=(1,K))
    # Create spatial weigthing matrix W
    W = distance_matrix(locOwn, loc)<=cutW
    row_sum = W.sum(axis=1,keepdims=True) # calc row sum
    if row_sum!=0:
        W = W/row_sum  # row standardize

    # Create error
    err = np.random.normal(0, errStd, 1)
    # Calcualte Y
    Y = b0 + np.matmul(Xown,b1)+ np.matmul(np.matmul(W,X),b2)+err

    assert(Y.shape==(1,1))

    maps = np.zeros((sizeGrid,sizeGrid,K))

    # 
    for k in range(0,K):
        I = np.concatenate((locOwn[:,0],loc[:,0]),axis=0)
        J = np.concatenate((locOwn[:,1],loc[:,1]),axis=0)
        V = np.concatenate((Xown[:,k],X[:,k]),axis=0)
        A = sparse.coo_matrix((V,(I,J)),shape=(sizeGrid,sizeGrid))
        maps[:,:,k] = A.todense()
    #
    return maps,Y,X,Xown,W,loc,locOwn

In [322]:


In [324]:
maps,y,x,xown,w,loc,locOwn = generate_landscape3(sizeGrid,nObs,K,b0,b1,b2,errStd,cutW)

print("y: \n",y)
print("x: \n",x)
print("xown: \n",xown)

print("map k=0: \n",maps[:,:,0])
#print("map k=1: \n",maps[:,:,1])
#print("map k=2: \n",maps[:,:,2])

print("loc: \n",loc)
print("w: \n",w)
print("locOwn: \n",locOwn)


y: 
 [[ 702.9]]
x: 
 [[ 3]
 [16]]
xown: 
 [[77]]
map k=0: 
 [[  0.   0.   0.   0.   0.   0.   0.   0.   0.]
 [  0.   0.   0.   0.   0.   0.   0.   0.   0.]
 [  0.   0.  16.   0.   0.   0.   0.   0.   0.]
 [  0.   0.   0.   0.   0.   0.   0.   0.   0.]
 [  0.   0.   0.   0.  77.   0.   0.   0.   0.]
 [  0.   0.   0.   0.   0.   0.   0.   0.   0.]
 [  0.   0.   0.   0.   0.   0.   0.   0.   0.]
 [  0.   0.   0.   0.   0.   0.   0.   3.   0.]
 [  0.   0.   0.   0.   0.   0.   0.   0.   0.]]
loc: 
 [[7 7]
 [2 2]]
w: 
 [[False  True]]
locOwn: 
 [[4 4]]

Keras CNN model


In [325]:
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
from scipy.spatial import distance_matrix
from scipy import sparse
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split

In [9]:
def mapToX2Canal(N,nObsMax):
    # %%

    #X = np.zeros((N,sizeGrid,sizeGrid,K))
    X = np.zeros((N,sizeGrid,sizeGrid,2))
    Y = np.zeros((N,1))

    for i in range(0,N):
        #
        maps,y,x,xown,w,loc,locOwn = generate_landscape2(sizeGrid,nObsMax,K,b0,b1,b2,errStd,cutW)

        Y[i,:] = y
        #X[i,:,:,:] = maps
        #X[i,:,:,:] = maps

        X[i,:,:,:] = maps
        X[i,4,4,0] = 0
        X[i,:,:,1] = np.zeros((sizeGrid,sizeGrid))
        X[i,4,4,1] = maps[4,4,0]

    return X,Y

In [10]:
def mapToX(N,nObsMax):
    # %%

    #X = np.zeros((N,sizeGrid,sizeGrid,K))
    X = np.zeros((N,sizeGrid,sizeGrid,1))
    Y = np.zeros((N,1))

    for i in range(0,N):
        #
        maps,y,x,xown,w,loc,locOwn = generate_landscape2(sizeGrid,nObsMax,K,b0,b1,b2,errStd,cutW)

        Y[i,:] = y
        X[i,:,:,:] = maps
        
    return X,Y

In [11]:
def standardSplit(X,Y,test_size=0.1,random_state=42):

    # %% standardized features and targets
    x_min_max = MinMaxScaler()
    y_min_max = MinMaxScaler()
    X_minmax = x_min_max.fit_transform(X.reshape(X.shape[0],-1)).reshape(X.shape)
    Y_minmax  = y_min_max.fit_transform(Y)

    # %% Split sample in test and training_set
    x_train, x_test, y_train, y_test = train_test_split(X_minmax, Y_minmax, test_size=test_size, random_state=random_state)
    
    return x_min_max, y_min_max, x_train, x_test, y_train, y_test

In [12]:
#model 0
def model0(input_shape,num_classes):
    model = Sequential()
    model.add(Conv2D(32, kernel_size=(3, 3),
                     activation='relu',
                     input_shape=input_shape))
    #model.add(Conv2D(64, (3, 3), activation='relu'))
    #model.add(MaxPooling2D(pool_size=(2, 2)))
    #model.add(Dropout(0.25))
    model.add(Flatten())
    #model.add(Dense(128, activation='relu'))
    model.add(Dense(128, activation='relu'))
    #model.add(Dropout(0.5))
    model.add(Dense(num_classes, activation='relu'))

    model.compile(loss='mean_squared_error',
                  optimizer=keras.optimizers.Adadelta(),
                  metrics=['mae'])
    return model

In [13]:
#model 1
def model1(input_shape,num_classes):
    model = Sequential()
    model.add(Conv2D(32, kernel_size=(3, 3),
                     activation='relu',
                     input_shape=input_shape))
    model.add(Conv2D(64, (3, 3), activation='relu'))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    #model.add(Dropout(0.25))
    model.add(Flatten())
    model.add(Dense(128, activation='relu'))
    model.add(Dense(128, activation='relu'))
    #model.add(Dropout(0.5))
    model.add(Dense(num_classes, activation='relu'))

    model.compile(loss='mean_squared_error',
                  optimizer=keras.optimizers.Adadelta(),
                  metrics=['mae'])
    return model

In [14]:
#model 2
def model2(input_shape,num_classes):
    model = Sequential()
    model.add(Conv2D(32, kernel_size=(3, 3),
                     activation='relu',padding='same',
                     input_shape=input_shape))
    model.add(Conv2D(64, (3, 3), activation='relu',padding='same',))
    #model.add(MaxPooling2D(pool_size=(2, 2)))
    #model.add(Dropout(0.25))
    model.add(Flatten())
    model.add(Dense(128, activation='relu'))
    model.add(Dense(128, activation='relu'))
    #model.add(Dropout(0.5))
    model.add(Dense(num_classes, activation='relu'))

    model.compile(loss='mean_squared_error',
                  optimizer=keras.optimizers.Adadelta(),
                  metrics=['mae'])
    return model

In [15]:
#model 3
def model3(input_shape,num_classes):
    model = Sequential()
    model.add(Conv2D(16, kernel_size=(3, 3),
                     activation='relu',
                     input_shape=input_shape, name='conv1'))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(Conv2D(32, kernel_size=(3, 3),
                 activation='relu',
                 name='conv2'))
    #model.add(Conv2D(2, kernel_size=(3, 3),
    #         activation='relu',
    #         name='conv3'))
    #model.add(Conv2D(64, (3, 3), activation='relu'))

    #model.add(Dropout(0.25))
    model.add(Flatten())
    model.add(Dense(64, activation='relu'))
    #model.add(Dense(128, activation='relu'))
    #model.add(Dropout(0.5))
    model.add(Dense(num_classes, activation='relu'))

    model.compile(loss='mean_squared_error',
                  optimizer=keras.optimizers.Adadelta(),
                  metrics=['mae'])
    return model

In [238]:
#model 4
def model4(input_shape,num_classes):
    model = Sequential()
    model.add(Conv2D(2, kernel_size=(4, 4),
                     activation='relu',
                     input_shape=input_shape, name='conv1'))
    #model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(Conv2D(4, kernel_size=(3, 3),
                 activation='relu',
                 name='conv2'))
    model.add(Conv2D(8, kernel_size=(2, 2),
                 activation='relu',
                 name='conv3'))
    #model.add(Conv2D(2, kernel_size=(3, 3),
    #         activation='relu',
    #         name='conv3'))
    #model.add(Conv2D(64, (3, 3), activation='relu'))

    #model.add(Dropout(0.25))
    model.add(Flatten())
    model.add(Dense(12, activation='relu',name='dens1'))
    #model.add(Dense(128, activation='relu'))
    #model.add(Dropout(0.5))
    model.add(Dense(num_classes, activation='relu',name='dens2'))

    model.compile(loss='mean_squared_error',
                  optimizer=keras.optimizers.Adadelta(),
                  metrics=['mae'])
    return model

Gernerate data


In [16]:
sizeGrid = 9 # size of the grid
nObs = 6 # number of observations
K = 1 # number of features
N = 60000
nObsMax = 20

# set coefficients
b0 = np.random.normal(5, 2, 1)
b1 = np.random.normal(5, 2, K).reshape(K,1) # coef for own characteristics
b2 = np.random.normal(5, 2, K).reshape(K,1) # coef for neighbor characteristics
errStd = 0 # error added to Y
cutW = distance_matrix([[4,4]], [[4+2,4+2]]) # cut distance in W all cells not more then 2 away


# Generate data 
X,Y = mapToX(N,nObsMax)
#X,Y = mapToX2Canal(N,nObsMax)

# Standardize data and split in training and test
x_min_max, y_min_max, x_train, x_test, y_train, y_test = standardSplit(X,Y)

In [17]:
# %%
def r2(y_true, y_pred):
    """Calcualte and return R2.
    
    y_true -- the observed values
    y_pred -- the prediced values
    """
    SS_res =  np.sum(np.square(y_true - y_pred)) 
    SS_tot = np.sum(np.square(y_true - np.mean(y_true))) 
    return ( 1 - SS_res/SS_tot )

Choose model


In [269]:
input_shape = X[0,:,:,:].shape
num_classes = 1

#model = model0(input_shape,num_classes)
#model = model1(input_shape,num_classes)
#model = model2(input_shape,num_classes)
model = model3(input_shape,num_classes)
#model = model4(input_shape,num_classes)

In [240]:
tensorboardCall = keras.callbacks.TensorBoard(log_dir='./nn_spatial/run', 
                            histogram_freq=10,
                            write_graph=True, 
                            write_grads=False, 
                            write_images=False, 
                            embeddings_freq=0, 
                            embeddings_layer_names=None, 
                            embeddings_metadata=None)

Train model


In [271]:
batch_size = 128
epochs = 10

hist= model.fit(x_train, y_train,
          batch_size=batch_size,
          epochs=epochs,
          verbose=1,
          validation_data=(x_test, y_test))


Train on 54000 samples, validate on 6000 samples
Epoch 1/10
54000/54000 [==============================] - 2s - loss: 0.0041 - mean_absolute_error: 0.0481 - val_loss: 0.0037 - val_mean_absolute_error: 0.0448
Epoch 2/10
54000/54000 [==============================] - 2s - loss: 0.0039 - mean_absolute_error: 0.0472 - val_loss: 0.0036 - val_mean_absolute_error: 0.0446
Epoch 3/10
54000/54000 [==============================] - 2s - loss: 0.0037 - mean_absolute_error: 0.0458 - val_loss: 0.0040 - val_mean_absolute_error: 0.0488
Epoch 4/10
54000/54000 [==============================] - 2s - loss: 0.0036 - mean_absolute_error: 0.0455 - val_loss: 0.0035 - val_mean_absolute_error: 0.0437
Epoch 5/10
54000/54000 [==============================] - 2s - loss: 0.0036 - mean_absolute_error: 0.0450 - val_loss: 0.0035 - val_mean_absolute_error: 0.0445
Epoch 6/10
54000/54000 [==============================] - 2s - loss: 0.0035 - mean_absolute_error: 0.0445 - val_loss: 0.0043 - val_mean_absolute_error: 0.0509
Epoch 7/10
54000/54000 [==============================] - 2s - loss: 0.0034 - mean_absolute_error: 0.0438 - val_loss: 0.0035 - val_mean_absolute_error: 0.0440
Epoch 8/10
54000/54000 [==============================] - 2s - loss: 0.0033 - mean_absolute_error: 0.0428 - val_loss: 0.0036 - val_mean_absolute_error: 0.0459
Epoch 9/10
54000/54000 [==============================] - 2s - loss: 0.0032 - mean_absolute_error: 0.0425 - val_loss: 0.0036 - val_mean_absolute_error: 0.0447
Epoch 10/10
54000/54000 [==============================] - 2s - loss: 0.0032 - mean_absolute_error: 0.0422 - val_loss: 0.0030 - val_mean_absolute_error: 0.0399

In [275]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1 (Conv2D)               (None, 7, 7, 16)          160       
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 3, 3, 16)          0         
_________________________________________________________________
conv2 (Conv2D)               (None, 1, 1, 32)          4640      
_________________________________________________________________
flatten_8 (Flatten)          (None, 32)                0         
_________________________________________________________________
dense_14 (Dense)             (None, 64)                2112      
_________________________________________________________________
dense_15 (Dense)             (None, 1)                 65        
=================================================================
Total params: 6,977
Trainable params: 6,977
Non-trainable params: 0
_________________________________________________________________

In [200]:
l1 = model.get_layer('conv1')
l1.get_weights()[0][:,:,0,0]


Out[200]:
array([[ 0.2, -0.7,  0.2],
       [ 0.3,  0.1, -0.7],
       [-0.5, -0.7, -0.3]], dtype=float32)

In [93]:
print(hist.history)


{'loss': [0.0024865771004999124, 0.0024603699064227164, 0.0023128733933316888, 0.0021759412887471692, 0.0020651788402486731, 0.0019897642819279872, 0.0018330787767276719, 0.0018652071051161597, 0.0018286741286930111, 0.0016909202046830345], 'val_mean_absolute_error': [0.036505337615807848, 0.034979474405447644, 0.031514464408159255, 0.034146842579046884, 0.030385081837574639, 0.037837908814350764, 0.027459301720062893, 0.041256732950607937, 0.027224423572421073, 0.030276246080795925], 'mean_absolute_error': [0.036968810705123124, 0.036720353373774779, 0.035705618909663624, 0.034340180346259366, 0.033298837486240601, 0.032570631140912021, 0.031053587523323517, 0.031604946201598205, 0.031307050999667907, 0.029966303044447192], 'val_loss': [0.0024312197041387361, 0.0023548891202857095, 0.0020208633334065475, 0.0024172085889925558, 0.00176766185524563, 0.0025843806434422733, 0.0015974428274979195, 0.0027359500744690499, 0.001632327379969259, 0.0016920321617896357]}

In [35]:
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])


Test loss: 0.00164330925823
Test accuracy: 0.0300901780128

In [273]:
# %% Model
Yhat_test = model.predict(x_test,batch_size=32)

oY_test = y_min_max.inverse_transform(y_test)
oY_hat = y_min_max.inverse_transform(Yhat_test)
#oY_test = y_test
#oY_hat = Yhat_test
# %%
fig, ax = plt.subplots()
ax.scatter(oY_test, oY_hat, edgecolors=(0, 0, 0))
ax.plot([oY_test.min(), oY_test.max()], [oY_test.min(), oY_test.max()], 'k--', lw=4)
ax.set_xlabel('Measured')
ax.set_ylabel('Predicted')
plt.show()

r2Model = r2(oY_test,oY_hat)
print("R2 Model: ",r2Model)


R2 Model:  0.920590788407

In [ ]:


In [39]:
# %% Model that only knows own effect but this one perfectly
oX_test = x_min_max.inverse_transform(x_test.reshape(x_test.shape[0],-1)).reshape(x_test.shape)
oY_test = y_min_max.inverse_transform(y_test)
oY_hat = b0+b1*oX_test[:,4,4,0]
#oY_test = y_test
#oY_hat = Yhat_test
# %%
fig, ax = plt.subplots()
ax.scatter(oY_test, oY_hat, edgecolors=(0, 0, 0))
ax.plot([oY_test.min(), oY_test.max()], [oY_test.min(), oY_test.max()], 'k--', lw=4)
ax.set_xlabel('Measured')
ax.set_ylabel('Predicted')
plt.show()

r2Model = r2(oY_test,oY_hat)
print("R2 Model: ",r2Model)


R2 Model:  -13087.8825299

In [312]:
'''Visualization of the filters of VGG16, via gradient ascent in input space.
This script can run on CPU in a few minutes.
Results example: http://i.imgur.com/4nj4KjN.jpg
'''
from __future__ import print_function

from scipy.misc import imsave
import numpy as np
import time
from keras.applications import vgg16
from keras import backend as K
# dimensions of the generated pictures for each filter.
img_width = 9
img_height = 9
input_channel = 1

# the name of the layer we want to visualize
# (see model definition at keras/applications/vgg16.py)
layer_name = 'dense_15'

# util function to convert a tensor into a valid image


def deprocess_image(x):
    #xs = x.reshape(1,x.shape[0],x.shape[1],x.shape[2])
    #xo = x_min_max.inverse_transform(xs.reshape(xs.shape[0],-1)).reshape(xs.shape)
    #xe = xo.reshape(x.shape[0],x.shape[1],x.shape[2])
    # normalize tensor: center on 0., ensure std is 0.1
    #x -= x.mean()
    x -= 0.5
    x /= (x.std() + 1e-5)
    x *= 0.1

    # clip to [0, 1]
    x += 0.5
    x = np.clip(x, 0, 1)

    # convert to RGB array
    x *= 255
    if K.image_data_format() == 'channels_first':
        x = x.transpose((1, 2, 0))
    x = np.clip(x, 0, 255).astype('uint8')
    return x


model.summary()

# this is the placeholder for the input images
input_img = model.input

# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers[1:]])


def normalize(x):
    # utility function to normalize a tensor by its L2 norm
    return x / (K.sqrt(K.mean(K.square(x))) + 1e-5)


kept_filters = []
for filter_index in range(1):
    # we only scan through the first 200 filters,
    # but there are actually 512 of them
    # Hugo: Changed to 12
    print('Processing filter %d' % filter_index)
    start_time = time.time()

    # we build a loss function that maximizes the activation
    # of the nth filter of the layer considered
    layer_output = layer_dict[layer_name].output
    loss = K.mean(layer_output)
    
    # we compute the gradient of the input picture wrt this loss
    grads = K.gradients(loss, input_img)[0]

    # normalization trick: we normalize the gradient
    grads = normalize(grads)

    # this function returns the loss and grads given the input picture
    iterate = K.function([input_img], [loss, grads])

    # step size for gradient ascent
    step = 0.1

    # we start from a gray image with some random noise
    if K.image_data_format() == 'channels_first':
        input_img_data = np.random.random((1, input_channel, img_width, img_height))
    else:
        input_img_data = np.random.random((1, img_width, img_height, input_channel))
    #input_img_data = (input_img_data - 0.5) * 20 + 128
    input_img_data = np.zeros((1, img_width, img_height, input_channel))
    # we run gradient ascent for 20 steps
    for i in range(20):
        loss_value, grads_value = iterate([input_img_data])
        input_img_data += grads_value * step

        print('Current loss value:', loss_value)
        if loss_value <= 0.:
            # some filters get stuck to 0, we can skip them
            break

    # decode the resulting input image
    if loss_value > 0:
        img = deprocess_image(input_img_data[0])
        kept_filters.append((img, loss_value))
    end_time = time.time()
    print('Filter %d processed in %ds' % (filter_index, end_time - start_time))



# the filters that have the highest loss are assumed to be better-looking.
# we will only keep the top 64 filters.
kept_filters.sort(key=lambda x: x[1], reverse=True)
#kept_filters = kept_filters[:n * n]

# build a black picture with enough space for
# our 8 x 8 filters of size 128 x 128, with a 5px margin in between
n = len(kept_filters)
zoomFact = 50
img_width =9*zoomFact
img_height = 9*zoomFact
margin = 5
width = img_width +  margin
height = n * img_height + (n - 1) * margin
stitched_filters = np.zeros(( height,width, 3))

l = 0
# fill the picture with our saved filters
for i in range(0,n):
        #kept_filters[i][0][4,4] = 0
        img, loss = kept_filters[i]
        
        #img += abs(img.min())
        #img *= 255.0/img.max() 
        stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,:-margin, :] = zoom(img,[zoomFact,zoomFact,1],order=0)
        l +=1
# save the result to disk
imsave('stitched_filters_%dx%d.png' % (n, n), stitched_filters)


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1 (Conv2D)               (None, 7, 7, 16)          160       
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 3, 3, 16)          0         
_________________________________________________________________
conv2 (Conv2D)               (None, 1, 1, 32)          4640      
_________________________________________________________________
flatten_8 (Flatten)          (None, 32)                0         
_________________________________________________________________
dense_14 (Dense)             (None, 64)                2112      
_________________________________________________________________
dense_15 (Dense)             (None, 1)                 65        
=================================================================
Total params: 6,977
Trainable params: 6,977
Non-trainable params: 0
_________________________________________________________________
Processing filter 0
Current loss value: 0.0141055
Current loss value: 0.537152
Current loss value: 2.05368
Current loss value: 3.84711
Current loss value: 5.78991
Current loss value: 7.79087
Current loss value: 9.82979
Current loss value: 11.8442
Current loss value: 13.844
Current loss value: 15.8447
Current loss value: 17.8694
Current loss value: 19.8662
Current loss value: 21.8778
Current loss value: 23.9145
Current loss value: 25.944
Current loss value: 27.9649
Current loss value: 30.0166
Current loss value: 32.0608
Current loss value: 34.0722
Current loss value: 36.1233
Filter 0 processed in 0s

In [314]:
img[:,:,0]


Out[314]:
array([[120, 119, 123, 124, 105, 120, 121, 120, 120],
       [121, 117, 120, 114, 111, 104, 117, 119, 120],
       [117, 116, 116, 132,  35,  95,  94, 123, 120],
       [122,  79, 101, 102,  98,  57, 110, 118, 120],
       [114,  95,  52, 153, 142, 160,  27,  89, 120],
       [120, 108, 132,  25, 137, 110, 117, 102, 120],
       [115, 115, 109,  96,   8, 140, 132, 126, 120],
       [120, 119, 121, 117, 125,  85, 119, 119, 120],
       [120, 120, 120, 120, 120, 120, 120, 120, 120]], dtype=uint8)

In [294]:
'''Visualization of the filters of VGG16, via gradient ascent in input space.
This script can run on CPU in a few minutes.
Results example: http://i.imgur.com/4nj4KjN.jpg
'''
from __future__ import print_function

from scipy.misc import imsave
import numpy as np
import time
from keras.applications import vgg16
from keras import backend as K

# dimensions of the generated pictures for each filter.
img_width = 9
img_height = 9
input_channel = 1

# the name of the layer we want to visualize
# (see model definition at keras/applications/vgg16.py)
layer_name = 'dense_15'

# util function to convert a tensor into a valid image


def deprocess_image(x):
    #xs = x.reshape(1,x.shape[0],x.shape[1],x.shape[2])
    #xo = x_min_max.inverse_transform(xs.reshape(xs.shape[0],-1)).reshape(xs.shape)
    #xe = xo.reshape(x.shape[0],x.shape[1],x.shape[2])
    """
    # normalize tensor: center on 0., ensure std is 0.1
    x -= x.mean()
    x /= (x.std() + 1e-5)
    x *= 0.1

    # clip to [0, 1]
    x += 0.5
    x = np.clip(x, 0, 1)

    # convert to RGB array
    x *= 255
    if K.image_data_format() == 'channels_first':
        x = x.transpose((1, 2, 0))
    #x = np.clip(x, 0, 255).astype('uint8')
    """
    return x


model.summary()

# this is the placeholder for the input images
input_img = model.input

# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers[1:]])


def normalize(x):
    # utility function to normalize a tensor by its L2 norm
    return x / (K.sqrt(K.mean(K.square(x))) + 1e-5)


kept_filters = []
for filter_index in range(1):
    # we only scan through the first 200 filters,
    # but there are actually 512 of them
    # Hugo: Changed to 12
    print('Processing filter %d' % filter_index)
    start_time = time.time()

    # we build a loss function that maximizes the activation
    # of the nth filter of the layer considered
    layer_output = layer_dict[layer_name].output
    loss = K.mean(layer_output)
    
    # we compute the gradient of the input picture wrt this loss
    grads = K.gradients(loss, input_img)[0]

    # normalization trick: we normalize the gradient
    grads = normalize(grads)

    # this function returns the loss and grads given the input picture
    iterate = K.function([input_img], [loss, grads])

    # step size for gradient ascent
    step = 1.

    # we start from a gray image with some random noise
    if K.image_data_format() == 'channels_first':
        input_img_data = np.random.random((1, input_channel, img_width, img_height))
    else:
        input_img_data = np.random.random((1, img_width, img_height, input_channel))
    #input_img_data = (input_img_data - 0.5) * 20 + 128

    # we run gradient ascent for 20 steps
    for i in range(2000):
        loss_value, grads_value = iterate([input_img_data])
        input_img_data += grads_value * step

        print('Current loss value:', loss_value)
        if loss_value <= 0.:
            # some filters get stuck to 0, we can skip them
            break

    # decode the resulting input image
    if loss_value > 0:
        img = deprocess_image(input_img_data[0])
        kept_filters.append((img, loss_value))
    end_time = time.time()
    print('Filter %d processed in %ds' % (filter_index, end_time - start_time))



# the filters that have the highest loss are assumed to be better-looking.
# we will only keep the top 64 filters.
kept_filters.sort(key=lambda x: x[1], reverse=True)
#kept_filters = kept_filters[:n * n]

# build a black picture with enough space for
# our 8 x 8 filters of size 128 x 128, with a 5px margin in between
n = len(kept_filters)
zoomFact = 50
img_width =9*zoomFact
img_height = 9*zoomFact
margin = 5
width = img_width +  margin
height = n * img_height + (n - 1) * margin
stitched_filters = np.zeros(( height,width, 3))

l = 0
# fill the picture with our saved filters
for i in range(0,n):
        #kept_filters[i][0][4,4] = 0
        img, loss = kept_filters[i]
        
        #img += abs(img.min())
        #img *= 255.0/img.max() 
        stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,:-margin, :] = zoom(img,[zoomFact,zoomFact,1],order=0)
        l +=1
# save the result to disk
imsave('stitched_filters_%dx%d.png' % (n, n), stitched_filters)


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1 (Conv2D)               (None, 7, 7, 16)          160       
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 3, 3, 16)          0         
_________________________________________________________________
conv2 (Conv2D)               (None, 1, 1, 32)          4640      
_________________________________________________________________
flatten_8 (Flatten)          (None, 32)                0         
_________________________________________________________________
dense_14 (Dense)             (None, 64)                2112      
_________________________________________________________________
dense_15 (Dense)             (None, 1)                 65        
=================================================================
Total params: 6,977
Trainable params: 6,977
Non-trainable params: 0
_________________________________________________________________
Processing filter 0
Current loss value: 0.362228
Current loss value: 10.7791
Current loss value: 26.6021
Current loss value: 43.9561
Current loss value: 61.6491
Current loss value: 79.7254
Current loss value: 97.5793
Current loss value: 115.305
Current loss value: 133.527
Current loss value: 151.644
Current loss value: 169.631
Current loss value: 187.746
Current loss value: 206.187
Current loss value: 224.481
Current loss value: 243.191
Current loss value: 261.782
Current loss value: 280.093
Current loss value: 298.423
Current loss value: 316.985
Current loss value: 335.432
Current loss value: 353.967
Current loss value: 372.315
Current loss value: 390.517
Current loss value: 409.071
Current loss value: 427.636
Current loss value: 445.959
Current loss value: 464.419
Current loss value: 482.796
Current loss value: 501.12
Current loss value: 519.821
Current loss value: 538.224
Current loss value: 556.684
Current loss value: 575.552
Current loss value: 593.932
Current loss value: 612.836
Current loss value: 631.269
Current loss value: 649.704
Current loss value: 668.009
Current loss value: 687.013
Current loss value: 705.532
Current loss value: 724.56
Current loss value: 743.0
Current loss value: 761.567
Current loss value: 780.128
Current loss value: 799.121
Current loss value: 817.653
Current loss value: 836.361
Current loss value: 854.607
Current loss value: 873.583
Current loss value: 892.218
Current loss value: 911.048
Current loss value: 929.598
Current loss value: 948.477
Current loss value: 966.678
Current loss value: 985.697
Current loss value: 1004.31
Current loss value: 1023.09
Current loss value: 1041.85
Current loss value: 1060.74
Current loss value: 1079.21
Current loss value: 1097.83
Current loss value: 1116.52
Current loss value: 1135.41
Current loss value: 1153.94
Current loss value: 1172.54
Current loss value: 1191.44
Current loss value: 1210.18
Current loss value: 1228.73
Current loss value: 1247.26
Current loss value: 1265.96
Current loss value: 1284.85
Current loss value: 1303.5
Current loss value: 1322.39
Current loss value: 1340.92
Current loss value: 1359.85
Current loss value: 1378.54
Current loss value: 1397.38
Current loss value: 1416.08
Current loss value: 1434.96
Current loss value: 1453.63
Current loss value: 1472.33
Current loss value: 1490.68
Current loss value: 1509.66
Current loss value: 1528.47
Current loss value: 1547.29
Current loss value: 1565.94
Current loss value: 1584.49
Current loss value: 1603.35
Current loss value: 1621.59
Current loss value: 1640.74
Current loss value: 1659.19
Current loss value: 1677.81
Current loss value: 1696.3
Current loss value: 1715.17
Current loss value: 1733.9
Current loss value: 1752.74
Current loss value: 1771.51
Current loss value: 1790.21
Current loss value: 1808.6
Current loss value: 1827.28
Current loss value: 1846.21
Current loss value: 1864.9
Current loss value: 1883.8
Current loss value: 1902.41
Current loss value: 1920.94
Current loss value: 1939.71
Current loss value: 1958.51
Current loss value: 1977.15
Current loss value: 1996.09
Current loss value: 2014.87
Current loss value: 2033.63
Current loss value: 2052.2
Current loss value: 2071.04
Current loss value: 2089.82
Current loss value: 2108.45
Current loss value: 2127.41
Current loss value: 2146.15
Current loss value: 2164.87
Current loss value: 2183.5
Current loss value: 2202.41
Current loss value: 2221.11
Current loss value: 2239.75
Current loss value: 2258.7
Current loss value: 2277.1
Current loss value: 2296.17
Current loss value: 2314.81
Current loss value: 2333.75
Current loss value: 2352.52
Current loss value: 2371.13
Current loss value: 2389.82
Current loss value: 2408.43
Current loss value: 2427.4
Current loss value: 2445.98
Current loss value: 2464.75
Current loss value: 2483.44
Current loss value: 2502.34
Current loss value: 2520.85
Current loss value: 2539.66
Current loss value: 2558.6
Current loss value: 2577.16
Current loss value: 2595.75
Current loss value: 2614.58
Current loss value: 2633.44
Current loss value: 2652.15
Current loss value: 2670.95
Current loss value: 2689.6
Current loss value: 2708.34
Current loss value: 2726.98
Current loss value: 2745.82
Current loss value: 2764.67
Current loss value: 2783.4
Current loss value: 2802.06
Current loss value: 2820.87
Current loss value: 2839.69
Current loss value: 2858.33
Current loss value: 2877.18
Current loss value: 2896.02
Current loss value: 2914.67
Current loss value: 2933.36
Current loss value: 2952.18
Current loss value: 2971.02
Current loss value: 2989.51
Current loss value: 3008.37
Current loss value: 3026.83
Current loss value: 3045.78
Current loss value: 3064.66
Current loss value: 3083.42
Current loss value: 3102.35
Current loss value: 3120.97
Current loss value: 3139.9
Current loss value: 3158.57
Current loss value: 3177.1
Current loss value: 3195.94
Current loss value: 3214.79
Current loss value: 3233.61
Current loss value: 3252.34
Current loss value: 3271.19
Current loss value: 3290.0
Current loss value: 3308.9
Current loss value: 3327.46
Current loss value: 3346.13
Current loss value: 3364.88
Current loss value: 3383.73
Current loss value: 3402.57
Current loss value: 3421.37
Current loss value: 3440.13
Current loss value: 3458.88
Current loss value: 3477.76
Current loss value: 3496.15
Current loss value: 3514.99
Current loss value: 3533.39
Current loss value: 3552.42
Current loss value: 3571.08
Current loss value: 3590.0
Current loss value: 3608.78
Current loss value: 3627.44
Current loss value: 3646.07
Current loss value: 3664.74
Current loss value: 3683.76
Current loss value: 3702.39
Current loss value: 3721.3
Current loss value: 3739.72
Current loss value: 3758.75
Current loss value: 3777.42
Current loss value: 3796.26
Current loss value: 3815.03
Current loss value: 3833.66
Current loss value: 3852.61
Current loss value: 3871.2
Current loss value: 3890.24
Current loss value: 3909.02
Current loss value: 3927.89
Current loss value: 3946.62
Current loss value: 3965.42
Current loss value: 3984.26
Current loss value: 4002.7
Current loss value: 4021.65
Current loss value: 4040.13
Current loss value: 4059.14
Current loss value: 4077.85
Current loss value: 4096.76
Current loss value: 4115.56
Current loss value: 4134.26
Current loss value: 4152.87
Current loss value: 4171.52
Current loss value: 4190.5
Current loss value: 4209.32
Current loss value: 4228.06
Current loss value: 4246.69
Current loss value: 4265.57
Current loss value: 4284.41
Current loss value: 4303.19
Current loss value: 4321.98
Current loss value: 4340.6
Current loss value: 4359.46
Current loss value: 4377.92
Current loss value: 4396.89
Current loss value: 4415.78
Current loss value: 4434.57
Current loss value: 4453.57
Current loss value: 4472.07
Current loss value: 4490.78
Current loss value: 4509.49
Current loss value: 4528.43
Current loss value: 4547.2
Current loss value: 4565.99
Current loss value: 4584.55
Current loss value: 4603.46
Current loss value: 4622.43
Current loss value: 4641.12
Current loss value: 4659.64
Current loss value: 4678.25
Current loss value: 4697.15
Current loss value: 4715.95
Current loss value: 4734.76
Current loss value: 4753.67
Current loss value: 4772.32
Current loss value: 4791.02
Current loss value: 4809.82
Current loss value: 4828.78
Current loss value: 4847.41
Current loss value: 4866.18
Current loss value: 4885.17
Current loss value: 4903.8
Current loss value: 4922.74
Current loss value: 4941.38
Current loss value: 4960.42
Current loss value: 4979.15
Current loss value: 4997.95
Current loss value: 5016.55
Current loss value: 5035.3
Current loss value: 5054.3
Current loss value: 5072.95
Current loss value: 5091.9
Current loss value: 5110.54
Current loss value: 5129.58
Current loss value: 5148.26
Current loss value: 5167.13
Current loss value: 5185.68
Current loss value: 5204.42
Current loss value: 5223.4
Current loss value: 5242.05
Current loss value: 5260.74
Current loss value: 5279.47
Current loss value: 5298.51
Current loss value: 5317.22
Current loss value: 5336.12
Current loss value: 5354.84
Current loss value: 5373.57
Current loss value: 5392.47
Current loss value: 5411.17
Current loss value: 5430.01
Current loss value: 5448.88
Current loss value: 5467.53
Current loss value: 5486.24
Current loss value: 5505.08
Current loss value: 5523.92
Current loss value: 5542.38
Current loss value: 5561.35
Current loss value: 5580.11
Current loss value: 5598.85
Current loss value: 5617.46
Current loss value: 5636.4
Current loss value: 5655.31
Current loss value: 5674.06
Current loss value: 5692.9
Current loss value: 5711.56
Current loss value: 5730.31
Current loss value: 5749.13
Current loss value: 5767.93
Current loss value: 5786.53
Current loss value: 5805.42
Current loss value: 5824.32
Current loss value: 5843.09
Current loss value: 5862.05
Current loss value: 5880.68
Current loss value: 5899.42
Current loss value: 5918.25
Current loss value: 5936.98
Current loss value: 5955.95
Current loss value: 5974.52
Current loss value: 5993.3
Current loss value: 6012.05
Current loss value: 6030.94
Current loss value: 6049.49
Current loss value: 6068.3
Current loss value: 6087.3
Current loss value: 6105.92
Current loss value: 6124.9
Current loss value: 6143.48
Current loss value: 6162.54
Current loss value: 6181.28
Current loss value: 6200.12
Current loss value: 6218.68
Current loss value: 6237.42
Current loss value: 6256.4
Current loss value: 6275.09
Current loss value: 6293.97
Current loss value: 6312.43
Current loss value: 6331.48
Current loss value: 6350.29
Current loss value: 6369.14
Current loss value: 6387.62
Current loss value: 6406.13
Current loss value: 6425.15
Current loss value: 6443.83
Current loss value: 6462.76
Current loss value: 6481.54
Current loss value: 6500.3
Current loss value: 6518.9
Current loss value: 6537.84
Current loss value: 6556.61
Current loss value: 6575.38
Current loss value: 6593.92
Current loss value: 6612.6
Current loss value: 6631.54
Current loss value: 6650.37
Current loss value: 6669.19
Current loss value: 6688.09
Current loss value: 6706.72
Current loss value: 6725.45
Current loss value: 6744.23
Current loss value: 6762.97
Current loss value: 6781.8
Current loss value: 6800.65
Current loss value: 6819.54
Current loss value: 6838.22
Current loss value: 6857.1
Current loss value: 6875.84
Current loss value: 6894.82
Current loss value: 6913.27
Current loss value: 6932.14
Current loss value: 6950.63
Current loss value: 6969.58
Current loss value: 6988.51
Current loss value: 7007.25
Current loss value: 7026.26
Current loss value: 7044.87
Current loss value: 7063.84
Current loss value: 7082.44
Current loss value: 7101.32
Current loss value: 7119.94
Current loss value: 7138.64
Current loss value: 7157.35
Current loss value: 7176.16
Current loss value: 7195.17
Current loss value: 7213.82
Current loss value: 7232.79
Current loss value: 7251.16
Current loss value: 7270.12
Current loss value: 7288.68
Current loss value: 7307.43
Current loss value: 7326.46
Current loss value: 7345.21
Current loss value: 7363.98
Current loss value: 7382.55
Current loss value: 7401.53
Current loss value: 7420.27
Current loss value: 7439.07
Current loss value: 7457.59
Current loss value: 7476.26
Current loss value: 7495.29
Current loss value: 7514.05
Current loss value: 7532.91
Current loss value: 7551.76
Current loss value: 7570.41
Current loss value: 7589.11
Current loss value: 7607.96
Current loss value: 7626.7
Current loss value: 7645.33
Current loss value: 7664.32
Current loss value: 7683.05
Current loss value: 7701.86
Current loss value: 7720.45
Current loss value: 7739.38
Current loss value: 7758.28
Current loss value: 7776.96
Current loss value: 7795.6
Current loss value: 7814.3
Current loss value: 7833.25
Current loss value: 7852.0
Current loss value: 7870.83
Current loss value: 7889.35
Current loss value: 7908.27
Current loss value: 7927.23
Current loss value: 7945.89
Current loss value: 7964.48
Current loss value: 7983.4
Current loss value: 8002.02
Current loss value: 8020.75
Current loss value: 8039.58
Current loss value: 8058.46
Current loss value: 8077.17
Current loss value: 8095.82
Current loss value: 8114.63
Current loss value: 8133.56
Current loss value: 8152.21
Current loss value: 8170.92
Current loss value: 8189.94
Current loss value: 8208.43
Current loss value: 8227.28
Current loss value: 8245.99
Current loss value: 8265.0
Current loss value: 8283.62
Current loss value: 8302.21
Current loss value: 8320.91
Current loss value: 8339.61
Current loss value: 8358.67
Current loss value: 8377.28
Current loss value: 8396.07
Current loss value: 8414.8
Current loss value: 8433.84
Current loss value: 8452.59
Current loss value: 8471.47
Current loss value: 8489.94
Current loss value: 8508.7
Current loss value: 8527.68
Current loss value: 8546.46
Current loss value: 8565.4
Current loss value: 8583.86
Current loss value: 8602.85
Current loss value: 8621.78
Current loss value: 8640.48
Current loss value: 8659.13
Current loss value: 8677.71
Current loss value: 8696.68
Current loss value: 8715.56
Current loss value: 8734.4
Current loss value: 8753.39
Current loss value: 8771.9
Current loss value: 8790.79
Current loss value: 8809.56
Current loss value: 8828.31
Current loss value: 8847.11
Current loss value: 8865.83
Current loss value: 8884.72
Current loss value: 8903.46
Current loss value: 8922.47
Current loss value: 8941.03
Current loss value: 8959.83
Current loss value: 8978.45
Current loss value: 8997.19
Current loss value: 9016.21
Current loss value: 9034.77
Current loss value: 9053.61
Current loss value: 9072.34
Current loss value: 9091.41
Current loss value: 9110.13
Current loss value: 9128.99
Current loss value: 9147.82
Current loss value: 9166.57
Current loss value: 9185.17
Current loss value: 9203.79
Current loss value: 9222.85
Current loss value: 9241.69
Current loss value: 9260.48
Current loss value: 9279.09
Current loss value: 9298.01
Current loss value: 9316.85
Current loss value: 9335.6
Current loss value: 9354.22
Current loss value: 9372.83
Current loss value: 9391.74
Current loss value: 9410.59
Current loss value: 9429.43
Current loss value: 9448.34
Current loss value: 9467.02
Current loss value: 9485.68
Current loss value: 9504.32
Current loss value: 9523.16
Current loss value: 9542.08
Current loss value: 9560.77
Current loss value: 9579.48
Current loss value: 9598.28
Current loss value: 9617.35
Current loss value: 9636.13
Current loss value: 9654.96
Current loss value: 9673.55
Current loss value: 9692.32
Current loss value: 9711.38
Current loss value: 9730.03
Current loss value: 9748.97
Current loss value: 9767.46
Current loss value: 9786.52
Current loss value: 9805.4
Current loss value: 9824.06
Current loss value: 9842.78
Current loss value: 9861.38
Current loss value: 9880.48
Current loss value: 9899.29
Current loss value: 9918.17
Current loss value: 9936.93
Current loss value: 9955.78
Current loss value: 9974.71
Current loss value: 9993.3
Current loss value: 10012.0
Current loss value: 10030.8
Current loss value: 10049.8
Current loss value: 10068.7
Current loss value: 10087.5
Current loss value: 10106.3
Current loss value: 10125.1
Current loss value: 10144.0
Current loss value: 10162.6
Current loss value: 10181.4
Current loss value: 10200.0
Current loss value: 10219.0
Current loss value: 10237.9
Current loss value: 10256.5
Current loss value: 10275.4
Current loss value: 10294.2
Current loss value: 10313.0
Current loss value: 10331.7
Current loss value: 10350.5
Current loss value: 10369.4
Current loss value: 10388.1
Current loss value: 10407.1
Current loss value: 10425.7
Current loss value: 10444.5
Current loss value: 10463.1
Current loss value: 10482.0
Current loss value: 10501.0
Current loss value: 10519.7
Current loss value: 10538.6
Current loss value: 10557.3
Current loss value: 10576.3
Current loss value: 10595.1
Current loss value: 10613.9
Current loss value: 10632.6
Current loss value: 10651.4
Current loss value: 10670.3
Current loss value: 10689.0
Current loss value: 10708.0
Current loss value: 10726.7
Current loss value: 10745.6
Current loss value: 10764.4
Current loss value: 10783.1
Current loss value: 10801.9
Current loss value: 10820.4
Current loss value: 10839.4
Current loss value: 10858.3
Current loss value: 10877.1
Current loss value: 10895.9
Current loss value: 10914.8
Current loss value: 10933.7
Current loss value: 10952.4
Current loss value: 10971.0
Current loss value: 10989.7
Current loss value: 11008.5
Current loss value: 11027.5
Current loss value: 11046.1
Current loss value: 11064.9
Current loss value: 11083.8
Current loss value: 11102.7
Current loss value: 11121.3
Current loss value: 11140.1
Current loss value: 11158.9
Current loss value: 11177.7
Current loss value: 11196.7
Current loss value: 11215.3
Current loss value: 11234.1
Current loss value: 11252.9
Current loss value: 11271.9
Current loss value: 11290.7
Current loss value: 11309.4
Current loss value: 11328.0
Current loss value: 11346.8
Current loss value: 11365.8
Current loss value: 11384.4
Current loss value: 11403.2
Current loss value: 11422.0
Current loss value: 11441.0
Current loss value: 11459.6
Current loss value: 11478.4
Current loss value: 11497.4
Current loss value: 11516.0
Current loss value: 11535.0
Current loss value: 11553.8
Current loss value: 11572.6
Current loss value: 11591.2
Current loss value: 11610.1
Current loss value: 11628.7
Current loss value: 11647.6
Current loss value: 11666.3
Current loss value: 11685.0
Current loss value: 11703.9
Current loss value: 11722.8
Current loss value: 11741.6
Current loss value: 11760.2
Current loss value: 11779.0
Current loss value: 11797.7
Current loss value: 11816.6
Current loss value: 11835.4
Current loss value: 11854.0
Current loss value: 11872.9
Current loss value: 11891.9
Current loss value: 11910.6
Current loss value: 11929.6
Current loss value: 11948.0
Current loss value: 11966.9
Current loss value: 11985.8
Current loss value: 12004.6
Current loss value: 12023.5
Current loss value: 12042.2
Current loss value: 12061.0
Current loss value: 12079.7
Current loss value: 12098.7
Current loss value: 12117.3
Current loss value: 12136.1
Current loss value: 12155.0
Current loss value: 12173.8
Current loss value: 12192.8
Current loss value: 12211.5
Current loss value: 12230.4
Current loss value: 12248.9
Current loss value: 12267.7
Current loss value: 12286.7
Current loss value: 12305.3
Current loss value: 12324.1
Current loss value: 12342.9
Current loss value: 12361.9
Current loss value: 12380.6
Current loss value: 12399.4
Current loss value: 12418.0
Current loss value: 12436.8
Current loss value: 12455.7
Current loss value: 12474.4
Current loss value: 12493.1
Current loss value: 12511.9
Current loss value: 12531.0
Current loss value: 12549.5
Current loss value: 12568.2
Current loss value: 12587.0
Current loss value: 12605.8
Current loss value: 12624.8
Current loss value: 12643.5
Current loss value: 12662.4
Current loss value: 12681.0
Current loss value: 12699.9
Current loss value: 12718.6
Current loss value: 12737.3
Current loss value: 12756.2
Current loss value: 12774.7
Current loss value: 12793.8
Current loss value: 12812.5
Current loss value: 12831.4
Current loss value: 12849.9
Current loss value: 12868.8
Current loss value: 12887.5
Current loss value: 12906.3
Current loss value: 12925.2
Current loss value: 12943.7
Current loss value: 12962.7
Current loss value: 12981.6
Current loss value: 13000.4
Current loss value: 13019.3
Current loss value: 13037.6
Current loss value: 13056.6
Current loss value: 13075.4
Current loss value: 13094.3
Current loss value: 13113.2
Current loss value: 13131.9
Current loss value: 13150.6
Current loss value: 13169.4
Current loss value: 13188.1
Current loss value: 13207.0
Current loss value: 13225.6
Current loss value: 13244.4
Current loss value: 13263.3
Current loss value: 13282.2
Current loss value: 13300.9
Current loss value: 13319.5
Current loss value: 13338.2
Current loss value: 13357.0
Current loss value: 13376.0
Current loss value: 13394.6
Current loss value: 13413.4
Current loss value: 13432.2
Current loss value: 13451.2
Current loss value: 13469.9
Current loss value: 13488.7
Current loss value: 13507.3
Current loss value: 13526.1
Current loss value: 13545.0
Current loss value: 13563.7
Current loss value: 13582.4
Current loss value: 13601.3
Current loss value: 13620.3
Current loss value: 13638.9
Current loss value: 13657.6
Current loss value: 13676.4
Current loss value: 13695.2
Current loss value: 13714.2
Current loss value: 13732.9
Current loss value: 13751.6
Current loss value: 13770.3
Current loss value: 13789.3
Current loss value: 13808.0
Current loss value: 13826.7
Current loss value: 13845.5
Current loss value: 13864.3
Current loss value: 13883.3
Current loss value: 13902.0
Current loss value: 13920.9
Current loss value: 13939.5
Current loss value: 13958.4
Current loss value: 13977.1
Current loss value: 13995.9
Current loss value: 14014.7
Current loss value: 14033.3
Current loss value: 14052.3
Current loss value: 14071.2
Current loss value: 14090.0
Current loss value: 14108.6
Current loss value: 14127.4
Current loss value: 14146.4
Current loss value: 14165.2
Current loss value: 14184.0
Current loss value: 14202.6
Current loss value: 14221.6
Current loss value: 14240.5
Current loss value: 14259.1
Current loss value: 14277.9
Current loss value: 14296.6
Current loss value: 14315.7
Current loss value: 14334.4
Current loss value: 14353.3
Current loss value: 14371.9
Current loss value: 14390.9
Current loss value: 14409.8
Current loss value: 14428.6
Current loss value: 14447.4
Current loss value: 14466.1
Current loss value: 14485.1
Current loss value: 14503.8
Current loss value: 14522.7
Current loss value: 14541.6
Current loss value: 14560.3
Current loss value: 14579.0
Current loss value: 14597.8
Current loss value: 14616.5
Current loss value: 14635.4
Current loss value: 14654.3
Current loss value: 14673.0
Current loss value: 14691.9
Current loss value: 14710.8
Current loss value: 14729.5
Current loss value: 14748.1
Current loss value: 14767.0
Current loss value: 14785.8
Current loss value: 14804.4
Current loss value: 14823.4
Current loss value: 14842.2
Current loss value: 14861.1
Current loss value: 14879.9
Current loss value: 14898.5
Current loss value: 14917.4
Current loss value: 14936.2
Current loss value: 14955.0
Current loss value: 14973.6
Current loss value: 14992.6
Current loss value: 15011.5
Current loss value: 15030.3
Current loss value: 15049.2
Current loss value: 15067.7
Current loss value: 15086.6
Current loss value: 15105.5
Current loss value: 15124.3
Current loss value: 15142.9
Current loss value: 15161.8
Current loss value: 15180.9
Current loss value: 15199.5
Current loss value: 15218.1
Current loss value: 15236.9
Current loss value: 15255.9
Current loss value: 15274.7
Current loss value: 15293.5
Current loss value: 15312.1
Current loss value: 15331.0
Current loss value: 15350.0
Current loss value: 15368.3
Current loss value: 15387.3
Current loss value: 15406.1
Current loss value: 15425.0
Current loss value: 15443.9
Current loss value: 15462.6
Current loss value: 15481.6
Current loss value: 15500.3
Current loss value: 15519.3
Current loss value: 15537.6
Current loss value: 15556.6
Current loss value: 15575.5
Current loss value: 15594.4
Current loss value: 15613.3
Current loss value: 15632.0
Current loss value: 15651.0
Current loss value: 15669.6
Current loss value: 15688.3
Current loss value: 15706.9
Current loss value: 15725.9
Current loss value: 15744.8
Current loss value: 15763.7
Current loss value: 15782.6
Current loss value: 15801.3
Current loss value: 15820.2
Current loss value: 15838.9
Current loss value: 15857.6
Current loss value: 15876.3
Current loss value: 15895.3
Current loss value: 15914.2
Current loss value: 15933.1
Current loss value: 15951.9
Current loss value: 15970.7
Current loss value: 15989.6
Current loss value: 16008.1
Current loss value: 16027.0
Current loss value: 16045.6
Current loss value: 16064.6
Current loss value: 16083.5
Current loss value: 16102.3
Current loss value: 16121.1
Current loss value: 16139.9
Current loss value: 16158.5
Current loss value: 16177.1
Current loss value: 16196.1
Current loss value: 16215.0
Current loss value: 16233.9
Current loss value: 16252.8
Current loss value: 16271.5
Current loss value: 16290.4
Current loss value: 16309.2
Current loss value: 16327.8
Current loss value: 16346.4
Current loss value: 16365.4
Current loss value: 16384.3
Current loss value: 16403.1
Current loss value: 16422.1
Current loss value: 16440.8
Current loss value: 16459.7
Current loss value: 16478.4
Current loss value: 16497.0
Current loss value: 16515.8
Current loss value: 16534.8
Current loss value: 16553.7
Current loss value: 16572.5
Current loss value: 16591.5
Current loss value: 16610.3
Current loss value: 16629.1
Current loss value: 16647.7
Current loss value: 16666.7
Current loss value: 16685.4
Current loss value: 16704.3
Current loss value: 16723.2
Current loss value: 16741.9
Current loss value: 16760.6
Current loss value: 16779.5
Current loss value: 16798.3
Current loss value: 16816.9
Current loss value: 16835.8
Current loss value: 16854.3
Current loss value: 16873.3
Current loss value: 16892.2
Current loss value: 16911.0
Current loss value: 16929.9
Current loss value: 16948.7
Current loss value: 16967.2
Current loss value: 16986.0
Current loss value: 17004.9
Current loss value: 17023.8
Current loss value: 17042.6
Current loss value: 17061.2
Current loss value: 17080.1
Current loss value: 17099.0
Current loss value: 17117.5
Current loss value: 17136.5
Current loss value: 17155.3
Current loss value: 17174.1
Current loss value: 17192.7
Current loss value: 17211.7
Current loss value: 17230.6
Current loss value: 17249.3
Current loss value: 17268.0
Current loss value: 17286.7
Current loss value: 17305.7
Current loss value: 17324.4
Current loss value: 17343.3
Current loss value: 17362.2
Current loss value: 17380.9
Current loss value: 17399.6
Current loss value: 17418.5
Current loss value: 17437.2
Current loss value: 17456.1
Current loss value: 17475.0
Current loss value: 17493.8
Current loss value: 17512.6
Current loss value: 17531.5
Current loss value: 17550.2
Current loss value: 17568.9
Current loss value: 17587.8
Current loss value: 17606.5
Current loss value: 17625.4
Current loss value: 17644.2
Current loss value: 17663.0
Current loss value: 17681.8
Current loss value: 17700.4
Current loss value: 17719.4
Current loss value: 17738.2
Current loss value: 17757.0
Current loss value: 17775.8
Current loss value: 17794.7
Current loss value: 17813.4
Current loss value: 17832.3
Current loss value: 17851.1
Current loss value: 17869.7
Current loss value: 17888.6
Current loss value: 17907.5
Current loss value: 17926.0
Current loss value: 17945.0
Current loss value: 17963.9
Current loss value: 17982.6
Current loss value: 18001.5
Current loss value: 18020.3
Current loss value: 18038.9
Current loss value: 18057.9
Current loss value: 18076.7
Current loss value: 18095.3
Current loss value: 18114.2
Current loss value: 18133.1
Current loss value: 18151.9
Current loss value: 18170.7
Current loss value: 18189.5
Current loss value: 18208.1
Current loss value: 18227.1
Current loss value: 18245.7
Current loss value: 18264.5
Current loss value: 18283.5
Current loss value: 18302.3
Current loss value: 18321.1
Current loss value: 18340.0
Current loss value: 18358.8
Current loss value: 18377.4
Current loss value: 18396.3
Current loss value: 18414.9
Current loss value: 18433.7
Current loss value: 18452.7
Current loss value: 18471.6
Current loss value: 18490.4
Current loss value: 18509.2
Current loss value: 18528.1
Current loss value: 18546.6
Current loss value: 18565.5
Current loss value: 18584.2
Current loss value: 18603.0
Current loss value: 18622.0
Current loss value: 18640.8
Current loss value: 18659.6
Current loss value: 18678.4
Current loss value: 18697.3
Current loss value: 18715.9
Current loss value: 18734.7
Current loss value: 18753.5
Current loss value: 18772.2
Current loss value: 18791.1
Current loss value: 18809.7
Current loss value: 18828.7
Current loss value: 18847.6
Current loss value: 18866.4
Current loss value: 18885.0
Current loss value: 18903.7
Current loss value: 18922.7
Current loss value: 18941.5
Current loss value: 18960.3
Current loss value: 18978.9
Current loss value: 18997.9
Current loss value: 19016.8
Current loss value: 19035.5
Current loss value: 19054.2
Current loss value: 19072.9
Current loss value: 19091.9
Current loss value: 19110.3
Current loss value: 19129.4
Current loss value: 19148.1
Current loss value: 19167.1
Current loss value: 19185.9
Current loss value: 19204.3
Current loss value: 19223.3
Current loss value: 19242.1
Current loss value: 19261.0
Current loss value: 19279.5
Current loss value: 19298.5
Current loss value: 19317.4
Current loss value: 19336.2
Current loss value: 19354.8
Current loss value: 19373.5
Current loss value: 19392.5
Current loss value: 19411.3
Current loss value: 19430.1
Current loss value: 19448.7
Current loss value: 19467.7
Current loss value: 19486.6
Current loss value: 19505.3
Current loss value: 19524.0
Current loss value: 19542.7
Current loss value: 19561.7
Current loss value: 19580.5
Current loss value: 19599.3
Current loss value: 19618.0
Current loss value: 19636.9
Current loss value: 19655.4
Current loss value: 19674.0
Current loss value: 19693.0
Current loss value: 19711.8
Current loss value: 19730.7
Current loss value: 19749.2
Current loss value: 19768.2
Current loss value: 19787.1
Current loss value: 19805.9
Current loss value: 19824.5
Current loss value: 19843.1
Current loss value: 19862.1
Current loss value: 19880.9
Current loss value: 19899.7
Current loss value: 19918.3
Current loss value: 19937.2
Current loss value: 19956.2
Current loss value: 19974.9
Current loss value: 19993.5
Current loss value: 20012.1
Current loss value: 20031.1
Current loss value: 20049.9
Current loss value: 20068.8
Current loss value: 20087.3
Current loss value: 20106.3
Current loss value: 20125.1
Current loss value: 20143.9
Current loss value: 20162.4
Current loss value: 20181.2
Current loss value: 20200.2
Current loss value: 20219.1
Current loss value: 20237.8
Current loss value: 20256.5
Current loss value: 20275.4
Current loss value: 20294.2
Current loss value: 20312.8
Current loss value: 20331.8
Current loss value: 20350.5
Current loss value: 20369.3
Current loss value: 20388.3
Current loss value: 20406.9
Current loss value: 20425.7
Current loss value: 20444.5
Current loss value: 20463.2
Current loss value: 20482.0
Current loss value: 20500.8
Current loss value: 20519.4
Current loss value: 20538.4
Current loss value: 20557.3
Current loss value: 20576.1
Current loss value: 20594.7
Current loss value: 20613.5
Current loss value: 20632.2
Current loss value: 20651.0
Current loss value: 20669.8
Current loss value: 20688.5
Current loss value: 20707.4
Current loss value: 20726.3
Current loss value: 20745.0
Current loss value: 20763.7
Current loss value: 20782.3
Current loss value: 20801.2
Current loss value: 20820.1
Current loss value: 20838.7
Current loss value: 20857.5
Current loss value: 20876.4
Current loss value: 20895.3
Current loss value: 20914.0
Current loss value: 20932.6
Current loss value: 20951.4
Current loss value: 20970.3
Current loss value: 20989.2
Current loss value: 21008.0
Current loss value: 21026.8
Current loss value: 21045.7
Current loss value: 21064.3
Current loss value: 21083.1
Current loss value: 21101.9
Current loss value: 21120.7
Current loss value: 21139.6
Current loss value: 21158.5
Current loss value: 21177.3
Current loss value: 21196.2
Current loss value: 21214.9
Current loss value: 21233.6
Current loss value: 21252.4
Current loss value: 21271.2
Current loss value: 21289.9
Current loss value: 21308.9
Current loss value: 21327.7
Current loss value: 21346.4
Current loss value: 21365.1
Current loss value: 21384.0
Current loss value: 21402.6
Current loss value: 21421.1
Current loss value: 21440.1
Current loss value: 21458.9
Current loss value: 21477.8
Current loss value: 21496.3
Current loss value: 21515.3
Current loss value: 21534.2
Current loss value: 21553.0
Current loss value: 21571.6
Current loss value: 21590.2
Current loss value: 21609.2
Current loss value: 21628.0
Current loss value: 21646.9
Current loss value: 21665.4
Current loss value: 21684.3
Current loss value: 21703.3
Current loss value: 21722.0
Current loss value: 21740.6
Current loss value: 21759.4
Current loss value: 21778.3
Current loss value: 21797.2
Current loss value: 21815.9
Current loss value: 21834.6
Current loss value: 21853.5
Current loss value: 21872.3
Current loss value: 21890.9
Current loss value: 21909.8
Current loss value: 21928.6
Current loss value: 21947.4
Current loss value: 21966.4
Current loss value: 21985.0
Current loss value: 22003.8
Current loss value: 22022.6
Current loss value: 22041.4
Current loss value: 22060.1
Current loss value: 22078.9
Current loss value: 22097.7
Current loss value: 22116.7
Current loss value: 22135.6
Current loss value: 22154.2
Current loss value: 22173.0
Current loss value: 22191.8
Current loss value: 22210.6
Current loss value: 22229.3
Current loss value: 22248.1
Current loss value: 22266.7
Current loss value: 22285.7
Current loss value: 22304.7
Current loss value: 22323.4
Current loss value: 22342.0
Current loss value: 22360.8
Current loss value: 22379.7
Current loss value: 22398.2
Current loss value: 22417.2
Current loss value: 22435.9
Current loss value: 22454.9
Current loss value: 22473.7
Current loss value: 22492.4
Current loss value: 22511.4
Current loss value: 22529.9
Current loss value: 22548.8
Current loss value: 22567.4
Current loss value: 22586.3
Current loss value: 22605.2
Current loss value: 22624.0
Current loss value: 22642.6
Current loss value: 22661.5
Current loss value: 22680.0
Current loss value: 22698.9
Current loss value: 22717.8
Current loss value: 22736.6
Current loss value: 22755.4
Current loss value: 22774.3
Current loss value: 22793.0
Current loss value: 22811.8
Current loss value: 22830.6
Current loss value: 22849.2
Current loss value: 22868.1
Current loss value: 22887.0
Current loss value: 22905.8
Current loss value: 22924.6
Current loss value: 22943.2
Current loss value: 22962.1
Current loss value: 22980.9
Current loss value: 22999.5
Current loss value: 23018.5
Current loss value: 23037.3
Current loss value: 23056.1
Current loss value: 23075.0
Current loss value: 23093.8
Current loss value: 23112.4
Current loss value: 23131.3
Current loss value: 23150.1
Current loss value: 23168.7
Current loss value: 23187.7
Current loss value: 23206.5
Current loss value: 23225.2
Current loss value: 23243.9
Current loss value: 23262.8
Current loss value: 23281.7
Current loss value: 23300.4
Current loss value: 23319.1
Current loss value: 23337.8
Current loss value: 23356.8
Current loss value: 23375.3
Current loss value: 23394.3
Current loss value: 23413.2
Current loss value: 23432.1
Current loss value: 23451.0
Current loss value: 23469.2
Current loss value: 23488.3
Current loss value: 23507.0
Current loss value: 23526.0
Current loss value: 23544.4
Current loss value: 23563.5
Current loss value: 23582.3
Current loss value: 23601.2
Current loss value: 23619.7
Current loss value: 23638.5
Current loss value: 23657.5
Current loss value: 23676.3
Current loss value: 23695.0
Current loss value: 23713.7
Current loss value: 23732.6
Current loss value: 23751.5
Current loss value: 23770.2
Current loss value: 23788.8
Current loss value: 23807.5
Current loss value: 23826.5
Current loss value: 23845.3
Current loss value: 23864.2
Current loss value: 23883.0
Current loss value: 23901.9
Current loss value: 23920.7
Current loss value: 23939.3
Current loss value: 23958.2
Current loss value: 23976.8
Current loss value: 23995.9
Current loss value: 24014.7
Current loss value: 24033.6
Current loss value: 24052.3
Current loss value: 24071.2
Current loss value: 24089.7
Current loss value: 24108.4
Current loss value: 24127.5
Current loss value: 24146.2
Current loss value: 24165.2
Current loss value: 24184.0
Current loss value: 24202.8
Current loss value: 24221.3
Current loss value: 24240.2
Current loss value: 24259.1
Current loss value: 24277.7
Current loss value: 24296.5
Current loss value: 24315.3
Current loss value: 24334.3
Current loss value: 24352.7
Current loss value: 24371.8
Current loss value: 24390.4
Current loss value: 24409.2
Current loss value: 24428.2
Current loss value: 24446.7
Current loss value: 24465.6
Current loss value: 24484.3
Current loss value: 24503.3
Current loss value: 24521.9
Current loss value: 24540.6
Current loss value: 24559.5
Current loss value: 24578.3
Current loss value: 24597.3
Current loss value: 24615.8
Current loss value: 24634.8
Current loss value: 24653.5
Current loss value: 24672.5
Current loss value: 24691.0
Current loss value: 24709.9
Current loss value: 24728.6
Current loss value: 24747.3
Current loss value: 24766.4
Current loss value: 24785.2
Current loss value: 24804.0
Current loss value: 24822.7
Current loss value: 24841.7
Current loss value: 24860.3
Current loss value: 24879.0
Current loss value: 24897.9
Current loss value: 24916.7
Current loss value: 24935.7
Current loss value: 24954.3
Current loss value: 24973.3
Current loss value: 24992.1
Current loss value: 25011.0
Current loss value: 25029.6
Current loss value: 25048.4
Current loss value: 25067.3
Current loss value: 25086.1
Current loss value: 25104.7
Current loss value: 25123.6
Current loss value: 25142.5
Current loss value: 25161.2
Current loss value: 25179.9
Current loss value: 25198.7
Current loss value: 25217.3
Current loss value: 25236.3
Current loss value: 25255.1
Current loss value: 25274.1
Current loss value: 25292.7
Current loss value: 25311.7
Current loss value: 25330.4
Current loss value: 25349.1
Current loss value: 25367.7
Current loss value: 25386.6
Current loss value: 25405.5
Current loss value: 25424.4
Current loss value: 25443.3
Current loss value: 25462.0
Current loss value: 25480.9
Current loss value: 25499.6
Current loss value: 25518.3
Current loss value: 25536.9
Current loss value: 25555.9
Current loss value: 25574.7
Current loss value: 25593.6
Current loss value: 25612.5
Current loss value: 25631.2
Current loss value: 25650.1
Current loss value: 25668.9
Current loss value: 25687.5
Current loss value: 25706.1
Current loss value: 25725.1
Current loss value: 25743.9
Current loss value: 25762.8
Current loss value: 25781.6
Current loss value: 25800.3
Current loss value: 25819.0
Current loss value: 25837.6
Current loss value: 25856.6
Current loss value: 25875.4
Current loss value: 25894.2
Current loss value: 25913.0
Current loss value: 25931.9
Current loss value: 25950.5
Current loss value: 25969.4
Current loss value: 25988.0
Current loss value: 26006.9
Current loss value: 26025.8
Current loss value: 26044.7
Current loss value: 26063.5
Current loss value: 26082.4
Current loss value: 26101.2
Current loss value: 26119.8
Current loss value: 26138.7
Current loss value: 26157.4
Current loss value: 26176.2
Current loss value: 26195.2
Current loss value: 26213.9
Current loss value: 26232.7
Current loss value: 26251.7
Current loss value: 26270.3
Current loss value: 26289.1
Current loss value: 26307.9
Current loss value: 26326.7
Current loss value: 26345.4
Current loss value: 26364.3
Current loss value: 26382.8
Current loss value: 26401.8
Current loss value: 26420.7
Current loss value: 26439.5
Current loss value: 26458.1
Current loss value: 26476.8
Current loss value: 26495.8
Current loss value: 26514.6
Current loss value: 26533.4
Current loss value: 26552.0
Current loss value: 26571.0
Current loss value: 26589.8
Current loss value: 26608.5
Current loss value: 26627.1
Current loss value: 26645.8
Current loss value: 26664.8
Current loss value: 26683.7
Current loss value: 26702.4
Current loss value: 26721.1
Current loss value: 26740.1
Current loss value: 26758.9
Current loss value: 26777.3
Current loss value: 26796.3
Current loss value: 26815.0
Current loss value: 26834.0
Current loss value: 26852.4
Current loss value: 26871.5
Current loss value: 26890.2
Current loss value: 26909.1
Current loss value: 26927.6
Current loss value: 26946.3
Current loss value: 26965.3
Current loss value: 26984.1
Current loss value: 27003.1
Current loss value: 27021.9
Current loss value: 27040.7
Current loss value: 27059.3
Current loss value: 27078.1
Current loss value: 27097.1
Current loss value: 27115.4
Current loss value: 27134.6
Current loss value: 27153.4
Current loss value: 27172.3
Current loss value: 27191.1
Current loss value: 27209.9
Current loss value: 27228.6
Current loss value: 27247.3
Current loss value: 27266.4
Current loss value: 27284.7
Current loss value: 27303.9
Current loss value: 27322.4
Current loss value: 27341.3
Current loss value: 27360.0
Current loss value: 27378.9
Current loss value: 27397.6
Current loss value: 27416.4
Current loss value: 27435.4
Current loss value: 27453.8
Current loss value: 27472.9
Current loss value: 27491.7
Current loss value: 27510.7
Current loss value: 27529.4
Current loss value: 27548.2
Current loss value: 27567.0
Current loss value: 27585.7
Current loss value: 27604.6
Current loss value: 27623.1
Current loss value: 27642.1
Current loss value: 27660.9
Current loss value: 27679.8
Current loss value: 27698.2
Current loss value: 27717.0
Current loss value: 27736.0
Current loss value: 27754.6
Current loss value: 27773.4
Current loss value: 27792.2
Current loss value: 27811.2
Current loss value: 27830.0
Current loss value: 27848.7
Current loss value: 27867.3
Current loss value: 27886.0
Current loss value: 27905.1
Current loss value: 27923.6
Current loss value: 27942.5
Current loss value: 27961.3
Current loss value: 27980.2
Current loss value: 27998.8
Current loss value: 28017.5
Current loss value: 28036.2
Current loss value: 28055.0
Current loss value: 28074.1
Current loss value: 28092.9
Current loss value: 28111.8
Current loss value: 28130.5
Current loss value: 28149.4
Current loss value: 28168.0
Current loss value: 28186.8
Current loss value: 28205.7
Current loss value: 28224.3
Current loss value: 28243.4
Current loss value: 28262.1
Current loss value: 28281.0
Current loss value: 28299.8
Current loss value: 28318.6
Current loss value: 28337.3
Current loss value: 28356.0
Current loss value: 28375.0
Current loss value: 28393.7
Current loss value: 28412.8
Current loss value: 28431.5
Current loss value: 28450.4
Current loss value: 28468.8
Current loss value: 28487.6
Current loss value: 28506.6
Current loss value: 28525.2
Current loss value: 28544.0
Current loss value: 28562.8
Current loss value: 28581.8
Current loss value: 28600.4
Current loss value: 28619.1
Current loss value: 28637.8
Current loss value: 28656.6
Current loss value: 28675.6
Current loss value: 28694.1
Current loss value: 28713.0
Current loss value: 28731.8
Current loss value: 28750.8
Current loss value: 28769.3
Current loss value: 28788.1
Current loss value: 28807.0
Current loss value: 28825.8
Current loss value: 28844.4
Current loss value: 28863.3
Current loss value: 28882.2
Current loss value: 28901.0
Current loss value: 28919.9
Current loss value: 28938.5
Current loss value: 28957.2
Current loss value: 28976.1
Current loss value: 28995.0
Current loss value: 29014.0
Current loss value: 29032.7
Current loss value: 29051.6
Current loss value: 29070.3
Current loss value: 29089.0
Current loss value: 29107.8
Current loss value: 29126.6
Current loss value: 29145.4
Current loss value: 29164.2
Current loss value: 29183.1
Current loss value: 29201.8
Current loss value: 29220.8
Current loss value: 29239.5
Current loss value: 29258.3
Current loss value: 29277.0
Current loss value: 29295.8
Current loss value: 29314.6
Current loss value: 29333.4
Current loss value: 29352.0
Current loss value: 29370.9
Current loss value: 29389.9
Current loss value: 29408.5
Current loss value: 29427.3
Current loss value: 29445.9
Current loss value: 29464.8
Current loss value: 29483.6
Current loss value: 29502.5
Current loss value: 29521.4
Current loss value: 29540.0
Current loss value: 29559.2
Current loss value: 29577.6
Current loss value: 29596.4
Current loss value: 29615.0
Current loss value: 29633.9
Current loss value: 29652.8
Current loss value: 29671.5
Current loss value: 29690.2
Current loss value: 29709.1
Current loss value: 29728.1
Current loss value: 29746.7
Current loss value: 29765.5
Current loss value: 29784.3
Current loss value: 29803.1
Current loss value: 29822.1
Current loss value: 29840.7
Current loss value: 29859.5
Current loss value: 29878.3
Current loss value: 29897.2
Current loss value: 29915.8
Current loss value: 29934.6
Current loss value: 29953.5
Current loss value: 29972.2
Current loss value: 29991.3
Current loss value: 30009.8
Current loss value: 30028.7
Current loss value: 30047.4
Current loss value: 30066.3
Current loss value: 30084.9
Current loss value: 30103.7
Current loss value: 30122.6
Current loss value: 30141.5
Current loss value: 30160.5
Current loss value: 30179.2
Current loss value: 30198.1
Current loss value: 30216.8
Current loss value: 30235.4
Current loss value: 30254.0
Current loss value: 30273.0
Current loss value: 30291.9
Current loss value: 30310.7
Current loss value: 30329.7
Current loss value: 30348.4
Current loss value: 30367.3
Current loss value: 30386.0
Current loss value: 30404.7
Current loss value: 30423.2
Current loss value: 30442.2
Current loss value: 30461.0
Current loss value: 30479.9
Current loss value: 30498.4
Current loss value: 30517.4
Current loss value: 30536.3
Current loss value: 30555.0
Current loss value: 30573.7
Current loss value: 30592.4
Current loss value: 30611.4
Current loss value: 30630.2
Current loss value: 30649.0
Current loss value: 30667.6
Current loss value: 30686.5
Current loss value: 30705.1
Current loss value: 30724.0
Current loss value: 30742.7
Current loss value: 30761.5
Current loss value: 30780.4
Current loss value: 30799.3
Current loss value: 30817.9
Current loss value: 30836.7
Current loss value: 30855.5
Current loss value: 30874.1
Current loss value: 30893.0
Current loss value: 30911.8
Current loss value: 30930.7
Current loss value: 30949.5
Current loss value: 30968.5
Current loss value: 30987.0
Current loss value: 31005.8
Current loss value: 31024.4
Current loss value: 31043.3
Current loss value: 31062.2
Current loss value: 31080.9
Current loss value: 31099.9
Current loss value: 31118.7
Current loss value: 31137.7
Current loss value: 31156.2
Current loss value: 31175.0
Current loss value: 31193.5
Current loss value: 31212.5
Current loss value: 31231.3
Current loss value: 31250.1
Current loss value: 31268.7
Current loss value: 31287.7
Current loss value: 31306.6
Current loss value: 31325.2
Current loss value: 31344.0
Current loss value: 31362.7
Current loss value: 31381.7
Current loss value: 31400.5
Current loss value: 31419.2
Current loss value: 31437.9
Current loss value: 31456.8
Current loss value: 31475.7
Current loss value: 31494.4
Current loss value: 31513.1
Current loss value: 31531.8
Current loss value: 31550.8
Current loss value: 31569.6
Current loss value: 31588.3
Current loss value: 31607.0
Current loss value: 31626.0
Current loss value: 31644.9
Current loss value: 31663.2
Current loss value: 31682.2
Current loss value: 31701.0
Current loss value: 31719.9
Current loss value: 31738.8
Current loss value: 31757.4
Current loss value: 31776.2
Current loss value: 31795.0
Current loss value: 31813.8
Current loss value: 31832.6
Current loss value: 31851.4
Current loss value: 31870.2
Current loss value: 31889.0
Current loss value: 31907.6
Current loss value: 31926.6
Current loss value: 31945.4
Current loss value: 31964.2
Current loss value: 31982.8
Current loss value: 32001.5
Current loss value: 32020.5
Current loss value: 32039.3
Current loss value: 32058.1
Current loss value: 32076.8
Current loss value: 32095.7
Current loss value: 32114.5
Current loss value: 32133.2
Current loss value: 32151.9
Current loss value: 32170.6
Current loss value: 32189.6
Current loss value: 32208.4
Current loss value: 32227.3
Current loss value: 32245.9
Current loss value: 32264.6
Current loss value: 32283.6
Current loss value: 32302.4
Current loss value: 32321.5
Current loss value: 32340.1
Current loss value: 32359.0
Current loss value: 32377.5
Current loss value: 32396.5
Current loss value: 32415.1
Current loss value: 32433.9
Current loss value: 32452.8
Current loss value: 32471.6
Current loss value: 32490.6
Current loss value: 32509.3
Current loss value: 32528.2
Current loss value: 32546.9
Current loss value: 32565.6
Current loss value: 32584.5
Current loss value: 32603.2
Current loss value: 32621.9
Current loss value: 32640.9
Current loss value: 32659.8
Current loss value: 32678.5
Current loss value: 32697.2
Current loss value: 32715.9
Current loss value: 32734.9
Current loss value: 32753.7
Current loss value: 32772.4
Current loss value: 32791.1
Current loss value: 32810.1
Current loss value: 32828.8
Current loss value: 32847.6
Current loss value: 32866.5
Current loss value: 32885.2
Current loss value: 32904.1
Current loss value: 32922.6
Current loss value: 32941.7
Current loss value: 32960.4
Current loss value: 32979.2
Current loss value: 32997.8
Current loss value: 33016.5
Current loss value: 33035.5
Current loss value: 33054.2
Current loss value: 33073.3
Current loss value: 33092.1
Current loss value: 33110.9
Current loss value: 33129.7
Current loss value: 33148.3
Current loss value: 33167.1
Current loss value: 33185.9
Current loss value: 33204.9
Current loss value: 33223.5
Current loss value: 33242.2
Current loss value: 33260.8
Current loss value: 33279.7
Current loss value: 33298.7
Current loss value: 33317.5
Current loss value: 33336.5
Current loss value: 33355.1
Current loss value: 33374.1
Current loss value: 33392.7
Current loss value: 33411.4
Current loss value: 33430.1
Current loss value: 33449.0
Current loss value: 33467.9
Current loss value: 33486.8
Current loss value: 33505.7
Current loss value: 33524.4
Current loss value: 33543.0
Current loss value: 33561.8
Current loss value: 33580.7
Current loss value: 33599.2
Current loss value: 33618.3
Current loss value: 33637.0
Current loss value: 33656.0
Current loss value: 33674.9
Current loss value: 33693.5
Current loss value: 33712.3
Current loss value: 33731.1
Current loss value: 33750.1
Current loss value: 33768.9
Current loss value: 33787.8
Current loss value: 33806.3
Current loss value: 33825.2
Current loss value: 33843.9
Current loss value: 33862.7
Current loss value: 33881.4
Current loss value: 33900.2
Current loss value: 33919.3
Current loss value: 33938.0
Current loss value: 33956.8
Current loss value: 33975.5
Current loss value: 33994.3
Current loss value: 34013.4
Current loss value: 34032.1
Current loss value: 34051.1
Current loss value: 34069.5
Current loss value: 34088.4
Current loss value: 34107.3
Current loss value: 34126.1
Current loss value: 34145.1
Current loss value: 34163.7
Current loss value: 34182.5
Current loss value: 34201.1
Current loss value: 34219.9
Current loss value: 34238.9
Current loss value: 34257.6
Current loss value: 34276.4
Current loss value: 34295.2
Current loss value: 34314.2
Current loss value: 34332.9
Current loss value: 34351.7
Current loss value: 34370.7
Current loss value: 34389.4
Current loss value: 34408.3
Current loss value: 34427.1
Current loss value: 34445.7
Current loss value: 34464.6
Current loss value: 34483.3
Current loss value: 34502.1
Current loss value: 34521.0
Current loss value: 34539.6
Current loss value: 34558.6
Current loss value: 34577.4
Current loss value: 34596.2
Current loss value: 34614.9
Current loss value: 34633.8
Current loss value: 34652.7
Current loss value: 34671.4
Current loss value: 34690.3
Current loss value: 34709.2
Current loss value: 34727.8
Current loss value: 34746.7
Current loss value: 34765.5
Current loss value: 34784.4
Current loss value: 34803.3
Current loss value: 34822.0
Current loss value: 34840.7
Current loss value: 34859.5
Current loss value: 34878.4
Current loss value: 34897.2
Current loss value: 34916.1
Current loss value: 34934.8
Current loss value: 34953.8
Current loss value: 34972.6
Current loss value: 34991.3
Current loss value: 35010.0
Current loss value: 35028.7
Current loss value: 35047.8
Current loss value: 35066.6
Current loss value: 35085.5
Current loss value: 35104.0
Current loss value: 35122.8
Current loss value: 35141.9
Current loss value: 35160.6
Current loss value: 35179.7
Current loss value: 35198.3
Current loss value: 35217.3
Current loss value: 35236.0
Current loss value: 35254.8
Current loss value: 35273.7
Current loss value: 35292.4
Current loss value: 35311.5
Current loss value: 35330.1
Current loss value: 35348.8
Current loss value: 35367.4
Current loss value: 35386.4
Current loss value: 35405.3
Current loss value: 35424.1
Current loss value: 35443.1
Current loss value: 35461.8
Current loss value: 35480.8
Current loss value: 35499.3
Current loss value: 35518.2
Current loss value: 35537.1
Current loss value: 35555.9
Current loss value: 35574.5
Current loss value: 35593.5
Current loss value: 35612.4
Current loss value: 35630.9
Current loss value: 35649.8
Current loss value: 35668.6
Current loss value: 35687.6
Current loss value: 35706.5
Current loss value: 35725.4
Current loss value: 35744.0
Current loss value: 35762.7
Current loss value: 35781.8
Current loss value: 35800.5
Current loss value: 35819.5
Current loss value: 35837.9
Current loss value: 35857.0
Current loss value: 35875.6
Current loss value: 35894.4
Current loss value: 35913.4
Current loss value: 35932.2
Current loss value: 35951.2
Current loss value: 35970.0
Current loss value: 35988.9
Current loss value: 36007.4
Current loss value: 36026.3
Current loss value: 36045.3
Current loss value: 36064.1
Current loss value: 36083.1
Current loss value: 36101.5
Current loss value: 36120.5
Current loss value: 36139.2
Current loss value: 36158.0
Current loss value: 36177.0
Current loss value: 36195.7
Current loss value: 36214.4
Current loss value: 36233.2
Current loss value: 36252.2
Current loss value: 36270.9
Current loss value: 36289.6
Current loss value: 36308.3
Current loss value: 36327.2
Current loss value: 36346.2
Current loss value: 36365.0
Current loss value: 36383.9
Current loss value: 36402.4
Current loss value: 36421.3
Current loss value: 36440.2
Current loss value: 36459.0
Current loss value: 36477.7
Current loss value: 36496.6
Current loss value: 36515.5
Current loss value: 36534.2
Current loss value: 36552.9
Current loss value: 36571.6
Current loss value: 36590.6
Current loss value: 36609.4
Current loss value: 36628.3
Current loss value: 36647.2
Current loss value: 36665.6
Current loss value: 36684.6
Current loss value: 36703.4
Current loss value: 36722.3
Current loss value: 36741.2
Current loss value: 36759.9
Current loss value: 36778.5
Current loss value: 36797.3
Current loss value: 36816.2
Current loss value: 36835.1
Current loss value: 36853.8
Current loss value: 36872.6
Current loss value: 36891.5
Current loss value: 36910.3
Current loss value: 36929.0
Current loss value: 36948.0
Current loss value: 36966.7
Current loss value: 36985.7
Current loss value: 37004.5
Current loss value: 37023.3
Current loss value: 37041.9
Current loss value: 37060.6
Current loss value: 37079.7
Current loss value: 37098.4
Current loss value: 37117.4
Current loss value: 37136.3
Current loss value: 37155.1
Current loss value: 37173.9
Current loss value: 37192.5
Current loss value: 37211.3
Current loss value: 37230.1
Current loss value: 37249.1
Current loss value: 37267.9
Current loss value: 37286.7
Current loss value: 37305.5
Current loss value: 37324.2
Current loss value: 37343.2
Current loss value: 37361.9
Current loss value: 37380.9
Current loss value: 37399.5
Current loss value: 37418.3
Current loss value: 37436.9
Current loss value: 37455.8
Current loss value: 37474.9
Current loss value: 37493.6
Current loss value: 37512.5
Current loss value: 37531.3
Filter 0 processed in 3s

In [295]:



Out[295]:
array([[    0.8,   -23.4,    42.2,  -645.7,    60.3,   154.2,    25.1,
            6.4,     1. ],
       [  -10.7,   107.8,    15. ,   311. , -2800.8,  -659.9,   231.2,
           26.2,     0.5],
       [ -201.3,  -449.1,  -506.2, -1783.7, -2903.2, -5117.8,   121.5,
           41.8,     0.2],
       [   75. , -2099.9, -1168.3,   907.6, -6273.2, -1759.6, -1167. ,
         -359.5,     0.5],
       [ -143.4,  -287.9, -3676. ,  -944.3,  3617.1, -5716.7, -2125.4,
         -105.4,     0.2],
       [   -1.1, -2056.6,  -730.6, -3844.1, -8804.5,   419.1,   -14.6,
           77.6,     0.5],
       [ -483.1,  -144.4, -2785.8,  3187.4, -3687.8, -4939.8,   -45. ,
          -43.8,     0.4],
       [   12.5,   165.5,    85.4,  -294.3,  -418.8,   383.5,  -267. ,
            1.9,     0.4],
       [    0.5,     0.5,     0.7,     0.6,     0.3,     0.7,     0.8,
            0.9,     0. ]])

In [292]:
l = 0
# fill the picture with our saved filters
for i in range(0,n):
        #kept_filters[i][0][4,4] = 0
        img, loss = kept_filters[i]
        
        img += abs(img.min())
        img *= 255.0/img.max() 
        stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,:-margin, :] = zoom(img,[zoomFact,zoomFact,1],order=0)
        l +=1
# save the result to disk
imsave('stitched_filters_%dx%d.png' % (n, n), stitched_filters)

In [243]:
'''Visualization of the filters of VGG16, via gradient ascent in input space.
This script can run on CPU in a few minutes.
Results example: http://i.imgur.com/4nj4KjN.jpg
'''
from __future__ import print_function

from scipy.misc import imsave
import numpy as np
import time
from keras.applications import vgg16
from keras import backend as K

# dimensions of the generated pictures for each filter.
img_width = 9
img_height = 9
input_channel = 1

# the name of the layer we want to visualize
# (see model definition at keras/applications/vgg16.py)
layer_name = 'dens2'

# util function to convert a tensor into a valid image


def deprocess_image(x):
    xs = x.reshape(1,x.shape[0],x.shape[1],x.shape[2])
    xo = x_min_max.inverse_transform(xs.reshape(xs.shape[0],-1)).reshape(xs.shape)
    xe = xo.reshape(x.shape[0],x.shape[1],x.shape[2])
    """
    # normalize tensor: center on 0., ensure std is 0.1
    x -= x.mean()
    x /= (x.std() + 1e-5)
    x *= 0.1

    # clip to [0, 1]
    x += 0.5
    x = np.clip(x, 0, 1)

    # convert to RGB array
    x *= 255
    if K.image_data_format() == 'channels_first':
        x = x.transpose((1, 2, 0))
    #x = np.clip(x, 0, 255).astype('uint8')
    """
    return xe


model.summary()

# this is the placeholder for the input images
input_img = model.input

# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers[1:]])


def normalize(x):
    # utility function to normalize a tensor by its L2 norm
    return x / (K.sqrt(K.mean(K.square(x))) + 1e-5)


kept_filters = []
for filter_index in range(8):
    # we only scan through the first 200 filters,
    # but there are actually 512 of them
    # Hugo: Changed to 12
    print('Processing filter %d' % filter_index)
    start_time = time.time()

    # we build a loss function that maximizes the activation
    # of the nth filter of the layer considered
    layer_output = layer_dict[layer_name].output
    if K.image_data_format() == 'channels_first':
        loss = K.mean(layer_output[:, filter_index, :, :])
    else:
        loss = K.mean(layer_output[:, :, :, filter_index])

    # we compute the gradient of the input picture wrt this loss
    grads = K.gradients(loss, input_img)[0]

    # normalization trick: we normalize the gradient
    grads = normalize(grads)

    # this function returns the loss and grads given the input picture
    iterate = K.function([input_img], [loss, grads])

    # step size for gradient ascent
    step = 1.

    # we start from a gray image with some random noise
    if K.image_data_format() == 'channels_first':
        input_img_data = np.random.random((1, input_channel, img_width, img_height))
    else:
        input_img_data = np.random.random((1, img_width, img_height, input_channel))
    #input_img_data = (input_img_data - 0.5) * 20 + 128

    # we run gradient ascent for 20 steps
    for i in range(20):
        loss_value, grads_value = iterate([input_img_data])
        input_img_data += grads_value * step

        print('Current loss value:', loss_value)
        if loss_value <= 0.:
            # some filters get stuck to 0, we can skip them
            break

    # decode the resulting input image
    if loss_value > 0:
        img = deprocess_image(input_img_data[0])
        kept_filters.append((img, loss_value))
    end_time = time.time()
    print('Filter %d processed in %ds' % (filter_index, end_time - start_time))



# the filters that have the highest loss are assumed to be better-looking.
# we will only keep the top 64 filters.
kept_filters.sort(key=lambda x: x[1], reverse=True)
#kept_filters = kept_filters[:n * n]

# build a black picture with enough space for
# our 8 x 8 filters of size 128 x 128, with a 5px margin in between
n = len(kept_filters)
zoomFact = 50
img_width =9*zoomFact
img_height = 9*zoomFact
margin = 5
width = img_width +  margin
height = n * img_height + (n - 1) * margin
stitched_filters = np.zeros(( height,width, 3))

l = 0
# fill the picture with our saved filters
for i in range(0,n):
    
        img, loss = kept_filters[i]
        img += abs(img.min())
        img *= 255.0/img.max() 
        stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,:-margin, :] = zoom(img,[zoomFact,zoomFact,1],order=0)
        l +=1
# save the result to disk
imsave('stitched_filters_%dx%d.png' % (n, n), stitched_filters)


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1 (Conv2D)               (None, 6, 6, 2)           34        
_________________________________________________________________
conv2 (Conv2D)               (None, 4, 4, 4)           76        
_________________________________________________________________
conv3 (Conv2D)               (None, 3, 3, 8)           136       
_________________________________________________________________
flatten_6 (Flatten)          (None, 72)                0         
_________________________________________________________________
dens1 (Dense)                (None, 12)                876       
_________________________________________________________________
dens2 (Dense)                (None, 1)                 13        
=================================================================
Total params: 1,135
Trainable params: 1,135
Non-trainable params: 0
_________________________________________________________________
Processing filter 0
---------------------------------------------------------------------------
InvalidArgumentError                      Traceback (most recent call last)
~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\common_shapes.py in _call_cpp_shape_fn_impl(op, input_tensors_needed, input_tensors_as_shapes_needed, debug_python_shape_fn, require_shape_fn)
    670           graph_def_version, node_def_str, input_shapes, input_tensors,
--> 671           input_tensors_as_shapes, status)
    672   except errors.InvalidArgumentError as err:

~\AppData\Local\conda\conda\envs\tensorflow\lib\contextlib.py in __exit__(self, type, value, traceback)
     65             try:
---> 66                 next(self.gen)
     67             except StopIteration:

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\errors_impl.py in raise_exception_on_not_ok_status()
    465           compat.as_text(pywrap_tensorflow.TF_Message(status)),
--> 466           pywrap_tensorflow.TF_GetCode(status))
    467   finally:

InvalidArgumentError: Index out of range using input dim 2; input has only 2 dims for 'strided_slice_219' (op: 'StridedSlice') with input shapes: [?,1], [4], [4], [4] and with computed input tensors: input[3] = <1 1 1 1>.

During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)
<ipython-input-243-08aa19f6cc71> in <module>()
     74         loss = K.mean(layer_output[:, filter_index, :, :])
     75     else:
---> 76         loss = K.mean(layer_output[:, :, :, filter_index])
     77 
     78     # we compute the gradient of the input picture wrt this loss

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\ops\array_ops.py in _SliceHelper(tensor, slice_spec, var)
    497         ellipsis_mask=ellipsis_mask,
    498         var=var,
--> 499         name=name)
    500 
    501 

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\ops\array_ops.py in strided_slice(input_, begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask, var, name)
    661       ellipsis_mask=ellipsis_mask,
    662       new_axis_mask=new_axis_mask,
--> 663       shrink_axis_mask=shrink_axis_mask)
    664 
    665   parent_name = name

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\ops\gen_array_ops.py in strided_slice(input, begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask, name)
   3513                                 ellipsis_mask=ellipsis_mask,
   3514                                 new_axis_mask=new_axis_mask,
-> 3515                                 shrink_axis_mask=shrink_axis_mask, name=name)
   3516   return result
   3517 

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\op_def_library.py in apply_op(self, op_type_name, name, **keywords)
    765         op = g.create_op(op_type_name, inputs, output_types, name=scope,
    766                          input_types=input_types, attrs=attr_protos,
--> 767                          op_def=op_def)
    768         if output_structure:
    769           outputs = op.outputs

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\ops.py in create_op(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_shapes, compute_device)
   2506                     original_op=self._default_original_op, op_def=op_def)
   2507     if compute_shapes:
-> 2508       set_shapes_for_outputs(ret)
   2509     self._add_op(ret)
   2510     self._record_op_seen_by_control_dependencies(ret)

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\ops.py in set_shapes_for_outputs(op)
   1871       shape_func = _call_cpp_shape_fn_and_require_op
   1872 
-> 1873   shapes = shape_func(op)
   1874   if shapes is None:
   1875     raise RuntimeError(

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\ops.py in call_with_requiring(op)
   1821 
   1822   def call_with_requiring(op):
-> 1823     return call_cpp_shape_fn(op, require_shape_fn=True)
   1824 
   1825   _call_cpp_shape_fn_and_require_op = call_with_requiring

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\common_shapes.py in call_cpp_shape_fn(op, input_tensors_needed, input_tensors_as_shapes_needed, debug_python_shape_fn, require_shape_fn)
    608     res = _call_cpp_shape_fn_impl(op, input_tensors_needed,
    609                                   input_tensors_as_shapes_needed,
--> 610                                   debug_python_shape_fn, require_shape_fn)
    611     if not isinstance(res, dict):
    612       # Handles the case where _call_cpp_shape_fn_impl calls unknown_shape(op).

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\tensorflow\python\framework\common_shapes.py in _call_cpp_shape_fn_impl(op, input_tensors_needed, input_tensors_as_shapes_needed, debug_python_shape_fn, require_shape_fn)
    674       missing_shape_fn = True
    675     else:
--> 676       raise ValueError(err.message)
    677 
    678   if missing_shape_fn:

ValueError: Index out of range using input dim 2; input has only 2 dims for 'strided_slice_219' (op: 'StridedSlice') with input shapes: [?,1], [4], [4], [4] and with computed input tensors: input[3] = <1 1 1 1>.

In [205]:
img += abs(img.min())

img *= 255.0/img.max() 
img


Out[205]:
array([[[ 109.6],
        [  69.2],
        [ 106.3],
        [ 129.5],
        [  71.9],
        [ 103.5],
        [ 108.5],
        [ 105.5],
        [ 107.9]],

       [[ 125.3],
        [ 163.6],
        [ 116. ],
        [  94.2],
        [ 118.3],
        [ 155.2],
        [ 121.7],
        [ 125. ],
        [ 106.5]],

       [[  42.4],
        [  72.9],
        [  89.6],
        [ 152. ],
        [   0. ],
        [ 165. ],
        [ 110.1],
        [ 117.1],
        [ 108.1]],

       [[ 122.7],
        [  36.5],
        [ 161.4],
        [ 255. ],
        [  80.5],
        [  43.5],
        [ 144.4],
        [ 110.1],
        [ 106.6]],

       [[  76.8],
        [ 137.4],
        [ 126.3],
        [  56.3],
        [  73.5],
        [  34.2],
        [  75.5],
        [   7.5],
        [ 107.9]],

       [[ 114. ],
        [  90.7],
        [ 104.8],
        [  66.9],
        [ 135.3],
        [ 165.8],
        [  93.1],
        [  17.4],
        [ 108.3]],

       [[ 124.4],
        [ 141.4],
        [ 124.3],
        [  78.9],
        [  84.2],
        [ 124.2],
        [ 100.9],
        [  64.3],
        [ 106.9]],

       [[  94.4],
        [ 121.7],
        [  94.1],
        [ 107.5],
        [ 104.8],
        [ 137.1],
        [ 104.6],
        [ 107.9],
        [ 106.4]],

       [[ 107.5],
        [ 107.5],
        [ 107.6],
        [ 107.1],
        [ 106.6],
        [ 108. ],
        [ 108. ],
        [ 107.5],
        [ 106.6]]])

In [200]:
# build a black picture with enough space for
# our 8 x 8 filters of size 128 x 128, with a 5px margin in between
n = len(kept_filters)
zoomFact = 50
img_width =9*zoomFact
img_height = 9*zoomFact
margin = 5
width = img_width +  margin
height = n * img_height + (n - 1) * margin
stitched_filters = np.zeros(( height,width, 3))

l = 0
# fill the picture with our saved filters
for i in range(0,n):
    
        img, loss = kept_filters[i]
        stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,:-margin, :] = zoom(img,[zoomFact,zoomFact,1])
        l +=1
# save the result to disk
imsave('stitched_filters_%dx%d.png' % (n, n), stitched_filters)

In [194]:
stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,:, :].shape


Out[194]:
(0, 455, 3)

In [197]:
(img_width + margin) * i + img_width


Out[197]:
2725

Sensitivity analysis

... with repsect to own characteristics


In [131]:
# Pick a landscape
i = 500
marEff = np.zeros((x_test.shape[0],1))
for i in range(0,x_test.shape[0]):
    
    xi = x_test[i:i+1,:,:,:]
    yi = y_test[i,:]

    yi_hat = model.predict(xi)
    oyi_hat = y_min_max.inverse_transform(yi_hat.reshape(-1, 1))
    oyi = y_min_max.inverse_transform(yi.reshape(-1, 1))

    #print("oyi:",oyi," oyi_hat: ",oyi_hat )

    #Make a change to own characteristic

    oxi = x_min_max.inverse_transform(xi.reshape(xi.shape[0],-1)).reshape(xi.shape)
    oxi[:,4,4,0] +=1

    xi_prime = x_min_max.transform(oxi.reshape(oxi.shape[0],-1)).reshape(oxi.shape)
    # Make new prediction
    yi_hat_prime = model.predict(xi_prime)
    oyi_hat_prime = y_min_max.inverse_transform(yi_hat_prime.reshape(-1, 1))
    #print('oyi_hat_prime: ',oyi_hat_prime )
    #print('del pred: ', oyi_hat_prime-oyi_hat )
    #print('b1',b1)
    marEff[i,:] = oyi_hat_prime-oyi_hat

In [41]:
print("average marginal effect own characteristics: \n",np.mean(marEff))
print('b1: \n', b1)


average marginal effect own characteristics: 
 9.23862591434
b1: 
 [[ 9.43021722]]

... with repsect to neighboring characteristics

Lets consider specific examples of landscapes: One neighbor case!

Set own characteristic to a fixed values and move around one single neighbor and predict outcome


In [44]:
nSize = 80
ownSize = 30

res = np.zeros((sizeGrid,sizeGrid))

for k in range(0,sizeGrid):
    for l in range(0,sizeGrid):
        imap = np.zeros((1,sizeGrid,sizeGrid,1))
        imap[0,4,4,0] = ownSize
        imap[0,k,l,0] = nSize
        #imap[0,4,4,0] = 0
        
        imap_prime = x_min_max.transform(imap.reshape(imap.shape[0],-1)).reshape(imap.shape)
        # Make new prediction
        yi_hat_prime = model.predict(imap_prime)
        oyi_hat_prime = y_min_max.inverse_transform(yi_hat_prime.reshape(-1, 1))
        res[k,l] = oyi_hat_prime

# calculate effect of neighbor by substracting own effect (b1*ownSize) and devide by neighboring size nSize        
print((res-b1*ownSize)/nSize)        
print(b2)


[[-0.09590673  0.02645464  0.02578478  0.03501482  0.03645869 -0.25616865
  -0.09725675 -0.1731452  -0.10088529]
 [-0.07680463  0.07941027  0.12521868  0.38151942  0.56627703  0.25255366
   0.38236819  0.27172671  0.35986834]
 [ 0.09205752  0.16718722  2.40748148  2.61892939  2.49451838  2.62981615
   2.86107645  0.20646334  0.26639567]
 [-0.19768438  0.36203661  2.64389544  2.4985116   2.60336848  2.60755778
   2.55488902  0.17171793  0.2555974 ]
 [-0.12507696  0.36411105  2.69355135  2.6327096   5.93218813  2.58887034
   2.69915629  0.20320788  0.025208  ]
 [-0.07430791  0.2000043   2.73191386  2.63744288  2.75621272  2.69569903
   3.091393    0.39864588  0.27037402]
 [-0.16688146  0.15544901  2.46705599  2.71511012  2.57985545  2.60664797
   2.53672114  0.20020113  0.12577982]
 [-0.15882444  0.15481539  0.05049334  0.41724558  0.22630625  0.26732874
   0.13495989  0.13673869  0.15309954]
 [-0.09507932 -0.14950056 -0.20892744 -0.01390257 -0.15436086 -0.23943853
   0.00812083  0.12192812 -0.14874372]]
[[ 3.76041529]]

Test sens


In [48]:
res = np.zeros((sizeGrid,sizeGrid))

fig = plt.subplots(9, 9, sharex=True, sharey=True)
i = 0
for k in range(0,sizeGrid):
    for l in range(0,sizeGrid):
        i +=1
        seli = (x_test[:,k,l,0]>0)
        imap = np.copy(x_test[seli,:,:,:])
        iY = y_test[seli,:]
        oimap = x_min_max.inverse_transform(imap.reshape(imap.shape[0],-1)).reshape(imap.shape)
        oiY = y_min_max.inverse_transform(iY)

        yi_hat = model.predict(imap)
        oimap_prime = np.copy(oimap)
        oimap_prime[:,k,l,:] += 10
        
        imap_prime = x_min_max.transform(oimap_prime.reshape(oimap_prime.shape[0],-1)).reshape(oimap_prime.shape)

        yi_hat_prime = model.predict(imap_prime)

        oyi_hat = y_min_max.inverse_transform(yi_hat.reshape(-1, 1))
        oyi_hat_prime = y_min_max.inverse_transform(yi_hat_prime.reshape(-1, 1))
        #res[k,l] = np.mean(oyi_hat-oyi_hat_prime)
        res[k,l] = np.mean(oyi_hat_prime-oyi_hat)
        
        # Plot hist
        if k==0 & l==0:
            ax = plt.subplot(9, 9, i)
            ax0 = ax
        else:
            ax = plt.subplot(9, 9, i, sharex=ax0)
        
        ax.hist(oyi_hat_prime-oyi_hat, bins=100)
        #ax.axvline(x=0,color='black')
        for label in ax.get_xticklabels() + ax.get_yticklabels():
            label.set_visible(False)



In [26]:
%matplotlib notebook

# use gridspec to partition the figure into subplots
import matplotlib.gridspec as gridspec

In [35]:
plt.figure()
gspec = gridspec.GridSpec(9, 9)

for k in range(0,sizeGrid):
    for l in range(0,sizeGrid):
        ax = plt.subplot(gspec[k, l])
        ax.hist(oyi_hat_prime-oyi_hat, bins=100)
        for label in ax.get_xticklabels() + ax.get_yticklabels():
            label.set_visible(False)



In [36]:
oyi_hat_prime-oyi_hat


Out[36]:
array([[ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.],
       [ 0.]], dtype=float32)

In [49]:
np.set_printoptions(precision=1,suppress=True)
print(res)
print("b1: ",str(b1))
print("b2: ",str(b2))


[[ -0.3  -0.7   0.   -0.9  -0.2   0.1  -0.5   0.2   0. ]
 [ -0.6  -0.5   0.4   1.    1.3   0.3   0.8  -0.3   0. ]
 [  0.5   0.4   9.3   8.4   8.6   9.5   9.2   0.    0. ]
 [ -0.1   1.4   9.1   9.2  12.2  10.5   9.5   0.9   0. ]
 [ -0.4   1.9   8.5  10.5  42.2  12.4   8.6   1.1   0. ]
 [ -0.1   1.1   9.8  11.3  12.5  10.4   9.9   0.8   0. ]
 [ -0.2   0.1   8.7  10.    9.6  10.    9.1  -0.3   0. ]
 [ -0.4  -0.5   0.4   1.1   1.5   0.4   0.8  -0.1   0. ]
 [  0.    0.    0.    0.    0.    0.    0.    0.    0. ]]
b1:  [[ 4.3]]
b2:  [[ 4.8]]

In [118]:
imap.shape


Out[118]:
(703, 9, 9, 1)

In [117]:
imap_prime.shape


Out[117]:
(1, 9, 9, 1)

In [92]:
np.mean(oyi_hat_prime-oyi_hat)


Out[92]:
-362.79788

In [ ]:


In [ ]:


In [ ]:


In [123]:
xx = np.pad(imap,pad_width=1,mode='constant')

In [136]:
nSize = 80
ownSize = 30

res = np.zeros((sizeGrid,sizeGrid))

for k in range(0,sizeGrid):
    for l in range(0,sizeGrid):
        imap = np.zeros((1,sizeGrid,sizeGrid,2))
        imap[0,4,4,1] = ownSize
        imap[0,k-1:k+2,l-1:l+2,0] = nSize
        imap[0,4,4,0] = 0
        #print(imap[0,:,:,0])
        imap_prime = x_min_max.transform(imap.reshape(imap.shape[0],-1)).reshape(imap.shape)
        # Make new prediction
        yi_hat_prime = model.predict(imap_prime)
        oyi_hat_prime = y_min_max.inverse_transform(yi_hat_prime.reshape(-1, 1))
        res[k,l] = oyi_hat_prime

# calculate effect of neighbor by substracting own effect (b1*ownSize) and devide by neighboring size nSize        
print((res-b1*ownSize)/nSize)        
print(b2)


[[ 0.1  0.1  0.1  0.1  0.1  0.1  0.1  0.1  0.1]
 [ 0.1  5.   5.3  6.   6.3  5.8  5.4  4.9  0.2]
 [ 0.1  5.9  6.   6.6  6.3  6.1  5.7  5.1  0.1]
 [ 0.1  6.3  6.2  6.7  6.2  6.3  6.2  5.3  0.5]
 [ 0.1  6.4  6.7  7.   6.4  6.8  6.6  5.7  0.7]
 [ 0.1  6.3  5.8  6.3  5.9  6.5  6.   5.8  0.9]
 [ 0.1  6.   6.6  6.4  6.6  6.8  6.   5.9  0.9]
 [ 0.1  5.5  6.3  6.4  6.5  6.4  5.4  5.   0.5]
 [ 0.1  0.6  0.5  0.4  0.1 -0.1 -0.1  0.2 -0.1]]
[[ 6.8]]

In [126]:
imap[0,:,:,0]


Out[126]:
array([[  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
       [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,  80.]])

In [98]:
# Pick a landscape

pick_i = np.random.randint(0,x_test.shape[0], size=(200,1))

sensAvg = np.zeros((pick_i.shape[0]*sizeGrid*sizeGrid,3))
rr = 0
marEff = np.zeros((pick_i.shape[0],sizeGrid,sizeGrid))
#marEff = np.zeros((10,sizeGrid,sizeGrid))
for ii in range(0,pick_i.shape[0]):

    i = int(pick_i[ii])
    xi = x_test[i:i+1,:,:,:]
    yi = y_test[i,:]

    yi_hat = model.predict(xi)
    oyi_hat = y_min_max.inverse_transform(yi_hat.reshape(-1, 1))
    oyi = y_min_max.inverse_transform(yi.reshape(-1, 1))
    
    for k in range(0,sizeGrid):
        for l in range(0,sizeGrid):


            #Make a change to neigh. characteristic

            oxi = x_min_max.inverse_transform(xi.reshape(xi.shape[0],-1)).reshape(xi.shape)
            
            #Save neighboring average before 
            xx = np.copy(oxi[:,2:7,2:7,0])
            xx[xx==0] = np.nan
            sensAvg[rr,0] = np.nanmean(xx)
            
            numNeigh = np.count_nonzero(~np.isnan(xx))
            
            # Check if k and l are withing cutoff
            if (k>=2) & (k<7) & (l>=2) & (l<7):
                
                oldMean = np.nanmean(xx)
                if np.isnan(oldMean):
                    oldMean=0
                oldSum = np.nansum(xx)
                if np.isnan(oldSum):
                    oldSum=0 
                if oxi[:,k,l,0]>0:
                    addPart = (oldMean+1)*(numNeigh)-oldSum
                else:
                    addPart = (oldMean+1)*(numNeigh+1)-oldSum
            else:
                addPart = 1
            
            if np.isnan(addPart):
                addPart = 1
            
            # Add old average +2 such that average is increased by 1
            oxi[:,k,l,0] += addPart
            
            #Save neighboring average after
            xx = np.copy(oxi[:,2:7,2:7,0])
            xx[xx==0] = np.nan
            sensAvg[rr,1] = np.nanmean(xx)
            
            xi_prime = x_min_max.transform(oxi.reshape(oxi.shape[0],-1)).reshape(oxi.shape)
            # Make new prediction
            yi_hat_prime = model.predict(xi_prime)
            oyi_hat_prime = y_min_max.inverse_transform(yi_hat_prime.reshape(-1, 1))
            
            marEff[ii,k,l] = oyi_hat_prime-oyi_hat
            
            sensAvg[rr,2] = oyi_hat_prime-oyi_hat
            
            rr +=1


C:\Users\storm\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:30: RuntimeWarning: Mean of empty slice
C:\Users\storm\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:59: RuntimeWarning: Mean of empty slice
C:\Users\storm\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:37: RuntimeWarning: Mean of empty slice

In [99]:
np.set_printoptions(precision=1,suppress=True)
meanMarEff = np.mean(marEff,axis=0)
meanMarEff[4,4] = 0
print('Mean marginal effects of neighboring characteristics \n',meanMarEff)
print('b2 \n',b2)


Mean marginal effects of neighboring characteristics 
 [[ 0.8  1.3  1.3  1.6  1.7  1.6  1.9  1.8  1.6]
 [ 1.1  1.5  0.2 -0.1 -0.1  0.   0.5  2.1  2.3]
 [ 1.2  0.7  4.3  0.5  1.2 -0.2  0.1  0.7  1.8]
 [ 1.7  0.5 -1.7  1.4  0.6  0.2  2.6 -0.1  1.9]
 [ 1.7  0.4 -0.   0.9  0.  -0.9 -2.2 -0.4  1.9]
 [ 1.6  0.3  2.   2.4  4.8  1.   0.5 -0.2  1.8]
 [ 1.2  0.7 -1.7 -0.5 -1.7 -0.6  0.  -0.2  1.6]
 [ 1.3  1.8  0.4  0.  -0.5 -0.3 -0.   1.8  1.8]
 [ 0.9  1.6  1.4  1.4  1.2  1.5  1.3  1.7  1.3]]
b2 
 [[ 6.8]]

In [537]:
np.set_printoptions(precision=3)
aa = sensAvg[sensAvg[:,0]!=sensAvg[:,1]]
aa = aa[~np.isnan(np.sum(aa,axis=1))]

plt.scatter((aa[:,1]-aa[:,0]),aa[:,2]/(aa[:,1]-aa[:,0]))
print('mean',np.mean(aa[:,2]/(aa[:,1]-aa[:,0])))
print('median',np.median(aa[:,2]/(aa[:,1]-aa[:,0])))
print('b2',b2)


mean -21.7486537909
median 5.05365448534
b2 [[ 7.997]]

In [538]:
np.set_printoptions(precision=3)
aa = sensAvg[sensAvg[:,0]!=sensAvg[:,1]]
aa = aa[~np.isnan(np.sum(aa,axis=1))]
aa = aa[(aa[:,1]-aa[:,0])>0,:]
plt.scatter((aa[:,1]-aa[:,0]),aa[:,2]/(aa[:,1]-aa[:,0]))
print('mean',np.mean(aa[:,2]/(aa[:,1]-aa[:,0])))
print('median',np.median(aa[:,2]/(aa[:,1]-aa[:,0])))
print('b2',b2)


mean 207.00957546
median 8.09351806641
b2 [[ 7.997]]

In [494]:
ll = 25
k = 6
l = 2

xx = np.copy(oxi[:,2:7,2:7,0])
xx[xx==0] = np.nan
print(xx)
#print(np.nanmean(xx))
print(sensAvg[rr-ll,0])
print(sensAvg[rr-ll,1])

print((sensAvg[rr-ll,1]-sensAvg[rr-ll,0]))

print(marEff[ii,k,l])
print(b2)
print(marEff[ii,k,l]/(sensAvg[rr-ll,1]-sensAvg[rr-ll,0]))


[[[ nan  nan  nan  nan  nan]
  [ nan  nan  nan  nan  nan]
  [ nan  nan  nan  nan  nan]
  [ nan  nan  nan  nan  nan]
  [ nan  nan  nan  nan  nan]]]
nan
1.0
nan
11.8434906006
[[ 7.997]]
nan

In [266]:
k = 6
l = 1
oxi = x_min_max.inverse_transform(xi.reshape(xi.shape[0],-1)).reshape(xi.shape)
oxi[:,k,l,0] +=1

xi_prime = x_min_max.transform(oxi.reshape(oxi.shape[0],-1)).reshape(oxi.shape)
# Make new prediction
yi_hat_prime = model.predict(xi_prime)
oyi_hat_prime = y_min_max.inverse_transform(yi_hat_prime.reshape(-1, 1))

marEff[ii,k,l] = oyi_hat_prime-oyi_hat

In [294]:
xx = oxi[:,2:7,2:7]
xx[xx==0] = np.nan


Out[294]:
array([[[ nan,  nan],
        [ nan,  nan],
        [ nan,  nan],
        [ nan,  nan],
        [ nan,  nan]],

       [[ nan,  nan],
        [ nan,  nan],
        [ 11.,  nan],
        [ nan,  nan],
        [ nan,  nan]],

       [[ nan,  nan],
        [ nan,  nan],
        [ nan,  39.],
        [ nan,  nan],
        [ 66.,  nan]],

       [[ nan,  nan],
        [ nan,  nan],
        [ nan,  nan],
        [ nan,  nan],
        [ nan,  nan]],

       [[ nan,  nan],
        [ nan,  nan],
        [ nan,  nan],
        [ nan,  nan],
        [ nan,  nan]]])

In [268]:
x_min_max.inverse_transform(xi.reshape(xi.shape[0],-1)).reshape(xi.shape)[:,:,:,0]


Out[268]:
array([[[  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,  28.],
        [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
        [ 43.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
        [  0.,   0.,   0.,   0.,  39.,   0.,   0.,  32.,   0.],
        [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
        [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
        [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,  68.],
        [  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.],
        [  0.,   0.,   0.,   0.,   0.,   0.,   0.,  38.,   0.]]])

In [269]:
print(oyi_hat_prime)
print(oyi_hat)
print(oyi_hat_prime-oyi_hat)
print(b2)


[[ 669.8]]
[[ 662.3]]
[[ 7.5]]
[[ 8.]]

In [253]:
((39+1)/2)*b2


Out[253]:
array([[ 159.9]])

In [255]:
39*b2


Out[255]:
array([[ 311.9]])

In [272]:
oxi[:,2:7,2:7].shape


Out[272]:
(1, 5, 5, 2)

In [45]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1 (Conv2D)               (None, 7, 7, 2)           20        
_________________________________________________________________
flatten_1 (Flatten)          (None, 98)                0         
_________________________________________________________________
dense_1 (Dense)              (None, 64)                6336      
_________________________________________________________________
dense_2 (Dense)              (None, 1)                 65        
=================================================================
Total params: 6,421
Trainable params: 6,421
Non-trainable params: 0
_________________________________________________________________

In [502]:
#import pydot
from IPython.display import SVG
from keras.utils.vis_utils import model_to_dot
from keras.utils import plot_model
plot_model(model, to_file='SLX_CNN_Model.png')
#SVG(model_to_dot(model).create(prog='dot', format='svg'))


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\keras\utils\vis_utils.py in _check_pydot()
     22         # to check the pydot/graphviz installation.
---> 23         pydot.Dot.create(pydot.Dot())
     24     except Exception:

AttributeError: 'NoneType' object has no attribute 'Dot'

During handling of the above exception, another exception occurred:

ImportError                               Traceback (most recent call last)
<ipython-input-502-c8b39929b864> in <module>()
      3 from keras.utils.vis_utils import model_to_dot
      4 from keras.utils import plot_model
----> 5 plot_model(model, to_file='SLX_CNN_Model.png')
      6 #SVG(model_to_dot(model).create(prog='dot', format='svg'))

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\keras\utils\vis_utils.py in plot_model(model, to_file, show_shapes, show_layer_names, rankdir)
    129             'LR' creates a horizontal plot.
    130     """
--> 131     dot = model_to_dot(model, show_shapes, show_layer_names, rankdir)
    132     _, extension = os.path.splitext(to_file)
    133     if not extension:

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\keras\utils\vis_utils.py in model_to_dot(model, show_shapes, show_layer_names, rankdir)
     50     from ..models import Sequential
     51 
---> 52     _check_pydot()
     53     dot = pydot.Dot()
     54     dot.set('rankdir', rankdir)

~\AppData\Local\conda\conda\envs\tensorflow\lib\site-packages\keras\utils\vis_utils.py in _check_pydot()
     25         # pydot raises a generic Exception here,
     26         # so no specific class can be caught.
---> 27         raise ImportError('Failed to import pydot. You must install pydot'
     28                           ' and graphviz for `pydotprint` to work.')
     29 

ImportError: Failed to import pydot. You must install pydot and graphviz for `pydotprint` to work.

In [ ]: