In [1]:
import keras as k
import keras.backend as b
import keras.layers as l
import numpy as np
import sys
import theano as t
import keras.initializations as i
sys.path.append('../src/mane/')
In [2]:
import graph as g
fb = g.graph_from_pickle('../src/mane/data/egonets.graph')
In [3]:
def row_dot(inputs):
return b.batch_dot(inputs[0], inputs[1], axes=[1,1])
def nce_loss(y_true, y_pred):
y_true = y_true.reshape(y_pred.shape)
return -b.log(b.sigmoid(y_pred * y_true))
def init_uniform(shape, name=None):
return i.uniform(shape=shape, scale=1, name=name)
In [4]:
t_in = l.Input(batch_shape=(100,1), dtype='int32', name='target')
c_in = l.Input(batch_shape=(100,1), dtype='int32', name='class')
In [5]:
emb_t = l.Embedding(input_dim=len(fb), output_dim=200,
name='target_emb', input_length=1,
init=init_uniform) (t_in)
emb_t = l.Reshape(target_shape=(200,))(emb_t)
emb_c = l.Embedding(input_dim=len(fb), output_dim=200,
name='nce_emb', input_length=1,
init=init_uniform) (c_in)
emb_c = l.Reshape(target_shape=(200,))(emb_c)
emb_b = l.Embedding(input_dim=len(fb), output_dim=1,
name='nce_bias', input_length=1,
init='zero') (c_in)
emb_b = l.Reshape(target_shape=(1,))(emb_b)
In [6]:
dot_prod = l.Merge(mode=row_dot, output_shape=(1,), name='row_wise_dot')([emb_t, emb_c])
logits = l.Merge(mode='sum', output_shape=(1,), name='logits')([dot_prod, emb_b])
sigm = l.Activation('sigmoid', name='label')(logits)
In [7]:
model = k.models.Model(input=[t_in, c_in], output=sigm)
In [8]:
model.compile(loss='binary_crossentropy', optimizer='adam', name='mmodel')
In [9]:
model.summary()
In [25]:
data_generator = fb.gen_walk('random_walk')
In [26]:
model.fit_generator(data_generator, nb_epoch=1, samples_per_epoch=3433150)
In [20]:
next(data_generator)
Out[20]:
In [21]:
la = next(data_generator)
In [24]:
len(la[0]['class'])
Out[24]:
In [ ]: