In [1]:
import pandas as pd
import numpy as np
import tensorflow as tf

import pickle
import os
import random

from sklearn.manifold import TSNE
import matplotlib.pyplot as plt

import collections

In [2]:
seq_df = pd.read_table('data/family_classification_sequences.tab')
seq_df.head()


Out[2]:
Sequences
0 MAFSAEDVLKEYDRRRRMEALLLSLYYPNDRKLLDYKEWSPPRVQV...
1 MSIIGATRLQNDKSDTYSAGPCYAGGCSAFTPRGTCGKDWDLGEQT...
2 MQNPLPEVMSPEHDKRTTTPMSKEANKFIRELDKKPGDLAVVSDFV...
3 MDSLNEVCYEQIKGTFYKGLFGDFPLIVDKKTGCFNATKLCVLGGK...
4 MEAKNITIDNTTYNFFKFYNINQPLTNLKYLNSERLCFSNAVMGKI...

In [3]:
def make_codones(sseq):
    crop = len(sseq) % 3
    cropped_seq = sseq[:-crop] if crop > 0 else sseq

    return [cropped_seq[i:i+3] for i in range(0, len(cropped_seq), 3)]

def seq_to3(seq):
    splittings = [make_codones(seq[i:]) for i in range(3)]
    return splittings

def create_all_codones(df):
    codones = []

    for i in range(df.shape[0]):
        row = df.iloc[i, :][0]
        codones.extend(seq_to3(row))
    return codones

In [4]:
def read_or_create(read_path, producer):
    if os.path.isfile(read_path):
        print('reading', read_path)
        with open(read_path, 'rb') as fp:
            return pickle.load(fp)
    result = producer()
    print('saving', read_path)
    with open(read_path, 'wb') as fp:
        pickle.dump(result, fp)
    return result

In [5]:
all_codones = read_or_create(read_path='data/all_codones.pickle',
                             producer= lambda: create_all_codones(seq_df))


reading data/all_codones.pickle

In [6]:
len(all_codones)


Out[6]:
972054

In [7]:
def generate_sample(index_words_list, context_window_size):
    """ Form training pairs according to the skip-gram model. """
    for index_words in index_words_list:
        for index, center in enumerate(index_words):
            context = random.randint(1, context_window_size)
            # get a random target before the center word
            for target in index_words[max(0, index - context): index]:
                yield center, target
            # get a random target after the center wrod
            for target in index_words[index + 1: index + context + 1]:
                yield center, target


def get_batch(iterator, batch_size):
    """ Group a numerical stream into batches and yield them as Numpy arrays. """
    while True:
        center_batch = np.zeros(batch_size, dtype=np.int32)
        target_batch = np.zeros([batch_size, 1], dtype=np.int32)
        for index in range(batch_size):
            center_batch[index], target_batch[index] = next(iterator)
        yield center_batch, target_batch


def flatten(x):
    return [item for sublist in x for item in sublist]


def cod_to_dict(cod, dictionary):
    return [dictionary[key] for key in cod]

# I've added sorting by frequency
def make_dictionary(all_codones):
    flat_codones = flatten(all_codones)
    count = collections.Counter(flat_codones).most_common()
    dictionary = {}
    for word, _ in count:
        dictionary[word] = len(dictionary)
    return dictionary

def process_data(all_codones, dictionary, batch_size, skip_window):
    cod_dicts = [cod_to_dict(cod, dictionary) for cod in all_codones]
    single_gen = generate_sample(cod_dicts, context_window_size=skip_window)
    batch_gen = get_batch(single_gen, batch_size=batch_size)
    return batch_gen

In [8]:
dictionary = make_dictionary(all_codones)

In [9]:
len(dictionary)


Out[9]:
9424

In [10]:
BATCH_SIZE = 128
SKIP_WINDOW = 12  # the context window

batch_gen = process_data(all_codones, dictionary, BATCH_SIZE, SKIP_WINDOW)

In [11]:
######################

In [12]:
def weight_variable(shape, name):
    initial = tf.truncated_normal(shape, stddev=0.1)
    return tf.Variable(initial, name=name)

def bias_variable(shape, name):
    initial = tf.constant(0.1, shape=shape)
    return tf.Variable(initial, name=name)

def conv2d(x, W, name=None):
    return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1],
                        padding='SAME', name=name)

In [13]:
class SkipGramModel:
    """ Build the graph for word2vec model """

    def __init__(self, vocab_size, embed_size, batch_size, num_sampled, learning_rate):
        self.vocab_size = vocab_size
        self.embed_size = embed_size
        self.batch_size = batch_size
        self.num_sampled = num_sampled
        self.lr = learning_rate
        self.global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
        self.saver = tf.train.Saver()  # defaults to saving all variables - in this case embed_matrix, nce_weight, nce_bias

    def _create_placeholders(self):
        with tf.name_scope("data"):
            self.center_words = tf.placeholder(tf.int32, shape=[self.batch_size], name='center_words')
            self.target_words = tf.placeholder(tf.int32, shape=[self.batch_size, 1], name='target_words')

    def _create_matrix(self):
        with tf.name_scope("matrix"):
            self.embed_matrix = tf.Variable(tf.random_uniform([self.vocab_size,
                                                               self.embed_size], -1.0, 1.0),
                                            name='matrix')

    def _create_conv_layer(self):
        with tf.name_scope("conv"):
            W_conv = weight_variable([1, 1, 1, 1], name='W_conv')
            b_conv = bias_variable([1], name='b_conv')
            x = tf.reshape(self.matrix, [-1, self.vocab_size, self.embed_size, 1]) 
            conv = conv2d(x, W_conv, name='Conv2D')
            self.embed_matrix = tf.reshape(conv, [self.vocab_size, self.embed_size], 
                                           name='conv_reshaped')
            
    def _create_loss(self):
        with tf.name_scope("loss"):
            
            embed = tf.nn.embedding_lookup(self.embed_matrix, self.center_words, name='embed')

            # construct variables for NCE loss
            nce_weight = tf.Variable(tf.truncated_normal([self.vocab_size, self.embed_size],
                                                         stddev=1.0 / (self.embed_size ** 0.5)),
                                     name='nce_weight')
            nce_bias = tf.Variable(tf.zeros([self.vocab_size]), name='nce_bias')

            # define loss function to be NCE loss function
            self.loss = tf.reduce_mean(tf.nn.nce_loss(weights=nce_weight,
                                                      biases=nce_bias,
                                                      labels=self.target_words,
                                                      inputs=embed,
                                                      num_sampled=self.num_sampled,
                                                      num_classes=self.vocab_size), name='loss')

    def _create_optimizer(self):
        self.optimizer = tf.train.GradientDescentOptimizer(self.lr).minimize(self.loss,
                                                                                 global_step=self.global_step)

    def _create_summaries(self):
        with tf.name_scope("summaries"):
            tf.summary.scalar("loss", self.loss)
            tf.summary.histogram("histogram loss", self.loss)
            # because you have several summaries, we should merge them all
            # into one op to make it easier to manage
            self.summary_op = tf.summary.merge_all()

    def build_graph(self):
        """ Build the graph for our model """
        self._create_placeholders()
        self._create_matrix()
        #self._create_conv_layer()
        self._create_loss()
        self._create_optimizer()
        self._create_summaries()

In [14]:
VOCAB_SIZE = len(dictionary)
EMBED_SIZE = 100  # dimension of the word embedding vectors
NUM_SAMPLED = 7  # Number of negative examples to sample.
LEARNING_RATE = .5
NUM_TRAIN_STEPS = 1500000
SKIP_STEP = 2000

g = tf.Graph()
with g.as_default():
    model = SkipGramModel(VOCAB_SIZE, EMBED_SIZE, BATCH_SIZE, NUM_SAMPLED, LEARNING_RATE)
    model.build_graph()


INFO:tensorflow:Summary name histogram loss is illegal; using histogram_loss instead.

In [15]:
######################

In [16]:
def make_dir(path):
    """ Create a directory if there isn't one already. """
    try:
        os.mkdir(path)
    except OSError:
        pass

In [53]:
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.3)

def train_model(model, batch_gen, num_train_steps, learning_rate, skip_step):
    make_dir('checkpoints')
    with tf.Session(graph=g, config=tf.ConfigProto(gpu_options=gpu_options)) as sess:
        
        saver = tf.train.Saver()
        save_dir = 'checkpoints_copy'

        try:
            print("Trying to restore last checkpoint ...")
            last_chk_path = tf.train.latest_checkpoint(checkpoint_dir=save_dir)
            saver.restore(session, save_path=last_chk_path)
            print("Restored checkpoint from:", last_chk_path)
        except:
            print("Failed to restore checkpoint. Initializing variables instead.")
            session.run(tf.global_variables_initializer())
        
        return
        
        #sess.run(tf.global_variables_initializer())
        #ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/checkpoint'))
        #print(ckpt)
        #print(os.path.isfile(ckpt.model_checkpoint_path))
#         if that checkpoint exists, restore from checkpoint
        #if ckpt and os.path.isfile(ckpt.model_checkpoint_path):
        #    model.saver.restore(sess, ckpt.model_checkpoint_path)
        #return

        total_loss = 0.0  # we use this to calculate late average loss in the last SKIP_STEP steps
        writer = tf.summary.FileWriter('improved_graph/lr' + str(learning_rate), sess.graph)
        initial_step = model.global_step.eval()
        for index in range(initial_step, initial_step + num_train_steps):
            centers, targets = next(batch_gen)
            feed_dict = {model.center_words: centers, model.target_words: targets}
            loss_batch, _, summary, cur_matrix = sess.run([model.loss, model.optimizer, model.summary_op, model.embed_matrix],
                                              feed_dict=feed_dict)
            writer.add_summary(summary, global_step=index)
            total_loss += loss_batch
            if (index + 1) % skip_step == 0:
                #print('Cur matrix 100th row', cur_matrix[100])
                print('Average loss at step {}: {:5.1f}'.format(index, total_loss / skip_step))
                total_loss = 0.0
                model.saver.save(sess, 'checkpoints/skip-gram', index)

        final_embed_matrix = sess.run(model.embed_matrix)
        return final_embed_matrix

In [18]:
final_embed_matrix = train_model(model, batch_gen, NUM_TRAIN_STEPS, LEARNING_RATE, SKIP_STEP)


Average loss at step 1999:  24.8
Average loss at step 3999:  19.1
Average loss at step 5999:  15.0
Average loss at step 7999:  13.6
Average loss at step 9999:   9.9
Average loss at step 11999:   9.4
Average loss at step 13999:   6.3
Average loss at step 15999:   8.4
Average loss at step 17999:   6.8
Average loss at step 19999:   6.4
Average loss at step 21999:   5.9
Average loss at step 23999:   5.3
Average loss at step 25999:   4.9
Average loss at step 27999:   4.2
Average loss at step 29999:   3.9
Average loss at step 31999:   4.0
Average loss at step 33999:   3.8
Average loss at step 35999:   2.6
Average loss at step 37999:   3.6
Average loss at step 39999:   3.5
Average loss at step 41999:   3.3
Average loss at step 43999:   3.4
Average loss at step 45999:   2.8
Average loss at step 47999:   3.1
Average loss at step 49999:   3.0
Average loss at step 51999:   2.4
Average loss at step 53999:   2.4
Average loss at step 55999:   2.0
Average loss at step 57999:   2.8
Average loss at step 59999:   3.1
Average loss at step 61999:   1.5
Average loss at step 63999:   1.8
Average loss at step 65999:   1.1
Average loss at step 67999:   2.4
Average loss at step 69999:   3.3
Average loss at step 71999:   3.1
Average loss at step 73999:   3.1
Average loss at step 75999:   2.8
Average loss at step 77999:   2.8
Average loss at step 79999:   3.0
Average loss at step 81999:   3.0
Average loss at step 83999:   3.0
Average loss at step 85999:   2.9
Average loss at step 87999:   2.8
Average loss at step 89999:   2.9
Average loss at step 91999:   2.6
Average loss at step 93999:   2.7
Average loss at step 95999:   2.2
Average loss at step 97999:   2.8
Average loss at step 99999:   2.8
Average loss at step 101999:   2.6
Average loss at step 103999:   2.4
Average loss at step 105999:   2.7
Average loss at step 107999:   2.3
Average loss at step 109999:   2.9
Average loss at step 111999:   2.9
Average loss at step 113999:   2.7
Average loss at step 115999:   2.0
Average loss at step 117999:   2.2
Average loss at step 119999:   2.0
Average loss at step 121999:   2.2
Average loss at step 123999:   1.8
Average loss at step 125999:   2.0
Average loss at step 127999:   2.3
Average loss at step 129999:   2.5
Average loss at step 131999:   2.4
Average loss at step 133999:   2.3
Average loss at step 135999:   2.4
Average loss at step 137999:   2.3
Average loss at step 139999:   2.1
Average loss at step 141999:   1.7
Average loss at step 143999:   2.1
Average loss at step 145999:   2.9
Average loss at step 147999:   2.8
Average loss at step 149999:   2.3
Average loss at step 151999:   2.0
Average loss at step 153999:   2.0
Average loss at step 155999:   2.6
Average loss at step 157999:   2.8
Average loss at step 159999:   2.4
Average loss at step 161999:   2.1
Average loss at step 163999:   2.2
Average loss at step 165999:   2.3
Average loss at step 167999:   2.5
Average loss at step 169999:   2.8
Average loss at step 171999:   2.5
Average loss at step 173999:   2.5
Average loss at step 175999:   2.5
Average loss at step 177999:   2.4
Average loss at step 179999:   2.0
Average loss at step 181999:   2.3
Average loss at step 183999:   2.5
Average loss at step 185999:   2.7
Average loss at step 187999:   2.7
Average loss at step 189999:   2.6
Average loss at step 191999:   2.5
Average loss at step 193999:   2.7
Average loss at step 195999:   2.4
Average loss at step 197999:   2.5
Average loss at step 199999:   2.4
Average loss at step 201999:   2.2
Average loss at step 203999:   2.8
Average loss at step 205999:   2.5
Average loss at step 207999:   2.1
Average loss at step 209999:   2.3
Average loss at step 211999:   2.2
Average loss at step 213999:   2.1
Average loss at step 215999:   1.9
Average loss at step 217999:   2.5
Average loss at step 219999:   2.6
Average loss at step 221999:   2.0
Average loss at step 223999:   1.7
Average loss at step 225999:   1.0
Average loss at step 227999:   0.8
Average loss at step 229999:   1.2
Average loss at step 231999:   1.9
Average loss at step 233999:   1.5
Average loss at step 235999:   1.8
Average loss at step 237999:   1.6
Average loss at step 239999:   2.8
Average loss at step 241999:   2.5
Average loss at step 243999:   2.7
Average loss at step 245999:   2.6
Average loss at step 247999:   2.6
Average loss at step 249999:   2.5
Average loss at step 251999:   2.6
Average loss at step 253999:   2.4
Average loss at step 255999:   2.7
Average loss at step 257999:   2.8
Average loss at step 259999:   2.5
Average loss at step 261999:   2.6
Average loss at step 263999:   2.4
Average loss at step 265999:   2.6
Average loss at step 267999:   2.4
Average loss at step 269999:   2.8
Average loss at step 271999:   2.5
Average loss at step 273999:   2.4
Average loss at step 275999:   2.8
Average loss at step 277999:   2.6
Average loss at step 279999:   2.5
Average loss at step 281999:   2.5
Average loss at step 283999:   2.8
Average loss at step 285999:   2.6
Average loss at step 287999:   2.5
Average loss at step 289999:   2.6
Average loss at step 291999:   2.6
Average loss at step 293999:   2.8
Average loss at step 295999:   2.6
Average loss at step 297999:   2.3
Average loss at step 299999:   1.9
Average loss at step 301999:   2.1
Average loss at step 303999:   2.7
Average loss at step 305999:   2.5
Average loss at step 307999:   2.5
Average loss at step 309999:   2.4
Average loss at step 311999:   2.7
Average loss at step 313999:   2.7
Average loss at step 315999:   2.6
Average loss at step 317999:   2.4
Average loss at step 319999:   2.4
Average loss at step 321999:   2.6
Average loss at step 323999:   2.7
Average loss at step 325999:   2.6
Average loss at step 327999:   2.6
Average loss at step 329999:   2.5
Average loss at step 331999:   2.6
Average loss at step 333999:   2.4
Average loss at step 335999:   2.6
Average loss at step 337999:   2.7
Average loss at step 339999:   2.7
Average loss at step 341999:   2.4
Average loss at step 343999:   2.5
Average loss at step 345999:   2.6
Average loss at step 347999:   2.7
Average loss at step 349999:   2.7
Average loss at step 351999:   2.5
Average loss at step 353999:   2.5
Average loss at step 355999:   2.5
Average loss at step 357999:   2.3
Average loss at step 359999:   2.7
Average loss at step 361999:   2.7
Average loss at step 363999:   2.6
Average loss at step 365999:   2.6
Average loss at step 367999:   2.6
Average loss at step 369999:   2.5
Average loss at step 371999:   2.4
Average loss at step 373999:   2.7
Average loss at step 375999:   2.7
Average loss at step 377999:   2.6
Average loss at step 379999:   2.7
Average loss at step 381999:   2.6
Average loss at step 383999:   2.6
Average loss at step 385999:   2.6
Average loss at step 387999:   2.5
Average loss at step 389999:   2.7
Average loss at step 391999:   2.4
Average loss at step 393999:   2.7
Average loss at step 395999:   2.7
Average loss at step 397999:   2.6
Average loss at step 399999:   2.4
Average loss at step 401999:   2.2
Average loss at step 403999:   2.5
Average loss at step 405999:   2.2
Average loss at step 407999:   2.6
Average loss at step 409999:   2.7
Average loss at step 411999:   2.7
Average loss at step 413999:   2.6
Average loss at step 415999:   2.6
Average loss at step 417999:   2.7
Average loss at step 419999:   2.5
Average loss at step 421999:   2.4
Average loss at step 423999:   2.7
Average loss at step 425999:   2.6
Average loss at step 427999:   2.5
Average loss at step 429999:   2.3
Average loss at step 431999:   2.5
Average loss at step 433999:   2.6
Average loss at step 435999:   2.6
Average loss at step 437999:   2.0
Average loss at step 439999:   2.7
Average loss at step 441999:   2.7
Average loss at step 443999:   2.5
Average loss at step 445999:   2.6
Average loss at step 447999:   2.4
Average loss at step 449999:   2.3
Average loss at step 451999:   2.7
Average loss at step 453999:   2.6
Average loss at step 455999:   2.6
Average loss at step 457999:   2.5
Average loss at step 459999:   2.6
Average loss at step 461999:   2.6
Average loss at step 463999:   2.4
Average loss at step 465999:   2.6
Average loss at step 467999:   1.9
Average loss at step 469999:   1.7
Average loss at step 471999:   2.6
Average loss at step 473999:   2.5
Average loss at step 475999:   2.2
Average loss at step 477999:   2.0
Average loss at step 479999:   2.6
Average loss at step 481999:   2.4
Average loss at step 483999:   2.0
Average loss at step 485999:   2.7
Average loss at step 487999:   2.4
Average loss at step 489999:   2.2
Average loss at step 491999:   2.2
Average loss at step 493999:   2.1
Average loss at step 495999:   2.2
Average loss at step 497999:   2.4
Average loss at step 499999:   2.5
Average loss at step 501999:   2.0
Average loss at step 503999:   1.9
Average loss at step 505999:   2.6
Average loss at step 507999:   2.3
Average loss at step 509999:   2.4
Average loss at step 511999:   2.4
Average loss at step 513999:   2.4
Average loss at step 515999:   2.2
Average loss at step 517999:   2.2
Average loss at step 519999:   2.4
Average loss at step 521999:   2.1
Average loss at step 523999:   2.3
Average loss at step 525999:   2.5
Average loss at step 527999:   2.5
Average loss at step 529999:   2.4
Average loss at step 531999:   2.4
Average loss at step 533999:   2.6
Average loss at step 535999:   2.6
Average loss at step 537999:   2.5
Average loss at step 539999:   2.6
Average loss at step 541999:   1.9
Average loss at step 543999:   1.7
Average loss at step 545999:   1.9
Average loss at step 547999:   2.0
Average loss at step 549999:   2.5
Average loss at step 551999:   2.2
Average loss at step 553999:   2.1
Average loss at step 555999:   2.6
Average loss at step 557999:   2.4
Average loss at step 559999:   2.3
Average loss at step 561999:   2.5
Average loss at step 563999:   2.4
Average loss at step 565999:   2.2
Average loss at step 567999:   2.4
Average loss at step 569999:   2.3
Average loss at step 571999:   2.3
Average loss at step 573999:   2.3
Average loss at step 575999:   2.5
Average loss at step 577999:   2.4
Average loss at step 579999:   2.6
Average loss at step 581999:   2.1
Average loss at step 583999:   2.5
Average loss at step 585999:   2.5
Average loss at step 587999:   2.5
Average loss at step 589999:   2.4
Average loss at step 591999:   1.9
Average loss at step 593999:   2.1
Average loss at step 595999:   2.2
Average loss at step 597999:   2.1
Average loss at step 599999:   2.5
Average loss at step 601999:   2.3
Average loss at step 603999:   2.2
Average loss at step 605999:   2.3
Average loss at step 607999:   1.9
Average loss at step 609999:   2.2
Average loss at step 611999:   2.1
Average loss at step 613999:   2.1
Average loss at step 615999:   1.9
Average loss at step 617999:   1.6
Average loss at step 619999:   1.9
Average loss at step 621999:   2.3
Average loss at step 623999:   2.0
Average loss at step 625999:   2.7
Average loss at step 627999:   2.6
Average loss at step 629999:   2.4
Average loss at step 631999:   2.5
Average loss at step 633999:   2.2
Average loss at step 635999:   2.4
Average loss at step 637999:   2.6
Average loss at step 639999:   2.5
Average loss at step 641999:   2.6
Average loss at step 643999:   2.6
Average loss at step 645999:   2.7
Average loss at step 647999:   2.6
Average loss at step 649999:   2.6
Average loss at step 651999:   2.6
Average loss at step 653999:   2.5
Average loss at step 655999:   2.3
Average loss at step 657999:   2.6
Average loss at step 659999:   2.3
Average loss at step 661999:   2.3
Average loss at step 663999:   1.7
Average loss at step 665999:   1.9
Average loss at step 667999:   2.8
Average loss at step 669999:   2.7
Average loss at step 671999:   2.5
Average loss at step 673999:   2.1
Average loss at step 675999:   2.1
Average loss at step 677999:   2.3
Average loss at step 679999:   1.9
Average loss at step 681999:   2.2
Average loss at step 683999:   2.1
Average loss at step 685999:   2.1
Average loss at step 687999:   1.8
Average loss at step 689999:   1.8
Average loss at step 691999:   2.0
Average loss at step 693999:   2.1
Average loss at step 695999:   1.8
Average loss at step 697999:   2.4
Average loss at step 699999:   2.1
Average loss at step 701999:   1.9
Average loss at step 703999:   2.4
Average loss at step 705999:   2.1
Average loss at step 707999:   1.9
Average loss at step 709999:   2.4
Average loss at step 711999:   2.2
Average loss at step 713999:   2.6
Average loss at step 715999:   2.3
Average loss at step 717999:   2.6
Average loss at step 719999:   2.6
Average loss at step 721999:   1.8
Average loss at step 723999:   1.6
Average loss at step 725999:   1.8
Average loss at step 727999:   2.4
Average loss at step 729999:   2.6
Average loss at step 731999:   2.0
Average loss at step 733999:   2.2
Average loss at step 735999:   2.2
Average loss at step 737999:   2.1
Average loss at step 739999:   2.5
Average loss at step 741999:   2.8
Average loss at step 743999:   2.7
Average loss at step 745999:   2.5
Average loss at step 747999:   2.7
Average loss at step 749999:   2.6
Average loss at step 751999:   2.4
Average loss at step 753999:   1.7
Average loss at step 755999:   2.2
Average loss at step 757999:   1.9
Average loss at step 759999:   2.1
Average loss at step 761999:   2.6
Average loss at step 763999:   2.7
Average loss at step 765999:   2.5
Average loss at step 767999:   2.5
Average loss at step 769999:   2.7
Average loss at step 771999:   2.5
Average loss at step 773999:   2.6
Average loss at step 775999:   2.4
Average loss at step 777999:   2.3
Average loss at step 779999:   2.3
Average loss at step 781999:   2.3
Average loss at step 783999:   2.5
Average loss at step 785999:   2.6
Average loss at step 787999:   2.6
Average loss at step 789999:   2.6
Average loss at step 791999:   2.5
Average loss at step 793999:   2.5
Average loss at step 795999:   2.5
Average loss at step 797999:   2.5
Average loss at step 799999:   2.6
Average loss at step 801999:   2.4
Average loss at step 803999:   2.6
Average loss at step 805999:   2.4
Average loss at step 807999:   2.1
Average loss at step 809999:   2.2
Average loss at step 811999:   2.2
Average loss at step 813999:   2.0
Average loss at step 815999:   2.1
Average loss at step 817999:   2.4
Average loss at step 819999:   1.9
Average loss at step 821999:   1.9
Average loss at step 823999:   1.9
Average loss at step 825999:   2.4
Average loss at step 827999:   2.4
Average loss at step 829999:   2.6
Average loss at step 831999:   2.3
Average loss at step 833999:   2.7
Average loss at step 835999:   2.6
Average loss at step 837999:   2.6
Average loss at step 839999:   2.6
Average loss at step 841999:   2.6
Average loss at step 843999:   2.5
Average loss at step 845999:   2.5
Average loss at step 847999:   2.5
Average loss at step 849999:   2.5
Average loss at step 851999:   2.6
Average loss at step 853999:   2.3
Average loss at step 855999:   2.5
Average loss at step 857999:   2.5
Average loss at step 859999:   2.6
Average loss at step 861999:   2.4
Average loss at step 863999:   2.0
Average loss at step 865999:   2.1
Average loss at step 867999:   1.8
Average loss at step 869999:   2.0
Average loss at step 871999:   1.8
Average loss at step 873999:   1.9
Average loss at step 875999:   1.9
Average loss at step 877999:   1.6
Average loss at step 879999:   1.7
Average loss at step 881999:   1.8
Average loss at step 883999:   1.7
Average loss at step 885999:   1.8
Average loss at step 887999:   1.7
Average loss at step 889999:   1.5
Average loss at step 891999:   1.5
Average loss at step 893999:   1.4
Average loss at step 895999:   1.5
Average loss at step 897999:   1.2
Average loss at step 899999:   1.5
Average loss at step 901999:   1.3
Average loss at step 903999:   1.7
Average loss at step 905999:   1.4
Average loss at step 907999:   2.6
Average loss at step 909999:   2.2
Average loss at step 911999:   1.9
Average loss at step 913999:   1.7
Average loss at step 915999:   1.7
Average loss at step 917999:   1.6
Average loss at step 919999:   1.5
Average loss at step 921999:   1.6
Average loss at step 923999:   1.7
Average loss at step 925999:   1.2
Average loss at step 927999:   1.4
Average loss at step 929999:   1.2
Average loss at step 931999:   1.5
Average loss at step 933999:   1.4
Average loss at step 935999:   1.4
Average loss at step 937999:   1.3
Average loss at step 939999:   1.2
Average loss at step 941999:   1.2
Average loss at step 943999:   1.2
Average loss at step 945999:   1.0
Average loss at step 947999:   1.2
Average loss at step 949999:   1.1
Average loss at step 951999:   1.4
Average loss at step 953999:   1.4
Average loss at step 955999:   2.9
Average loss at step 957999:   2.7
Average loss at step 959999:   2.6
Average loss at step 961999:   2.7
Average loss at step 963999:   2.4
Average loss at step 965999:   2.3
Average loss at step 967999:   2.6
Average loss at step 969999:   2.6
Average loss at step 971999:   2.5
Average loss at step 973999:   2.4
Average loss at step 975999:   2.3
Average loss at step 977999:   2.4
Average loss at step 979999:   2.3
Average loss at step 981999:   2.3
Average loss at step 983999:   1.9
Average loss at step 985999:   2.3
Average loss at step 987999:   2.3
Average loss at step 989999:   2.6
Average loss at step 991999:   2.6
Average loss at step 993999:   2.5
Average loss at step 995999:   2.6
Average loss at step 997999:   2.5
Average loss at step 999999:   2.6
Average loss at step 1001999:   2.5
Average loss at step 1003999:   2.4
Average loss at step 1005999:   2.5
Average loss at step 1007999:   2.5
Average loss at step 1009999:   2.4
Average loss at step 1011999:   2.3
Average loss at step 1013999:   2.2
Average loss at step 1015999:   2.3
Average loss at step 1017999:   2.3
Average loss at step 1019999:   2.6
Average loss at step 1021999:   2.7
Average loss at step 1023999:   2.6
Average loss at step 1025999:   2.5
Average loss at step 1027999:   2.6
Average loss at step 1029999:   2.6
Average loss at step 1031999:   2.6
Average loss at step 1033999:   2.5
Average loss at step 1035999:   2.6
Average loss at step 1037999:   2.5
Average loss at step 1039999:   2.4
Average loss at step 1041999:   2.6
Average loss at step 1043999:   2.5
Average loss at step 1045999:   2.6
Average loss at step 1047999:   2.5
Average loss at step 1049999:   2.4
Average loss at step 1051999:   2.2
Average loss at step 1053999:   2.3
Average loss at step 1055999:   2.4
Average loss at step 1057999:   1.3
Average loss at step 1059999:   1.5
Average loss at step 1061999:   1.5
Average loss at step 1063999:   0.8
Average loss at step 1065999:   1.2
Average loss at step 1067999:   2.9
Average loss at step 1069999:   2.6
Average loss at step 1071999:   2.6
Average loss at step 1073999:   2.6
Average loss at step 1075999:   2.6
Average loss at step 1077999:   2.6
Average loss at step 1079999:   2.6
Average loss at step 1081999:   2.6
Average loss at step 1083999:   2.7
Average loss at step 1085999:   2.6
Average loss at step 1087999:   2.7
Average loss at step 1089999:   2.2
Average loss at step 1091999:   1.8
Average loss at step 1093999:   2.3
Average loss at step 1095999:   2.7
Average loss at step 1097999:   2.5
Average loss at step 1099999:   2.4
Average loss at step 1101999:   2.4
Average loss at step 1103999:   2.4
Average loss at step 1105999:   2.4
Average loss at step 1107999:   2.5
Average loss at step 1109999:   2.4
Average loss at step 1111999:   2.4
Average loss at step 1113999:   2.4
Average loss at step 1115999:   2.7
Average loss at step 1117999:   2.6
Average loss at step 1119999:   2.7
Average loss at step 1121999:   2.6
Average loss at step 1123999:   2.5
Average loss at step 1125999:   2.5
Average loss at step 1127999:   2.6
Average loss at step 1129999:   2.5
Average loss at step 1131999:   2.5
Average loss at step 1133999:   2.6
Average loss at step 1135999:   2.5
Average loss at step 1137999:   2.6
Average loss at step 1139999:   2.7
Average loss at step 1141999:   2.6
Average loss at step 1143999:   2.6
Average loss at step 1145999:   2.2
Average loss at step 1147999:   2.4
Average loss at step 1149999:   2.6
Average loss at step 1151999:   2.5
Average loss at step 1153999:   2.5
Average loss at step 1155999:   2.6
Average loss at step 1157999:   2.6
Average loss at step 1159999:   2.6
Average loss at step 1161999:   2.6
Average loss at step 1163999:   2.5
Average loss at step 1165999:   2.3
Average loss at step 1167999:   2.2
Average loss at step 1169999:   2.6
Average loss at step 1171999:   2.4
Average loss at step 1173999:   2.6
Average loss at step 1175999:   2.4
Average loss at step 1177999:   2.4
Average loss at step 1179999:   2.5
Average loss at step 1181999:   2.5
Average loss at step 1183999:   2.5
Average loss at step 1185999:   2.6
Average loss at step 1187999:   2.6
Average loss at step 1189999:   2.5
Average loss at step 1191999:   2.5
Average loss at step 1193999:   2.5
Average loss at step 1195999:   2.5
Average loss at step 1197999:   2.4
Average loss at step 1199999:   2.2
Average loss at step 1201999:   2.5
Average loss at step 1203999:   2.5
Average loss at step 1205999:   2.4
Average loss at step 1207999:   2.3
Average loss at step 1209999:   2.5
Average loss at step 1211999:   2.4
Average loss at step 1213999:   2.5
Average loss at step 1215999:   2.4
Average loss at step 1217999:   2.3
Average loss at step 1219999:   2.5
Average loss at step 1221999:   2.4
Average loss at step 1223999:   2.0
Average loss at step 1225999:   2.4
Average loss at step 1227999:   2.2
Average loss at step 1229999:   2.2
Average loss at step 1231999:   2.5
Average loss at step 1233999:   2.5
Average loss at step 1235999:   1.8
Average loss at step 1237999:   2.0
Average loss at step 1239999:   1.7
Average loss at step 1241999:   2.5
Average loss at step 1243999:   2.4
Average loss at step 1245999:   2.5
Average loss at step 1247999:   2.5
Average loss at step 1249999:   2.6
Average loss at step 1251999:   2.4
Average loss at step 1253999:   2.7
Average loss at step 1255999:   2.4
Average loss at step 1257999:   2.4
Average loss at step 1259999:   2.5
Average loss at step 1261999:   2.5
Average loss at step 1263999:   1.9
Average loss at step 1265999:   2.5
Average loss at step 1267999:   2.4
Average loss at step 1269999:   2.4
Average loss at step 1271999:   1.7
Average loss at step 1273999:   2.3
Average loss at step 1275999:   2.4
Average loss at step 1277999:   2.2
Average loss at step 1279999:   2.6
Average loss at step 1281999:   2.5
Average loss at step 1283999:   2.4
Average loss at step 1285999:   2.2
Average loss at step 1287999:   2.5
Average loss at step 1289999:   2.5
Average loss at step 1291999:   2.5
Average loss at step 1293999:   2.7
Average loss at step 1295999:   2.5
Average loss at step 1297999:   2.4
Average loss at step 1299999:   2.4
Average loss at step 1301999:   2.2
Average loss at step 1303999:   2.2
Average loss at step 1305999:   2.0
Average loss at step 1307999:   2.4
Average loss at step 1309999:   2.1
Average loss at step 1311999:   2.0
Average loss at step 1313999:   2.3
Average loss at step 1315999:   2.4
Average loss at step 1317999:   2.6
Average loss at step 1319999:   2.5
Average loss at step 1321999:   2.4
Average loss at step 1323999:   2.5
Average loss at step 1325999:   2.6
Average loss at step 1327999:   2.6
Average loss at step 1329999:   2.4
Average loss at step 1331999:   2.6
Average loss at step 1333999:   2.6
Average loss at step 1335999:   2.6
Average loss at step 1337999:   2.0
Average loss at step 1339999:   2.3
Average loss at step 1341999:   1.9
Average loss at step 1343999:   2.2
Average loss at step 1345999:   2.7
Average loss at step 1347999:   2.6
Average loss at step 1349999:   2.6
Average loss at step 1351999:   2.5
Average loss at step 1353999:   2.6
Average loss at step 1355999:   2.3
Average loss at step 1357999:   2.1
Average loss at step 1359999:   2.7
Average loss at step 1361999:   2.5
Average loss at step 1363999:   1.7
Average loss at step 1365999:   2.6
Average loss at step 1367999:   2.2
Average loss at step 1369999:   1.9
Average loss at step 1371999:   2.5
Average loss at step 1373999:   2.5
Average loss at step 1375999:   2.6
Average loss at step 1377999:   2.5
Average loss at step 1379999:   2.6
Average loss at step 1381999:   2.6
Average loss at step 1383999:   1.8
Average loss at step 1385999:   1.8
Average loss at step 1387999:   1.9
Average loss at step 1389999:   2.5
Average loss at step 1391999:   2.4
Average loss at step 1393999:   2.5
Average loss at step 1395999:   2.6
Average loss at step 1397999:   2.5
Average loss at step 1399999:   2.5
Average loss at step 1401999:   2.6
Average loss at step 1403999:   2.3
Average loss at step 1405999:   2.3
Average loss at step 1407999:   2.2
Average loss at step 1409999:   2.2
Average loss at step 1411999:   2.2
Average loss at step 1413999:   2.0
Average loss at step 1415999:   2.2
Average loss at step 1417999:   2.1
Average loss at step 1419999:   2.7
Average loss at step 1421999:   2.5
Average loss at step 1423999:   2.4
Average loss at step 1425999:   2.6
Average loss at step 1427999:   2.6
Average loss at step 1429999:   2.6
Average loss at step 1431999:   2.0
Average loss at step 1433999:   1.8
Average loss at step 1435999:   2.4
Average loss at step 1437999:   2.7
Average loss at step 1439999:   2.7
Average loss at step 1441999:   2.4
Average loss at step 1443999:   2.3
Average loss at step 1445999:   2.1
Average loss at step 1447999:   2.1
Average loss at step 1449999:   2.1
Average loss at step 1451999:   2.0
Average loss at step 1453999:   2.1
Average loss at step 1455999:   2.1
Average loss at step 1457999:   1.8
Average loss at step 1459999:   1.8
Average loss at step 1461999:   1.9
Average loss at step 1463999:   1.5
Average loss at step 1465999:   1.8
Average loss at step 1467999:   1.8
Average loss at step 1469999:   1.5
Average loss at step 1471999:   1.8
Average loss at step 1473999:   1.6
Average loss at step 1475999:   1.8
Average loss at step 1477999:   1.6
Average loss at step 1479999:   1.8
Average loss at step 1481999:   1.7
Average loss at step 1483999:   1.5
Average loss at step 1485999:   1.4
Average loss at step 1487999:   1.1
Average loss at step 1489999:   1.4
Average loss at step 1491999:   1.4
Average loss at step 1493999:   1.8
Average loss at step 1495999:   1.5
Average loss at step 1497999:   2.5
Average loss at step 1499999:   2.7

In [19]:
final_embed_matrix


Out[19]:
array([[-0.1788726 ,  0.06352279,  0.20417669, ..., -0.07852899,
        -0.15395242, -0.19479749],
       [-0.14539281,  0.15225504,  0.1984829 , ...,  0.12135678,
        -0.02620173,  0.35002044],
       [ 0.14615406, -0.1313851 , -0.26296726, ..., -0.50978965,
         0.14209567,  0.23275888],
       ..., 
       [-0.35499239,  0.92123199,  0.20749044, ...,  0.96454573,
        -0.46314907, -0.59946823],
       [-0.93901634, -0.12750483, -0.81264901, ..., -0.85041928,
        -0.41288972,  0.9166038 ],
       [ 0.66656709, -0.34473491,  0.47611141, ...,  0.71734643,
        -0.52254581,  0.75010371]], dtype=float32)

In [20]:
final_embed_matrix[110]


Out[20]:
array([-0.05808124, -0.2702741 ,  0.23374355,  0.03329588,  0.28464177,
       -0.18135138, -0.27312088, -0.0792769 ,  0.06765673,  0.19695969,
        0.30224526, -0.43541247,  0.20541178,  0.05715238,  0.418706  ,
        0.33900061, -0.29642683, -0.28246671,  0.30710667,  0.34872133,
       -0.03053589, -0.25851986, -0.33247155, -0.27733579, -0.03450498,
       -0.16856   ,  0.33631071, -0.25987461,  0.27384746,  0.29659611,
        0.13111487,  0.21963502, -0.20245473, -0.26549467,  0.38368383,
        0.1618522 , -0.02639238, -0.34695369,  0.0803695 ,  0.22643267,
       -0.04453176,  0.28512827,  0.35255834, -0.0803884 ,  0.02346297,
        0.14236982,  0.28951535, -0.11098605,  0.25529975, -0.19437127,
       -0.21824153, -0.05479385,  0.0009965 ,  0.27405843, -0.06208887,
       -0.21950285, -0.44743136,  0.06627847, -0.09608825,  0.19581346,
        0.2664412 , -0.02507019, -0.22346064, -0.09773409,  0.1966729 ,
        0.33563221, -0.05667553, -0.06290171, -0.34061858, -0.03765723,
        0.168558  ,  0.10894039,  0.62495232,  0.13127114,  0.10348286,
        0.18239497,  0.31972665,  0.12350008,  0.13685551,  0.04288222,
        0.15473145, -0.09042376,  0.05402598, -0.26299793,  0.19564402,
        0.45056111,  0.29884335, -0.25826862,  0.35278326,  0.41062415,
       -0.37663293,  0.01747831,  0.26347664,  0.09417771,  0.28301045,
       -0.09387814,  0.06271276, -0.00431372,  0.36771649,  0.14793944], dtype=float32)

In [21]:
######################

In [22]:
tsne = TSNE(n_components=2, random_state=42)
XX = tsne.fit_transform(final_embed_matrix)

In [23]:
tsne_df = pd.DataFrame(XX, columns=['x0', 'x1'])
unique_codones = sorted(dictionary, key=dictionary.get)
tsne_df['codone'] = list(unique_codones)
tsne_df.head()


Out[23]:
x0 x1 codone
0 -2.772461 -7.154655 AAA
1 -0.867646 0.684946 LLL
2 3.345885 2.249863 ALA
3 -1.151602 -6.515843 LAA
4 -2.337872 -3.876973 AAL

In [24]:
filename = 'data/acid_properties.csv'
props = pd.read_csv(filename)

In [25]:
######################

In [26]:
def acid_dict(some_c, props):
    prop_by_letter = [props[props.acid == let].iloc[:, 1:] for let in some_c]   
    df_concat = pd.concat(prop_by_letter)
    res = df_concat.mean()
    dres = dict(res)
    dres['acid'] = some_c
    return dres

In [27]:
save_path = 'data/all_acid_dicts.pickle'
producer = lambda: [acid_dict(some_c, props) for some_c in tsne_df.codone]
all_acid_dicts = read_or_create(save_path, producer)

all_acid_df = pd.DataFrame(all_acid_dicts)
final_df = all_acid_df.join(tsne_df.set_index('codone'), on='acid')


reading data/all_acid_dicts.pickle

In [31]:
def plot_embedding_properties(final_df, save=False):
    plt.figure(figsize=(25, 20))
    for i, p in enumerate(['hydrophobicity', 'mass', 'number_of_atoms', 'volume']):
        plt.subplot(2,2,i+1)
        plt.title(p, fontsize=25)
        plt.scatter(final_df.x0, final_df.x1, c=final_df[p], s=10)
    if save:
        plt.savefig('hw_pure.png')
    else:
        plt.show()

plot_embedding_properties(final_df)