In [1]:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import collections
import math
import os
import random
import zipfile

import numpy as np
from six.moves import urllib
from six.moves import xrange  # pylint: disable=redefined-builtin
import tensorflow as tf

In [2]:
import pandas as pd
import numpy as np
import tensorflow as tf

import pickle
import os
import random

from sklearn.manifold import TSNE
import matplotlib.pyplot as plt

import collections

In [3]:
seq_df = pd.read_table('data/family_classification_sequences.tab')
seq_df.head()


Out[3]:
Sequences
0 MAFSAEDVLKEYDRRRRMEALLLSLYYPNDRKLLDYKEWSPPRVQV...
1 MSIIGATRLQNDKSDTYSAGPCYAGGCSAFTPRGTCGKDWDLGEQT...
2 MQNPLPEVMSPEHDKRTTTPMSKEANKFIRELDKKPGDLAVVSDFV...
3 MDSLNEVCYEQIKGTFYKGLFGDFPLIVDKKTGCFNATKLCVLGGK...
4 MEAKNITIDNTTYNFFKFYNINQPLTNLKYLNSERLCFSNAVMGKI...

In [4]:
def make_codones(sseq):
    crop = len(sseq) % 3
    cropped_seq = sseq[:-crop] if crop > 0 else sseq

    return [cropped_seq[i:i+3] for i in range(0, len(cropped_seq), 3)]

def seq_to3(seq):
    splittings = [make_codones(seq[i:]) for i in range(3)]
    return splittings

def create_all_codones(df):
    codones = []

    for i in range(df.shape[0]):
        row = df.iloc[i, :][0]
        codones.extend(seq_to3(row))
    return codones

In [5]:
def read_or_create(read_path, producer):
    if os.path.isfile(read_path):
        print('reading', read_path)
        with open(read_path, 'rb') as fp:
            return pickle.load(fp)
    result = producer()
    print('saving', read_path)
    with open(read_path, 'wb') as fp:
        pickle.dump(result, fp)
    return result

In [6]:
all_codones = read_or_create(read_path='data/all_codones.pickle',
                             producer= lambda: create_all_codones(seq_df))


reading data/all_codones.pickle

In [7]:
len(all_codones)


Out[7]:
972054

In [8]:
# Step 2: Build the dictionary and replace rare words with UNK token.
vocabulary_size = 9000

def flatten(x):
    return [item for sublist in x for item in sublist]

def build_dataset(words, n_words):
    """Process raw inputs into a dataset."""
    count = [['UNK', -1]]
    count.extend(collections.Counter(words).most_common(n_words - 1))
    dictionary = dict()
    for word, _ in count:
        dictionary[word] = len(dictionary)
    data = list()
    unk_count = 0
    for word in words:
        if word in dictionary:
            index = dictionary[word]
        else:
            index = 0  # dictionary['UNK']
        unk_count += 1
        data.append(index)
    count[0][1] = unk_count
    reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))
    return data, count, dictionary, reversed_dictionary

data, count, dictionary, reverse_dictionary = build_dataset(flatten(all_codones),
                                                            vocabulary_size)

In [9]:
#del vocabulary  # Hint to reduce memory.
print('Most common words (+UNK)', count[:5])
print('Sample data', data[:10], [reverse_dictionary[i] for i in data[:10]])

data_index = 0


Most common words (+UNK) [['UNK', 137373126], ('AAA', 152576), ('LLL', 131965), ('ALA', 127540), ('LAA', 126274)]
Sample data [4302, 767, 154, 3031, 2278, 4107, 9, 10, 4268, 4374] ['MAF', 'SAE', 'DVL', 'KEY', 'DRR', 'RRM', 'EAL', 'LLS', 'LYY', 'PND']

In [10]:
# Step 3: Function to generate a training batch for the skip-gram model.
def generate_batch(batch_size, num_skips, skip_window):
    global data_index
    assert batch_size % num_skips == 0
    assert num_skips <= 2 * skip_window
    batch = np.ndarray(shape=(batch_size), dtype=np.int32)
    labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
    span = 2 * skip_window + 1  # [ skip_window target skip_window ]
    buffer = collections.deque(maxlen=span)
    if data_index + span > len(data):
        data_index = 0
    buffer.extend(data[data_index:data_index + span])
    data_index += span
    for i in range(batch_size // num_skips):
        target = skip_window  # target label at the center of the buffer
        targets_to_avoid = [skip_window]
        for j in range(num_skips):
            while target in targets_to_avoid:
                target = random.randint(0, span - 1)
            targets_to_avoid.append(target)
            batch[i * num_skips + j] = buffer[skip_window]
            labels[i * num_skips + j, 0] = buffer[target]
        if data_index == len(data):
            buffer[:] = data[:span]
            data_index = span
        else:
            buffer.append(data[data_index])
            data_index += 1
    # Backtrack a little bit to avoid skipping words in the end of a batch
    data_index = (data_index + len(data) - span) % len(data)
    return batch, labels

In [13]:
# Step 4: Build and train a skip-gram model.

batch_size = 130
embedding_size = 100  # Dimension of the embedding vector.
skip_window = 2       # How many words to consider left and right.
num_skips = 2         # How many times to reuse an input to generate a label.

# We pick a random validation set to sample nearest neighbors. Here we limit the
# validation samples to the words that have a low numeric ID, which by
# construction are also the most frequent.
valid_size = 16     # Random set of words to evaluate similarity on.
valid_window = 50  # Only pick dev samples in the head of the distribution.
valid_examples = np.random.choice(valid_window, valid_size, replace=False)
num_sampled = 32    # Number of negative examples to sample.

graph = tf.Graph()

with graph.as_default():

    # Input data.
    train_inputs = tf.placeholder(tf.int32, shape=[batch_size])
    train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])
    valid_dataset = tf.constant(valid_examples, dtype=tf.int32)

    # Look up embeddings for inputs.
    embeddings = tf.Variable(tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))
    embed = tf.nn.embedding_lookup(embeddings, train_inputs)

    # Construct the variables for the NCE loss
    nce_weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size],
                            stddev=1.0 / math.sqrt(embedding_size)))
    nce_biases = tf.Variable(tf.zeros([vocabulary_size]))

    # Compute the average NCE loss for the batch.
    # tf.nce_loss automatically draws a new sample of the negative labels each
    # time we evaluate the loss.
    loss = tf.reduce_mean(
      tf.nn.nce_loss(weights=nce_weights,
                     biases=nce_biases,
                     labels=train_labels,
                     inputs=embed,
                     num_sampled=num_sampled,
                     num_classes=vocabulary_size))

    # Construct the SGD optimizer using a learning rate of 0.5
    optimizer = tf.train.GradientDescentOptimizer(0.5).minimize(loss)

    # Compute the cosine similarity between minibatch examples and all embeddings.
    norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))
    normalized_embeddings = embeddings / norm
    valid_embeddings = tf.nn.embedding_lookup(
      normalized_embeddings, valid_dataset)
    similarity = tf.matmul(
      valid_embeddings, normalized_embeddings, transpose_b=True)

    # Add variable initializer.
    init = tf.global_variables_initializer()

In [24]:
# Step 5: Begin training.
num_steps = 1000001
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.1)

with tf.Session(graph=graph, config=tf.ConfigProto(gpu_options=gpu_options)) as session:
    # We must initialize all variables before we use them.
    init.run()
    print('Initialized')

    average_loss = 0
    for step in xrange(num_steps):
        batch_inputs, batch_labels = generate_batch(
          batch_size, num_skips, skip_window)
        feed_dict = {train_inputs: batch_inputs, train_labels: batch_labels}

        # We perform one update step by evaluating the optimizer op (including it
        # in the list of returned values for session.run()
        _, loss_val = session.run([optimizer, loss], feed_dict=feed_dict)
        average_loss += loss_val

        if step % 2000 == 0:
            if step > 0:
                average_loss /= 2000
            # The average loss is an estimate of the loss over the last 2000 batches.
            print('Average loss at step ', step, ': ', average_loss)
            average_loss = 0

    final_embeddings = normalized_embeddings.eval()


Initialized
Average loss at step  0 :  119.126029968
Average loss at step  2000 :  61.1495944575
Average loss at step  4000 :  25.8924007711
Average loss at step  6000 :  11.727699588
Average loss at step  8000 :  7.03665991879
Average loss at step  10000 :  4.34988836366
Average loss at step  12000 :  4.42586186707
Average loss at step  14000 :  4.53659113109
Average loss at step  16000 :  4.1681057359
Average loss at step  18000 :  3.99903420579
Average loss at step  20000 :  3.2787261517
Average loss at step  22000 :  3.54625270468
Average loss at step  24000 :  3.6601667673
Average loss at step  26000 :  3.80738984627
Average loss at step  28000 :  3.80188235402
Average loss at step  30000 :  3.92436711299
Average loss at step  32000 :  3.87874091816
Average loss at step  34000 :  3.42209392667
Average loss at step  36000 :  2.61261815296
Average loss at step  38000 :  3.26761925304
Average loss at step  40000 :  4.06432568169
Average loss at step  42000 :  3.99964493847
Average loss at step  44000 :  4.01190854883
Average loss at step  46000 :  4.08558911824
Average loss at step  48000 :  3.76766406906
Average loss at step  50000 :  3.90899339104
Average loss at step  52000 :  4.04778058445
Average loss at step  54000 :  4.00031730092
Average loss at step  56000 :  3.97761685324
Average loss at step  58000 :  4.00943110311
Average loss at step  60000 :  4.07195993745
Average loss at step  62000 :  4.01821309149
Average loss at step  64000 :  3.78878077334
Average loss at step  66000 :  4.03495088553
Average loss at step  68000 :  3.95521069634
Average loss at step  70000 :  3.90212012184
Average loss at step  72000 :  3.99363900089
Average loss at step  74000 :  3.70062898862
Average loss at step  76000 :  3.71992637277
Average loss at step  78000 :  3.66881508118
Average loss at step  80000 :  3.6436969195
Average loss at step  82000 :  3.59785213071
Average loss at step  84000 :  3.92053696084
Average loss at step  86000 :  3.44885598665
Average loss at step  88000 :  3.7085479551
Average loss at step  90000 :  3.65722840309
Average loss at step  92000 :  3.81723995703
Average loss at step  94000 :  3.50954942298
Average loss at step  96000 :  3.38499734014
Average loss at step  98000 :  3.38563372475
Average loss at step  100000 :  3.89357519871
Average loss at step  102000 :  4.11270222557
Average loss at step  104000 :  3.61834502113
Average loss at step  106000 :  3.84507328439
Average loss at step  108000 :  3.15525771284
Average loss at step  110000 :  3.44166543514
Average loss at step  112000 :  3.66516802478
Average loss at step  114000 :  3.47526733917
Average loss at step  116000 :  3.67610444653
Average loss at step  118000 :  3.84662543333
Average loss at step  120000 :  3.80732675433
Average loss at step  122000 :  3.94069340956
Average loss at step  124000 :  4.01920956182
Average loss at step  126000 :  3.99649083769
Average loss at step  128000 :  3.44821328455
Average loss at step  130000 :  3.6333952297
Average loss at step  132000 :  4.06467120099
Average loss at step  134000 :  3.92057398844
Average loss at step  136000 :  3.39996988112
Average loss at step  138000 :  2.87374107689
Average loss at step  140000 :  2.60789674276
Average loss at step  142000 :  2.84049584624
Average loss at step  144000 :  2.7857802166
Average loss at step  146000 :  2.36684657329
Average loss at step  148000 :  1.98557331991
Average loss at step  150000 :  3.32123425409
Average loss at step  152000 :  4.05211616933
Average loss at step  154000 :  3.67943640348
Average loss at step  156000 :  3.96858145052
Average loss at step  158000 :  4.00736879611
Average loss at step  160000 :  3.91861312473
Average loss at step  162000 :  4.07063932872
Average loss at step  164000 :  4.02153898859
Average loss at step  166000 :  3.12508211523
Average loss at step  168000 :  3.44618395048
Average loss at step  170000 :  4.12333977485
Average loss at step  172000 :  3.75003493166
Average loss at step  174000 :  3.93262283897
Average loss at step  176000 :  4.11046838999
Average loss at step  178000 :  4.05429406583
Average loss at step  180000 :  3.9731704222
Average loss at step  182000 :  4.00487256348
Average loss at step  184000 :  3.90849013305
Average loss at step  186000 :  4.00094261241
Average loss at step  188000 :  3.88912317061
Average loss at step  190000 :  3.88107669032
Average loss at step  192000 :  3.68086536026
Average loss at step  194000 :  3.53149941957
Average loss at step  196000 :  3.98274851906
Average loss at step  198000 :  3.78647843987
Average loss at step  200000 :  3.62539402062
Average loss at step  202000 :  3.92829506385
Average loss at step  204000 :  3.74466307878
Average loss at step  206000 :  3.68918133736
Average loss at step  208000 :  3.99113954854
Average loss at step  210000 :  3.66902638066
Average loss at step  212000 :  4.03265728635
Average loss at step  214000 :  3.63641955787
Average loss at step  216000 :  4.00437462217
Average loss at step  218000 :  3.53712545213
Average loss at step  220000 :  3.89292595768
Average loss at step  222000 :  3.62105072767
Average loss at step  224000 :  3.80519144952
Average loss at step  226000 :  3.79368976575
Average loss at step  228000 :  3.2020428232
Average loss at step  230000 :  2.76883738774
Average loss at step  232000 :  2.69276540625
Average loss at step  234000 :  2.60941100121
Average loss at step  236000 :  4.0491347878
Average loss at step  238000 :  3.98549493086
Average loss at step  240000 :  3.95798052251
Average loss at step  242000 :  4.05932395315
Average loss at step  244000 :  3.95672905433
Average loss at step  246000 :  3.96949056971
Average loss at step  248000 :  3.86376481633
Average loss at step  250000 :  3.92728506088
Average loss at step  252000 :  3.41569460523
Average loss at step  254000 :  3.18544876683
Average loss at step  256000 :  3.73844458693
Average loss at step  258000 :  3.96467925411
Average loss at step  260000 :  3.41644651413
Average loss at step  262000 :  3.92711339468
Average loss at step  264000 :  3.91395852959
Average loss at step  266000 :  4.0830812937
Average loss at step  268000 :  3.93778098893
Average loss at step  270000 :  3.95348739946
Average loss at step  272000 :  3.96167342305
Average loss at step  274000 :  3.08926038212
Average loss at step  276000 :  3.29989641812
Average loss at step  278000 :  3.8794646033
Average loss at step  280000 :  4.09745702612
Average loss at step  282000 :  3.89756890613
Average loss at step  284000 :  3.60646986526
Average loss at step  286000 :  3.99830664855
Average loss at step  288000 :  3.82278461227
Average loss at step  290000 :  4.02027485836
Average loss at step  292000 :  3.31828936595
Average loss at step  294000 :  4.02582477999
Average loss at step  296000 :  3.81781618202
Average loss at step  298000 :  3.99572535551
Average loss at step  300000 :  3.77326693082
Average loss at step  302000 :  3.12646301329
Average loss at step  304000 :  3.67783437586
Average loss at step  306000 :  3.73613705873
Average loss at step  308000 :  3.59921461284
Average loss at step  310000 :  3.6095014047
Average loss at step  312000 :  3.87657125032
Average loss at step  314000 :  3.95851281166
Average loss at step  316000 :  3.9764292649
Average loss at step  318000 :  3.58220039356
Average loss at step  320000 :  3.94653614712
Average loss at step  322000 :  3.720163423
Average loss at step  324000 :  3.56642918301
Average loss at step  326000 :  3.66721898174
Average loss at step  328000 :  3.83152716225
Average loss at step  330000 :  4.0558986212
Average loss at step  332000 :  3.34858522427
Average loss at step  334000 :  3.54756380683
Average loss at step  336000 :  3.24299510968
Average loss at step  338000 :  3.96572959661
Average loss at step  340000 :  3.7940499264
Average loss at step  342000 :  4.03896022034
Average loss at step  344000 :  3.83318865609
Average loss at step  346000 :  3.89208621132
Average loss at step  348000 :  3.86148700178
Average loss at step  350000 :  3.53505287451
Average loss at step  352000 :  3.22938461024
Average loss at step  354000 :  3.8848562516
Average loss at step  356000 :  3.31536461008
Average loss at step  358000 :  2.93489264023
Average loss at step  360000 :  2.63819637197
Average loss at step  362000 :  2.98502131158
Average loss at step  364000 :  4.05092249942
Average loss at step  366000 :  3.95739364934
Average loss at step  368000 :  3.84283522844
Average loss at step  370000 :  3.70203513646
Average loss at step  372000 :  3.66868379104
Average loss at step  374000 :  3.4091258831
Average loss at step  376000 :  4.06076966739
Average loss at step  378000 :  4.0926863215
Average loss at step  380000 :  3.96751191795
Average loss at step  382000 :  3.74349420786
Average loss at step  384000 :  2.95387060857
Average loss at step  386000 :  3.24166875076
Average loss at step  388000 :  3.91270519984
Average loss at step  390000 :  3.95668291235
Average loss at step  392000 :  3.65617422414
Average loss at step  394000 :  2.56673428872
Average loss at step  396000 :  3.8659454838
Average loss at step  398000 :  3.87266151071
Average loss at step  400000 :  4.03922977996
Average loss at step  402000 :  3.94601182526
Average loss at step  404000 :  3.68007462901
Average loss at step  406000 :  3.88067942572
Average loss at step  408000 :  3.62295263529
Average loss at step  410000 :  3.46835929561
Average loss at step  412000 :  3.65445437789
Average loss at step  414000 :  4.05470535553
Average loss at step  416000 :  3.96624928701
Average loss at step  418000 :  3.8157374711
Average loss at step  420000 :  4.05416299701
Average loss at step  422000 :  4.01554898298
Average loss at step  424000 :  3.81042160892
Average loss at step  426000 :  3.37893894482
Average loss at step  428000 :  3.87441713405
Average loss at step  430000 :  3.86783204293
Average loss at step  432000 :  3.65294597566
Average loss at step  434000 :  3.94580326164
Average loss at step  436000 :  3.98247951066
Average loss at step  438000 :  3.83750182402
Average loss at step  440000 :  3.59764712918
Average loss at step  442000 :  3.85137526321
Average loss at step  444000 :  3.30985073692
Average loss at step  446000 :  4.01371329772
Average loss at step  448000 :  3.56705274636
Average loss at step  450000 :  3.99787095642
Average loss at step  452000 :  3.72514075524
Average loss at step  454000 :  3.91909309196
Average loss at step  456000 :  3.66607662511
Average loss at step  458000 :  3.81898567253
Average loss at step  460000 :  3.88718395978
Average loss at step  462000 :  3.93906749415
Average loss at step  464000 :  3.46467433429
Average loss at step  466000 :  3.97269784188
Average loss at step  468000 :  3.77697219557
Average loss at step  470000 :  3.18046859893
Average loss at step  472000 :  3.9524744468
Average loss at step  474000 :  3.99995392096
Average loss at step  476000 :  3.90242221797
Average loss at step  478000 :  3.74906534445
Average loss at step  480000 :  3.72255106813
Average loss at step  482000 :  3.62028006399
Average loss at step  484000 :  3.86086698365
Average loss at step  486000 :  3.94298090953
Average loss at step  488000 :  3.79800077397
Average loss at step  490000 :  3.67882431483
Average loss at step  492000 :  3.97426085067
Average loss at step  494000 :  3.95908077896
Average loss at step  496000 :  3.47798164189
Average loss at step  498000 :  3.49843656421
Average loss at step  500000 :  3.93079060453
Average loss at step  502000 :  3.69443558729
Average loss at step  504000 :  3.48006289864
Average loss at step  506000 :  3.87340049934
Average loss at step  508000 :  3.55336252189
Average loss at step  510000 :  3.70970340955
Average loss at step  512000 :  3.9084632591
Average loss at step  514000 :  3.43588912189
Average loss at step  516000 :  3.82652820206
Average loss at step  518000 :  3.97687758148
Average loss at step  520000 :  3.60218194193
Average loss at step  522000 :  3.42877672189
Average loss at step  524000 :  3.93728478396
Average loss at step  526000 :  3.59296439159
Average loss at step  528000 :  3.28555182534
Average loss at step  530000 :  3.81742381811
Average loss at step  532000 :  3.72024451208
Average loss at step  534000 :  3.98224378049
Average loss at step  536000 :  4.03965680349
Average loss at step  538000 :  3.9861843251
Average loss at step  540000 :  3.75858036697
Average loss at step  542000 :  3.53878676963
Average loss at step  544000 :  3.07333242232
Average loss at step  546000 :  3.70270115626
Average loss at step  548000 :  3.96055596244
Average loss at step  550000 :  4.04783767962
Average loss at step  552000 :  4.04177865171
Average loss at step  554000 :  3.84992902064
Average loss at step  556000 :  3.56693650341
Average loss at step  558000 :  3.59482140797
Average loss at step  560000 :  3.63450617212
Average loss at step  562000 :  3.34965990138
Average loss at step  564000 :  3.67434575313
Average loss at step  566000 :  3.71838268328
Average loss at step  568000 :  2.8887899254
Average loss at step  570000 :  3.50496822971
Average loss at step  572000 :  2.87180133441
Average loss at step  574000 :  3.62785212433
Average loss at step  576000 :  3.64093511793
Average loss at step  578000 :  3.94582883883
Average loss at step  580000 :  4.03352475274
Average loss at step  582000 :  4.01124733841
Average loss at step  584000 :  4.02803626275
Average loss at step  586000 :  4.04240604782
Average loss at step  588000 :  3.79423961008
Average loss at step  590000 :  3.70509081548
Average loss at step  592000 :  3.51949239814
Average loss at step  594000 :  3.87839257896
Average loss at step  596000 :  3.92019653869
Average loss at step  598000 :  3.85037129807
Average loss at step  600000 :  3.67841539454
Average loss at step  602000 :  3.83621081042
Average loss at step  604000 :  3.7845600248
Average loss at step  606000 :  3.84742611736
Average loss at step  608000 :  3.98529296803
Average loss at step  610000 :  3.96580527878
Average loss at step  612000 :  3.75711395466
Average loss at step  614000 :  3.64366654301
Average loss at step  616000 :  3.84276127493
Average loss at step  618000 :  3.53182745725
Average loss at step  620000 :  3.94750693047
Average loss at step  622000 :  3.92251111281
Average loss at step  624000 :  3.70414858413
Average loss at step  626000 :  3.44345592028
Average loss at step  628000 :  3.4733272745
Average loss at step  630000 :  3.95874742818
Average loss at step  632000 :  3.91192687941
Average loss at step  634000 :  3.73677934486
Average loss at step  636000 :  3.7655741446
Average loss at step  638000 :  3.6725953604
Average loss at step  640000 :  3.39735464382
Average loss at step  642000 :  3.34796014601
Average loss at step  644000 :  3.7475180999
Average loss at step  646000 :  3.5792993989
Average loss at step  648000 :  3.78419495273
Average loss at step  650000 :  3.9523264389
Average loss at step  652000 :  3.82278872764
Average loss at step  654000 :  3.85914325726
Average loss at step  656000 :  3.86383314097
Average loss at step  658000 :  3.84453871918
Average loss at step  660000 :  3.48142753267
Average loss at step  662000 :  3.78367291892
Average loss at step  664000 :  3.4662844575
Average loss at step  666000 :  3.07735001373
Average loss at step  668000 :  3.48037478232
Average loss at step  670000 :  3.73653779733
Average loss at step  672000 :  3.58240143311
Average loss at step  674000 :  3.78240312707
Average loss at step  676000 :  3.6556512208
Average loss at step  678000 :  3.54137724203
Average loss at step  680000 :  4.14104429734
Average loss at step  682000 :  3.77023518616
Average loss at step  684000 :  3.88225945234
Average loss at step  686000 :  3.2757957375
Average loss at step  688000 :  3.08582961267
Average loss at step  690000 :  3.35837932378
Average loss at step  692000 :  3.30401721323
Average loss at step  694000 :  2.70941208306
Average loss at step  696000 :  3.05649915022
Average loss at step  698000 :  3.25425812343
Average loss at step  700000 :  4.02279094058
Average loss at step  702000 :  4.00740947771
Average loss at step  704000 :  3.1121950137
Average loss at step  706000 :  2.77743973753
Average loss at step  708000 :  2.32957498714
Average loss at step  710000 :  3.39215748942
Average loss at step  712000 :  3.37749999213
Average loss at step  714000 :  2.96630233583
Average loss at step  716000 :  4.2398018682
Average loss at step  718000 :  3.80606846631
Average loss at step  720000 :  3.99840133929
Average loss at step  722000 :  4.09281390643
Average loss at step  724000 :  3.81891062462
Average loss at step  726000 :  3.92831310713
Average loss at step  728000 :  3.77012417293
Average loss at step  730000 :  3.67117787617
Average loss at step  732000 :  3.46938789392
Average loss at step  734000 :  3.53024798489
Average loss at step  736000 :  3.37046450347
Average loss at step  738000 :  3.57182955229
Average loss at step  740000 :  4.03005469978
Average loss at step  742000 :  3.74208204532
Average loss at step  744000 :  3.9628334564
Average loss at step  746000 :  3.76446300036
Average loss at step  748000 :  3.97897696114
Average loss at step  750000 :  3.29946661222
Average loss at step  752000 :  3.2122924124
Average loss at step  754000 :  3.75051852274
Average loss at step  756000 :  3.95314256883
Average loss at step  758000 :  3.75137947899
Average loss at step  760000 :  3.85390408397
Average loss at step  762000 :  3.55421551287
Average loss at step  764000 :  3.77973961204
Average loss at step  766000 :  3.77341864222
Average loss at step  768000 :  3.50986721534
Average loss at step  770000 :  3.92477182496
Average loss at step  772000 :  3.96833926809
Average loss at step  774000 :  3.82259867442
Average loss at step  776000 :  4.02170427823
Average loss at step  778000 :  3.57859000093
Average loss at step  780000 :  3.73805037689
Average loss at step  782000 :  3.84640007758
Average loss at step  784000 :  3.78137674654
Average loss at step  786000 :  4.03022279811
Average loss at step  788000 :  3.75677310479
Average loss at step  790000 :  3.61514839941
Average loss at step  792000 :  3.07029638785
Average loss at step  794000 :  2.73917092168
Average loss at step  796000 :  3.13750098866
Average loss at step  798000 :  3.45186756968
Average loss at step  800000 :  3.49827256316
Average loss at step  802000 :  2.98938052636
Average loss at step  804000 :  3.57626739341
Average loss at step  806000 :  3.94320663393
Average loss at step  808000 :  3.69862916261
Average loss at step  810000 :  3.93712006366
Average loss at step  812000 :  4.04175051296
Average loss at step  814000 :  3.90587647218
Average loss at step  816000 :  3.93629096663
Average loss at step  818000 :  3.69892529202
Average loss at step  820000 :  3.71494105184
Average loss at step  822000 :  3.20149250978
Average loss at step  824000 :  3.9005826894
Average loss at step  826000 :  3.60882700759
Average loss at step  828000 :  3.75368388921
Average loss at step  830000 :  3.54332648093
Average loss at step  832000 :  3.7988607837
Average loss at step  834000 :  3.88100214434
Average loss at step  836000 :  3.99332412922
Average loss at step  838000 :  3.7746885463
Average loss at step  840000 :  3.65251970655
Average loss at step  842000 :  3.89932671654
Average loss at step  844000 :  4.01310121906
Average loss at step  846000 :  3.8967320497
Average loss at step  848000 :  3.81769780421
Average loss at step  850000 :  3.89187510407
Average loss at step  852000 :  3.77035244966
Average loss at step  854000 :  3.99029542553
Average loss at step  856000 :  3.78694486749
Average loss at step  858000 :  3.34722984838
Average loss at step  860000 :  2.78106876457
Average loss at step  862000 :  2.49957658201
Average loss at step  864000 :  2.14309600896
Average loss at step  866000 :  2.15214239249
Average loss at step  868000 :  3.88837835318
Average loss at step  870000 :  4.11665901482
Average loss at step  872000 :  4.0257419672
Average loss at step  874000 :  4.01976076913
Average loss at step  876000 :  3.34193235111
Average loss at step  878000 :  3.78487754649
Average loss at step  880000 :  3.15544565821
Average loss at step  882000 :  3.53115876853
Average loss at step  884000 :  4.0942779566
Average loss at step  886000 :  4.00463715196
Average loss at step  888000 :  3.78228797132
Average loss at step  890000 :  3.74674371338
Average loss at step  892000 :  3.28984229523
Average loss at step  894000 :  3.97753263569
Average loss at step  896000 :  3.95500701374
Average loss at step  898000 :  3.73017018628
Average loss at step  900000 :  3.58603824222
Average loss at step  902000 :  3.43651346284
Average loss at step  904000 :  3.51565404844
Average loss at step  906000 :  3.89684177095
Average loss at step  908000 :  3.53647247091
Average loss at step  910000 :  3.94289418364
Average loss at step  912000 :  3.18497651362
Average loss at step  914000 :  3.65658980548
Average loss at step  916000 :  3.2287833699
Average loss at step  918000 :  3.99560763955
Average loss at step  920000 :  3.67411530697
Average loss at step  922000 :  3.44806361651
Average loss at step  924000 :  3.18636469388
Average loss at step  926000 :  3.31088513547
Average loss at step  928000 :  3.50122531992
Average loss at step  930000 :  3.28062318963
Average loss at step  932000 :  3.16026620138
Average loss at step  934000 :  3.67589388871
Average loss at step  936000 :  4.1872833302
Average loss at step  938000 :  4.02456256723
Average loss at step  940000 :  3.89639034379
Average loss at step  942000 :  3.6623539263
Average loss at step  944000 :  3.14787442183
Average loss at step  946000 :  3.67750448525
Average loss at step  948000 :  4.12483517337
Average loss at step  950000 :  3.94900021815
Average loss at step  952000 :  3.86042083168
Average loss at step  954000 :  3.59857352728
Average loss at step  956000 :  3.96898870999
Average loss at step  958000 :  3.90635740793
Average loss at step  960000 :  3.78139202893
Average loss at step  962000 :  4.02272753215
Average loss at step  964000 :  3.45346943313
Average loss at step  966000 :  3.52783829921
Average loss at step  968000 :  3.51899553269
Average loss at step  970000 :  3.40819113529
Average loss at step  972000 :  3.67416924244
Average loss at step  974000 :  3.73567245537
Average loss at step  976000 :  3.77570666528
Average loss at step  978000 :  3.92931683648
Average loss at step  980000 :  3.67024605119
Average loss at step  982000 :  3.66562316275
Average loss at step  984000 :  3.75733993471
Average loss at step  986000 :  3.79348353648
Average loss at step  988000 :  3.49998480648
Average loss at step  990000 :  3.86291727626
Average loss at step  992000 :  3.86313721418
Average loss at step  994000 :  3.9765391047
Average loss at step  996000 :  3.74463638395
Average loss at step  998000 :  3.82135864002
Average loss at step  1000000 :  3.38083948189

In [25]:
final_embeddings


Out[25]:
array([[ 0.10082047, -0.18633492, -0.13755505, ..., -0.03174205,
         0.13989469, -0.11684187],
       [-0.08840837, -0.03059632, -0.00652557, ..., -0.05839119,
         0.06080177, -0.13072729],
       [-0.11391483, -0.00997427,  0.04847915, ..., -0.17545949,
         0.05758522, -0.10118128],
       ..., 
       [-0.05109098,  0.07239907,  0.12829277, ..., -0.05017259,
        -0.12748541,  0.10859746],
       [ 0.05100953,  0.04071346, -0.00584286, ...,  0.06296492,
         0.008503  ,  0.14932406],
       [-0.02431265,  0.10759754,  0.06825366, ...,  0.13342872,
        -0.02238155,  0.14059585]], dtype=float32)

In [26]:
tsne = TSNE(n_components=2, random_state=42)
XX = tsne.fit_transform(final_embeddings)

In [27]:
tsne_df = pd.DataFrame(XX, columns=['x0', 'x1'])
unique_codones = sorted(dictionary, key=dictionary.get)
tsne_df['codone'] = list(unique_codones)
tsne_df.head()


Out[27]:
x0 x1 codone
0 -0.626161 -1.229317 UNK
1 -0.566174 0.541346 AAA
2 -2.262350 -0.848446 LLL
3 -0.223504 -1.765902 ALA
4 -0.537833 0.245597 LAA

In [28]:
filename = 'data/acid_properties.csv'
props = pd.read_csv(filename)

In [29]:
def acid_dict(some_c, props):
    prop_by_letter = [props[props.acid == let].iloc[:, 1:] for let in some_c]   
    df_concat = pd.concat(prop_by_letter)
    res = df_concat.mean()
    dres = dict(res)
    dres['acid'] = some_c
    return dres

In [30]:
save_path = 'data/all_acid_dicts.pickle'
producer = lambda: [acid_dict(some_c, props) for some_c in tsne_df.codone]
all_acid_dicts = read_or_create(save_path, producer)

all_acid_df = pd.DataFrame(all_acid_dicts)
final_df = all_acid_df.join(tsne_df.set_index('codone'), on='acid')


reading data/all_acid_dicts.pickle

In [31]:
def plot_embedding_properties(final_df):
    plt.figure(figsize=(25, 20))
    for i, p in enumerate(['hydrophobicity', 'mass', 'number_of_atoms', 'volume']):
        plt.subplot(2,2,i+1)
        plt.title(p, fontsize=25)
        plt.scatter(final_df.x0, final_df.x1, c=final_df[p], s=10)
    plt.show()

plot_embedding_properties(final_df)



In [ ]: