In [1]:
%matplotlib inline
%load_ext autoreload
%autoreload 2
In [2]:
from eden.converter.molecule import obabel
import networkx as nx
import pybel
import requests
import os.path
from itertools import tee
from numpy.random import randint
from numpy.random import uniform
from eden.graph import Vectorizer
from sklearn.linear_model import SGDClassifier
import datetime, time
from eden.util import random_bipartition_iter
from eden.model import ActiveLearningBinaryClassificationModel
from eden.util import configure_logging
import logging
configure_logging(logging.getLogger(),verbosity=2)
In [3]:
def make_iterable(filename, file_format):
if file_format == 'sdf':
with open(filename) as f:
s = ''
for line in f:
if line.strip() != '$$$$':
s = s + line
else:
return_value = s + line
s = ''
yield return_value
elif file_format == 'smi':
with open(filename) as f:
for line in f:
yield line
This is where the data sets are defined:
In [4]:
AID = 720577
#AID=2401
DATA_DIR = '/home/liconj/proj/thesis/EDeN/examples/model_comparison/data'
active_fname=DATA_DIR + '/AID%s_active.sdf'%AID
inactive_fname=DATA_DIR + '/AID%s_inactive.sdf'%AID
Functions for training and testing the model
In [5]:
model_fname = DATA_DIR + '/AID%s.model'%AID
model_type = "default"
n_conf = 10
n_iter = 50
active_set_size = 5
n_active_learning_iterations = 0
threshold = 1
train_test_split = 0.8
pre_processor_parameters={'k':randint(1, 10,size=n_iter),
'threshold':randint(3, 10, size=n_iter),
'model_type':[model_type],
'n_conf':[n_conf]}
def pre_processor(data, model_type="3d", **kwargs):
# model_type = kwargs.get('mode', 'default')
if model_type == "default":
iterable = obabel.obabel_to_eden(data, **kwargs)
elif model_type == "3d":
iterable = obabel.obabel_to_eden3d(data, **kwargs)
return iterable
vectorizer = Vectorizer()
estimator = SGDClassifier(class_weight='auto', shuffle=True)
# Make predictive model
model = ActiveLearningBinaryClassificationModel(pre_processor,
estimator=estimator,
vectorizer=vectorizer,
n_jobs=2,
pre_processor_n_jobs=2,
n_blocks = 10,
fit_vectorizer=True)
In [6]:
########
# Create iterables from files
########
iterable_pos = make_iterable('AID720577_active.sdf', 'sdf')
iterable_neg = make_iterable('AID720577_inactive.sdf', 'sdf')
iterable_pos, iterable_pos_ = tee(iterable_pos)
iterable_neg, iterable_neg_ = tee(iterable_neg)
start = time.time()
print('# positives: %d # negatives: %d (%.1f sec %s)'%(sum(1 for x in iterable_pos_), sum(1 for x in iterable_neg_), time.time() - start, str(datetime.timedelta(seconds=(time.time() - start)))))
iterable_pos, iterable_pos_ = tee(iterable_pos)
iterable_neg, iterable_neg_ = tee(iterable_neg)
# Split train/test
iterable_pos_train, iterable_pos_test = random_bipartition_iter(iterable_pos, relative_size=train_test_split)
iterable_neg_train, iterable_neg_test = random_bipartition_iter(iterable_neg, relative_size=train_test_split)
In [7]:
%%time
# Optimize hyperparameters and fit model
# Since this model is fitted much more slowly, use a single vectorizer
#vectorizer_parameters={'complexity':[2,3,4],
# 'discretization_size':randint(2, 3,size=n_iter),
# 'discretization_dimension':randint(2, 3,size=n_iter)}
vectorizer_parameters={'complexity':[4,5,6], 'n':[2,3,4]}
estimator_parameters={'n_iter':randint(5, 100, size=n_iter),
'penalty':['l1','l2','elasticnet'],
'l1_ratio':uniform(0.1,0.9, size=n_iter),
'loss':['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron'],
'power_t':uniform(0.1, size=n_iter),
'alpha': [10**x for x in range(-8,-2)],
'eta0': [10**x for x in range(-4,-1)],
'learning_rate': ["invscaling", "constant", "optimal"]}
model.optimize(iterable_pos_train, iterable_neg_train,
model_name=model_fname,
n_active_learning_iterations=0,
size_positive=-1,
size_negative=active_set_size,
n_iter=n_iter, cv=3,
pre_processor_parameters=pre_processor_parameters,
vectorizer_parameters=vectorizer_parameters,
estimator_parameters=estimator_parameters)
In [8]:
%%time
# Estimate predictive performance
model.estimate( iterable_pos_test, iterable_neg_test )
Out[8]:
In [13]:
model_fname = DATA_DIR + '/AID%s.model3d'%AID
model_type = "3d"
n_conf = 10
n_iter = 200
active_set_size = 5
n_active_learning_iterations = 0
train_test_split = 0.8
pre_processor_parameters={'k':randint(1, 10,size=n_iter),
'threshold':randint(3, 10, size=n_iter),
'model_type':[model_type],
'n_conf':[n_conf]}
def pre_processor(data, model_type="3d", **kwargs):
# model_type = kwargs.get('mode', 'default')
if model_type == "default":
iterable = obabel.obabel_to_eden(data, **kwargs)
elif model_type == "3d":
iterable = obabel.obabel_to_eden3d(data, **kwargs)
return iterable
vectorizer = Vectorizer()
estimator = SGDClassifier(class_weight='auto', shuffle=True)
# Make predictive model
model3d = ActiveLearningBinaryClassificationModel(pre_processor,
estimator=estimator,
vectorizer=vectorizer,
n_jobs = 1,
pre_processor_n_jobs = 1,
n_blocks = 10,
fit_vectorizer=True)
In [14]:
########
# Create iterables from files
########
iterable_pos = make_iterable('AID720577_active.sdf', 'sdf')
iterable_neg = make_iterable('AID720577_inactive.sdf', 'sdf')
iterable_pos, iterable_pos_ = tee(iterable_pos)
iterable_neg, iterable_neg_ = tee(iterable_neg)
start = time.time()
print('# positives: %d # negatives: %d (%.1f sec %s)'%(sum(1 for x in iterable_pos_), sum(1 for x in iterable_neg_), time.time() - start, str(datetime.timedelta(seconds=(time.time() - start)))))
iterable_pos, iterable_pos_ = tee(iterable_pos)
iterable_neg, iterable_neg_ = tee(iterable_neg)
# Split train/test
iterable_pos_train, iterable_pos_test = random_bipartition_iter(iterable_pos, relative_size=train_test_split)
iterable_neg_train, iterable_neg_test = random_bipartition_iter(iterable_neg, relative_size=train_test_split)
In [15]:
model3d.fit_vectorizer
Out[15]:
In [16]:
#%%time
# Optimize hyperparameters and fit model
vectorizer_parameters={'complexity':[6], 'n':[2,3,4]}
estimator_parameters={'n_iter':randint(5, 100, size=n_iter),
'penalty':['l1','l2','elasticnet'],
'l1_ratio':uniform(0.1,0.9, size=n_iter),
'loss':['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron'],
'power_t':uniform(0.1, size=n_iter),
'alpha': [10**x for x in range(-8,-2)],
'eta0': [10**x for x in range(-4,-1)],
'learning_rate': ["invscaling", "constant", "optimal"]}
model3d.optimize(iterable_pos_train, iterable_neg_train,
model_name=model_fname,
n_iter=n_iter, cv=3,
pre_processor_parameters=pre_processor_parameters,
vectorizer_parameters=vectorizer_parameters,
estimator_parameters=estimator_parameters)
In [ ]:
%%time
# Estimate predictive performance
model3d.estimate( iterable_pos_test, iterable_neg_test )
In [ ]:
def test_obabel_model(fname, model_type = "default", model_fname=None):
from eden.model import ActiveLearningBinaryClassificationModel
model = ActiveLearningBinaryClassificationModel()
model.load(model_fname)
#create iterable from files
from eden.converter.molecule import obabel
if model_type == "default":
iterable=obabel.obabel_to_eden(fname)
elif model_type == "3d":
iterable=obabel.obabel_to_eden3d(fname)
predictions= model.decision_function( iterable )
return predictions