Purpose of this file is for the integration of Radical Pilot with AdaptiveMD workflow.
In [1]:
    
from adaptivemd import Project, File, Configuration
from adaptivemd.mongodb import MongoDBStorage
from adaptivemd.engine.openmm import OpenMMEngine
from adaptivemd.analysis.pyemma import PyEMMAAnalysis
from adaptivemd.rp.client import Client
    
    
In [3]:
    
#Project.set_dblocation("user:user@ds159013.mlab.com", 59013)
#Project.set_dblocation("user:user@two.radical-project.org", 32769)
print MongoDBStorage._db_url
project_name = 'rp_testing_modeller_1'
    
    
In [3]:
    
Project.list()
    
    Out[3]:
In [4]:
    
Project.delete(project_name)
#[Project.delete(p) for p in Project.list()]
Project.list()
    
    Out[4]:
In [5]:
    
project = Project(project_name)
#old#project.initialize(resource)
#old#resource = LocalResource()
project.initialize()
    
In [6]:
    
#configs = [ Configuration("local-1", resource_name="local.localhost") ]
#
#configs.append( Configuration(name="titan.1",
#                       shared_path="/lustre/atlas/scratch/jrossyra/bip149/admd/",
#                       queues=["batch"],
#                       allocation="bip149",
#                       cores_per_node=16,
#                       resource_name="ornl.titan")
#             )
#
#configs.append( Configuration(name="titan.2",
#                       shared_path="/lustre/atlas/scratch/jrossyra/bip141/admd/",
#                       queues=["batch"],
#                       allocation="bip141",
#                       cores_per_node=16,
#                       resource_name="ornl.titan")
#              )
#'''
#configs.append( Configuration(name="rhea.1",
#                       shared_path="/lustre/atlas/scratch/jrossyra/bip149/admd/",
#                       queues=["batch"],
#                       allocation="bip149",
#                       cores_per_node=16,
#                       resource_name="ornl.rhea")
#              )
#
#configs.append( Configuration(name="rhea.2",
#                       shared_path="/lustre/atlas/scratch/jrossyra/bip141/admd/",
#                       queues=["batch"],
#                       allocation="bip141",
#                       cores_per_node=16,
#                       resource_name="ornl.rhea")
#              )
#'''
#[project.storage.save(c) for c in configs]
    
    Out[6]:
In [7]:
    
# sweet!
Project.list()
    
    Out[7]:
In [8]:
    
# This is not necessary for executing with RP
#pdb_file = File('file://../files/alanine/alanine.pdb').named('initial_pdb').load()
#system_file = File('file://../files/alanine/system.xml').load()
#integrator_file = File('file://../files/alanine/integrator.xml').load()
pdb_file = File('file://../files/alanine/alanine.pdb').named('initial_pdb')
system_file = File('file://../files/alanine/system.xml')
integrator_file = File('file://../files/alanine/integrator.xml')
    
In [9]:
    
engine = OpenMMEngine(
    pdb_file=pdb_file,
    system_file=system_file,
    integrator_file=integrator_file,
    args='-r --report-interval 1 -p CPU'
).named('openmm')
engine.add_output_type('master', 'master.dcd', stride=10)
engine.add_output_type('protein', 'protein.dcd', stride=1, selection='protein')
modeller = PyEMMAAnalysis(
    engine=engine,
    outtype='protein',
    features={'add_inverse_distances': {'select_Backbone': None}}
).named('pyemma')
margs = dict(tica_stride=1, tica_lag=5, tica_dim=2,
    clust_stride=1, msm_states=10, msm_lag=5)
    
In [10]:
    
project.generators.add(engine)
project.generators.add(modeller)
    
In [11]:
    
# TODO have this return resource so it can be given directly
#      to the queueing function, and keep the option of using
#      the resource name as well
project.request_resource(2,10, destination='local.localhost')
#project.request_resource(2,30, destination='ornl.titan')
    
    Out[11]:
In [12]:
    
trajectories = project.new_trajectory(engine['pdb_file'], 100, engine, 4)
project.queue(trajectories, resource_name="local.localhost")
print trajectories
    
    
In [13]:
    
client = Client('mongodb://user:user@two.radical-project.org:32769/', project_name)
#import os
#os.environ['RADICAL_PILOT_DBURL'] = 'mongodb://rp:rp@ds015335.mlab.com:15335/rp'
#os.system('export RP_ENABLE_OLD_DEFINES=True')
client.start()
    
    
    
    
    
In [14]:
    
mixedbag = project.new_trajectory(engine['pdb_file'], 100, engine, 3)
#mixedbag=[]
mtask = modeller.execute(trajectories, **margs)
mixedbag.append(mtask)
project.queue(mixedbag, resource_name='local.localhost')
    
    
In [16]:
    
project.trajectories.__len__()
    
    Out[16]:
In [17]:
    
t=trajectories[0]
    
In [18]:
    
t.est_exec_time
    
    
In [18]:
    
client.stop()
    
In [16]:
    
mixedbag = project.new_trajectory(engine['pdb_file'], 100, engine, 3)
mtask = modeller.execute(list(project.trajectories), **margs)
mixedbag.append(mtask)
project.queue(mixedbag, resource_name='local.localhost')
    
    
In [13]:
    
f = lambda: len(project.trajectories) == 4
project.wait_until(f)
    
In [15]:
    
client.stop()
    
In [15]:
    
project.tasks.__len__()
    
    Out[15]:
In [15]:
    
project.trajectories.__len__()
    
    Out[15]:
In [13]:
    
ta=project.tasks.last
    
In [15]:
    
for ta in project.tasks:
    print ta.description[:55]
    print ta.est_exec_time
    
    
In [ ]: