In [1]:
import openpathsampling as paths
import numpy as np
import openpathsampling.engines.openmm as peng
Load first frame from test pdb.
In [2]:
template = peng.snapshot_from_pdb('../resources/AD_initial_frame.pdb')
Create a simple CV without an underlying function.
In [3]:
cv0 = paths.CollectiveVariable('func0')
Create a complicated function.
In [4]:
center = 1
def dist(snapshot, center, np):
return np.sum(snapshot.coordinates._value[0]) - center
Create collective variable from this function. Note that you have to specify center and np to make this work
In [5]:
cv1 = paths.FunctionCV('func1', dist, center=center, np=np, cv_time_reversible=False).with_diskcache()
cv2 = paths.FunctionCV('func2', dist, center=center, np=np, cv_wrap_numpy_array=True).with_diskcache()
cv3 = paths.FunctionCV('func3', dist, center=center, np=np, cv_wrap_numpy_array=True, cv_time_reversible=True).with_diskcache()
Create storage to test save and load.
In [6]:
#! lazy
storage = paths.Storage('can_be_deleted.nc', mode='w')
print(storage.snapshots.save(template))
Save CV
In [7]:
#! lazy
print(storage.save([cv0, cv1, cv2, cv3]))
In [8]:
#! lazy
print(storage.cvs.index)
Set the CV value for the storage.template
In [9]:
cv0[template] = 10.0
and create a storage for it
In [10]:
storage.cvs.add_diskcache(cv0, allow_incomplete=True)
And we should have a float store
In [11]:
storage.cvs.sync(cv0)
In [12]:
assert(cv0._store_dict.value_store.vars['value'][:] == [10.0])
Test function for reversed template.
In [13]:
dd = dist(template.reversed, center, np)
print(dd)
Create another CV. This time using the from_template function.
In [14]:
print(cv0([template, template]))
print(cv1([template, template]))
In [15]:
print(type(cv0([template, template])))
print(type(cv0([template, template])[0]))
print(type(cv1([template, template])))
print(type(cv1([template, template])[0]))
In [16]:
#! skip
print(storage.cvs.variables['json'][:])
In [17]:
cv0j = storage.cvs.vars['json'][0]
cv1j = storage.cvs.vars['json'][1]
cv2j = storage.cvs.vars['json'][2]
cv3j = storage.cvs.vars['json'][3]
In [18]:
res = cv0j([template, template, template])
assert(res[0] == res[1] == res[2])
res = cv1j([template, template, template])
assert(res[0] == res[1] == res[2])
res = cv2j([template, template, template])
assert(res[0] == res[1] == res[2])
res = cv3j([template, template, template])
assert(res[0] == res[1] == res[2])
In [19]:
t = paths.Trajectory([template] * 4)
In [20]:
#! lazy
print storage.save(t)
In [21]:
def ff(t, cv3):
return max(cv3(t))
In [22]:
a = paths.netcdfplus.FunctionPseudoAttribute('max_cv', paths.Trajectory, ff, cv3=cv3)
In [23]:
#! lazy
print storage.attributes.save(a)
In [24]:
print a(t)
In [25]:
p = paths.netcdfplus.LoaderProxy(storage.trajectories, t.__uuid__)
In [26]:
print a(p)
In [27]:
print storage.trajectories.add_attribute(paths.netcdfplus.ValueStore, a, t, allow_incomplete=False)
In [28]:
ats = storage.trajectories.attribute_list[a]
In [29]:
print ats.vars['value'][:]
In [30]:
print a._store_dict.value_store.vars['value'][:]
In [31]:
storage.trajectories.sync_attribute(a)
In [32]:
storage.attributes.has_cache(a)
Out[32]:
In [33]:
storage.attributes.sync(a)
In [34]:
storage.close()
In [35]:
storage = paths.Storage('can_be_deleted.nc', mode='r')
In [36]:
storage.attributes[4]
Out[36]:
In [37]:
storage.close()
In [ ]: