This is a more extensive integration test, if all the features of netcdf+ work as expected.
In [1]:
from __future__ import print_function
import openpathsampling as paths
from openpathsampling.netcdfplus import (
NetCDFPlus,
ObjectStore,
StorableObject,
NamedObjectStore,
UniqueNamedObjectStore,
DictStore,
ImmutableDictStore,
VariableStore,
StorableNamedObject
)
import numpy as np
In [2]:
# force numpy print options for test comparison
np.set_printoptions(precision=6, formatter={'float_kind': lambda x: "{:.6f}".format(x)})
In [3]:
class Node(StorableObject):
def __init__(self, value):
super(Node, self).__init__()
self.value = value
def __repr__(self):
return 'Node(%s)' % self.value
In [4]:
class NamedNode(StorableNamedObject):
def __init__(self, value):
super(NamedNode, self).__init__()
self.value = value
def __repr__(self):
return 'Node(%s)' % self.value
In [5]:
st = NetCDFPlus('test_netcdfplus.nc', mode='w')
In [6]:
class NodeIntStore(VariableStore):
def __init__(self):
super(NodeIntStore, self).__init__(Node, ['value'])
def initialize(self):
super(VariableStore, self).initialize()
# Add here the stores to be imported
self.create_variable('value', 'int')
In [7]:
st.create_store('nodesnamed', NamedObjectStore(NamedNode))
st.create_store('nodesunique', UniqueNamedObjectStore(NamedNode))
st.create_store('dict', DictStore())
st.create_store('dictimmutable', ImmutableDictStore())
st.create_store('varstore', NodeIntStore())
And the default store. The last store for a particular object is used as the default if no specific store is specified.
In [8]:
st.create_store('nodes', ObjectStore(Node))
In [9]:
#! lazy
# lazy because output checking fails in Py3k tests -- why is that?
print(st.find_store(Node))
Initialize the store
In [10]:
st.finalize_stores()
In [11]:
v = st.variables['nodes_uuid']
In [12]:
v.chunking()
Out[12]:
In [13]:
print(st.find_store(Node))
In [14]:
st.nodes.save(Node(10));
In [15]:
st.close()
In [16]:
st = NetCDFPlus('test_netcdfplus.nc', mode='a')
set caching of the new stores
In [17]:
for store in st.stores:
store.set_caching(10)
Check if the stores were correctly loaded
In [18]:
assert('nodes' in st.objects)
In [19]:
assert('stores' in st.objects)
In [20]:
assert(len(st.nodes) == 1)
In [21]:
assert(len(st.stores) == 7)
In [22]:
for store in st.stores:
print('{:40} {:30}'.format(str(store), str(store.cache)))
Get a list of all possible variable types
In [23]:
print(sorted(st.get_var_types()))
Make a dimension on length 2 to simplify dimension nameing.
Now we construct for each type a corresponding variable of dimensions 2x2x2.
In [24]:
st.create_dimension('pair', 2)
In [25]:
for var_type in st.get_var_types():
st.create_variable(var_type, var_type, dimensions=('pair', 'pair', 'pair'))
In [26]:
st.update_delegates()
In [27]:
for var_name, var in sorted(st.variables.items()):
print(var_name, var.dimensions)
In [28]:
for var in sorted(st.vars):
print(var)
In [29]:
st.vars['bool'][:] = True
In [30]:
print(st.vars['bool'][:])
In [31]:
st.vars['float'][1,1] = 1.0
In [32]:
print(st.vars['float'][:])
In [33]:
st.vars['index'][0,1,0] = 10
st.vars['index'][0,1,1] = -1
st.vars['index'][0,0] = None
In [34]:
print(st.vars['index'][0,1])
print(st.vars['index'][0,0])
In [35]:
st.vars['int'][0,1,0] = 10
st.vars['int'][0,1,1] = -1
In [36]:
print(st.vars['int'][:])
The variable type JSON encode the given object as a JSON string in the shortest possible way. This includes using referenes to storable objects.
In [37]:
st.vars['json'][0,1,1] = {'Hallo': 2, 'Test': 3}
In [38]:
print(st.vars['json'][0,1,1])
In [39]:
st.vars['json'][0,1,0] = Node(10)
In [40]:
#! lazy
print(st.variables['json'][0,1,:])
All object types registered as being Storable by subclassing from openpathsampling.base.StorableObject
.
A JSON serializable object. This can be normal very simple python objects, plus numpy arrays, and objects that implement to_dict
and from_dict
. This is almost the same as JSON except if the object to be serialized is a storable object itself, it will not be referenced but the object itself will be turned into a JSON representation.
In [41]:
nn = Node(10)
st.vars['jsonobj'][1,0,0] = nn
In [42]:
print(st.variables['jsonobj'][1,0,0])
In [43]:
st.vars['jsonobj'][1,0,0]
Out[43]:
In [44]:
st.vars['numpy.float32'][:] = np.ones((2,2,2)) * 3.0
st.vars['numpy.float32'][0] = np.ones((2,2)) * 7.0
In [45]:
print(st.vars['numpy.float32'][:])
You can store objects of a type which you have previously added. For loading you need to make sure that the class (and the store if set manually) is present when you load from the store.
In [46]:
st.vars['obj.nodes'][0,0,0] = Node(1)
st.vars['obj.nodes'][0,1,0] = Node('Second')
st.vars['obj.nodes'][0,0,1] = Node('Third')
In [47]:
# st.vars['obj.nodes'][1] = Node(20)
In [48]:
print(st.variables['obj.nodes'][:])
print(st.variables['nodes_json'][:])
In [49]:
print(st.vars['obj.nodes'][0,0,0])
print(type(st.vars['obj.nodes'][0,0,0]))
Lazy loading will reconstruct an object using proxies. These proxies behave almost like the loaded object, but will delay loading of the object until it is accessed. Saving for lazy objects is the same as for regular objects. Only loading return a proxy object.
In [50]:
st.vars['lazyobj.nodes'][0,0,0] = Node('First')
The type of the returned object is LoaderProxy
while the class is the actual class is the baseclass loaded by the store to not trigger loading when the __class__
attribute is accessed. The actual object can be accessed by __subject__
and doing so will trigger loading the object. All regular attributes will be delegated to __subject__.attribute
and also trigger loading.
In [51]:
#! lazy
proxy = st.vars['lazyobj.nodes'][0,0,0]
print('Type: ', type(proxy))
print('Class: ', proxy.__class__)
print('Content:', proxy.__subject__.__dict__)
print('Access: ', proxy.value)
Note that there are now 6 Node
objects.
In [52]:
print(st.nodes[:])
In [53]:
obj = Node('BlaBla')
st.nodes.save(obj);
Saving without specifying should use store nodes
which was defined last.
In [54]:
print(len(st.nodes))
obj = Node('BlaBlaBla')
st.save(obj)
print(len(st.nodes))
Get the index of the obj in the storage
In [55]:
print(st.idx(obj))
And test the different ways to access the contained json
In [56]:
print(st.nodes.variables['json'][st.idx(obj)])
In [57]:
print(st.variables['nodes_json'][st.idx(obj)])
In [58]:
print(st.nodes.vars['json'][st.idx(obj)])
print(st.nodes.vars['json'][st.idx(obj)] is obj)
In [59]:
print(st.nodes[st.idx(obj)])
print(st.nodes[st.idx(obj)] is obj)
One importance difference is that a store like nodes
has a cache (which we set to 10 before). Using vars
will not use a store and hence create a new object!
ObjectStores are resposible to save and load objects. There are now 6 types available.
The basic store which we have used before
Supports to give objects names
In [60]:
n = NamedNode(3)
NamedObjects have a .name
property, which has a default.
In [61]:
print(n.name)
and can be set.
In [62]:
n.name = 'OneNode'
print(n.name)
n.name = 'MyNode'
print(n.name)
Once the object is saved, the name cannot be changed anymore.
In [63]:
st.nodesnamed.save(n);
In [64]:
try:
n.name = 'NewName'
except ValueError as e:
print('# We had an exception')
print(e)
else:
raise RuntimeWarning('This should have produced an error')
usually names are not unique (see next store). So we can have more than one object with the same name.
In [65]:
n2 = NamedNode(9)
n2.name = 'MyNode'
st.nodesnamed.save(n2);
See the list of named objects
In [66]:
print(st.nodesnamed.name_idx)
The forces names to be unique
In [67]:
st.nodesunique.save(n);
Note here that an object can be store more than once in a storage, but only if more than one store supports the file type.
In [68]:
try:
st.nodesunique.save(n2)
except RuntimeWarning as e:
print('# We had an exception')
print(e)
else:
raise RuntimeWarning('This should have produced an error')
As said before this can only happen if you have more than one store for the same object type.
In [69]:
print(st.nodesunique.name_idx)
some more tests. First saving onnamed objects. This is okay. Only given names should be unique.
In [70]:
n3 = NamedNode(10)
n4 = NamedNode(12)
st.nodesunique.save(n3);
st.nodesunique.save(n4);
In [71]:
n5 = NamedNode(1)
n5.name = 'MyNode'
In [72]:
try:
st.nodesunique.save(n5)
except RuntimeWarning as e:
print('# We had an exception')
print(e)
else:
raise RuntimeWarning('This should have produced an error')
This works since it does a rename before saving.
In [73]:
st.nodesunique.save(n5, 'NextNode');
In [74]:
n6 = NamedNode(1)
n6.name = 'SecondNode'
In [75]:
try:
st.nodesunique.save(n6, 'MyNode')
except RuntimeWarning as e:
print('# We had an exception')
print(e)
else:
raise RuntimeWarning('This should have produced an error')
A dictstore works a like a dictionary on disk. The content is returned using dict()
In [76]:
print(dict(st.dict))
print(st.dict.name_idx)
In [77]:
n1 = NamedNode(1)
n2 = NamedNode(2)
n3 = NamedNode(3)
st.dict['Number1'] = n1
In [78]:
for key in sorted(st.dict):
obj = st.dict[key]
idxs = sorted(st.dict.name_idx[key])
print(key, ':', str(obj), idxs)
In [79]:
st.dict['Number2'] = n2
In [80]:
for key in sorted(st.dict):
obj = st.dict[key]
idxs = sorted(st.dict.name_idx[key])
print(key, ':', str(obj), idxs)
In [81]:
st.dict['Number1'] = n3
In [82]:
for key in sorted(st.dict):
obj = st.dict[key]
idxs = sorted(st.dict.name_idx[key])
print(key, ':', str(obj), idxs)
In [83]:
print(st.dict['Number1'])
In [84]:
print(st.dict.find('Number1'))
In [85]:
print('[', ', '.join(st.dict.variables['json'][:]), ']')
In [86]:
for key in sorted(st.dict):
obj = st.dict[key]
idxs = sorted(st.dict.name_idx[key])
print(key, ':', str(obj), idxs)
This adds the check that already used names cannot be used again
In [87]:
try:
st.dictimmutable['Number1'] = n1
st.dictimmutable['Number1'] = n2
except RuntimeWarning as e:
print('# We had an exception')
print(e)
else:
raise RuntimeWarning('This should have produced an error')
Store a node with an int as we defined for our VariableStore
In [88]:
a = Node(30)
st.varstore.save(a);
clear the cache
In [89]:
st.varstore.clear_cache()
And try loading
In [90]:
assert(st.varstore[0].value == 30)
Try storing non int() parseable value
In [91]:
try:
a = Node('test')
print(st.varstore.save(a))
except ValueError as e:
print('# We had an exception')
print(e)
else:
raise RuntimeWarning('This should have produced an error')
In [92]:
st_uuid = NetCDFPlus('test_netcdfplus_uuid.nc', mode='w')
st_uuid.create_store('nodes', ObjectStore(Node))
st_uuid.finalize_stores()
st_uuid.save(st.nodes[0])
st_uuid.close()
In [93]:
st.close()
In [94]:
st_fb = NetCDFPlus('test_netcdfplus_fb.nc', mode='w', fallback=NetCDFPlus('test_netcdfplus_uuid.nc'))
st_fb.create_store('nodes', ObjectStore(Node))
st_fb.finalize_stores()
In [95]:
st_fb.exclude_from_fallback
Out[95]:
In [96]:
assert(st_fb.fallback.nodes[0] in st_fb.fallback)
In [97]:
assert(st_fb.fallback.nodes[0] in st_fb)
In [98]:
assert(st.nodes[0] in st_fb)
In [99]:
assert(st.nodes[0] in st_fb.fallback)
Try saving object in fallback
In [100]:
print(hex(st_fb.nodes.save(st_fb.fallback.nodes[0])))
In [101]:
assert(len(st_fb.nodes) == 0)
In [102]:
assert(st_fb.fallback.nodes[0] in st_fb)
assert(st_fb.fallback.nodes[0] in st_fb.fallback)
assert(st.nodes[0] in st_fb)
assert(st.nodes[0] in st_fb.fallback)
In [103]:
st_fb.fallback.close()
st_fb.close()