In [493]:
import os, sys, glob, string, shutil, re
import yaml, json
from collections import OrderedDict
class UnsortableList(list):
def sort(self, *args, **kwargs):
pass
class UnsortableOrderedDict(OrderedDict):
def items(self, *args, **kwargs):
return UnsortableList(OrderedDict.items(self, *args, **kwargs))
In [494]:
class OrderedDictYAMLLoader(yaml.Loader):
"""
A YAML loader that loads mappings into ordered dictionaries.
"""
def __init__(self, *args, **kwargs):
yaml.Loader.__init__(self, *args, **kwargs)
self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map)
self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map)
def construct_yaml_map(self, node):
data = OrderedDict()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_mapping(self, node, deep=False):
if isinstance(node, yaml.MappingNode):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(None, None,
'expected a mapping node, but found %s' % node.id, node.start_mark)
mapping = OrderedDict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
raise yaml.constructor.ConstructorError('while constructing a mapping',
node.start_mark, 'found unacceptable key (%s)' % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
In [495]:
def find_prov(content):
prov = re.search('_output_var_names\s*=\s*\[(.*?)\]',content, re.DOTALL)
if prov:
prov = re.split('\n.*?',prov.groups()[0])
prov = [re.sub('\s*##.*','',i) for i in prov]
prov = [re.sub('\s*\'','',i) for i in prov]
prov = [re.sub(',\s*','',i) for i in prov]
prov = [re.sub('#\s*.*','',i) for i in prov]
prov = [re.sub(' ','',i) for i in prov]
prov = [re.sub('\\r','',i) for i in prov]
prov = [i for i in prov if len(i)>0]
return prov
def find_uses(content):
uses = re.search('_input_var_names\s*=\s*\[(.*?)\]',content, re.DOTALL)
if uses:
uses = re.split('\n.*?',uses.groups()[0])
uses = [re.sub('\s*##.*','',i) for i in uses]
uses = [re.sub('\s*\'','',i) for i in uses]
uses = [re.sub(',\s*','',i) for i in uses]
uses = [re.sub('#\s*.*','',i) for i in uses]
uses = [re.sub(' ','',i) for i in uses]
uses = [re.sub('\\r','',i) for i in uses]
uses = [i for i in uses if len(i)>0]
return uses
In [558]:
def match_keys(toKey,fromKey):
allU = UnsortableOrderedDict()
for k in toKey.keys():
allk = {}
vals = toKey[k]
keys_ = [[name for name in fromKey.keys() for i in fromKey[name] if i == j] for j in vals]
keys = [list({j.split('_')[0] for j in i}) if len(i)>0 else [''] for i in keys_]
keys_lin = list({keys[i][j] for i in range(len(keys)) for j in range(len(keys[i]))})
keys_pairs = [[keys[i][j],vals[i]] for i in range(len(keys)) for j in range(len(keys[i]))]
allk = {keys_lin[i]:[] for i in range(len(keys_lin))}
for i in range(len(keys_pairs)):
allk[keys_pairs[i][0]].append(keys_pairs[i][1])
allU[k] = allk
return allU
In [559]:
def create_provides_dot_json(comp_dir, prov):
# where prov is a list of dictionaries of the connections it provides
Pls = []
for k in prov.keys():
p = UnsortableOrderedDict()
p['id'] = k
p['required'] = False
p['exchange_items'] = prov[k]
Pls.append(p)
o = open(comp_dir + '/provides.json', 'w')
json.dump(Pls, o, indent = 2, sort_keys = False)
o.close()
def create_uses_dot_json(comp_dir, uses):
# where uses is a list of dictionaries of the connections it needs
Pls = []
for k in uses.keys():
p = UnsortableOrderedDict()
p['id'] = k
p['required'] = False
p['exchange_items'] = uses[k]
Pls.append(p)
o = open(comp_dir + '/uses.json', 'w')
json.dump(Pls, o, indent = 2, sort_keys = False)
o.close()
In [560]:
yaml_root = 'yaml/'
components_root = 'components/'
source_dir = 'topoflow/'
source_dirs = glob.iglob(source_dir + '*.py') # py files
Uses = UnsortableOrderedDict()
Provides = UnsortableOrderedDict()
for comp in source_dirs:
comp_name = string.split(comp,'/')[-1][:-3]
print 'Updating uses/provides for ' + comp_name
# make new dirs
# comp_dir = components_root + string.lower(comp_name)
# if not os.path.exists(comp_dir):
# os.makedirs(comp_dir)
# os.makedirs(comp_dir + '/db')
with open(comp, 'r') as content_file:
content = content_file.read()
# provides.json
provides = find_prov(content)
if provides:
Provides[comp_name] = provides
# uses.json
uses = find_uses(content)
if uses:
Uses[comp_name] = uses
keyed_provides = match_keys(Provides,Uses)
keyed_uses = match_keys(Uses,Provides)
# o = open(components_root + '/_uses_extra.json', 'w')
# json.dump(uses_extra, o, indent = 2, sort_keys = False)
# o.close()
# o = open(components_root + '/_provides_extra.json', 'w')
# json.dump(provides_extra, o, indent = 2, sort_keys = False)
# o.close()
source_dirs = glob.iglob(source_dir + '*.py') # py files
for comp in source_dirs:
comp_name = string.split(comp,'/')[-1][:-3]
comp_dir = components_root + string.lower(comp_name)
if any(i == comp_name for i in keyed_provides.keys()) or any(i == comp_name for i in keyed_uses.keys()):
comp_dir = components_root + string.lower(comp_name)
if not os.path.exists(comp_dir):
os.makedirs(comp_dir)
if any(i == comp_name for i in keyed_provides.keys()):
create_provides_dot_json(comp_dir, keyed_provides[comp_name])
if any(i == comp_name for i in keyed_uses.keys()):
create_uses_dot_json(comp_dir, keyed_uses[comp_name])
# inBoth = set(Uses) & set(Provides)
# inOneButNotOther = set(Uses) | set(Provides)
# inUsesNotProvides = {i for i in Uses if set(i) | set(Provides)}
In [ ]: