First you need to import the necessary modules ...
In [1]:
from pyasp.term import *
from urllib.request import urlopen
from __meneco__ import query, utils, sbml
Next, you can load a draft network from an sbml file and convert it into logic facts ...
In [2]:
draft_sbml= urlopen('https://raw.githubusercontent.com/bioasp/meneco/master/Ectodata/ectocyc.sbml')
draftnet = sbml.readSBMLnetwork(draft_sbml, 'draft')
load the seeds ...
In [3]:
seeds_sbml = urlopen('https://raw.githubusercontent.com/bioasp/meneco/master/Ectodata/seeds.sbml')
seeds = sbml.readSBMLseeds(seeds_sbml)
and load the targets ...
In [4]:
targets_sbml = urlopen('https://raw.githubusercontent.com/bioasp/meneco/master/Ectodata/targets.sbml')
targets = sbml.readSBMLtargets(targets_sbml)
Then you can check the draft network for unproducible targets ...
In [5]:
unproducible = TermSet()
model = query.get_unproducible(draftnet, targets, seeds)
for a in model:
target = str(a)[13:]
t = String2TermSet(target)
unproducible = unproducible.union(t)
unproducible = TermSet(unproducible)
utils.print_met(unproducible.to_list())
You can load another reaction network like metacyc repair data base...
In [6]:
repair_sbml = urlopen('https://raw.githubusercontent.com/bioasp/meneco/master/Ectodata/metacyc_16-5.sbml')
repairnet = sbml.readSBMLnetwork(repair_sbml, 'repairnet')
and combine the draft network with the repair database ...
In [7]:
combinet = draftnet
combinet = TermSet(combinet.union(repairnet))
and then check for which targets producibilty cannot be restored even with the combined networks ...
In [8]:
unrepairable = TermSet()
model = query.get_unproducible(combinet, targets, seeds)
for a in model:
target = str(a)[13:]
t = String2TermSet(target)
unrepairable = unrepairable.union(t)
unrepairable = TermSet(unrepairable)
utils.print_met(unrepairable)
and for which targets the production paths are repairable ...
In [9]:
repairable = unproducible.difference(unrepairable)
utils.print_met(repairable)
You can compute the essential reactions for the repairable target ...
In [10]:
essential_reactions = TermSet()
for t in repairable:
single_target = TermSet()
single_target.add(t)
print('\nComputing essential reactions for', t,'...', end=' ')
essentials = query.get_intersection_of_completions(draftnet, repairnet, seeds, single_target)
print('done.')
print(' ',len(essentials), 'essential reactions found:')
utils.print_met(essentials.to_list())
essential_reactions = essential_reactions.union(essentials)
essential_reactions= TermSet(essential_reactions)
print('\nOverall',len(essential_reactions), 'essential reactions found.')
utils.print_met(essential_reactions)
You can compute a completion of minimal size suitable to produce all repairable targets ...
In [11]:
models = query.get_minimal_completion_size(draftnet, repairnet, seeds, repairable)
optimum = models[0].score[0]
print(' minimal size =',optimum)
utils.print_met(models[0].to_list())
We can compute the common reactions in all completion with a given size ...
In [12]:
model = query.get_intersection_of_optimal_completions(draftnet, repairnet, seeds, repairable, optimum)
utils.print_met(model.to_list())
We can compute the union all completion with a given size ...
In [13]:
model = query.get_union_of_optimal_completions(draftnet, repairnet, seeds, repairable, optimum)
utils.print_met(model.to_list())
And finally compute all (for this notebook we print only the first three) completions with a given size ...
In [14]:
models = query.get_optimal_completions(draftnet, repairnet, seeds, repairable, optimum)
for i in range(0,3):
print('Completion '+str(i+1)+':')
utils.print_met(models[i].to_list())
That's all folks!