In [2]:
import synchwordfinder as swf
import graphgenerator as gg
import probabilisticgraph as pg
import dmarkov as dm
import sequenceanalyzer as sa
import yaml
graph_path = 'logisticmap'
alpha = 0.95
alpha2 = 0.99
Lmax = 14
lseq = 10000000
test = 'chi-squared'
sw_file = 'synchwords'
with open('results/' + graph_path + '/probabilities/original.yaml','r') as f:
    base_probs = yaml.load(f)[0]

In [1]:
#Generate D-Markov and generate its sequence
rng = range(4,12)
for d in rng:
    g = pg.ProbabilisticGraph(path='graphs/' + graph_path + '/rtp_L' + str(Lmax) + '.yaml')
    h = dm.DMarkov(g, d)
    h.save_graph_file('graphs/' + graph_path + '/dmarkov_d' + str(d) + '.yaml')
#     s, a = h.generate_sequence(lseq, h.states[0])
#     with open('sequences/' + graph_path + '/len_' + str(lseq) + '_dmarkov_d' + str(d) + '.yaml', 'w') as f:
#         yaml.dump(s, f)


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-1-ea749a1f2ab3> in <module>()
      2 rng = range(4,12)
      3 for d in rng:
----> 4     g = pg.ProbabilisticGraph(path='graphs/' + graph_path + '/rtp_L' + str(Lmax) + '.yaml')
      5     h = dm.DMarkov(g, d)
      6     h.save_graph_file('graphs/' + graph_path + '/dmarkov_d' + str(d) + '.yaml')

NameError: name 'pg' is not defined

In [3]:
#Generate Mk4 and its sequence
lrange = range(4, 12)
for l in lrange:
    g = gg.GraphGenerator('graphs/' + graph_path + '/dmarkov_d' + str(l) + '.yaml',
                          ['e'],
                          'graphs/' + graph_path + '/L' + str(l) + '_alpha' + str(alpha) + '_v2',
                          '')
    
    h = g.mk4(graph_path, l)
    s, alph = h.generate_sequence(lseq, h.states[0])
    seqpath = 'sequences/' + graph_path + '/len_' + str(lseq) + '_L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml'
    with open(seqpath, 'w') as f:
        yaml.dump(s,f)


/home/franch/anaconda2/lib/python2.7/site-packages/scipy/stats/stats.py:4351: RuntimeWarning: divide by zero encountered in true_divide
  terms = (f_obs - f_exp)**2 / f_exp
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-3-3674664b2609> in <module>()
      7                           '')
      8 
----> 9     h = g.mk4(graph_path, l)
     10     s, alph = h.generate_sequence(lseq, h.states[0])
     11     seqpath = 'sequences/' + graph_path + '/len_' + str(lseq) + '_L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml'

/home/franch/PycharmProjects/master_project/graphgenerator.pyc in mk4(self, graphpath, d, n, test, alpha, alpha2, stds, iters, l2)
    233                     new_parts.append(part)
    234             p = new_parts
--> 235             reduced_graph = self.apply_moore(p, test, alpha, l2)
    236             self.reconnect()
    237             reduced_graph.save_graph_file(self.save_path + '_mk4.yaml')

/home/franch/PycharmProjects/master_project/graphgenerator.pyc in apply_moore(self, p, test, alpha, l2)
    311         partition_set = ps.PartitionSet(p)
    312         reduced_classes = mr.moore(partition_set, self.original_graph, simple=False)
--> 313         reduced_graph = reduced_classes.recover_graph(self.original_graph, self.base_probs)
    314         reduced_graph = pg.ProbabilisticGraph(reduced_graph.states, reduced_graph.alphabet)
    315         reduced_graph.reassign_dest_edges(reduced_graph.states)

/home/franch/PycharmProjects/master_project/partitionset.pyc in recover_graph(self, g, base_probs)
     35             oedge = []
     36             for a in g.alphabet:
---> 37                 t = s.next_state_from_edge(a)
     38                 if t:
     39                     for p in self.partitions:

AttributeError: 'NoneType' object has no attribute 'next_state_from_edge'

In [ ]:
#Sequence analysis for Mk4
rng = range(4, 13)
K = []
Phi = []
L = 10
k = 9
a= 20
stats = {
    'probs': True,
    'cond_probs': True,
    'cond_entropy': True,
    'autocorr': False,
    'kld': True,
    'phi': True
}
for l in rng:
    s = sa.SequenceAnalyzer('sequences/' + graph_path + '/len_' + str(lseq) + '_L' + str(l) + '_alpha' + str(alpha) \
                            + '_mk4_v2.yaml')
    if stats['probs']:
        p = s.calc_probs(L)
        with open('results/' + graph_path + '/probabilities/L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml',
                  'w') as f:
            yaml.dump(p, f)
    else:
        with open('results/' + graph_path + '/probabilities/L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml',
                  'r') as f:
            p, alph = yaml.load(f)
            s.probabilities = p
            s.alphabet = alph
    if stats['cond_probs']:
        cp = s.calc_cond_probs(L-1)
        with open('results/' + graph_path + '/probabilities/cond_L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml',
                  'w') as f:
            yaml.dump(cp, f)
    else:
        with open('results/' + graph_path + '/probabilities/cond_L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml',
                  'r') as f:
            s.conditional_probabilities = yaml.load(f)
    if stats['cond_entropy']:
        h = s.calc_cond_entropy(L-1)
        with open('results/' + graph_path + '/cond_entropies/L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml',
                  'w') as f:
            yaml.dump(h,f)
    if stats['autocorr']:
        ac = s.calc_autocorrelation(a)
        with open('results/' + graph_path + '/autocorrelations/L' + str(l) + '_alpha' + str(alpha) + '_mk4_v2.yaml',
                  'w') as f:
            yaml.dump(ac,f)
    if stats['kld']:
        K.append(s.calc_kldivergence(base_probs, k))
    if stats['phi']:
        Phi.append(s.calc_kldivergence(base_probs, k))
        
if stats['kld']:
    with open('results/' + graph_path + '/kld/mk4_v2.yaml','w') as f:
        yaml.dump(K,f)
if stats['kld']:
    with open('results/' + graph_path + '/l1metric/mk4_v2.yaml','w') as f:
        yaml.dump(Phi,f)

In [ ]:
#Find synchronization words
w = 6
l2range = range(1,4)
syncher = swf.SynchWordFinder(graph_path, w, Lmax, alpha, test, l2range)
sw = syncher.find_synch_words()
with open('synch_words/' + graph_path + '/' + sw_file + '.yaml', 'w') as f:
    yaml.dump(sw,f)

In [ ]:
#Create terminations
terms = [
    'dmark',
    'omega_inverted'
]
rng = range(4,13,2)
with open('synch_words/' + graph_path + '/' + sw_file + '.yaml','r') as f:
    sw = yaml.load(f)
for t in terms:
    for l in rng:
        g = pg.ProbabilisticGraph(path='graphs/' + graph_path + '/rtp_L' + str(Lmax) + '.yaml')
        h = g.expand_last_level(l, t, alpha, test, sw)
        h.save_graph_file('graphs/' + graph_path + '/rtp_L' + str(l) + '_alpha' + str(alpha) + '_' + t + '.yaml')

In [ ]:
#Generate Mk1 and Mk2 and their sequences:
algos = [
    'mk1',
    'mk2_moore'
]
terms = [
    'dmark',
    'omega_inverted'
]
rng = range(4,13,2)
l2 = 3
with open('synch_words/' + graph_path + '/' + sw_file + '.yaml','r') as f:
    sw = yaml.load(f)
for l in rng:
    for t in terms:
        g = gg.GraphGenerator('graphs/' + graph_path + '/rtp_L' + str(l) + '_alpha' + str(alpha) + '_' + t + '.yaml',
                              sw,
                              'graphs/' + graph_path + '/L' + str(l) + '_alpha' + str(alpha) + '_' + t,
                              '')
        for a in algos:
            if a == 'mk1':
                h = g.mk1(test, alpha, l2)
            else:
                h = g.mk2_moore(test, alpha, l2)
            s, alph = h.generate_sequence(lseq, h.states[0])
            seqpath = 'sequences/' + graph_path + '/len_' + str(lseq) + '_L' + str(l) + '_alpha' + str(alpha) + '_' + t + \
            '_' + a + '.yaml'
            with open(seqpath, 'w') as f:
                yaml.dump(s, f)

In [ ]:
#Generate CRISSiS and its sequence:
l2range = range(1,4)
for l2 in l2range:
    g = gg.GraphGenerator('graphs/' + graph_path + '/rtp_L' + str(Lmax) + '.yaml',
                          sw,
                          'graphs/' + graph_path + '/L2' + str(l2) + '_alpha' + str(alpha),
                          '')
    h = g.crissis(test, alpha, l2)
    s, alph = h.generate_sequence(lseq, h.states[0])
    seqpath = 'sequences/' + graph_path + '/len_' + str(lseq) + '_L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml'
    with open(seqpath, 'w') as f:
        yaml.dump(s,f)

In [ ]:
#Sequence analysis for D-Markov
rng = range(4, 13)
K = []
Phi = []
L = 10
k = 9
a= 20
stats = {
    'probs': True,
    'cond_probs': True,
    'cond_entropy': True,
    'autocorr': False,
    'kld': True,
    'phi': True
}
for d in rng:
    s = sa.SequenceAnalyzer('sequences/' + graph_path + '/len_' + str(lseq) + '_dmarkov_d' + str(d) + '.yaml')
    if stats['probs']:
        p = s.calc_probs(L)
        with open('results/' + graph_path + '/probabilities/dmarkov_d' + str(d) + '.yaml','w') as f:
            yaml.dump(p, f)
    else:
        with open('results/' + graph_path + '/probabilities/dmarkov_d' + str(d) + '.yaml','r') as f:
            p, alph = yaml.load(f)
            s.probabilities = p
            s.alphabet = alph
    if stats['cond_probs']:
        cp = s.calc_cond_probs(L-1)
        with open('results/' + graph_path + '/probabilities/cond_dmarkov_d' + str(d) + '.yaml','w') as f:
            yaml.dump(cp, f)
    else:
        with open('results/' + graph_path + '/probabilities/cond_dmarkov_d' + str(d) + '.yaml','r') as f:
            s.conditional_probabilities = yaml.load(f)
    if stats['cond_entropy']:
        h = s.calc_cond_entropy(L-1)
        with open('results/' + graph_path + '/cond_entropies/dmarkov_d' + str(d) + '.yaml','w') as f:
            yaml.dump(h,f)
    if stats['autocorr']:
        ac = s.calc_autocorrelation(a)
        with open('results/' + graph_path + '/autocorrelations/dmarkov_d' + str(d) + '.yaml','w') as f:
            yaml.dump(ac,f)
    if stats['kld']:
        K.append(s.calc_kldivergence(base_probs, k))
    if stats['phi']:
        Phi.append(s.calc_kldivergence(base_probs, k))
        
if stats['kld']:
    with open('results/' + graph_path + '/kld/dmarkov.yaml','w') as f:
        yaml.dump(K,f)
if stats['kld']:
    with open('results/' + graph_path + '/l1metric/dmarkov.yaml','w') as f:
        yaml.dump(Phi,f)

In [ ]:
#Sequence analysis for CRISSiS
rng = range(1, 4)
K = []
Phi = []
L = 10
k = 6
a= 20
stats = {
    'probs': True,
    'cond_probs': True,
    'cond_entropy': True,
    'autocorr': False,
    'kld': True,
    'phi': True
}
for l2 in rng:
    s = sa.SequenceAnalyzer('sequences/' + graph_path + '/len_' + str(lseq) + '_L2_' + str(l2) + '_alpha' + str(alpha) \
                            + '_crissis.yaml')
    if stats['probs']:
        p = s.calc_probs(L)
        with open('results/' + graph_path + '/probabilities/L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml',
                  'w') as f:
            yaml.dump(p, f)
    else:
        with open('results/' + graph_path + '/probabilities/L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml',
                  'r') as f:
            p, alph = yaml.load(f)
            s.probabilities = p
            s.alphabet = alph
    if stats['cond_probs']:
        cp = s.calc_cond_probs(L-1)
        with open('results/' + graph_path + '/probabilities/cond_L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml',
                  'w') as f:
            yaml.dump(cp, f)
    else:
        with open('results/' + graph_path + '/probabilities/cond_L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml'
                  ,'r') as f:
            s.conditional_probabilities = yaml.load(f)
    if stats['cond_entropy']:
        h = s.calc_cond_entropy(L-1)
        with open('results/' + graph_path + '/cond_entropies/L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml',
                  'w') as f:
            yaml.dump(h,f)
    if stats['autocorr']:
        ac = s.calc_autocorrelation(a)
        with open('results/' + graph_path + '/autocorrelations/L2_' + str(l2) + '_alpha' + str(alpha) + '_crissis.yaml',
                  'r') as f:
            yaml.dump(ac,f)
    if stats['kld']:
        K.append(s.calc_kldivergence(base_probs, k))
    if stats['phi']:
        Phi.append(s.calc_kldivergence(base_probs, k))
        
if stats['kld']:
    with open('results/' + graph_path + '/kld/crissis.yaml','w') as f:
        yaml.dump(K,f)
if stats['kld']:
    with open('results/' + graph_path + '/l1metric/crissis.yaml','w') as f:
        yaml.dump(Phi,f)

In [ ]:
#Sequence analysis for Mk1 and Mk2
rng = range(4, 13, 2)
K = []
Phi = []
L = 10
k = 6
a= 20
algos = [
    'mk1',
    'mk2_moore'
]
terms = [
    'dmark',
    'omega_inverted'
]
stats = {
    'probs': False,
    'cond_probs': True,
    'cond_entropy': True,
    'autocorr': False,
    'kld': True,
    'phi': True
}

for a in algos:
    for t in terms:
        for l in rng:
            s = sa.SequenceAnalyzer('sequences/' + graph_path + '/len_' + str(lseq) + '_L' + str(l) + '_alpha' + \
                                    str(alpha) + '_' + t + '_' + a + '.yaml')
            if stats['probs']:
                p = s.calc_probs(L)
                with open('results/' + graph_path + '/probabilities/L' + str(l) + '_alpha' + str(alpha) + '_' + t + '_' + \
                          a + '.yaml', 'w') as f:
                    yaml.dump(p, f)
            else:
                with open('results/' + graph_path + '/probabilities/L' + str(l) + '_alpha' + str(alpha) + '_' + t + '_' + \
                          a + '.yaml', 'r') as f:
                    p, alph = yaml.load(f)
                    s.probabilities = p
                    s.alphabet = alph
            if stats['cond_probs']:
                cp = s.calc_cond_probs(L-1)
                with open('results/' + graph_path + '/probabilities/cond_L' + str(l) + '_alpha' + str(alpha) + '_' + t + \
                          '_' + a + '.yaml', 'w') as f:
                    yaml.dump(cp, f)
            else:
                with open('results/' + graph_path + '/probabilities/cond_L' + str(l) + '_alpha' + str(alpha) + '_' + t + \
                          '_' + a + '.yaml', 'r') as f:
                    s.conditional_probabilities = yaml.load(f)
            if stats['cond_entropy']:
                h = s.calc_cond_entropy(L-1)
                with open('results/' + graph_path + '/cond_entropies/L' + str(l) + '_alpha' + str(alpha) + '_' + t + '_' + \
                          a + '.yaml', 'w') as f:
                    yaml.dump(h,f)
            if stats['autocorr']:
                ac = s.calc_autocorrelation(a)
                with open('results/' + graph_path + '/autocorrelations/L' + str(l) + '_alpha' + str(alpha) + '_' + t + '_' + \
                          a + '.yaml', 'w') as f:
                    yaml.dump(ac,f)
            if stats['kld']:
                K.append(s.calc_kldivergence(base_probs, k))
            if stats['phi']:
                Phi.append(s.calc_kldivergence(base_probs, k))
        
        if stats['kld']:
            with open('results/' + graph_path + '/kld/' + a + '_' + t + '.yaml','w') as f:
                yaml.dump(K,f)
        if stats['kld']:
            with open('results/' + graph_path + '/l1metric/' + a + '_' + t + '.yaml','w') as f:
                yaml.dump(Phi,f)

In [ ]:


In [ ]: