Convergent Cross Mapping

This notebook and package is reproducing the results from Detecting Causality in Complex Ecosystems


In [1]:
import sys
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('ticks')
sns.set_context(context='paper',font_scale=1.5)
%matplotlib inline

In [2]:
%load_ext autoreload
%autoreload 2

In [3]:
#alter the line below to correspond to your file system
nonlinpy_dir = '/Users/nickc/Documents/skccm'
sys.path.append(nonlinpy_dir)

In [4]:
import skccm.paper as paper

In [5]:
import skccm.data as data

Work Flow

A coupled logistic map. Here is what we are going to do:

  1. Generate the time series
  2. Calculate the mutual information of the time series
  3. Embed the time series (not going to explore embedding dimension, just lag)
  4. Analyze forecast skill for a range of library lengths

1. Generate the time series


In [7]:
rx1 = 3.72 #determines chaotic behavior of the x1 series
rx2 = 3.72 #determines chaotic behavior of the x2 series
b12 = 0.2 #Influence of x1 on x2
b21 = 0.01 #Influence of x2 on x1
ts_length = 1000
x1,x2 = data.coupled_logistic(rx1,rx2,b12,b21,ts_length)

In [8]:
fig,ax = plt.subplots(nrows=2,sharex=True,sharey=True)
ax[0].plot(x1[0:100])
ax[1].plot(x2[0:100])
ax[0].set_yticks([.1,.3,.5,.7,.9])
ax[1].set_xlabel('Time')
sns.despine()



In [9]:
#fig.savefig('../figures/coupled_logistic.png',bbox_inches='tight')

2. Calculate the mutual information


In [10]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)

In [11]:
mi1 = e1.mutual_information(10)
mi2 = e2.mutual_information(10)

In [ ]:


In [12]:
fig,ax = plt.subplots(nrows=2,sharex=True,sharey=True)
ax[0].plot(np.arange(1,11),mi1)
ax[1].plot(np.arange(1,11),mi2)
ax[1].set_xlabel('Lag')
sns.despine()



In [13]:
#fig.savefig('../figures/mutual_info.png',bbox_inches='tight')

3. Embed the time series


In [14]:
lag = 1
embed = 2
X1 = e1.embed_vectors_1d(lag,embed)
X2 = e2.embed_vectors_1d(lag,embed)

4. Get the indices


In [15]:
X1_ind = e1.embed_indices(lag,embed)
X2_ind = e2.embed_indices(lag,embed)

In [16]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [17]:
#fig.savefig('../figures/x_embedded.png',bbox_inches='tight')

4. Forecast skill as a function of library length

Same legend as above.

Split it into a testing set and training set


In [18]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')

In [19]:
lib_lens


Out[19]:
array([ 10,  19,  29,  39,  49,  59,  69,  79,  89,  99, 109, 119, 129,
       139, 149, 159, 169, 179, 189, 199, 209, 219, 229, 239, 249, 259,
       269, 279, 289, 299, 309, 319, 329, 339, 349, 359, 369, 379, 389,
       399, 409, 419, 429, 439, 449, 459, 469, 479, 489, 499])

In [20]:
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [22]:
fig,ax = plt.subplots()
ax.plot(lib_lens,sc1,label='X1 xmap X2')
ax.plot(lib_lens,sc2, label='X2 xmap X1')
ax.set_xlabel('Library Length')
ax.set_ylabel('Forecast Skill')
sns.despine()



In [23]:
#fig.savefig('../figures/paper/xmap_lib_len.png',bbox_inches='tight')

For a single library length

Reproducing the plot from the paper

The paper explores different values for b12 and b21 in Figure 3B.


In [24]:
X1.shape


Out[24]:
(999, 2)

In [26]:
rx1 = 3.7 #determines chaotic behavior of the x1 series
rx2 = 3.7 #determines chaotic behavior of the x2 series

ts_length = 1000

#variables for embedding
lag = 1
embed = 2

CCM = paper.CCM() #intitiate the ccm object

#store values
num_b = 20 #number of bs to test
sc1_store = np.empty((num_b,num_b))
sc2_store = np.empty((num_b,num_b))

#values over which to test
max_b = .4 #maximum b values
b_range = np.linspace(0,max_b,num=num_b)

This takes roughly 20 min to run.


In [27]:
#loop through b values for b12 and b21
for ii,b12 in enumerate(b_range):
    for jj, b21 in enumerate(b_range):
        
        x1,x2 = data.coupled_logistic(rx1,rx2,b12,b21,ts_length)
        
        em_x1 = paper.Embed(x1)
        em_x2 = paper.Embed(x2)
        
        X1 = em_x1.embed_vectors_1d(lag,embed)
        X2 = em_x2.embed_vectors_1d(lag,embed)
        
        X1_ind = e1.embed_indices(lag,embed)
        X2_ind = e2.embed_indices(lag,embed)
        
        
        CCM = paper.CCM()
        CCM.fit(X1,X2)
        x1p, x2p = CCM.predict_drop_in_list([500],X1_ind,X2_ind)
        sc1,sc2 = CCM.score()
        
        sc1_store[ii,jj] = sc1[0] #only the one prediction out
        sc2_store[ii,jj] = sc2[0]
        
sc_diff = sc2_store-sc1_store

In [28]:
fig,ax = plt.subplots()
v = np.linspace(-1.6,1.6,21)
cax = ax.contourf(b_range,b_range,sc_diff,v,cmap='magma')
fig.colorbar(cax,ticks=v[::2])
ax.set_xlabel('B12')
ax.set_ylabel('B21')


Out[28]:
<matplotlib.text.Text at 0x10e779d68>

In [29]:
#fig.savefig('../figures/train_test_split/xmap_changingB.png',bbox_inches='tight')

Randomly forced


In [30]:
rx2 = 3.72 #determines chaotic behavior of the x2 series
b12 = .2 #Influence of x1 on x2

ts_length = 1000
x1,x2 = data.driven_rand_logistic(rx2,b12,ts_length)

In [31]:
fig,ax = plt.subplots(nrows=2,sharex=True,sharey=True)
ax[0].plot(x1[0:100])
ax[1].plot(x2[0:100])
ax[0].set_yticks([.1,.3,.5,.7,.9])
ax[1].set_xlabel('Time')
sns.despine()



In [32]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)

In [33]:
mi1 = e1.mutual_information(10)
mi2 = e2.mutual_information(10)

In [34]:
fig,ax = plt.subplots(nrows=2,sharex=True,sharey=True)
ax[0].plot(np.arange(1,11),mi1)
ax[1].plot(np.arange(1,11),mi2)
ax[1].set_xlabel('Lag')
sns.despine()



In [35]:
lag = 1
embed = 2
X1 = e1.embed_vectors_1d(lag,embed)
X2 = e2.embed_vectors_1d(lag,embed)
X1_ind = e1.embed_indices(lag,embed)
X2_ind = e2.embed_indices(lag,embed)

In [36]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [37]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')

In [38]:
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [39]:
plt.plot(sc1)
plt.plot(sc2)


Out[39]:
[<matplotlib.lines.Line2D at 0x10e79e3c8>]

Periodic Forcing


In [40]:
rx2 = 3.72 #determines chaotic behavior of the x2 series
b12 = .5 #Influence of x1 on x2

ts_length = 1000
x1,x2 = data.driving_sin(rx2,b12,ts_length)

In [41]:
fig,ax = plt.subplots(nrows=2,sharex=True)
ax[0].plot(x1[0:100])
ax[1].plot(x2[0:100])
ax[1].set_xlabel('Time')
sns.despine()



In [44]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)

In [45]:
mi1 = e1.mutual_information(10)
mi2 = e2.mutual_information(10)

In [46]:
fig,ax = plt.subplots(nrows=2,sharex=True,sharey=True)
ax[0].plot(np.arange(1,11),mi1)
ax[1].plot(np.arange(1,11),mi2)
ax[1].set_xlabel('Lag')
sns.despine()



In [47]:
lag1 = 4
lag2 = 1
embed = 2
X1 = e1.embed_vectors_1d(lag1,embed)
X2 = e2.embed_vectors_1d(lag2,embed)
X1_ind = e1.embed_indices(lag1,embed)
X2_ind = e2.embed_indices(lag2,embed)

In [48]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [50]:
CCM = paper.CCM()

In [52]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')

X1 and X2 need to have the same length. This can be corrected in future versions.


In [54]:
print('X1 Shape:', X1.shape)
print('X2 Shape:', X2.shape)


X1 Shape: (996, 2)
X2 Shape: (999, 2)

In [55]:
X2 = X2[:len(X1)]
X2_ind = X2_ind[:len(X1)]

In [56]:
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [57]:
plt.plot(sc1)
plt.plot(sc2)


Out[57]:
[<matplotlib.lines.Line2D at 0x11670d358>]

Lagged Coupled Logistic


In [68]:
rx1 = 3.72 #determines chaotic behavior of the x1 series
rx2 = 3.72 #determines chaotic behavior of the x2 series
b12 = 0.01 #Influence of x1 on x2
b21 = 0.3 #Influence of x2 on x1
ts_length = 1000
x1,x2 = data.lagged_coupled_logistic(rx1,rx2,b12,b21,ts_length)

In [69]:
fig,ax = plt.subplots(nrows=2,sharex=True)
ax[0].plot(x1[0:100])
ax[1].plot(x2[0:100])
ax[1].set_xlabel('Time')
sns.despine()



In [70]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)

In [71]:
mi1 = e1.mutual_information(10)
mi2 = e2.mutual_information(10)

In [72]:
fig,ax = plt.subplots(nrows=2,sharex=True,sharey=True)
ax[0].plot(np.arange(1,11),mi1)
ax[1].plot(np.arange(1,11),mi2)
ax[1].set_xlabel('Lag')
sns.despine()



In [73]:
lag1 = 1
lag2 = 1
embed = 2
X1 = e1.embed_vectors_1d(lag1,embed)
X2 = e2.embed_vectors_1d(lag2,embed)
X1_ind = e1.embed_indices(lag1,embed)
X2_ind = e2.embed_indices(lag2,embed)

In [74]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [75]:
CCM = paper.CCM()
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [76]:
plt.plot(lib_lens,sc1)
plt.plot(lib_lens,sc2)


Out[76]:
[<matplotlib.lines.Line2D at 0x1176f82b0>]

In [ ]:

Random Linearly Increasing


In [77]:
ts_length=1000
x1 = np.random.randn(ts_length,) + np.linspace(0,25,ts_length)
x2 = np.random.randn(ts_length,) + np.linspace(0,10,ts_length)

In [78]:
plt.plot(x1)
plt.plot(x2)


Out[78]:
[<matplotlib.lines.Line2D at 0x1049b8c50>]

In [80]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)
mi1 = e1.mutual_information(20)
mi2 = e2.mutual_information(20)

In [81]:
fig,ax = plt.subplots(nrows=2,sharex=True)
ax[0].plot(np.arange(1,21),mi1)
ax[1].plot(np.arange(1,21),mi2)
ax[1].set_xlabel('Lag')
sns.despine()



In [82]:
lag = 2
embed = 2
X1 = e1.embed_vectors_1d(lag,embed)
X2 = e2.embed_vectors_1d(lag,embed)
X1_ind = e1.embed_indices(lag,embed)
X2_ind = e2.embed_indices(lag,embed)

In [83]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [84]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [85]:
plt.plot(lib_lens,sc1)
plt.plot(lib_lens,sc2)


Out[85]:
[<matplotlib.lines.Line2D at 0x117557d68>]

Cosine and Sine Waves


In [86]:
ts_length=500
x1 = np.random.randn(ts_length,) + 10*np.sin(np.linspace(0,10*np.pi,ts_length))
x2 = np.random.randn(ts_length,) + 20*np.cos(np.linspace(0,10*np.pi,ts_length))

#x1[x1<0] = np.random.randn(np.sum(x1<0),)
#x2[x2<0] = np.random.randn(np.sum(x2<0),)

In [87]:
plt.plot(x1)
plt.plot(x2,alpha=.5)


Out[87]:
[<matplotlib.lines.Line2D at 0x116576be0>]

In [92]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)
mi1 = e1.mutual_information(40)
mi2 = e2.mutual_information(40)

In [94]:
fig,ax = plt.subplots(nrows=2,sharex=True)
ax[0].plot(np.arange(1,41),mi1)
ax[1].plot(np.arange(1,41),mi2)
ax[1].set_xlabel('Lag')
sns.despine()



In [95]:
lag = 8
embed = 2
X1 = e1.embed_vectors_1d(lag,embed)
X2 = e2.embed_vectors_1d(lag,embed)
X1_ind = e1.embed_indices(lag,embed)
X2_ind = e2.embed_indices(lag,embed)

In [96]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [97]:
lib_lens


Out[97]:
array([ 10,  19,  29,  39,  49,  59,  69,  79,  89,  99, 109, 119, 129,
       139, 149, 159, 169, 179, 189, 199, 209, 219, 229, 239, 249, 259,
       269, 279, 289, 299, 309, 319, 329, 339, 349, 359, 369, 379, 389,
       399, 409, 419, 429, 439, 449, 459, 469, 479, 489, 499])

In [98]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [99]:
plt.plot(sc1)
plt.plot(sc2)


Out[99]:
[<matplotlib.lines.Line2D at 0x1176db9e8>]

Mean Zero, Unit Standard Devation


In [100]:
ts_length=500
x1 = np.random.randn(ts_length,) + 10*np.sin(np.linspace(0,10*np.pi,ts_length))
x2 = np.random.randn(ts_length,) + 20*np.cos(np.linspace(0,10*np.pi,ts_length))

In [101]:
plt.plot(x1)
plt.plot(x2,alpha=.5)


Out[101]:
[<matplotlib.lines.Line2D at 0x11a7270f0>]

In [102]:
def normalize(x):
    return (x - x.mean()) / x.std()

In [103]:
x1 = normalize(x1)
x2 = normalize(x2)

In [104]:
plt.plot(x1)
plt.plot(x2,alpha=.5)


Out[104]:
[<matplotlib.lines.Line2D at 0x11b291ef0>]

In [105]:
e1 = paper.Embed(x1)
e2 = paper.Embed(x2)
mi1 = e1.mutual_information(20)
mi2 = e2.mutual_information(20)

In [107]:
lag = 8
embed = 2
X1 = e1.embed_vectors_1d(lag,embed)
X2 = e2.embed_vectors_1d(lag,embed)
X1_ind = e1.embed_indices(lag,embed)
X2_ind = e2.embed_indices(lag,embed)

In [108]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [109]:
plt.plot(lib_lens,sc1)
plt.plot(lib_lens,sc2)


Out[109]:
[<matplotlib.lines.Line2D at 0x11a74b978>]

Lorenz


In [110]:
X = data.lorenz()

In [111]:
fig,ax = plt.subplots(3,figsize=(10,5),sharex=True)
ax[0].plot(X[:,0])
ax[1].plot(X[:,1])
ax[2].plot(X[:,2])
sns.despine()



In [112]:
e1 = paper.Embed(X[:,0])
e2 = paper.Embed(X[:,1])
mi1 = e1.mutual_information(100)
mi2 = e2.mutual_information(100)

In [113]:
plt.plot(mi1)
plt.plot(mi2)


Out[113]:
[<matplotlib.lines.Line2D at 0x1176beb00>]

In [114]:
lag = 18
embed = 3
X1 = e1.embed_vectors_1d(lag,embed)
X2 = e2.embed_vectors_1d(lag,embed)
X1_ind = e1.embed_indices(lag,embed)
X2_ind = e2.embed_indices(lag,embed)

In [115]:
fig,ax = plt.subplots(ncols=2,sharey=True,sharex=True,figsize=(10,4)) 
ax[0].scatter(X1[:,0],X1[:,1])
ax[1].scatter(X2[:,0],X2[:,1])
ax[0].set_xlabel('X1(t)')
ax[0].set_ylabel('X1(t-1)')
ax[1].set_xlabel('X2(t)')
ax[1].set_ylabel('X2(t-1)')
sns.despine()



In [116]:
len_X1 = len(X1)
lib_lens = np.linspace(10, len_X1/2, num=50, dtype='int')
CCM = paper.CCM()
CCM.fit(X1,X2)
x1p, x2p = CCM.predict_drop_in_list(lib_lens,X1_ind,X2_ind)
sc1,sc2 = CCM.score()

In [117]:
plt.plot(lib_lens,sc1)
plt.plot(lib_lens,sc2)


Out[117]:
[<matplotlib.lines.Line2D at 0x117160278>]

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [78]:
CCM = ccm.ccm(score_metric='corrcoef')

In [79]:
x1_len = len(X1)

In [80]:
x1tr, x1te, x2tr, x2te = ccm.utilities.train_test_split(X1,X2)

In [81]:
lib_lens


Out[81]:
array([ 10,  34,  58,  82, 106, 130, 154, 178, 202, 226, 250, 274, 298,
       322, 346, 370, 394, 418, 442, 466])

In [82]:
lib_lens = np.arange(5,len(x1tr)/2,len(x1tr)/20,dtype='int')
sc1, sc2 = CCM.predict_causation(x1tr, x1te, x2tr, x2te,lib_lens)

In [83]:
plt.plot(lib_lens,sc1)
plt.plot(lib_lens,sc2)


Out[83]:
[<matplotlib.lines.Line2D at 0x11add1668>]

Brown Noise

seed = 11, produces small variance in x2 and large variance in x1


In [84]:
def brown_noise_gen(ts_length,seed):
    x1 = np.zeros(ts_length)
    x2 = np.zeros(ts_length)
    np.random.seed(seed)
    x1[0] = np.random.randn()
    x2[0] = np.random.randn()
    for ii in range(ts_length-1):
        x1[ii+1]  = np.random.randn() + x1[ii] 
        x2[ii+1] = np.random.randn() + x2[ii]
    
    return x1,x2

In [85]:
#fig,axes = plt.subplots(10,10,figsize=(20,20))
#ax = axes.ravel()
ts_length=2000
iterations = 1000
S = np.zeros(iterations)
lib_lens = np.arange(50,ts_length/2,ts_length/2/20,dtype='int')
sc1 = np.zeros((iterations,len(lib_lens)))
sc2 = np.zeros((iterations,len(lib_lens)))
x1_store = np.zeros((iterations,2000))
x2_store = np.zeros((iterations,2000))
for ii in range(iterations):
    
    
    #np.random.seed(np.random.randint(0,10000,dtype='int'))
    #r_seed = np.random.randint(0,10000,dtype='int')
    x1,x2 = brown_noise_gen(2000,ii)

    x1_store[ii,:] = x1
    x2_store[ii,:] = x2
    
    em_x1 = ccm.embed(x1)
    em_x2 = ccm.embed(x2)
    lag1 = 20
    lag2 = 20
    embed = 3

    X1 = em_x1.embed_vectors_1d(lag1,embed)
    X2 = em_x2.embed_vectors_1d(lag2,embed)
    
    
    x1tr, x1te, x2tr, x2te = ccm.utilities.train_test_split(X1,X2,percent=.5)
    CCM = ccm.ccm(score_metric='corrcoef')
    
    sc1[ii,:], sc2[ii,:] = CCM.predict_causation(x1tr, x1te, x2tr, x2te,lib_lens)
    S[ii] = np.sum(sc1[ii,:]-sc2[ii,:])

In [86]:
X1.shape


Out[86]:
(1960, 3)

In [87]:
fig,ax = plt.subplots(ncols=2,figsize=(10,5))
ax[0].hist(np.abs(S))
ax[1].hist(S);



In [88]:
#fig.savefig('../figures/ccm_hist.png',bbox_inches='tight')

In [89]:
top_vals = np.argsort(np.abs(S))[::-1]
print(top_vals[0:10])


[ 26 369 306 424 311 697 714 149 687 575]

In [90]:
fig,axes = plt.subplots(4,4,figsize=(16,16),sharex=True,sharey=True)
ax = axes.ravel()

for ii in range(16):
    ax[ii].plot(sc1[top_vals[ii],:])
    ax[ii].plot(sc2[top_vals[ii],:])
    ax[ii].set_ylim(-1,1)



In [91]:
#fig.savefig('../figures/top_ccm.png',bbox_inches='tight')

plot the top eight and their corresponding ts


In [92]:
fig,axes = plt.subplots(4,4,figsize=(16,16))
ax = axes.ravel()
ts_list = [0,1,2,3,8,9,10,11]
c_list = [4,5,6,7,12,13,14,15]
for ii in range(8):
    ax[ts_list[ii]].plot(sc1[top_vals[ii],:])
    ax[ts_list[ii]].plot(sc2[top_vals[ii],:])
    ax[ts_list[ii]].set_ylim(-1.,1.)
    
    ax[c_list[ii]].plot(x1_store[top_vals[ii],:])
    ax[c_list[ii]].plot(x2_store[top_vals[ii],:])
sns.despine()



In [93]:
#fig.savefig('../figures/top_ccm_with_series.png',bbox_inches='tight')

In [ ]: