In [11]:
import deconvolution_methods as DM
import numpy as np
from sklearn.metrics import adjusted_mutual_info_score as AMI
from scipy.stats import spearmanr,pearsonr
import pandas as pd
def bin_variable(var1):   # bin with normalization

    var1=np.array(var1).astype(np.float)

    if abs(np.std(var1))>0.01:
        var1 = (var1 - np.mean(var1))/np.std(var1)
    else:
        var1 = (var1 - np.mean(var1))
    val1 = np.digitize(var1, np.histogram(var1, bins='fd')[1])
    #print(type(val1))
    #print((val1).shape())
    return val1

ajd_mi_bin=lambda x,y : AMI(bin_variable(x),bin_variable(y))

In [12]:
# load data
data_in_silico=pd.read_csv('net1_expression_data.tsv',sep='\t')
print(data_in_silico)


           G1        G2        G3        G4        G5        G6        G7  \
0    0.425448  0.017829  0.907989  0.448247  0.172324  0.273489  0.843766   
1    0.442400  0.050525  0.869368  0.445851  0.173311  0.274889  0.764049   
2    1.056847  0.208454  0.467448  0.505077  0.244883  0.208451  0.665355   
3    1.117226  0.003001  0.317654  0.387204  0.253792  0.179360  0.939244   
4    0.971068  0.001056  0.354651  0.474532  0.207718  0.102833  0.745871   
5    1.139386  0.122047  0.402465  0.348436  0.168614  0.255774  0.924055   
6    1.064869  0.140508  0.481763  0.474857  0.182643  0.112430  1.081748   
7    0.876117  0.073814  1.058292  0.730366  0.053656  0.175109  1.202648   
8    1.205966  0.153407  0.760861  0.655846  0.157731  0.141754  1.039769   
9    1.000675  0.078986  0.803631  0.838975  0.088306  0.151089  1.071518   
10   0.928186  0.227125  0.702909  0.867608  0.061720  0.185059  0.989553   
11   0.947588  0.212333  0.609245  0.796102  0.092966  0.222524  0.698124   
12   1.212049  0.186947  0.761027  0.827916  0.100705  0.111950  1.020951   
13   1.003172  0.333539  0.771987  0.572736  0.092510  0.285317  0.769854   
14   0.738023  0.335933  0.720210  0.668292  0.055268  0.267747  0.846951   
15   1.144486  0.326351  0.785587  0.694845  0.046373  0.252117  0.870037   
16   0.906053  0.003172  0.503248  0.516662  0.179844  0.038443  1.047120   
17   0.837823  0.028144  0.488505  0.482115  0.245780  0.047334  0.847346   
18   0.948287  0.096864  0.400070  0.501046  0.082271  0.212041  0.977879   
19   1.115667  0.127356  0.297581  0.546799  0.059252  0.214351  1.165951   
20   0.975200  0.461618  0.593751  0.471588  0.214912  0.191444  0.662483   
21   0.781117  0.002186  0.861754  0.430546  0.147209  0.159171  0.749388   
22   0.666524  0.000979  0.846738  0.406885  0.189498  0.207573  0.840815   
23   0.888472  0.034803  0.666051  1.044687  0.255957  0.192113  0.676957   
24   0.944821  0.279290  0.612167  0.466714  0.207090  0.117479  1.017983   
25   1.075737  0.113184  0.552293  0.454624  0.234980  0.103404  1.145467   
26   0.282454  0.000190  0.879253  0.462979  0.138402  0.199608  0.985759   
27   0.314675  0.003932  0.902473  0.389754  0.250205  0.207147  1.141450   
28   1.070505  0.001998  0.339419  0.578214  0.163271  0.255939  1.136663   
29   1.102135  0.002467  0.410364  0.344866  0.231532  0.273285  1.089367   
..        ...       ...       ...       ...       ...       ...       ...   
775  0.093239  0.470214  0.872441  0.425187  0.182433  0.234121  1.023675   
776  0.000394  0.433939  1.001753  0.472206  0.243924  0.206783  1.203441   
777  0.000000  0.556616  0.924034  0.455973  0.152323  0.181798  0.996847   
778  1.311478  0.021410  0.903272  0.365476  0.277467  0.194779  1.136898   
779  0.297778  0.405722  0.828092  0.560566  0.201707  0.222759  0.901214   
780  0.068538  0.470490  0.856287  0.474448  0.120264  0.266796  0.830767   
781  0.001802  0.562269  0.608965  0.514485  0.054994  0.169821  1.043671   
782  0.000000  0.546117  0.734377  0.607784  0.069531  0.182152  1.159245   
783  0.943150  0.012254  0.842211  0.381926  0.232661  0.194081  1.121305   
784  0.728711  0.332460  0.735137  0.443599  0.127676  0.137777  0.833377   
785  0.850870  0.262759  0.500743  0.465700  0.045077  0.160351  0.721701   
786  0.823991  0.537279  0.511939  0.399988  0.084159  0.208658  0.745676   
787  0.818882  0.016234  0.827155  0.454315  0.259882  0.168315  0.870953   
788  0.977340  0.020450  0.851009  0.431860  0.220030  0.249506  1.004460   
789  0.749119  0.338502  0.541437  0.565911  0.031190  0.204972  0.832861   
790  0.760055  0.356076  0.739085  0.564103  0.075034  0.244582  0.806002   
791  0.802239  0.314679  0.394164  0.494371  0.031037  0.176075  1.062559   
792  0.832758  0.431669  0.407950  0.527717  0.076502  0.122830  0.928564   
793  0.777980  0.355424  0.328239  0.503721  0.044797  0.146983  0.701849   
794  0.537963  0.368906  0.349281  0.472909  0.073730  0.112995  0.896205   
795  0.867678  0.021559  0.811866  0.504050  0.225154  0.152427  1.108783   
796  0.952452  0.012235  0.744307  0.585896  0.177781  0.102012  1.065298   
797  0.808889  0.000125  0.336347  0.625708  0.017787  0.001400  0.811810   
798  0.776001  0.014447  0.832623  0.434733  0.194447  0.159369  1.055016   
799  0.749716  0.002786  0.489709  0.497823  0.141594  0.153731  0.843994   
800  0.919307  0.012417  0.793202  0.550684  0.215473  0.290485  1.010154   
801  0.769860  0.033997  0.735370  0.349348  0.171566  0.148841  0.952494   
802  0.727458  0.003877  0.356966  0.615756  0.081724  0.117133  0.947027   
803  0.832066  0.010711  0.830129  0.465677  0.179892  0.101629  0.911963   
804  0.681207  0.000560  0.373693  0.719729  0.070691  0.004762  0.948530   

           G8        G9       G10    ...        G1634     G1635     G1636  \
0    0.648201  1.004533  0.365305    ...     0.011979  0.963306  1.169870   
1    0.747870  1.022589  0.434106    ...     0.022247  1.014137  0.888465   
2    1.192092  0.824068  0.146987    ...     0.422066  0.895203  1.028826   
3    0.868668  0.963028  0.233785    ...     0.001163  1.046540  1.058098   
4    0.909753  1.151865  0.318988    ...     0.000845  1.041745  1.061129   
5    0.823940  1.025145  0.310873    ...     0.000262  0.894509  0.925117   
6    0.998682  1.160934  0.359449    ...     0.001865  0.878323  0.912965   
7    1.047473  0.995017  0.113200    ...     0.000996  0.828337  0.958336   
8    1.008289  1.035285  0.072027    ...     0.000505  0.801840  1.095022   
9    1.130214  1.110817  0.095975    ...     0.005712  0.745761  1.059646   
10   1.004720  0.826006  0.552997    ...     0.022357  0.863416  0.923620   
11   1.080793  0.931476  0.331910    ...     0.002455  1.149097  1.042499   
12   1.197657  1.007976  0.637948    ...     0.002395  1.174866  1.089302   
13   0.932774  0.970770  0.203957    ...     0.199881  1.304085  0.919658   
14   1.094221  1.011040  0.181848    ...     0.230103  0.837097  0.913412   
15   1.014882  0.903245  0.261398    ...     0.226194  0.867944  1.143205   
16   0.305926  0.960644  0.126756    ...     0.420922  0.864122  0.904293   
17   0.390469  1.002008  0.210880    ...     0.433035  1.050262  0.964083   
18   0.735807  1.038894  0.179818    ...     0.016541  0.881559  0.936256   
19   0.891906  0.890540  0.124175    ...     0.010288  0.809623  0.790889   
20   1.025346  1.078332  0.141895    ...     0.006152  0.972342  1.031862   
21   0.949657  0.965237  0.618169    ...     0.004632  1.276183  0.915012   
22   1.015546  0.932645  0.785498    ...     0.034910  0.920156  0.798236   
23   0.974621  0.563853  0.164179    ...     0.202318  0.245840  1.135446   
24   0.859900  0.972353  0.037672    ...     0.181409  1.148729  1.097319   
25   0.768539  0.972023  0.137477    ...     0.144489  0.872108  0.856596   
26   0.862188  0.990039  0.157343    ...     0.500773  1.065382  1.004653   
27   1.100934  1.172391  0.130149    ...     0.642529  0.803693  0.894216   
28   0.769983  0.731050  0.214724    ...     0.017904  0.956216  1.210825   
29   0.892256  0.664787  0.116190    ...     0.008776  0.958881  1.030523   
..        ...       ...       ...    ...          ...       ...       ...   
775  0.395604  1.127973  0.271488    ...     0.006339  0.796428  1.046528   
776  0.426711  0.816112  0.537173    ...     0.010831  0.764892  0.879775   
777  0.313779  0.906837  0.567832    ...     0.002705  0.733178  1.153663   
778  0.890996  1.072271  0.169287    ...     0.147214  0.870734  0.924245   
779  0.531591  0.977222  0.122268    ...     0.062782  0.780940  1.184458   
780  0.310593  0.865067  0.084532    ...     0.018971  0.699871  0.896903   
781  0.260632  1.031350  0.091864    ...     0.025532  0.656247  0.956433   
782  0.511480  1.061015  0.087380    ...     0.002052  0.710299  1.064239   
783  1.131490  1.119293  0.149347    ...     0.174936  0.964722  0.864920   
784  0.970991  0.951415  0.178438    ...     0.130949  0.668505  0.946221   
785  0.982692  0.991051  0.161797    ...     0.166588  0.620720  0.795671   
786  1.042987  1.016474  0.275368    ...     0.141470  0.570848  0.900090   
787  1.140906  0.933591  0.183162    ...     0.191659  1.238997  1.045038   
788  1.038454  0.985426  0.211553    ...     0.133622  0.924002  0.874529   
789  0.637924  1.014595  0.211421    ...     0.085321  0.724321  1.173754   
790  0.607513  1.101712  0.259367    ...     0.114425  0.544851  0.877579   
791  0.596269  1.079551  0.244185    ...     0.115245  0.705082  0.978903   
792  0.473986  1.047551  0.236044    ...     0.070360  0.596329  0.971461   
793  0.371779  0.977216  0.232985    ...     0.092979  0.678037  1.188982   
794  0.461709  0.883675  0.161470    ...     0.042632  0.612426  1.002830   
795  1.042297  1.154171  0.224437    ...     0.182719  1.136295  1.041618   
796  0.948459  1.022752  0.096442    ...     0.104862  1.117519  1.175744   
797  0.978494  0.957247  0.210643    ...     0.052806  0.893849  1.256797   
798  0.925253  0.966468  0.148979    ...     0.071979  1.167453  1.031077   
799  1.117400  0.974371  0.068014    ...     0.055321  0.883066  1.201870   
800  1.070104  0.966322  0.155839    ...     0.150652  1.222495  1.084330   
801  1.099280  0.837844  0.138650    ...     0.108036  1.083037  1.071062   
802  1.114438  0.768196  0.085306    ...     0.070238  1.150937  1.040119   
803  0.920329  0.812562  0.113591    ...     0.080732  1.209577  0.893729   
804  0.879598  0.903394  0.157721    ...     0.108640  1.340851  1.206181   

        G1637     G1638     G1639     G1640     G1641     G1642     G1643  
0    0.331381  0.350600  0.822844  0.304483  0.319917  0.364280  0.765945  
1    0.281649  0.485940  0.915617  0.317507  0.238074  0.509130  0.691403  
2    0.825126  0.444819  0.349069  0.042310  0.165208  0.952178  0.678781  
3    0.484225  0.150689  0.449126  0.125197  0.000047  0.878127  0.566691  
4    0.384363  0.326859  0.512270  0.261410  0.000156  0.883981  0.646715  
5    0.295704  0.508041  0.754407  0.064153  0.040764  0.766373  0.725356  
6    0.205815  0.696312  0.659339  0.051364  0.035758  0.655370  0.748289  
7    0.704889  0.680660  0.487411  0.420395  0.500600  0.387646  0.631003  
8    0.504716  0.592740  0.444697  0.227262  0.124716  0.389183  0.655293  
9    0.533138  0.647711  0.687846  0.359815  0.210771  0.317452  0.581441  
10   0.138453  0.402500  1.271849  0.474471  0.321278  0.226481  0.716654  
11   0.174570  0.668863  0.925993  0.428474  0.216509  0.266207  0.797722  
12   0.091458  0.512683  1.003110  0.580088  0.386103  0.397070  0.771066  
13   0.522044  0.722686  0.349246  0.193734  0.552943  0.182037  0.899809  
14   0.454024  0.604683  0.524831  0.327399  0.467344  0.311103  0.694833  
15   0.527142  0.661751  0.461869  0.294394  0.569201  0.304232  0.732546  
16   0.357893  0.607244  0.282964  0.189076  0.134458  0.047078  0.860104  
17   0.367313  0.575971  0.322602  0.148548  0.110963  0.013287  0.891821  
18   0.210931  0.047953  0.464655  0.012755  0.302029  0.084034  0.689910  
19   0.312082  0.089383  0.388518  0.087324  0.298624  0.109295  0.655080  
20   0.285523  0.825726  0.493005  0.164294  0.088286  0.451830  0.773184  
21   0.521875  0.064749  0.760985  0.287374  0.151065  0.377006  0.800454  
22   0.287650  0.135804  1.063677  0.438555  0.059417  0.428528  0.850323  
23   0.174233  0.916787  0.006159  0.047522  0.605750  0.458028  0.606174  
24   0.402668  0.860023  0.326348  0.308184  0.126792  0.528898  0.747087  
25   0.312698  0.965921  0.236125  0.321860  0.190591  0.637416  0.713501  
26   0.346416  0.862817  0.300872  0.184376  0.404116  0.300664  0.892037  
27   0.316979  0.819623  0.490314  0.105278  0.473898  0.435224  0.882610  
28   0.244278  0.013906  0.584409  0.093984  0.000388  0.807773  0.794333  
29   0.203609  0.042784  0.656616  0.082431  0.003132  0.760462  0.813269  
..        ...       ...       ...       ...       ...       ...       ...  
775  0.415565  0.681347  0.691333  0.266312  0.304747  0.699084  0.607258  
776  0.468324  0.367396  0.963685  0.684879  0.362922  0.747533  0.584218  
777  0.509944  0.306759  0.808364  0.794919  0.378420  1.055204  0.790588  
778  0.293142  0.604988  0.416145  0.298029  0.176879  0.336811  0.832177  
779  0.270484  0.705589  0.459972  0.110584  0.429602  0.435478  0.719720  
780  0.245850  0.554799  0.398441  0.067452  0.235110  0.713438  0.752558  
781  0.169711  0.096867  0.556994  0.078364  0.148813  0.652432  0.690756  
782  0.195533  0.083764  0.683135  0.051813  0.249540  0.816787  0.250794  
783  0.296939  0.795894  0.453626  0.304972  0.270692  0.337530  0.904004  
784  0.181187  0.805542  0.389953  0.329235  0.179839  0.560929  0.728055  
785  0.196033  0.678561  0.552713  0.314447  0.111944  0.438667  0.650223  
786  0.249211  0.819399  0.508809  0.329311  0.060270  0.546979  0.706424  
787  0.239100  0.694212  0.475144  0.252716  0.153114  0.308459  0.810791  
788  0.287563  0.702833  0.493936  0.254768  0.206528  0.454661  0.693484  
789  0.171359  0.663181  0.581558  0.293795  0.151820  0.676071  0.812203  
790  0.250558  0.851568  0.616331  0.422883  0.163034  0.594877  0.781340  
791  0.128901  0.725503  0.613689  0.393749  0.031060  0.762420  0.595170  
792  0.118364  0.593121  0.458044  0.281340  0.047678  0.651284  0.848263  
793  0.055384  0.563171  0.470501  0.308998  0.007514  0.669087  0.802080  
794  0.060963  0.572214  0.535718  0.336253  0.001398  0.666126  0.826598  
795  0.299152  0.817016  0.473719  0.221956  0.216152  0.417448  0.717833  
796  0.352888  0.639579  0.484510  0.235779  0.162338  0.410628  0.811900  
797  0.075091  0.042382  0.718876  0.268246  0.000829  0.480193  0.389798  
798  0.262848  0.568709  0.536999  0.266558  0.209699  0.473411  0.675974  
799  0.215481  0.851838  0.485477  0.106718  0.000362  0.616506  0.680624  
800  0.291099  0.714423  0.504473  0.315625  0.193158  0.330502  0.749731  
801  0.259542  0.770311  0.597229  0.267072  0.223031  0.389618  0.752244  
802  0.187858  0.384040  0.489051  0.101366  0.000060  0.577090  0.554765  
803  0.307071  0.540507  0.417782  0.206499  0.143921  0.548212  0.755806  
804  0.072192  0.036824  0.655577  0.305273  0.000544  0.562644  0.541282  

[805 rows x 1643 columns]

In [13]:
P_corr=np.corrcoef(data_in_silico.transpose())

In [4]:
print(P_corr.shape)
print(len(list(data_in_silico['G1'])))


(1643, 1643)
805

In [25]:
S_corr=np.ones((1643,1643))
AMI_fd=np.ones((1643,1643))
for idx_i,i in zip(range(len(data_in_silico.columns)),data_in_silico.columns):
    for idx_j,j in zip(range(len(data_in_silico.columns)),data_in_silico.columns):
        if i!=j:
            S_corr[idx_i,idx_j]=spearmanr(list(data_in_silico[i]),list(data_in_silico[j]))[0]
            AMI_fd[idx_i,idx_j]=ajd_mi_bin(list(data_in_silico[i]),list(data_in_silico[j]))

In [35]:
print(S_corr)


[[ 1.         -0.00225177 -0.21438459 ...,  0.02601459 -0.06175255
   0.12834364]
 [-0.00225177  1.         -0.13923577 ..., -0.17931808  0.34836044
   0.0686446 ]
 [-0.21438459 -0.13923577  1.         ...,  0.43508113 -0.20182508
  -0.05472033]
 ..., 
 [ 0.02601459 -0.17931808  0.43508113 ...,  1.         -0.41291694
   0.09282972]
 [-0.06175255  0.34836044 -0.20182508 ..., -0.41291694  1.         -0.12056149]
 [ 0.12834364  0.0686446  -0.05472033 ...,  0.09282972 -0.12056149  1.        ]]

In [34]:
print(AMI_fd)


[[ 1.          0.01878533  0.01644456 ...,  0.01831519  0.00912871
   0.00339487]
 [ 0.01878533  1.          0.03327592 ...,  0.0381582   0.04501715
   0.01816231]
 [ 0.01644456  0.03327592  1.         ...,  0.06667567  0.03245433
   0.01355769]
 ..., 
 [ 0.01831519  0.0381582   0.06667567 ...,  1.          0.06838425
   0.0034866 ]
 [ 0.00912871  0.04501715  0.03245433 ...,  0.06838425  1.          0.02390381]
 [ 0.00339487  0.01816231  0.01355769 ...,  0.0034866   0.02390381  1.        ]]

In [32]:
S_corr=np.loadtxt('S_corrM.nptxt')
np.savetxt('AMI_fd.csv',AMI_fd,delimiter=",")

In [20]:
import deconvolution_methods as DM
??DM

In [16]:
# %load ND.py
def ND(mat,beta=0.90,alpha=1,control=0):
    '''
    This is a python implementation/translation of network deconvolution by MIT-KELLIS LAB
    
    
     LICENSE: MIT-KELLIS LAB
    
    
     AUTHORS:
        Algorithm was programmed by Soheil Feizi.
        Paper authors are S. Feizi, D. Marbach,  M. M?©dard and M. Kellis
    Python implementation: Gideon Rosenthal
    
    REFERENCES:
       For more details, see the following paper:
        Network Deconvolution as a General Method to Distinguish
        Direct Dependencies over Networks
        By: Soheil Feizi, Daniel Marbach,  Muriel Médard and Manolis Kellis
        Nature Biotechnology
    
    --------------------------------------------------------------------------
     ND.m: network deconvolution
    --------------------------------------------------------------------------
    
    DESCRIPTION:
    
     USAGE:
        mat_nd = ND(mat)
        mat_nd = ND(mat,beta)
        mat_nd = ND(mat,beta,alpha,control)
    
    
     INPUT ARGUMENTS:
     mat           Input matrix, if it is a square matrix, the program assumes
                   it is a relevance matrix where mat(i,j) represents the similarity content
                   between nodes i and j. Elements of matrix should be
                   non-negative.
     optional parameters:
     beta          Scaling parameter, the program maps the largest absolute eigenvalue
                   of the direct dependency matrix to beta. It should be
                   between 0 and 1.
     alpha         fraction of edges of the observed dependency matrix to be kept in
                   deconvolution process.
     control       if 0, displaying direct weights for observed
                   interactions, if 1, displaying direct weights for both observed and
                   non-observed interactions.
    
     OUTPUT ARGUMENTS:
    
     mat_nd        Output deconvolved matrix (direct dependency matrix). Its components
                   represent direct edge weights of observed interactions.
                   Choosing top direct interactions (a cut-off) depends on the application and
                   is not implemented in this code.
    
     To apply ND on regulatory networks, follow steps explained in Supplementary notes
     1.4.1 and 2.1 and 2.3 of the paper.
     In this implementation, input matrices are made symmetric.
    
    **************************************************************************
     loading scaling and thresholding parameters
    '''
    import scipy.stats.mstats as stat
    from numpy import linalg as LA


    if beta>=1 or beta<=0:
        print 'error: beta should be in (0,1)'
      
    if alpha>1 or alpha<=0:
            print 'error: alpha should be in (0,1)';
     
    
    '''
    ***********************************
     Processing the inut matrix
     diagonal values are filtered
    '''
    
    n = mat.shape[0]
    np.fill_diagonal(mat, 0)
    
    '''
    Thresholding the input matrix
    '''
    y =stat.mquantiles(mat[:],prob=[1-alpha])
    th = mat>=y
    mat_th=mat*th;

    '''
    making the matrix symetric if already not
    '''
    mat_th = (mat_th+mat_th.T)/2

    
    '''
    ***********************************
    eigen decomposition
    '''
    print 'Decomposition and deconvolution...'

    Dv,U = LA.eigh(mat_th) 
    D = np.diag((Dv))
    lam_n=np.abs(np.min(np.min(np.diag(D)),0))
    lam_p=np.abs(np.max(np.max(np.diag(D)),0))

    
    m1=lam_p*(1-beta)/beta
    m2=lam_n*(1+beta)/beta
    m=max(m1,m2)
    
    #network deconvolution
    for i in range(D.shape[0]):
        D[i,i] = (D[i,i])/(m+D[i,i])
    
    mat_new1 = np.dot(U,np.dot(D,LA.inv(U)))
    
                    
    '''
    
    ***********************************
     displying direct weights
    '''
    if control==0:
        ind_edges = (mat_th>0)*1.0;
        ind_nonedges = (mat_th==0)*1.0;
        m1 = np.max(np.max(mat*ind_nonedges));
        m2 = np.min(np.min(mat_new1));
        mat_new2 = (mat_new1+np.max(m1-m2,0))*ind_edges+(mat*ind_nonedges);
    else:
        m2 = np.min(np.min(mat_new1));
        mat_new2 = (mat_new1+np.max(-m2,0));
    
    
    '''
    ***********************************
     linearly mapping the deconvolved matrix to be between 0 and 1
    '''
    m1 = np.min(np.min(mat_new2));
    m2 = np.max(np.max(mat_new2));
    mat_nd = (mat_new2-m1)/(m2-m1);


    return mat_nd

In [17]:
ND_S= ND(S_corr)
ND_P= ND(P_corr)
ND_AMI=ND(AMI_fd)


Decomposition and deconvolution...
Decomposition and deconvolution...
Decomposition and deconvolution...

In [18]:
np.savetxt('ND_S.csv',ND_S,delimiter=",")
np.savetxt('ND_P.csv',ND_P,delimiter=",")
np.savetxt('ND_AMI.csv',ND_AMI,delimiter=",")

In [ ]:
PartCorr_S= DM.deconvolution_methods(3,S_corr)
PartCorr_P= DM.deconvolution_methods(3,P_corr)
PartCorr_AMI=DM.deconvolution_methods(3,AMI_fd)

In [25]:
#Reprocessing
PartCorr_S=np.absolute(np.nan_to_num(PartCorr_S))
PartCorr_P=np.absolute(np.nan_to_num(PartCorr_P))
PartCorr_AMI=np.absolute(np.nan_to_num(PartCorr_AMI))

print(PartCorr_S)


[[ 0.          0.13000715  0.27215561 ...,  0.11628633  0.05761709
   0.74997131]
 [ 0.13000715  0.          0.48133178 ...,  0.00307377  0.07546168
   0.18489732]
 [ 0.27215561  0.48133178  0.         ...,  0.2458947   0.14180642
   1.88940185]
 ..., 
 [ 0.11628633  0.00307377  0.2458947  ...,  0.          0.06462468
   0.56901616]
 [ 0.05761709  0.07546168  0.14180642 ...,  0.06462468  0.          0.14999466]
 [ 0.74997131  0.18489732  1.88940185 ...,  0.56901616  0.14999466  0.        ]]

In [26]:
np.savetxt('PartCorr_S.csv',PartCorr_S,delimiter=",")
np.savetxt('PartCorr_P.csv',PartCorr_P,delimiter=",")
np.savetxt('PartCorr_AMI.csv',PartCorr_AMI,delimiter=",")

In [28]:
data_in_silico.transpose().to_csv('data_in_silico.csv',sep=',',header=False,index=False)

In [ ]: