Simulation experiment


In [1]:
# set some ipython notebook properties
%matplotlib inline

# set degree of verbosity (adapt to INFO for more verbose output)
import logging
logging.basicConfig(level=logging.ERROR)

# set figure sizes
import pylab
pylab.rcParams['figure.figsize'] = (24.0, 8.0)

# set display width for pandas data frames
import pandas as pd
pd.set_option('display.width', 1000)

In [2]:
# generate continuous data
from snp_gen import snp_gen
from pysnptools.snpreader import SnpData, Bed

snpdata = snp_gen(fst=0.05, dfr=0.7, iid_count=5000, sid_count=10000, chr_count=10)
# insure file names are unique
import uuid
bed_fn = "data/5k_%s" % str(uuid.uuid4())[0:13]
Bed.write(snpdata, bed_fn)


WARNING:root:Because of rounding the actual number of iids is 4990 rather than the requested 5000

In [3]:
# visualize data set
from cluster_data import cluster_data
cluster_data(bed_fn)



In [4]:
# example definition of methods to compare
import numpy as np
from fastlmm.association import single_snp

def execute_lmm(test_snps, pheno, G0, covar):
    
    result = {}
    fs_result = {}
    
    result["full"] = single_snp(test_snps, pheno, G0=G0, covar=covar).sort(["Chr", "ChrPos"])["PValue"].as_matrix()

    return result, fs_result

def execute_linear_regression(test_snps, pheno, G0, covar):
    """
    implementation of linear regression with and without covariates
    """
    
    result = {}
    fs_result = {}
    
    # linear regression with causals as covariates
    from fastlmm.inference.linear_regression import f_regression_cov
    G_test = test_snps.read().standardize().val
    _, result["linreg"] = f_regression_cov(G_test.copy(), pheno["vals"].copy(), np.ones((len(pheno["vals"]),1)))
    _, result["linreg_cov_pcs"] = f_regression_cov(G_test.copy(), pheno["vals"].copy(), covar["vals"].copy())
    
    return result, fs_result


C:\Anaconda\lib\site-packages\matplotlib\__init__.py:1318: UserWarning:  This call to matplotlib.use() has no effect
because the backend has already been chosen;
matplotlib.use() must be called *before* pylab, matplotlib.pyplot,
or matplotlib.backends is imported for the first time.

  warnings.warn(_use_error_msg)

In [5]:
from fastlmm.util.runner import Local
from semisynth_simulations import run_simulation
out_prefix = "results/tmp_result_"
description = "simulation"

num_causals = 10
num_repeats = 10
num_pcs = 5

#from methods import execute_lmm, execute_linear_regression, execute_dual_fs, execute_fs
methods = [execute_lmm, execute_linear_regression]

run_simulation(bed_fn, out_prefix, methods, num_causals, num_repeats, num_pcs, description, Local())


time taken to draw figure 0.174000024796
showing figure!
Out[5]:
[{'method': 'full',
  'num_trials': 10000,
  'power': ([0.001,
    0.0005,
    0.0001,
    5e-05,
    1e-05,
    5e-06,
    1e-06,
    5e-07,
    1e-07,
    5e-08,
    1e-08],
   array([ 1.        ,  0.88888889,  0.88888889,  0.88888889,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.66666667,  0.66666667,
           0.66666667])),
  'prc': (array([ 0.47368421,  0.44444444,  0.47058824,  0.5       ,  0.53333333,
           0.57142857,  0.61538462,  0.66666667,  0.72727273,  0.8       ,
           0.88888889,  1.        ,  1.        ,  1.        ,  1.        ]),
   array([ 1.        ,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.77777778,  0.66666667,  0.        ]),
   0.93989603638726438),
  'roc': (array([ 0.    ,  0.    ,  0.    , ...,  0.9998,  0.9999,  1.    ]),
   array([ 0.66666667,  0.77777778,  0.88888889, ...,  1.        ,
           1.        ,  1.        ]),
   0.99988888888888883),
  't1err': ([0.001, 0.0005, 0.0001, 5e-05, 1e-05, 5e-06, 1e-06],
   array([ 0.001 ,  0.0005,  0.    ,  0.    ,  0.    ,  0.    ,  0.    ]))},
 {'method': 'linreg',
  'num_trials': 10000,
  'power': ([0.001,
    0.0005,
    0.0001,
    5e-05,
    1e-05,
    5e-06,
    1e-06,
    5e-07,
    1e-07,
    5e-08,
    1e-08],
   array([ 1.        ,  1.        ,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778])),
  'prc': (array([ 0.01319648,  0.01174743,  0.01176471,  0.01178203,  0.01179941,
           0.01181684,  0.01183432,  0.01185185,  0.01186944,  0.01188707,
           0.01190476,  0.0119225 ,  0.0119403 ,  0.01195815,  0.01197605,
           0.011994  ,  0.01201201,  0.01203008,  0.01204819,  0.01206637,
           0.01208459,  0.01210287,  0.01212121,  0.01213961,  0.01215805,
           0.01217656,  0.01067073,  0.01068702,  0.01070336,  0.01071975,
           0.0107362 ,  0.01075269,  0.01076923,  0.01078582,  0.01080247,
           0.01081917,  0.01083591,  0.01085271,  0.01086957,  0.01088647,
           0.01090343,  0.01092044,  0.0109375 ,  0.01095462,  0.01097179,
           0.01098901,  0.01100629,  0.01102362,  0.01104101,  0.01105845,
           0.01107595,  0.0110935 ,  0.01111111,  0.01112878,  0.0111465 ,
           0.01116427,  0.01118211,  0.0112    ,  0.01121795,  0.01123596,
           0.01125402,  0.01127214,  0.01129032,  0.01130856,  0.01132686,
           0.01134522,  0.01136364,  0.01138211,  0.01140065,  0.01141925,
           0.01143791,  0.01145663,  0.01147541,  0.01149425,  0.01151316,
           0.01153213,  0.01155116,  0.01157025,  0.0115894 ,  0.01160862,
           0.01162791,  0.01164725,  0.01166667,  0.01168614,  0.01170569,
           0.01172529,  0.01174497,  0.01176471,  0.01178451,  0.01180438,
           0.01182432,  0.01184433,  0.01186441,  0.01188455,  0.01190476,
           0.01192504,  0.01194539,  0.01196581,  0.0119863 ,  0.01200686,
           0.01202749,  0.01204819,  0.01206897,  0.01208981,  0.01211073,
           0.01213172,  0.01215278,  0.01217391,  0.01219512,  0.0122164 ,
           0.01223776,  0.01225919,  0.0122807 ,  0.01230228,  0.01232394,
           0.01234568,  0.01236749,  0.01238938,  0.01241135,  0.01243339,
           0.01245552,  0.01247772,  0.0125    ,  0.01252236,  0.0125448 ,
           0.01256732,  0.01258993,  0.01261261,  0.01263538,  0.01265823,
           0.01268116,  0.01270417,  0.01272727,  0.01275046,  0.01277372,
           0.01279707,  0.01282051,  0.01284404,  0.01286765,  0.01289134,
           0.01291513,  0.012939  ,  0.01296296,  0.01298701,  0.01301115,
           0.01303538,  0.0130597 ,  0.01308411,  0.01310861,  0.01313321,
           0.01315789,  0.01318267,  0.01320755,  0.01323251,  0.01325758,
           0.01328273,  0.01330798,  0.01333333,  0.01335878,  0.01338432,
           0.01340996,  0.0134357 ,  0.01346154,  0.01348748,  0.01351351,
           0.01353965,  0.01356589,  0.01359223,  0.01361868,  0.01364522,
           0.01367188,  0.01369863,  0.01372549,  0.01375246,  0.01377953,
           0.01380671,  0.01383399,  0.01386139,  0.01388889,  0.0139165 ,
           0.01394422,  0.01397206,  0.014     ,  0.01402806,  0.01405622,
           0.01408451,  0.0141129 ,  0.01414141,  0.01417004,  0.01419878,
           0.01422764,  0.01425662,  0.01428571,  0.01431493,  0.01434426,
           0.01437372,  0.01440329,  0.01443299,  0.01446281,  0.01449275,
           0.01452282,  0.01455301,  0.01458333,  0.01461378,  0.01464435,
           0.01470588,  0.01473684,  0.01476793,  0.01479915,  0.01483051,
           0.014862  ,  0.01489362,  0.01492537,  0.01495726,  0.01498929,
           0.01502146,  0.01505376,  0.01508621,  0.01511879,  0.01515152,
           0.01518438,  0.01521739,  0.01525054,  0.01528384,  0.01531729,
           0.01535088,  0.01538462,  0.0154185 ,  0.01545254,  0.01552106,
           0.01555556,  0.0155902 ,  0.01565996,  0.01569507,  0.01573034,
           0.01576577,  0.01580135,  0.0158371 ,  0.01587302,  0.01590909,
           0.01594533,  0.01598174,  0.01601831,  0.01605505,  0.01609195,
           0.01612903,  0.01616628,  0.0162037 ,  0.0162413 ,  0.01627907,
           0.01635514,  0.01639344,  0.01643192,  0.01647059,  0.01650943,
           0.01654846,  0.01658768,  0.01662708,  0.01666667,  0.01670644,
           0.01674641,  0.01678657,  0.01682692,  0.01686747,  0.01690821,
           0.01699029,  0.01703163,  0.01707317,  0.01711491,  0.01715686,
           0.01719902,  0.01724138,  0.01732673,  0.01736973,  0.01741294,
           0.01745636,  0.0175    ,  0.01754386,  0.01758794,  0.01763224,
           0.01772152,  0.0178117 ,  0.01785714,  0.01790281,  0.01799486,
           0.01804124,  0.01808786,  0.01813472,  0.01818182,  0.01822917,
           0.01827676,  0.01832461,  0.0183727 ,  0.01842105,  0.01846966,
           0.01851852,  0.01856764,  0.01861702,  0.01866667,  0.01871658,
           0.01876676,  0.0188172 ,  0.01886792,  0.01891892,  0.01897019,
           0.01902174,  0.01907357,  0.01912568,  0.01917808,  0.01923077,
           0.01928375,  0.01933702,  0.01939058,  0.01944444,  0.01949861,
           0.01955307,  0.01960784,  0.01966292,  0.01971831,  0.01977401,
           0.01983003,  0.01988636,  0.01994302,  0.02      ,  0.02011494,
           0.02017291,  0.02023121,  0.02028986,  0.02034884,  0.02040816,
           0.02046784,  0.02052786,  0.02058824,  0.02071006,  0.02077151,
           0.02083333,  0.02089552,  0.02095808,  0.02102102,  0.02108434,
           0.02114804,  0.02121212,  0.0212766 ,  0.02134146,  0.02140673,
           0.02147239,  0.02153846,  0.02160494,  0.02167183,  0.02173913,
           0.02180685,  0.021875  ,  0.02194357,  0.02201258,  0.02208202,
           0.0221519 ,  0.02222222,  0.02229299,  0.02236422,  0.0224359 ,
           0.02250804,  0.02258065,  0.02265372,  0.0228013 ,  0.02295082,
           0.02302632,  0.02310231,  0.02317881,  0.02325581,  0.02333333,
           0.02341137,  0.02348993,  0.02364865,  0.02380952,  0.0239726 ,
           0.02405498,  0.02413793,  0.02422145,  0.02430556,  0.02439024,
           0.02447552,  0.0245614 ,  0.02464789,  0.0248227 ,  0.02491103,
           0.02508961,  0.02527076,  0.02536232,  0.02545455,  0.02554745,
           0.02564103,  0.02573529,  0.02621723,  0.02671756,  0.02702703,
           0.02713178,  0.02734375,  0.02745098,  0.02755906,  0.02766798,
           0.02788845,  0.02811245,  0.02822581,  0.02834008,  0.02868852,
           0.02916667,  0.0292887 ,  0.03043478,  0.03056769,  0.03070175,
           0.03125   ,  0.03225806,  0.03286385,  0.03301887,  0.03365385,
           0.03398058,  0.03431373,  0.03465347,  0.035     ,  0.03626943,
           0.03664921,  0.03723404,  1.        ]),
   array([ 1.        ,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.77777778,  0.77777778,  0.77777778,
           0.77777778,  0.77777778,  0.        ]),
   0.40602386116052624),
  'roc': (array([ 0.    ,  0.0181,  0.0184, ...,  0.9998,  0.9999,  1.    ]),
   array([ 0.        ,  0.77777778,  0.77777778, ...,  1.        ,
           1.        ,  1.        ]),
   0.97827222222222221),
  't1err': ([0.001, 0.0005, 0.0001, 5e-05, 1e-05, 5e-06, 1e-06],
   array([ 0.0961,  0.0837,  0.0592,  0.0526,  0.0375,  0.0336,  0.0255]))},
 {'method': 'linreg_cov_pcs',
  'num_trials': 10000,
  'power': ([0.001,
    0.0005,
    0.0001,
    5e-05,
    1e-05,
    5e-06,
    1e-06,
    5e-07,
    1e-07,
    5e-08,
    1e-08],
   array([ 0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.77777778,  0.77777778,
           0.77777778])),
  'prc': (array([ 0.02122642,  0.01891253,  0.01895735,  0.01900238,  0.01904762,
           0.01909308,  0.01913876,  0.01918465,  0.01923077,  0.01927711,
           0.01932367,  0.01937046,  0.01941748,  0.01946472,  0.0195122 ,
           0.0195599 ,  0.01960784,  0.01965602,  0.01970443,  0.01975309,
           0.01980198,  0.01985112,  0.0199005 ,  0.01995012,  0.02      ,
           0.02005013,  0.0201005 ,  0.02015113,  0.02020202,  0.02025316,
           0.02030457,  0.02035623,  0.02040816,  0.02046036,  0.02051282,
           0.02056555,  0.02061856,  0.02067183,  0.02072539,  0.02077922,
           0.02083333,  0.02088773,  0.02094241,  0.02099738,  0.02105263,
           0.02110818,  0.02116402,  0.02122016,  0.0212766 ,  0.02133333,
           0.02139037,  0.02144772,  0.02150538,  0.02156334,  0.02162162,
           0.02168022,  0.02173913,  0.02179837,  0.02185792,  0.02191781,
           0.02197802,  0.02203857,  0.02209945,  0.02216066,  0.02222222,
           0.02228412,  0.02234637,  0.02240896,  0.02247191,  0.02253521,
           0.02259887,  0.02266289,  0.02272727,  0.02279202,  0.02285714,
           0.02292264,  0.02298851,  0.02305476,  0.02312139,  0.02318841,
           0.02325581,  0.02332362,  0.02339181,  0.02346041,  0.02352941,
           0.02359882,  0.02366864,  0.02373887,  0.02380952,  0.0238806 ,
           0.0239521 ,  0.02402402,  0.02409639,  0.02416918,  0.02424242,
           0.02431611,  0.02439024,  0.02446483,  0.02453988,  0.02461538,
           0.02469136,  0.0247678 ,  0.02484472,  0.02492212,  0.025     ,
           0.02507837,  0.02515723,  0.02523659,  0.02531646,  0.02539683,
           0.02547771,  0.02555911,  0.02564103,  0.02572347,  0.02580645,
           0.02588997,  0.02597403,  0.02605863,  0.02614379,  0.02622951,
           0.02631579,  0.02640264,  0.02649007,  0.02657807,  0.02675585,
           0.02684564,  0.02693603,  0.02702703,  0.02711864,  0.02721088,
           0.02730375,  0.02739726,  0.02749141,  0.02758621,  0.02768166,
           0.02777778,  0.02787456,  0.02797203,  0.02807018,  0.02816901,
           0.02826855,  0.02836879,  0.02846975,  0.02857143,  0.02867384,
           0.02877698,  0.02888087,  0.02898551,  0.02909091,  0.02919708,
           0.02930403,  0.02941176,  0.0295203 ,  0.02962963,  0.02973978,
           0.02985075,  0.02996255,  0.03007519,  0.03018868,  0.03030303,
           0.03041825,  0.03053435,  0.03065134,  0.03076923,  0.03088803,
           0.03100775,  0.0311284 ,  0.03125   ,  0.03137255,  0.03149606,
           0.03162055,  0.03174603,  0.03187251,  0.032     ,  0.03212851,
           0.03225806,  0.03238866,  0.03252033,  0.03265306,  0.03278689,
           0.03292181,  0.03305785,  0.03319502,  0.03333333,  0.0334728 ,
           0.03361345,  0.03375527,  0.03389831,  0.03404255,  0.03418803,
           0.03433476,  0.03448276,  0.03463203,  0.03478261,  0.0349345 ,
           0.03508772,  0.03524229,  0.03539823,  0.03555556,  0.03571429,
           0.03587444,  0.03603604,  0.0361991 ,  0.03636364,  0.03652968,
           0.03669725,  0.03686636,  0.03703704,  0.0372093 ,  0.03738318,
           0.03755869,  0.03773585,  0.03791469,  0.03809524,  0.03827751,
           0.03846154,  0.03864734,  0.03883495,  0.03902439,  0.03921569,
           0.03940887,  0.03960396,  0.039801  ,  0.04      ,  0.04020101,
           0.04040404,  0.04060914,  0.04081633,  0.04102564,  0.04123711,
           0.04145078,  0.04166667,  0.04188482,  0.04210526,  0.04232804,
           0.04255319,  0.04278075,  0.04301075,  0.04324324,  0.04347826,
           0.04371585,  0.04395604,  0.0441989 ,  0.04444444,  0.04469274,
           0.04494382,  0.04519774,  0.04545455,  0.04571429,  0.04597701,
           0.04624277,  0.04651163,  0.04678363,  0.04705882,  0.04733728,
           0.04761905,  0.04790419,  0.04819277,  0.04848485,  0.04878049,
           0.04907975,  0.04938272,  0.04968944,  0.05      ,  0.05031447,
           0.05063291,  0.05095541,  0.05128205,  0.0516129 ,  0.05194805,
           0.05228758,  0.05263158,  0.05298013,  0.05333333,  0.05369128,
           0.05405405,  0.05442177,  0.05479452,  0.05517241,  0.05555556,
           0.05594406,  0.05633803,  0.05673759,  0.05714286,  0.05755396,
           0.05797101,  0.05839416,  0.05882353,  0.05925926,  0.05970149,
           0.06015038,  0.06060606,  0.0610687 ,  0.06153846,  0.0620155 ,
           0.0625    ,  0.06299213,  0.06349206,  0.064     ,  0.06451613,
           0.06504065,  0.06557377,  0.0661157 ,  0.06666667,  0.06722689,
           0.06779661,  0.06837607,  0.06896552,  0.06956522,  0.07017544,
           0.07079646,  0.07142857,  0.07207207,  0.07272727,  0.0733945 ,
           0.07407407,  0.07476636,  0.0754717 ,  0.07619048,  0.07692308,
           0.0776699 ,  0.07843137,  0.07920792,  0.08      ,  0.08080808,
           0.08163265,  0.08247423,  0.08333333,  0.08421053,  0.08510638,
           0.08602151,  0.08695652,  0.08791209,  0.08888889,  0.08988764,
           0.09090909,  0.09195402,  0.09302326,  0.09411765,  0.0952381 ,
           0.09638554,  0.09756098,  0.09876543,  0.1       ,  0.10126582,
           0.1025641 ,  0.1038961 ,  0.10526316,  0.10666667,  0.10810811,
           0.10958904,  0.11111111,  0.11267606,  0.11428571,  0.11594203,
           0.11764706,  0.11940299,  0.12121212,  0.12307692,  0.125     ,
           0.12698413,  0.12903226,  0.13114754,  0.13333333,  0.13559322,
           0.13793103,  0.14035088,  0.14285714,  0.14545455,  0.14814815,
           0.1509434 ,  0.15384615,  0.15686275,  0.16      ,  0.16326531,
           0.16666667,  0.17021277,  0.17391304,  0.17777778,  0.18181818,
           0.18604651,  0.19047619,  0.19512195,  0.2       ,  0.20512821,
           0.21052632,  0.21621622,  0.22222222,  0.22857143,  0.23529412,
           0.24242424,  0.25      ,  0.25806452,  0.26666667,  0.27586207,
           0.28571429,  0.2962963 ,  0.30769231,  0.32      ,  0.33333333,
           0.34782609,  0.36363636,  0.38095238,  0.4       ,  0.42105263,
           0.44444444,  0.47058824,  0.5       ,  0.53333333,  0.57142857,
           0.61538462,  0.66666667,  0.72727273,  0.8       ,  0.88888889,
           0.875     ,  1.        ,  1.        ]),
   array([ 1.        ,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.88888889,  0.88888889,  0.88888889,  0.88888889,  0.88888889,
           0.77777778,  0.77777778,  0.        ]),
   0.87800154630744753),
  'roc': (array([  0.00000000e+00,   1.00000000e-04,   1.00000000e-04, ...,
            9.99800000e-01,   9.99900000e-01,   1.00000000e+00]),
   array([ 0.77777778,  0.77777778,  0.88888889, ...,  1.        ,
           1.        ,  1.        ]),
   0.99537777777777781),
  't1err': ([0.001, 0.0005, 0.0001, 5e-05, 1e-05, 5e-06, 1e-06],
   array([ 0.022 ,  0.0159,  0.0058,  0.0043,  0.0019,  0.0014,  0.0006]))}]

In [6]:
from GWAS_benchmark.semisynth_simulations import simulate_ascertained

snp_args = {"fst": 0.2, "dfr": 0.1, "sid_count": 10000, "maf_low":.05}
phenotype_args = {"genetic_var": 0.5, "noise_var": 0.5}
    # make this a tuple of function and kwargs
from GWAS_benchmark.methods import execute_lmm, execute_linear_regression
methods = [execute_lmm] #, execute_linear_regression]

prevalence = 0.2
num_causal = 20
num_repeats = 50
iid_count= 500
description = "ascertained"

res = simulate_ascertained(methods, prevalence, iid_count, num_causal, num_repeats, description, snp_args, phenotype_args)


Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
Because of rounding the actual number of iids is 1244 rather than the requested 1250
iid_count is 496 instead of 500 because of rounding
NaN beta value seen, may be due to an SNC (a constant SNP)
time taken to draw figure 0.134999990463
showing figure!