In [1]:
from lnl import lnl

In [2]:
from scipy.optimize import minimize

In [6]:
x0 = [  .10,  70,
         5,   -.5]
def fun(x):
    cosmo = {		'omega_m': x[0],
					'H0': x[1],
					'm_chi_over_H0' : x[2],
					'chi0' : 10**(x[3]),
					'chidot0': 0,
					'r_s':147.5
					 }
    return lnl(cosmo)

In [7]:
bounds = [(.1, .15), (60,80), (0,100), (-4,0)]
bestfit_axion = minimize(fun, x0, 
                         method = 'COBYLA', 
                         #bounds = bounds,
                         options = {'tol' : 1.0e-6,
                                    'rhobeg' : [.005, 1, .1, .1],
                                    'catol' : 1e-6
                                    }).x


lnl is 113.014639
lnl is 87.561308
lnl is 87.555103
lnl is 87.555182
lnl is 87.446351
lnl is 66.357403
lnl is 49.306659
lnl is 36.488220
lnl is 33.239578
lnl is 27.746180
lnl is 24.008014
lnl is 19.971601
lnl is 18.927412
lnl is 18.667376
lnl is 18.720492
lnl is 19.997947
lnl is 18.766258
lnl is 18.537714
lnl is 18.618791
lnl is 18.601996
lnl is 18.945661
lnl is 18.502196
lnl is 18.565673
lnl is 18.495404
lnl is 18.437139
lnl is 18.423001
lnl is 18.430799
lnl is 18.415647
lnl is 18.449549
lnl is 18.418333
lnl is 18.385498
lnl is 18.361066
lnl is 18.349660
lnl is 18.328396
lnl is 18.305895
lnl is 18.284919
lnl is 18.263950
lnl is 18.244403
lnl is 18.228673
lnl is 18.210472
lnl is 18.187515
lnl is 18.164700
lnl is 18.145360
lnl is 18.125055
lnl is 18.110191
lnl is 18.095276
lnl is 18.072551
lnl is 18.050557
lnl is 18.030358
lnl is 18.030920
lnl is 18.027607
lnl is 18.005747
lnl is 17.984297
lnl is 17.964307
lnl is 17.948753
lnl is 17.931856
lnl is 17.908064
lnl is 17.885147
lnl is 17.862886
lnl is 17.841500
lnl is 17.820441
lnl is 17.802820
lnl is 17.786407
lnl is 17.764325
lnl is 17.742676
lnl is 17.721247
lnl is 17.700760
lnl is 17.685371
lnl is 17.669329
lnl is 17.646690
lnl is 17.625133
lnl is 17.603825
lnl is 17.583231
lnl is 17.562303
lnl is 17.541488
lnl is 17.520854
lnl is 17.501821
lnl is 17.484917
lnl is 17.465213
lnl is 17.445418
lnl is 17.425170
lnl is 17.404869
lnl is 17.385815
lnl is 17.364131
lnl is 17.342990
lnl is 17.322310
lnl is 17.301007
lnl is 17.279947
lnl is 17.258879
lnl is 17.238524
lnl is 17.218719
lnl is 17.197414
lnl is 17.176236
lnl is 17.155214
lnl is 17.134229
lnl is 17.113344
lnl is 17.092473
lnl is 17.071854
lnl is 17.051670
lnl is 17.031306
lnl is 17.012556
lnl is 16.991776
lnl is 16.971801
lnl is 16.951099
lnl is 16.930844
lnl is 16.910602
lnl is 16.890584
lnl is 16.870519
lnl is 16.851216
lnl is 16.830865
lnl is 16.810560
lnl is 16.790465
lnl is 16.770910
lnl is 16.751048
lnl is 16.730988
lnl is 16.711340
lnl is 16.691310
lnl is 16.674036
lnl is 16.653678
lnl is 16.633806
lnl is 16.615213
lnl is 16.595481
lnl is 16.575680
lnl is 16.556015
lnl is 16.536420
lnl is 16.516976
lnl is 16.497487
lnl is 16.479057
lnl is 16.461513
lnl is 16.441728
lnl is 16.422202
lnl is 16.402995
lnl is 16.383910
lnl is 16.364602
lnl is 16.345635
lnl is 16.326595
lnl is 16.307580
lnl is 16.288801
lnl is 16.270082
lnl is 16.251619
lnl is 16.233531
lnl is 16.214878
lnl is 16.196351
lnl is 16.177435
lnl is 16.158615
lnl is 16.139926
lnl is 16.121144
lnl is 16.102631
lnl is 16.083958
lnl is 16.065346
lnl is 16.047498
lnl is 16.030099
lnl is 16.012988
lnl is 15.994557
lnl is 15.976130
lnl is 15.957753
lnl is 15.939467
lnl is 15.922570
lnl is 15.905080
lnl is 15.888293
lnl is 15.874781
lnl is 15.857517
lnl is 15.839643
lnl is 15.821770
lnl is 15.804079
lnl is 15.788370
lnl is 15.770999
lnl is 15.753801
lnl is 15.736760
lnl is 15.719880
lnl is 15.703641
lnl is 15.698691
lnl is 15.676936
lnl is 15.659038
lnl is 15.642309
lnl is 15.629737
lnl is 15.612994
lnl is 15.595429
lnl is 15.577878
lnl is 15.560908
lnl is 15.543452
lnl is 15.526108
lnl is 15.510403
lnl is 15.493150
lnl is 15.475955
lnl is 15.459241
lnl is 15.442509
lnl is 15.426400
lnl is 15.409196
lnl is 15.392166
lnl is 15.375371
lnl is 15.358384
lnl is 15.341503
lnl is 15.324593
lnl is 15.308327
lnl is 15.292273
lnl is 15.275675
lnl is 15.259705
lnl is 15.242979
lnl is 15.226318
lnl is 15.209758
lnl is 15.193154
lnl is 15.176872
lnl is 15.160882
lnl is 15.144441
lnl is 15.128381
lnl is 15.112223
lnl is 15.100343
lnl is 15.082904
lnl is 15.066683
lnl is 15.051292
lnl is 15.035096
lnl is 15.019126
lnl is 15.002989
lnl is 14.987246
lnl is 14.971091
lnl is 14.955570
lnl is 14.939897
lnl is 14.926883
lnl is 14.910906
lnl is 14.894921
lnl is 14.879282
lnl is 14.863444
lnl is 14.847799
lnl is 14.831982
lnl is 14.819840
lnl is 14.803424
lnl is 14.787572
lnl is 14.771910
lnl is 14.756740
lnl is 14.741074
lnl is 14.725716
lnl is 14.710241
lnl is 14.695745
lnl is 14.680527
lnl is 14.665061
lnl is 14.649689
lnl is 14.634303
lnl is 14.618963
lnl is 14.603852
lnl is 14.588665
lnl is 14.573629
lnl is 14.558501
lnl is 14.543410
lnl is 14.529418
lnl is 14.514899
lnl is 14.499568
lnl is 14.484462
lnl is 14.469486
lnl is 14.454651
lnl is 14.439781
lnl is 14.425068
lnl is 14.410769
lnl is 14.396504
lnl is 14.381972
lnl is 14.367233
lnl is 14.352803
lnl is 14.338737
lnl is 14.324629
lnl is 14.310379
lnl is 14.295819
lnl is 14.281294
lnl is 14.266835
lnl is 14.252436
lnl is 14.238166
lnl is 14.223842
lnl is 14.209847
lnl is 14.195620
lnl is 14.181636
lnl is 14.167736
lnl is 14.153864
lnl is 14.140356
lnl is 14.126984
lnl is 14.113497
lnl is 14.100937
lnl is 14.088430
lnl is 14.074977
lnl is 14.061065
lnl is 14.047293
lnl is 14.033539
lnl is 14.020339
lnl is 14.008268
lnl is 13.996086
lnl is 13.982764
lnl is 13.969194
lnl is 13.955960
lnl is 13.948236
lnl is 13.933997
lnl is 13.920566
lnl is 13.907323
lnl is 13.895729
lnl is 13.882186
lnl is 13.870239
lnl is 13.856120
lnl is 13.842566
lnl is 13.829003
lnl is 13.815952
lnl is 13.802532
lnl is 13.789118
lnl is 13.775861
lnl is 13.762457
lnl is 13.750331
lnl is 13.739102
lnl is 13.725173
lnl is 13.712000
lnl is 13.699654
lnl is 13.686750
lnl is 13.673750
lnl is 13.661272
lnl is 13.649364
lnl is 13.638243
lnl is 13.630672
lnl is 13.615733
lnl is 13.603450
lnl is 13.591406
lnl is 13.579502
lnl is 13.572387
lnl is 13.560566
lnl is 13.551052
lnl is 13.537054
lnl is 13.527416
lnl is 13.521594
lnl is 13.517563
lnl is 13.536010
lnl is 13.513949
lnl is 13.500288
lnl is 13.499754
lnl is 13.495831
lnl is 13.484694
lnl is 13.473483
lnl is 13.466133
lnl is 13.464696
lnl is 13.470384
lnl is 13.448133
lnl is 13.437288
lnl is 13.436054
lnl is 13.441034
lnl is 13.423604
lnl is 13.411967
lnl is 13.400306
lnl is 13.387520
lnl is 13.375298
lnl is 13.362645
lnl is 13.350088
lnl is 13.337595
lnl is 13.326227
lnl is 13.313959
lnl is 13.301393
lnl is 13.288992
lnl is 13.276841
lnl is 13.264587
lnl is 13.252293
lnl is 13.240825
lnl is 13.228969
lnl is 13.216803
lnl is 13.204720
lnl is 13.193700
lnl is 13.181534
lnl is 13.169437
lnl is 13.157623
lnl is 13.145571
lnl is 13.133768
lnl is 13.122080
lnl is 13.110180
lnl is 13.098218
lnl is 13.086832
lnl is 13.075161
lnl is 13.063267
lnl is 13.052044
lnl is 13.041528
lnl is 13.030489
lnl is 13.018978
lnl is 13.007298
lnl is 12.995579
lnl is 12.983999
lnl is 12.972339
lnl is 12.960726
lnl is 12.949179
lnl is 12.937575
lnl is 12.926408
lnl is 12.914975
lnl is 12.903758
lnl is 12.892351
lnl is 12.881053
lnl is 12.869845
lnl is 12.859060
lnl is 12.847986
lnl is 12.836856
lnl is 12.825875
lnl is 12.815648
lnl is 12.806003
lnl is 12.795161
lnl is 12.784167
lnl is 12.773369
lnl is 12.764302
lnl is 12.755009
lnl is 12.744402
lnl is 12.734857
lnl is 12.730841
lnl is 12.734713
lnl is 12.736130
lnl is 12.719598
lnl is 12.718687
lnl is 12.726016
lnl is 12.710674
lnl is 12.699420
lnl is 12.690004
lnl is 12.681288
lnl is 12.672212
lnl is 12.669813
lnl is 12.657390
lnl is 12.646442
lnl is 12.639425
lnl is 12.631538
lnl is 12.622892
lnl is 12.625026
lnl is 12.620971
lnl is 12.611010
lnl is 12.604713
lnl is 12.602649
lnl is 12.597284
lnl is 12.586979
lnl is 12.581057
lnl is 12.578674
lnl is 12.598740
lnl is 12.577500
lnl is 12.564763
lnl is 12.568494
lnl is 12.559743
lnl is 12.557279
lnl is 12.557296
lnl is 12.559176
lnl is 12.551801
lnl is 12.547448
lnl is 12.542644
lnl is 12.541425
lnl is 12.535948
lnl is 12.530518
lnl is 12.525117
lnl is 12.519737
lnl is 12.514372
lnl is 12.509023
lnl is 12.503688
lnl is 12.498375
lnl is 12.493137
lnl is 12.488028
lnl is 12.482720
lnl is 12.477449
lnl is 12.472410
lnl is 12.467128
lnl is 12.463418
lnl is 12.459056
lnl is 12.454102
lnl is 12.448817
lnl is 12.443539
lnl is 12.438271
lnl is 12.433017
lnl is 12.427805
lnl is 12.422570
lnl is 12.417353
lnl is 12.412137
lnl is 12.406935
lnl is 12.401783
lnl is 12.396602
lnl is 12.391450
lnl is 12.386394
lnl is 12.381175
lnl is 12.375988
lnl is 12.371624
lnl is 12.366676
lnl is 12.361404
lnl is 12.356192
lnl is 12.351040
lnl is 12.345943
lnl is 12.340928
lnl is 12.335775
lnl is 12.330723
lnl is 12.325594
lnl is 12.320624
lnl is 12.315736
lnl is 12.310569
lnl is 12.305448
lnl is 12.300356
lnl is 12.295533
lnl is 12.290457
lnl is 12.285382
lnl is 12.280303
lnl is 12.275373
lnl is 12.270405
lnl is 12.265427
lnl is 12.260499
lnl is 12.255790
lnl is 12.251755
lnl is 12.246482
lnl is 12.241499
lnl is 12.236565
lnl is 12.231961
lnl is 12.227899
lnl is 12.222896
lnl is 12.218438
lnl is 12.213702
lnl is 12.209010
lnl is 12.204380
lnl is 12.200544
lnl is 12.195343
lnl is 12.190483
lnl is 12.185823
lnl is 12.180980
lnl is 12.176178
lnl is 12.171810
lnl is 12.167056
lnl is 12.162706
lnl is 12.158727
lnl is 12.153824
lnl is 12.149151
lnl is 12.144593
lnl is 12.140460
lnl is 12.135660
lnl is 12.131960
lnl is 12.127022
lnl is 12.122186
lnl is 12.117479
lnl is 12.113159
lnl is 12.108325
lnl is 12.103509
lnl is 12.098777
lnl is 12.095147
lnl is 12.090724
lnl is 12.085834
lnl is 12.081001
lnl is 12.076190
lnl is 12.071393
lnl is 12.066666
lnl is 12.061918
lnl is 12.057171
lnl is 12.052389
lnl is 12.047804
lnl is 12.043154
lnl is 12.040296
lnl is 12.035746
lnl is 12.030973
lnl is 12.026126
lnl is 12.021263
lnl is 12.016420
lnl is 12.011594
lnl is 12.006781
lnl is 12.002027
lnl is 11.997233
lnl is 11.992447
lnl is 11.987722
lnl is 11.983043
lnl is 11.978361
lnl is 11.973957
lnl is 11.969245
lnl is 11.964523
lnl is 11.959790
lnl is 11.955331
lnl is 11.950622
lnl is 11.945930
lnl is 11.941216
lnl is 11.936566
lnl is 11.931883
lnl is 11.927305
lnl is 11.923109
lnl is 11.918339
lnl is 11.913667
lnl is 11.909031
lnl is 11.904379
lnl is 11.900228
lnl is 11.895789
lnl is 11.891078
lnl is 11.886412
lnl is 11.882509
lnl is 11.877878
lnl is 11.873248
lnl is 11.868604
lnl is 11.864092
lnl is 11.859511
lnl is 11.854965
lnl is 11.850403
lnl is 11.845787
lnl is 11.841195
lnl is 11.836683
lnl is 11.832572
lnl is 11.827889
lnl is 11.823277
lnl is 11.818697
lnl is 11.814131
lnl is 11.809610
lnl is 11.805051
lnl is 11.800502
lnl is 11.795951
lnl is 11.791492
lnl is 11.786937
lnl is 11.782430
lnl is 11.777891
lnl is 11.773355
lnl is 11.768821
lnl is 11.764302
lnl is 11.759785
lnl is 11.755272
lnl is 11.750820
lnl is 11.746399
lnl is 11.742055
lnl is 11.737574
lnl is 11.733085
lnl is 11.728605
lnl is 11.724139
lnl is 11.719678
lnl is 11.715716
lnl is 11.711217
lnl is 11.706795
lnl is 11.702353
lnl is 11.697933
lnl is 11.693510
lnl is 11.689665
lnl is 11.685283
lnl is 11.680879
lnl is 11.676443
lnl is 11.672015
lnl is 11.667625
lnl is 11.663438
lnl is 11.659209
lnl is 11.654818
lnl is 11.650630
lnl is 11.646227
lnl is 11.641836
lnl is 11.637539
lnl is 11.633153
lnl is 11.628772
lnl is 11.624436
lnl is 11.620089
lnl is 11.615725
lnl is 11.611514
lnl is 11.607162
lnl is 11.602831
lnl is 11.599153
lnl is 11.595246
lnl is 11.590874
lnl is 11.586526
lnl is 11.582212
lnl is 11.577909
lnl is 11.573617
lnl is 11.569470
lnl is 11.565139
lnl is 11.560835
lnl is 11.556692
lnl is 11.552387
lnl is 11.548115
lnl is 11.543865
lnl is 11.539702
lnl is 11.535423
lnl is 11.531172
lnl is 11.526913
lnl is 11.522703
lnl is 11.518598
lnl is 11.514376
lnl is 11.510158
lnl is 11.505936
lnl is 11.501705
lnl is 11.497548
lnl is 11.493331
lnl is 11.489121
lnl is 11.484921
lnl is 11.480773
lnl is 11.476715
lnl is 11.472503
lnl is 11.468296
lnl is 11.464091
lnl is 11.459894
lnl is 11.455702
lnl is 11.451514
lnl is 11.447338
lnl is 11.443158
lnl is 11.438998
lnl is 11.434828
lnl is 11.430678
lnl is 11.426520
lnl is 11.422378
lnl is 11.418235
lnl is 11.414101
lnl is 11.409978
lnl is 11.405853
lnl is 11.401756
lnl is 11.397642
lnl is 11.393577
lnl is 11.389613
lnl is 11.385515
lnl is 11.381482
lnl is 11.377416
lnl is 11.373647
lnl is 11.370560
lnl is 11.366408
lnl is 11.362211
lnl is 11.358089
lnl is 11.354015
lnl is 11.349937
lnl is 11.345883
lnl is 11.341821
lnl is 11.337803
lnl is 11.333833
lnl is 11.329765
lnl is 11.325737
lnl is 11.321932
lnl is 11.317944
lnl is 11.313891
lnl is 11.309892
lnl is 11.305984
lnl is 11.302027
lnl is 11.298033
lnl is 11.294045
lnl is 11.290143
lnl is 11.286254
lnl is 11.282293
lnl is 11.278665
lnl is 11.274749
lnl is 11.270789
lnl is 11.266853
lnl is 11.262902
lnl is 11.259003
lnl is 11.255026
lnl is 11.251092
lnl is 11.247238
lnl is 11.243368
lnl is 11.239513
lnl is 11.235602
lnl is 11.231677
lnl is 11.227761
lnl is 11.224570
lnl is 11.220882
lnl is 11.216802
lnl is 11.212919
lnl is 11.209256
lnl is 11.205580
lnl is 11.202871
lnl is 11.198646
lnl is 11.194787
lnl is 11.190943
lnl is 11.187087
lnl is 11.183193
lnl is 11.179411
lnl is 11.175655
lnl is 11.171739
lnl is 11.167837
lnl is 11.163964
lnl is 11.160098
lnl is 11.156258
lnl is 11.152421
lnl is 11.148767
lnl is 11.144918
lnl is 11.141157
lnl is 11.137301
lnl is 11.133464
lnl is 11.129620
lnl is 11.125788
lnl is 11.121974
lnl is 11.118147
lnl is 11.114461
lnl is 11.110642
lnl is 11.106829
lnl is 11.103079
lnl is 11.099274
lnl is 11.095462
lnl is 11.091665
lnl is 11.087874
lnl is 11.084076
lnl is 11.080287
lnl is 11.076506
lnl is 11.072737
lnl is 11.068993
lnl is 11.065243
lnl is 11.061488
lnl is 11.057772
lnl is 11.054106
lnl is 11.050389
lnl is 11.046641
lnl is 11.042891
lnl is 11.039176
lnl is 11.035483
lnl is 11.032008
lnl is 11.028218
lnl is 11.024475
lnl is 11.020746
lnl is 11.017018
lnl is 11.013310
lnl is 11.009593
lnl is 11.005882
lnl is 11.002241
lnl is 10.998584
lnl is 10.994870
lnl is 10.991174
lnl is 10.987526
lnl is 10.983848
lnl is 10.980221
lnl is 10.976651
lnl is 10.973106
lnl is 10.969422
lnl is 10.965748
lnl is 10.962085
lnl is 10.958442
lnl is 10.954807
lnl is 10.951201
lnl is 10.947607
lnl is 10.944050
lnl is 10.940405
lnl is 10.936754
lnl is 10.933114
lnl is 10.929484
lnl is 10.925865
lnl is 10.922730
lnl is 10.919119
lnl is 10.915480
lnl is 10.911878
lnl is 10.908324
lnl is 10.905088
lnl is 10.902112
lnl is 10.898420
lnl is 10.894825
lnl is 10.891269
lnl is 10.887669
lnl is 10.884081
lnl is 10.880510
lnl is 10.876938
lnl is 10.873361
lnl is 10.869869
lnl is 10.866286
lnl is 10.862721
lnl is 10.859252
lnl is 10.855683
lnl is 10.852132
lnl is 10.848773
lnl is 10.845418
lnl is 10.842300
lnl is 10.838815
lnl is 10.835320
lnl is 10.831829
lnl is 10.828386
lnl is 10.824920
lnl is 10.821519
lnl is 10.818407
lnl is 10.814894
lnl is 10.811573
lnl is 10.808270
lnl is 10.805473
lnl is 10.803942
lnl is 10.799941
lnl is 10.796482
lnl is 10.793911
lnl is 10.792039
lnl is 10.788794
lnl is 10.786806
lnl is 10.782880
lnl is 10.779395
lnl is 10.775979
lnl is 10.772926
lnl is 10.769463
lnl is 10.765983
lnl is 10.762510
lnl is 10.759045
lnl is 10.755590
lnl is 10.752149
lnl is 10.748888
lnl is 10.745420
lnl is 10.741979
lnl is 10.738551
lnl is 10.735288
lnl is 10.731847
lnl is 10.728430
lnl is 10.725064
lnl is 10.721696
lnl is 10.718308
lnl is 10.715475
lnl is 10.712211
lnl is 10.709277
lnl is 10.706282
lnl is 10.703672
lnl is 10.704209
lnl is 10.703057
lnl is 10.700187
lnl is 10.699013
lnl is 10.699812
lnl is 10.701087
lnl is 10.695982
lnl is 10.694677
lnl is 10.693605
lnl is 10.692652
lnl is 10.697599
lnl is 10.691721
lnl is 10.687829
lnl is 10.687732
lnl is 10.686801
lnl is 10.683326
lnl is 10.680241
lnl is 10.678456
lnl is 10.675855
lnl is 10.672172
lnl is 10.669081
lnl is 10.666058
lnl is 10.663739
lnl is 10.660298
lnl is 10.657029
lnl is 10.653746
lnl is 10.650494
lnl is 10.647264
lnl is 10.644067
lnl is 10.640932
lnl is 10.637807
lnl is 10.634835
lnl is 10.631828
lnl is 10.629065
lnl is 10.626286
lnl is 10.623792
lnl is 10.621512
lnl is 10.619482
lnl is 10.618206
lnl is 10.612749
lnl is 10.609562
lnl is 10.609039
lnl is 10.610470
lnl is 10.605612
lnl is 10.603069
lnl is 10.599655
lnl is 10.596457
lnl is 10.593875
lnl is 10.591756
lnl is 10.591950
lnl is 10.590779
lnl is 10.588457
lnl is 10.588349
lnl is 10.587922
lnl is 10.588819
lnl is 10.586780
lnl is 10.584597
lnl is 10.582811
lnl is 10.581429
lnl is 10.580057
lnl is 10.578825
lnl is 10.579366
lnl is 10.578742
lnl is 10.577107
lnl is 10.575729
lnl is 10.574497
lnl is 10.573454
lnl is 10.573514
lnl is 10.572908
lnl is 10.571358
lnl is 10.570163
lnl is 10.569109
lnl is 10.568659
lnl is 10.566988
lnl is 10.565365
lnl is 10.563753
lnl is 10.562171
lnl is 10.561242
lnl is 10.559797
lnl is 10.558558
lnl is 10.556897
lnl is 10.555264
lnl is 10.553636
lnl is 10.552011
lnl is 10.550388
lnl is 10.548767
lnl is 10.547151
lnl is 10.545536
lnl is 10.543925
lnl is 10.542317
lnl is 10.540713
lnl is 10.539147
lnl is 10.537566
lnl is 10.535969
lnl is 10.534446
lnl is 10.532851
lnl is 10.531291
lnl is 10.529734
lnl is 10.528554
lnl is 10.526876
lnl is 10.525246
lnl is 10.523641
lnl is 10.522043
lnl is 10.520448
lnl is 10.518876
lnl is 10.517285
lnl is 10.515704
lnl is 10.514136
lnl is 10.512546
lnl is 10.511114
lnl is 10.509796
lnl is 10.508203
lnl is 10.506615
lnl is 10.505064
lnl is 10.503468
lnl is 10.501874
lnl is 10.500282
lnl is 10.498691
lnl is 10.497103
lnl is 10.495518
lnl is 10.493931
lnl is 10.492356
lnl is 10.490772
lnl is 10.489189

In [8]:
chi2 = 2 * fun(bestfit_axion)
print 'chi2 is ', chi2
print 'bestfit is', bestfit_axion
x = bestfit_axion
from get_background import get_hubble_rate
da, h = get_hubble_rate(**{		'omega_m': x[0],
					'H0': x[1],
					'm_chi_over_H0' : x[2],
					'chi0' : x[3],
					'chidot0': 0,
					'r_s':147.5
					 })
z = linspace(0,20, 10000)
plot(z, da(z)/147.5)
scatter(array([0.35, 0.57, 2.4]), array([6.875, 9.191, 10.8]))
figure()
plot(z, h(z)*3e5)
y = array([12895, 14231, 3e5/9])/147.5
scatter(array([0.35, 0.57, 2.4]), y)


lnl is 10.489189
chi2 is  20.9783773498
bestfit is [  0.13803337  70.00685416   4.9794276   -0.68307001]
/Users/follin/projects/anaconda/lib/python2.7/site-packages/scipy/integrate/quadpack.py:321: IntegrationWarning: The maximum number of subdivisions (50) has been achieved.
  If increasing the limit yields no improvement it is advised to analyze 
  the integrand in order to determine the difficulties.  If the position of a 
  local difficulty can be determined (singularity, discontinuity) one will 
  probably gain from splitting up the interval and calling the integrator 
  on the subranges.  Perhaps a special-purpose integrator should be used.
  warnings.warn(msg, IntegrationWarning)
Out[8]:
<matplotlib.collections.PathCollection at 0x107183a90>

In [9]:
x0 = [  1.39245792e-01,  7.22385516e+01]
def fun(x):
    cosmo = {		'omega_m': x[0],
					'H0': x[1],
					'm_chi_over_H0' : .0001,
					'chi0' : 0,
					'chidot0': 0,
					'r_s':147.5
					 }
    return lnl(cosmo)

In [10]:
bounds = [(.1, .15), (60,80)]

bestfit_lcdm = minimize(fun, x0, 
                        method = 'COBYLA', 
                        #bounds = bounds,
                        options = {'tol' : 1.0e-6,
                                    'rhobeg' : [.005, 1],
                                    'catol' : 1e-6
                                    }).x


lnl is 6.010635
lnl is 7.811573
lnl is 6.010640
lnl is 7.843757
lnl is 6.466785
lnl is 6.123910
lnl is 6.010635
lnl is 6.124415
lnl is 6.039048
lnl is 6.017726
lnl is 6.012409
lnl is 6.010635
lnl is 6.012408
lnl is 6.011078
lnl is 6.010746
lnl is 6.010662
lnl is 6.010635
lnl is 6.010662
lnl is 6.010642
lnl is 6.010636
lnl is 6.010635
lnl is 6.010635
lnl is 6.010635
lnl is 6.010635
lnl is 6.010635

In [11]:
chi2 = 2 * fun(bestfit_lcdm)
print 'chi2 is ', chi2
print 'bestfit is', bestfit_lcdm
x = bestfit_lcdm
from get_background import get_hubble_rate
da_lcdm, h_lcdm = get_hubble_rate(**{	'omega_m': x[0],
					'H0': x[1],
					'm_chi_over_H0' : .0001,
					'chi0' : 0,
					'chidot0': 0,
					'r_s':147.5
					 })
z = linspace(0,20, 10000)
plot(z, da_lcdm(z)/147.5)
scatter(array([0.35, 0.57, 2.4]), array([6.875, 9.191, 10.8]))
figure()
plot(z, h_lcdm(z)*3e5)
y = array([12895, 14231, 3e5/9])/147.5
scatter(array([0.35, 0.57, 2.4]), y)


lnl is 6.010635
chi2 is  12.0212694148
bestfit is [  0.13924479  72.2385516 ]
Out[11]:
<matplotlib.collections.PathCollection at 0x10741cb10>

In [12]:
z = linspace(0,5,1000)
plot(z, h(z)/h_lcdm(z))


Out[12]:
[<matplotlib.lines.Line2D at 0x10718eb50>]

In [13]:
plot(z, da(z)/da_lcdm(z))


-c:1: RuntimeWarning: invalid value encountered in divide
Out[13]:
[<matplotlib.lines.Line2D at 0x10753c510>]

In [14]:
from scipy.optimize import show_options
show_options(solver='minimize')#, method = 'COBLYA')


**Minimize options**

*BFGS* options:

    gtol : float
        Gradient norm must be less than `gtol` before successful
        termination.
    norm : float
        Order of norm (Inf is max, -Inf is min).
    eps : float or ndarray
        If `jac` is approximated, use this value for the step size.

*Nelder-Mead* options:

    xtol : float
        Relative error in solution `xopt` acceptable for convergence.
    ftol : float
        Relative error in ``fun(xopt)`` acceptable for convergence.
    maxfev : int
        Maximum number of function evaluations to make.

*Newton-CG* options:

    xtol : float
        Average relative error in solution `xopt` acceptable for
        convergence.
    eps : float or ndarray
        If `jac` is approximated, use this value for the step size.

*CG* options:

    gtol : float
        Gradient norm must be less than `gtol` before successful
        termination.
    norm : float
        Order of norm (Inf is max, -Inf is min).
    eps : float or ndarray
        If `jac` is approximated, use this value for the step size.

*Powell* options:

    xtol : float
        Relative error in solution `xopt` acceptable for convergence.
    ftol : float
        Relative error in ``fun(xopt)`` acceptable for convergence.
    maxfev : int
        Maximum number of function evaluations to make.
    direc : ndarray
        Initial set of direction vectors for the Powell method.

*Anneal* options:

    ftol : float
        Relative error in ``fun(x)`` acceptable for convergence.
    schedule : str
        Annealing schedule to use. One of: 'fast', 'cauchy' or
        'boltzmann'.
    T0 : float
        Initial Temperature (estimated as 1.2 times the largest
        cost-function deviation over random points in the range).
    Tf : float
        Final goal temperature.
    maxfev : int
        Maximum number of function evaluations to make.
    maxaccept : int
        Maximum changes to accept.
    boltzmann : float
        Boltzmann constant in acceptance test (increase for less
        stringent test at each temperature).
    learn_rate : float
        Scale constant for adjusting guesses.
    quench, m, n : float
        Parameters to alter fast_sa schedule.
    lower, upper : float or ndarray
        Lower and upper bounds on `x`.
    dwell : int
        The number of times to search the space at each temperature.

*L-BFGS-B* options:

    ftol : float
        The iteration stops when ``(f^k -
        f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= ftol``.
    gtol : float
        The iteration will stop when ``max{|proj g_i | i = 1, ..., n}
        <= gtol`` where ``pg_i`` is the i-th component of the
        projected gradient.
    maxcor : int
        The maximum number of variable metric corrections used to
        define the limited memory matrix. (The limited memory BFGS
        method does not store the full hessian but uses this many terms
        in an approximation to it.)
    maxiter : int
        Maximum number of function evaluations.

*TNC* options:

    ftol : float
        Precision goal for the value of f in the stoping criterion.
        If ftol < 0.0, ftol is set to 0.0 defaults to -1.
    xtol : float
        Precision goal for the value of x in the stopping
        criterion (after applying x scaling factors).  If xtol <
        0.0, xtol is set to sqrt(machine_precision).  Defaults to
        -1.
    gtol : float
        Precision goal for the value of the projected gradient in
        the stopping criterion (after applying x scaling factors).
        If gtol < 0.0, gtol is set to 1e-2 * sqrt(accuracy).
        Setting it to 0.0 is not recommended.  Defaults to -1.
    scale : list of floats
        Scaling factors to apply to each variable.  If None, the
        factors are up-low for interval bounded variables and
        1+|x] fo the others.  Defaults to None
    offset : float
        Value to subtract from each variable.  If None, the
        offsets are (up+low)/2 for interval bounded variables
        and x for the others.
    maxCGit : int
        Maximum number of hessian*vector evaluations per main
        iteration.  If maxCGit == 0, the direction chosen is
        -gradient if maxCGit < 0, maxCGit is set to
        max(1,min(50,n/2)).  Defaults to -1.
    maxiter : int
        Maximum number of function evaluation.  if None, `maxiter` is
        set to max(100, 10*len(x0)).  Defaults to None.
    eta : float
        Severity of the line search. if < 0 or > 1, set to 0.25.
        Defaults to -1.
    stepmx : float
        Maximum step for the line search.  May be increased during
        call.  If too small, it will be set to 10.0.  Defaults to 0.
    accuracy : float
        Relative precision for finite difference calculations.  If
        <= machine_precision, set to sqrt(machine_precision).
        Defaults to 0.
    minfev : float
        Minimum function value estimate.  Defaults to 0.
    rescale : float
        Scaling factor (in log10) used to trigger f value
        rescaling.  If 0, rescale at each iteration.  If a large
        value, never rescale.  If < 0, rescale is set to 1.3.

*COBYLA* options:

    tol : float
        Final accuracy in the optimization (not precisely guaranteed).
        This is a lower bound on the size of the trust region.
    rhobeg : float
        Reasonable initial changes to the variables.
    maxfev : int
        Maximum number of function evaluations.
    catol : float
        Absolute tolerance for constraint violations (default: 1e-6).

*SLSQP* options:

    ftol : float
        Precision goal for the value of f in the stopping criterion.
    eps : float
        Step size used for numerical approximation of the jacobian.
    maxiter : int
        Maximum number of iterations.

*dogleg* options:

    initial_trust_radius : float
        Initial trust-region radius.
    max_trust_radius : float
        Maximum value of the trust-region radius. No steps that are longer
        than this value will be proposed.
    eta : float
        Trust region related acceptance stringency for proposed steps.
    gtol : float
        Gradient norm must be less than `gtol` before successful
        termination.

*trust-ncg* options:

    See dogleg options.

In [14]:


In [ ]: