In [1]:
import tensorflow as tf
import numpy as np

import itertools # for fast looping
import time # for timing loop
from iminuit import Minuit
from scipy.optimize import minimize

In [2]:
x = tf.placeholder(tf.float32, shape = (None))
w = tf.Variable(1.)
p = tf.Variable(0.)
a = tf.Variable(1.)

# Build the computational graph (a function is a graph!)
f = a * tf.sin(w*x + p)

In [3]:
def eval_func(f):
    
    with tf.Session() as sess:
        # Create TF session and initialise variables
        init = tf.global_variables_initializer()
        sess.run(init)
        
        # Run calculation of y by feeding data to tensor x
        return sess.run(f, feed_dict = { x : [1., 2., 3., 4.] })

In [4]:
y_data = eval_func(f)
print(y_data)


[ 0.84147096  0.90929741  0.14112    -0.7568025 ]

Other Examples


In [5]:
mu = 0.
sigma = 3.

# pdf of Gaussian of variable x with mean mu and standard deviation sigma
dist = tf.contrib.distributions.Normal(loc=mu, scale=sigma)

In [6]:
with tf.Session() as sess:
    init = tf.global_variables_initializer()
    sess.run(init)
    
    # Cumulative distribution funtion of pdf evalauted at x=1
    sess.run(dist.cdf(1.))
    
    # Evaluate the pdf at x=0
    sess.run(dist.prob(0.))
    
    sample_data = sess.run(dist.sample(10))
    
    y_data = sess.run(dist.prob(sample_data))

In [7]:
print(y_data)


[ 0.12275931  0.11710991  0.09628345  0.13253607  0.11454652  0.0119292
  0.10539442  0.08797453  0.13228333  0.12415046]

In [8]:
def sample_model(model, n_samples):
    x = model.sample(n_samples)
    y = model.prob(x)
    
    with tf.Session() as sess:
        init = tf.global_variables_initializer()
        sess.run(init)
        return sess.run(x), sess.run(y)

In [9]:
test_x, test_y = sample_model(dist, 100)

In [10]:
print(test_x)
print(test_y)


[-0.15095371 -1.72796488  3.68452597  6.00855017 -1.90036845  2.21521759
 -0.2099297  -5.33368587  4.3518219   1.2742157   0.22073576  3.10999203
 -2.91720247 -4.96751451 -6.65307808  1.69550872 -1.5565964  -2.35418344
  2.95723152 -1.3394407   0.23221141  1.27179468  3.40411878 -2.58556747
  0.8326112  -1.32705569 -5.21293211 -3.42798471  1.29671311 -3.8755374
 -0.3775202  -5.31450081  3.62241459 -5.20924664 -2.33888936  3.51585603
  3.18749762  0.07891298 -2.52393961 -0.91975915 -5.28871346 -1.18423879
  1.17885232 -0.26625836  1.37547731 -3.81738138  1.34128082 -0.40326306
 -4.56505775  7.30844212  0.15224279  4.81127548 -0.26438731  3.37008977
  0.28073943  3.30063128 -3.39587069  4.62578583  3.96464252  0.27808604
 -3.62360096  0.34765559 -0.34262559 -0.60462564  2.26074076 -2.36454177
  0.76187122 -1.60278618  2.54460907 -8.04659939  0.82805765 -2.84289026
 -0.15960288 -0.67983669 -0.25671631  0.75115442 -1.85519266 -4.70066833
 -1.05734432 -2.32298136  1.05323911 -0.72019267 -3.90225005  0.8506642
  3.08191347 -3.17732143 -2.1929965   1.32945418 -2.53144979  5.75971079
  1.21981454  0.9983741   3.37324953  5.14251614 -5.11341763  3.2789104
 -3.3894248   1.99055636 -6.83252764  0.56594664]
[ 0.08646944  0.0183047   0.12966521  0.1282081   0.10909559  0.09652932
  0.05088519  0.01106634  0.07064892  0.1012982   0.01661846  0.04378203
  0.13280499  0.12879156  0.13293804  0.12469175  0.07791191  0.07436842
  0.11841778  0.13146384  0.03870311  0.1260381   0.1317956   0.0151082
  0.07208879  0.13277644  0.1082635   0.11704901  0.06390198  0.12583219
  0.06218906  0.11512426  0.13278188  0.08260816  0.03387494  0.13263725
  0.08390611  0.1245744   0.06298031  0.04955095  0.13023351  0.03960036
  0.08117835  0.11092733  0.10723278  0.12717985  0.09728557  0.04014325
  0.10822286  0.07371283  0.12642875  0.13240916  0.01142457  0.13172695
  0.03077989  0.09613007  0.11101575  0.028079    0.13295041  0.12538011
  0.04186556  0.10521404  0.0235424   0.1227861   0.13112055  0.0767953
  0.02823305  0.12928753  0.09859751  0.13296312  0.11054991  0.0883957
  0.11865954  0.02932186  0.13174218  0.09668152  0.0766444   0.12357482
  0.10561705  0.09922032  0.0811692   0.12761149  0.095386    0.10730815
  0.12497756  0.09957343  0.12185752  0.1222527   0.00844319  0.06273917
  0.10481539  0.13221242  0.13283321  0.12817015  0.09407915  0.07680036
  0.11387476  0.13297862  0.02473459  0.12641691]

In [11]:
def normal_log(X, mu, sigma, TYPE=np.float32):
    return -tf.log(tf.constant(np.sqrt(2 * np.pi), dtype=TYPE) * sigma) - \
        tf.pow(X - mu, 2) / (tf.constant(2, dtype=TYPE) * tf.pow(sigma, 2))

In [12]:
def nll(X, mu, sigma, TYPE=np.float32):
    return -tf.reduce_sum(normal_log(X, mu, sigma, TYPE))

In [13]:
# MLE attempt
TYPE = np.float64

n_events = 1000000 # time of fit is very dependent on n_events
n_trials = 10

sess = tf.Session()

def func(mu_, sigma_):
    return sess.run(nll_, feed_dict={mu: mu_, sigma: sigma_})

# Gilles example
def func_scipy(x):
    return sess.run(nll_, feed_dict={mu: x[0], sigma: x[1]})


start_time = time.time()
for _ in itertools.repeat(None, n_trials):
    data = np.random.normal(0.5, 1.5, n_events).astype(TYPE)
    
    # Define data as a variable so that it will be cached
    X = tf.Variable(data, name='data')
    
    mu = tf.Variable(TYPE(1), name='mu')
    sigma = tf.Variable(TYPE(2), name='sigma')
    
    init = tf.global_variables_initializer()
    sess.run(init)
    
    nll_ = nll(X, mu, sigma, TYPE)
    
    # To guard against excessive output
    if n_trials > 1:
        print_level = 0
    else:
        print_level = 1
    
#     minuit = Minuit(func, mu_=10, sigma_=10, error_mu_=0.5, error_sigma_=0.5,
#            limit_mu_=(-1, 100), limit_sigma_=(0, 100), errordef=1, print_level=print_level)
#     minuit.migrad()
#     minuit.minos()
    
    # Gilles example
    ret = minimize(func_scipy, x0=[10, 10], bounds=[(-1, 100), (0.00001, 100)])
    #print(ret.x, ret.fun) # x is an array of fit values, fun is the value of the function passed
    
end_time = time.time()
time_duration = end_time - start_time
mean_fit_time = time_duration/n_trials

# mu_ = minuit.values['mu_']
# sigma_ = minuit.values['sigma_']

sess.close()

#print("mu = {}, sigma = {}".format(mu_, sigma_))
print("mu = {}, sigma = {}".format(ret.x[0], ret.x[1]))

print("\nfit {} times in {} seconds".format(n_trials, time_duration))
print("The average fit time is {} seconds".format(mean_fit_time))


mu = 0.4981923162521545, sigma = 1.500902383935564

fit 10 times in 4.295571565628052 seconds
The average fit time is 0.4295571565628052 seconds

In [ ]: