``````

In [6]:

%pylab inline

import numpy
import matplotlib.pyplot as plt

def func((a,b,c,d,e)):
x = (a**2 + b/2.0) * numpy.sin(c) + (d*e)
return x

vec = numpy.random.uniform(2, 10, size=5)

``````
``````

Populating the interactive namespace from numpy and matplotlib
3.94096364509

``````
``````

In [18]:

def approx_jacobian(x, func, epsilon,*args):
"""Approximate the Jacobian matrix of callable function func

* Parameters
x       - The state vector at which the Jacobian matrix is desired
func    - A vector-valued function of the form f(x,*args)
epsilon - The peturbation used to determine the partial derivatives
*args   - Additional arguments passed to func

* Returns
An array of dimensions (lenf, lenx) where lenf is the length
of the outputs of func, and lenx is the number of

* Notes
The approximation is done using forward differences

"""
x0 = numpy.asfarray(x)
print x0
f0 = func(x0)
print f0
jac = numpy.zeros([len(x0),1])
print jac
dx = numpy.zeros(len(x0))
for i in range(len(x0)):
dx[i] = epsilon
jac[i] = (func(*((x0+dx,)+args)) - f0)/epsilon
dx[i] = 0.0
return jac.transpose()

def rosen_der(x):
xm = x[1:-1]
xm_m1 = x[:-2]
xm_p1 = x[2:]
der = np.zeros_like(x)
der[1:-1] = 200*(xm-xm_m1**2) - 400*(xm_p1 - xm**2)*xm - 2*(1-xm)
der[0] = -400*x[0]*(x[1]-x[0]**2) - 2*(1-x[0])
der[-1] = 200*(x[-1]-x[-2]**2)
return der

#approx_jacobian(vec, func, 10**-3)
print rosen_der(numpy.random.uniform(-10, 10, 10))

``````
``````

[    813.87642749   26173.46749188 -220878.89736476  -81492.20903774
-90800.89917906   -6008.74806583  315852.335482   -340412.45883999
-17409.54332278    -669.0726721 ]

``````