ForwardDiff.jl is one of a few AD tools in the JuliaDiff project. They also have reverse accumulation and dual number packages. Dual numbers are used internally in ForwardDiff to accumulate derivatives, instead of parsing the source code for a function.
In [1]:
using ForwardDiff
using PyPlot
In [2]:
f1(x) = x[1]*x[2] + sin(x[1]);
In [3]:
f1([1;2])
Out[3]:
Using ForwardDiff.jl, it's super simple to get a gradient function and evaluate it:
In [4]:
Df1 = x -> ForwardDiff.gradient(f1, x);
Df1_check(x) = [x[2] + cos(x[1]); x[1]];
In [5]:
Df1([1;2])
Out[5]:
In [6]:
Df1_check([1;2])
Out[6]:
In [7]:
function f2(x)
function localfun(x)
return exp(x)
end
if x[3] > 0
y = x[3]*x[1:2]
else
y = localfun(x[1:2])
end
return y
end
Out[7]:
In [8]:
Df2 = x -> ForwardDiff.jacobian(f2, x);
In [9]:
Df2([1;2;0]) # exp
Out[9]:
In [10]:
Df2([1;2;pi]) #
Out[10]:
In [31]:
function f3(x)
srand(271828)
y = 0
while randn() < 2
y += x^randn()
end
return y
end
Out[31]:
In [32]:
Df3 = n -> ForwardDiff.derivative(f3,n);
In [33]:
f3(10.)
Out[33]:
In [34]:
Df3(10.)
Out[34]:
In [26]:
function f4(n)
function collatz(n)
# https://en.wikipedia.org/wiki/Collatz_conjecture
#if n % 2 == 0
if round(n/2) == n/2
return n/2
else
return 3*n+1
end
end
seq = [n]
while n > 1
n = collatz(n)
push!(seq, n)
end
return mean(seq)
end
Out[26]:
In [27]:
Df4 = n -> ForwardDiff.derivative(f3,n);
In [28]:
nvec = 1:50;
f4vec = [f4(n) for n in nvec];
plot(nvec, f4vec)
Out[28]:
In [29]:
f4(27)
Out[29]:
In [30]:
Df4(27)
Out[30]:
In [ ]: