In [12]:
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
from math import sqrt, pi
import scipy.optimize
In [13]:
def non_convex(x):
return np.cos(14.5 * x - 0.3) + (x + 0.2) * x
x = np.linspace(-1,1, 100)
plt.plot(x, non_convex(x))
plt.show()
In [14]:
ret = scipy.optimize.basinhopping(non_convex, x0=1, niter=1000)
print(ret)
Notice you have to specify how long to try for. There is no way that basin hopping will know it's finished. Thus, try a few different iteration numbers and see if it changes.
In [15]:
x = np.linspace(-1,1, 100)
plt.plot(x, non_convex(x))
plt.plot(ret.x, ret.fun, 'rp')
plt.show()
In [103]:
scipy.optimize.brentq(lambda x: np.sin(x), a=-pi / 2., b = pi / 2)
Out[103]:
Let's try with $\sin(x)$
In [104]:
scipy.optimize.brentq(lambda x: np.sin(x), a=0.25, b = 0.5)
Must have different signs!
In [2]:
scipy.optimize.brentq(lambda x: np.sin(x), a=-0.5, b = 0.5)
Out[2]:
In [107]:
k = 32.5
def Q(x):
return x ** 2 / (1 - x)
def obj(x):
return Q(x) - k
scipy.optimize.root(obj, x0=0)
Out[107]:
In [108]:
scipy.optimize.brentq(obj, a=0, b=1)
In [109]:
scipy.optimize.brentq(obj, a=0, b=1 - 10**-9)
Out[109]:
So the equilibrium concentration of the acid is $1 - x = 0.03\textrm{M}$
Let's try minimization with $\sin x$ bound on the interval $[-\pi, \pi]$
Type: Bounded or Bounded and Constrained Minimization
Discrete/Continuous: Continuous
Dimensions: N
Derivative: optional
Convex: yes
Python: minimize with bounds argument
Notes: bounds look like bounds=[(min_1, max_1), (min_2, max_2), ...]. One for each dimension
In [105]:
scipy.optimize.minimize(lambda x: np.sin(x), x0=[0], bounds=[(-pi, pi)])
Out[105]:
Minimzie $\sin x\cos y$ to be on interval $[-\pi, \pi]$ for both $x$ and $y$
In [106]:
scipy.optimize.minimize(lambda x: np.sin(x[0]) * np.cos(x[1]), x0=[0, 0], bounds=[(-pi, pi), (-pi, pi)])
Out[106]:
We're saying we know all but one parameter, $p$, so we'll find a $p$ such that entropy is maximized. This is a common technique in statistical learning, machine learning, and statistical mechanics. Much of my own research uses this idea of solving underdetermined problems by maximizing entropy.
In [4]:
from scipy.special import comb
import numpy as np
#P(n)
def binomial(n, p, N):
return comb(N, n) * (1 - p) ** (N - n) * p**n
#Make sure this function works with numpy arrays
test = np.arange(3)
print(binomial(test, 0.2, 5))
In [6]:
#We need to use negative entropy, so it's a minimization problem
def negative_entropy(x):
p = x
N = 5
ps = binomial(np.arange(N + 1), p, N)
return np.sum(ps * np.log(ps))
negative_entropy(0.2)
Out[6]:
In [7]:
scipy.optimize.minimize(negative_entropy, x0=0.5, bounds=[(10**-9, 1 - 10**-9)])
Out[7]:
In [11]:
np_negative_entropy = np.frompyfunc(negative_entropy, 1, 1)
x = np.linspace(0.01, 0.99, 25)
plt.plot(x, np_negative_entropy(x))
plt.xlabel('$p$')
plt.ylabel('S')
plt.show()
So each $p$ value corresponds to an entire binomial distribution. Let's see how to plot each of them
In [9]:
import matplotlib.cm as cm #import some colors
#need this to color based on entropy. It converts entropy to a number between 0 and 1
def entropy_to_01(x):
return (x - 0.1) / (1.5 - 0.1)
color_grad = cm.jet #my color scheme
n = np.linspace(0, 5, 6)
#plot the binomial for different ps, but colored by their entropy
for pi in np.linspace(0.01,0.99,25):
plt.plot(n, binomial(n, pi, 5), lw=1.5, color=color_grad(entropy_to_01(-negative_entropy(pi))))
plt.show()
Consider $x$, $y$ which lie on the circle $x^2 + y^2 = 1$. Find the $x$, $y$ which is as close as possible to the point $(3,4)$.
In [16]:
x = np.linspace(-1, 1, 100)
#make sure it's square
plt.figure(figsize=(6,6))
#have to plot the circle in two steps
plt.plot(x, np.sqrt(1 - x**2), color='blue')
plt.plot(x, -np.sqrt(1 - x**2), color='blue')
#now the point
plt.plot(3, 4, 'ro')
plt.xlim(-5, 5)
plt.ylim(-5, 5)
plt.show()
In [17]:
from math import sqrt
def dist(x1, x2):
return sqrt(np.sum((x1 - x2)**2))
x1 = np.array([1,5])
x2 = np.array([-2,4])
print(dist(x1, x2))
In [18]:
def circle(x):
return x[0] ** 2 + x[1] ** 2 - 1
print(circle([0, 1]))
In [19]:
def obj(x):
return dist(x, np.array([3,4]))
print(obj(np.array([2,4])))
In [20]:
my_constraints = {'type': 'eq', 'fun': circle}
result = scipy.optimize.minimize(obj, x0=[0,1], constraints=my_constraints)
print(result)
In [21]:
x = np.linspace(-1, 1, 100)
#make sure it's square
plt.figure(figsize=(6,6))
#plot the circle in 2 steps
plt.plot(x, np.sqrt(1 - x**2), color='blue')
plt.plot(x, -np.sqrt(1 - x**2), color='blue')
plt.plot(3, 4, 'ro')
plt.plot(result.x[0], result.x[1], 'go')
plt.xlim(-5, 5)
plt.ylim(-5, 5)
Out[21]:
In [22]:
date = {'year': 2015, 'month':'October', 'day': 3}
print(date['year'])
In [23]:
print(date['month'])
In [24]:
date['month'] = 'Footober'
print(date['month'])
Python dictionaries are used when you want to store multiple numed variables together.
In [25]:
my_constraints = {'type': 'eq', 'fun': circle}
In [26]:
my_constraints = {'type': 'ineq', 'fun': circle} #An inequality constraint
Inequality means the quantity is always non-negative
In [27]:
from math import atan, sqrt, sin
x = np.linspace(-1, 1, 100)
#make sure it's square and make it polar
plt.figure(figsize=(8,8))
ax = plt.subplot(111, polar=True)
theta = np.linspace(0, 2 * pi, 500)
r = 1 + 0.3 * np.sin(7 * (theta + 4))
ax.plot(theta, r)
ax.plot(atan(2 / 1.), sqrt(2**2 + 1**2), 'bo')
ax.set_rmax(3)
plt.show()
We want to find a point on the flower that is as close as possible to the red dot. This is non-convex because there are many local minima.
In [28]:
def circle_constraint(s):
x = s[0]
y = s[1]
r = sqrt(x**2 + y**2)
theta = atan(y / x)
return 1 + 0.3 * np.sin(7 * (theta + 4)) - r
print(circle_constraint(np.array([1,1])))
In [29]:
def dist(x1, x2):
return sqrt(np.sum((x1 - x2)**2))
In [30]:
def obj(x):
return dist(x, np.array([1,2]))
print(obj(np.array([0,1])))
Basin-hopping is like a wrapper around one of the minimization techniques we've learned. So to communicate with the technique which basin-hopping is using, we have to wrap our constraint dictionary in another dictionary.
In [31]:
my_constraints = {'type':'eq', 'fun':circle_constraint}
minimizer_args = {'constraints':my_constraints}
ret = scipy.optimize.basinhopping(obj, x0=[1,1], niter=1000, minimizer_kwargs=minimizer_args)
In [32]:
print(ret)
print(circle_constraint(ret.x))
print(ret.x)
In [33]:
x = np.linspace(-1, 1, 100)
#make sure it's square and make it polar
plt.figure(figsize=(8,8))
ax = plt.subplot(111, polar=True)
theta = np.linspace(0, 2 * pi, 1500)
r = 1 + 0.3 * np.sin(7 * (theta + 4))
ax.plot(theta, r)
ax.plot(atan(2. / 1.), sqrt(2**2 + 1**2), 'ro')
ax.plot(atan(ret.x[1] / ret.x[0]), sqrt(ret.x[0] ** 2 + ret.x[1] ** 2), 'o')
ax.set_rmax(3)
plt.show()