In [1]:
import sys

sys.path.append('../../..')

In [2]:
import numpy
import scipy.stats

from lib.msra_loss import MSRALossFunctionAbs

In [3]:
class MCLossFunction(MSRALossFunctionAbs):
    def shortfall_risk(self, m=None):
        m = self._check_argument(m)
        x_minus_m = numpy.subtract(self.x, m)
        
        mean_sum_ = numpy.mean(x_minus_m.sum(axis=1))
        
        pos_part_squared = numpy.square(numpy.maximum(x_minus_m, 0.))
        mean_sum_2_ = numpy.mean(pos_part_squared.sum(axis=1))
        
        return mean_sum_ + 0.5 * mean_sum_2_

    def shortfall_risk_jac(self, m):
        m = self._check_argument(m)
        x_minus_m = numpy.subtract(self.x, m)
        
        pos_part = numpy.maximum(x_minus_m, 0.)
        mean_pos_part = numpy.mean(pos_part, axis=0)
        
        return mean_pos_part + 1.

In [4]:
M = 100000

mu = [0., 0.]
sigma = [[1., 0.], [0., 1.]]

rv = scipy.stats.multivariate_normal(mean=mu, cov=sigma, allow_singular=True)
X = rv.rvs(size=M)

c = 1.

loss = MCLossFunction(X, 1.)

In [5]:
from scipy.optimize import minimize

maxiter = 3500

In [6]:
guess = 1000. * numpy.ones(loss.dim)

In [7]:
cons = ({'type': 'ineq',
         'fun' : lambda x: loss.ineq_constraint(x),
         'jac' : lambda x: loss.ineq_constraint_jac(x)})

In [8]:
res = minimize(loss.objective, guess, 
               jac=loss.objective_jac, 
               constraints=cons, 
               method='SLSQP',
               options={'maxiter': maxiter})

In [9]:
%%timeit

res = minimize(loss.objective, guess, 
               jac=loss.objective_jac, 
               constraints=cons, 
               method='SLSQP',
               options={'maxiter': maxiter})


10 loops, best of 3: 107 ms per loop

In [10]:
print res
print
print loss.ineq_constraint(res.x)
print numpy.mean(res.x)


     fun: -0.34487032385756555
     jac: array([ 1.,  1.,  0.])
 message: 'Optimization terminated successfully.'
    nfev: 10
     nit: 10
    njev: 10
  status: 0
 success: True
       x: array([-0.17141535, -0.17345498])

-3.9881090208e-08
-0.172435161929