In [ ]:
    
# initial package installation
Pkg.add("Convex")
Pkg.add("SCS")
Pkg.add("Gadfly")
Pkg.add("Interact")
    
In [28]:
    
# Make the Convex.jl module available
using Convex
using SCS # first order splitting conic solver [O'Donoghue et al., 2014]
set_default_solver(SCSSolver(verbose=0)) # could also use Gurobi, Mosek, CPLEX, ...
# Generate random problem data
m = 50;  n = 100
A = randn(m, n)
x♮ = sprand(n, 1, .5) # true (sparse nonnegative) parameter vector
noise = .1*randn(m)    # gaussian noise
b = A*x♮ + noise      # noisy linear observations
# Create a (column vector) variable of size n.
x = Variable(n)
# nonnegative elastic net with regularization
λ = 1
μ = 1
problem = minimize(norm(A * x - b)^2 + λ*norm(x)^2 + μ*norm(x, 1), 
                   x >= 0)
# Solve the problem by calling solve!
solve!(problem)
println("problem status is ", problem.status) # :Optimal, :Infeasible, :Unbounded etc.
println("optimal value is ", problem.optval)
    
    
In [29]:
    
using Gadfly, Interact
@manipulate for λ=0:.1:5, mu=0:.1:5
    problem = minimize(norm(A * x - b)^2 + λ*norm(x)^2 + μ*norm(x, 1), 
                   x >= 0)
    solve!(problem)
    plot(x=x.value, Geom.histogram(minbincount = 20), 
             Scale.x_continuous(minvalue=0, maxvalue=3.5))#, Scale.y_continuous(minvalue=0, maxvalue=6))
end
    
    
    
    Out[29]:
In [30]:
    
# Scalar variable
x = Variable()
    
    Out[30]:
In [31]:
    
# (Column) vector variable
y = Variable(4)
    
    Out[31]:
In [32]:
    
# Matrix variable
Z = Variable(4, 4)
    
    Out[32]:
Convex.jl allows you to use a wide variety of functions on variables and on expressions to form new expressions.
In [34]:
    
x + 2x
    
    Out[34]:
In [35]:
    
e = y[1] + logdet(Z) + sqrt(x) + minimum(y)
    
    Out[35]:
In [38]:
    
e.children[2]
    
    Out[38]:
In [39]:
    
x <= 0
    
    Out[39]:
In [40]:
    
x^2 <= sum(y)
    
    Out[40]:
In [42]:
    
M = Z 
for i = 1:length(y)
    M += rand(size(Z))*y[i]
end
M ⪰ 0
    
    Out[42]:
In [12]:
    
x = Variable()
y = Variable(4)
objective = 2*x + 1 - sqrt(sum(y))
constraint = x >= maximum(y)
p = minimize(objective, constraint)
    
    Out[12]:
In [43]:
    
# solve the problem
solve!(p)
p.status
    
    Out[43]:
In [44]:
    
x.value
    
In [45]:
    
# can evaluate expressions directly
evaluate(objective)
    
    Out[45]:
call a MathProgBase solver suited for your problem class
to solve problem using a different solver, just import the solver package and pass the solver to the solve! method: eg
using Mosek
solve!(p, MosekSolver())
In [23]:
    
# Generate random problem data
m = 50;  n = 100
A = randn(m, n)
x♮ = sprand(n, 1, .5) # true (sparse nonnegative) parameter vector
noise = .1*randn(m)    # gaussian noise
b = A*x♮ + noise      # noisy linear observations
# Create a (column vector) variable of size n.
x = Variable(n)
# nonnegative elastic net with regularization
λ = 1
μ = 1
problem = minimize(norm(A * x - b)^2 + λ*norm(x)^2 + μ*norm(x, 1), 
                   x >= 0)
@time solve!(problem)
λ = 1.5
@time solve!(problem, warmstart = true)
    
    
In [17]:
    
# affine
x = Variable(4)
y = Variable (2)
sum(x) + y[2]
    
    Out[17]:
In [18]:
    
2*maximum(x) + 4*sum(y) - sqrt(y[1] + x[1]) - 7 * minimum(x[2:4])
    
    Out[18]:
In [19]:
    
# not dcp compliant
log(x) + x^2
    
    Out[19]:
    
In [20]:
    
# $f$ is convex increasing and $g$ is convex
square(pos(x))
    
    Out[20]:
In [21]:
    
# $f$ is convex decreasing and $g$ is concave 
invpos(sqrt(x))
    
    Out[21]:
In [22]:
    
# $f$ is concave increasing and $g$ is concave 
sqrt(sqrt(x))
    
    Out[22]: