Setup

The following few lines setup the codes we'll need

In [1]:
using ApproxFun # if this isn't there, use Pkg.add("ApproxFun")
In [2]:
include("simple_gradient_descent.jl")
Out[2]:
gradient_descent_2
In [3]:
using Optim
Optim.UnconstrainedProblems.examples["Rosenbrock"]
Out[3]:
Optim.UnconstrainedProblems.OptimizationProblem("Rosenbrock",Optim.UnconstrainedProblems.rosenbrock,Optim.UnconstrainedProblems.rosenbrock_gradient!,Optim.UnconstrainedProblems.rosenbrock_hessian!,[0.0,0.0],[1.0,1.0],true,true)
In [4]:
# These codes turn f and g into one function ... 
function opt_combine(x, f, g!)
    g = Vector{Float64}(length(x))
    g!(x,g)
    return (f(x), g)
end
function opt_problem(p::Optim.UnconstrainedProblems.OptimizationProblem)
    return x -> opt_combine(x, p.f, p.g!)
end
# this just makes it easy to use
opt_problem(s::AbstractString) = opt_problem(
    Optim.UnconstrainedProblems.examples[s])

# Here's an example 
fg = opt_problem("Rosenbrock")
f, g = fg([0.0,0.0]) # show the function and gradient
Out[4]:
(1.0,[-2.0,0.0])
In [9]:
using Plots
plotlyjs()
ezcontour(x, y, f) = begin
    X = repmat(x', length(y), 1)
    Y = repmat(y, 1, length(x))
    # Evaluate each f(x, y)
    Z = map((x,y) -> f([x,y]), X, Y)
    plot(x, y, Z, st=:contour)
end