Skip to content

Commit

Permalink
Loosen Vector to Array to allow optimization of functions of Arrays
Browse files Browse the repository at this point in the history
  • Loading branch information
timholy committed Mar 7, 2014
1 parent 007dc77 commit 47a39d2
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 23 deletions.
26 changes: 13 additions & 13 deletions src/linesearch/hz_linesearch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ const DEFAULTSIGMA = 0.9
# Generate initial guess for step size (HZ, stage I0)
function alphainit{T}(alpha::Real,
x::Array{T},
gr::Vector, # Could loosen to Array
gr::Array,
f_x::Real,
psi0::T = 0.01)
if isnan(alpha)
Expand All @@ -92,10 +92,10 @@ end
function alphatry{T}(alpha::T,
d::Union(DifferentiableFunction,
TwiceDifferentiableFunction),
x::Vector,
s::Vector,
xtmp::Vector,
gtmp::Vector,
x::Array,
s::Array,
xtmp::Array,
gtmp::Array,
lsr::LineSearchResults,
psi1::Real = 0.2,
psi2::Real = 2,
Expand Down Expand Up @@ -167,10 +167,10 @@ end

function hz_linesearch!{T}(df::Union(DifferentiableFunction,
TwiceDifferentiableFunction),
x::Vector{T},
s::Vector,
xtmp::Vector,
g::Vector,
x::Array{T},
s::Array,
xtmp::Array,
g::Array,
lsr::LineSearchResults{T},
c::Real,
mayterminate::Bool,
Expand Down Expand Up @@ -605,11 +605,11 @@ end
# Define one-parameter function for line searches
function linefunc!(df::Union(DifferentiableFunction,
TwiceDifferentiableFunction),
x::Vector,
s::Vector,
x::Array,
s::Array,
alpha::Real,
xtmp::Vector,
g::Vector,
xtmp::Array,
g::Array,
calc_grad::Bool)
f_calls = 0
g_calls = 0
Expand Down
12 changes: 6 additions & 6 deletions src/types.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ OptimizationTrace() = OptimizationTrace(Array(OptimizationState, 0))

abstract OptimizationResults

type MultivariateOptimizationResults{T} <: OptimizationResults
type MultivariateOptimizationResults{T,N} <: OptimizationResults
method::ASCIIString
initial_x::Vector{T}
minimum::Vector{T}
initial_x::Array{T,N}
minimum::Array{T,N}
f_minimum::Float64
iterations::Int
iteration_converged::Bool
Expand Down Expand Up @@ -104,19 +104,19 @@ end

# TODO: Expose ability to do forward and backward differencing
function DifferentiableFunction(f::Function)
function g!(x::Vector, storage::Vector)
function g!(x::Array, storage::Array)
Calculus.finite_difference!(f, x, storage, :central)
return
end
function fg!(x::Vector, storage::Vector)
function fg!(x::Array, storage::Array)
g!(x, storage)
return f(x)
end
return DifferentiableFunction(f, g!, fg!)
end

function DifferentiableFunction(f::Function, g!::Function)
function fg!(x::Vector, storage::Vector)
function fg!(x::Array, storage::Array)
g!(x, storage)
return f(x)
end
Expand Down
6 changes: 3 additions & 3 deletions src/utilities/assess_convergence.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
function assess_convergence(x::Vector,
x_previous::Vector,
function assess_convergence(x::Array,
x_previous::Array,
f_x::Real,
f_x_previous::Real,
gr::Vector,
gr::Array,
xtol::Real,
ftol::Real,
grtol::Real)
Expand Down
2 changes: 1 addition & 1 deletion src/utilities/maxdiff.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
function maxdiff(x::Vector, y::Vector)
function maxdiff(x::Array, y::Array)
res = 0.0
for i in 1:length(x)
delta = abs(x[i] - y[i])
Expand Down
16 changes: 16 additions & 0 deletions test/cg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,19 @@ for (name, prob) in Optim.UnconstrainedProblems.examples
end
end
end

let
objective(X, B) = sum((X.-B).^2)/2

function objective_gradient!(X, G, B)
for i = 1:length(G)
G[i] = X[i]-B[i]
end
end

B = rand(2,2)
df = Optim.DifferentiableFunction(X -> objective(X, B), (X, G) -> objective_gradient!(X, G, B))
results = Optim.cg(df, rand(2,2))
@assert Optim.converged(results)
@assert results.f_minimum < 1e-8
end

0 comments on commit 47a39d2

Please sign in to comment.