Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
fb74645
Initial commit and skeleton for NonlinearMinimizer
Jan 30, 2015
a7ee059
Merge branch 'qp' of https://github.com/debasish83/breeze into nlqp
Jan 31, 2015
679bb5f
Skeleton for approximate eigen value calculation
Feb 2, 2015
3d80b31
Copyright message for NonlinearMinimizer
Feb 2, 2015
3781e37
merge with qp branch; NOTICE file updated
Feb 3, 2015
536886d
Initial checkin for PowerMethod and PowerMethodTest;Eigen value extra…
Feb 3, 2015
f98bd80
Compilation fixes to LBFGS eigenMin and eigenMax
Feb 4, 2015
ae795b6
Power Method merged; NonlinearMinimizer now supports preserving histo…
Feb 11, 2015
51b4224
Generating PQN.CompactHessian from BFGS.ApproximateInverseHessian not…
Feb 11, 2015
ee697bf
Linear Regression formulation added for comparisons
Feb 11, 2015
ce8638f
Fixed LBFGS.maxEigen using power law on CompactHessian
Feb 12, 2015
bbc3edd
Merge branch 'qp' of https://github.com/debasish83/breeze into nlqp
Feb 17, 2015
f85ff86
Merge branch 'qp' of https://github.com/debasish83/breeze into nlqp
Feb 22, 2015
e3a61a9
Added a proximal interface to ProjectQuasiNewton solver; Added projec…
Feb 23, 2015
928de32
probability simplex benchmark
Feb 24, 2015
91f2e17
After experimentation NonlinearMinimizer now users PQN/OWLQN and supp…
Feb 28, 2015
33d28ff
Add testcases for Least square variants
Mar 1, 2015
6cba897
merge with upstream
Mar 1, 2015
9bef354
I dunno.
dlwh Mar 1, 2015
18c7789
PQN fixes from David's fix_pqn branch; added strong wolfe line search…
Mar 2, 2015
43794c0
Unused import from FirstOrderMinimizer; PQN migrated to Strong Wolfe …
Mar 5, 2015
e2c1db8
Used BacktrackingLineSearch in SPG and PQN; Updated NonlinearMinimize…
Mar 5, 2015
defaff5
NonlinearMinimizer println changed to nl from pqn
Mar 5, 2015
610027f
Updated with cforRange in proximal operations
Mar 7, 2015
8c6a6c8
BacktrackingLineSearch takes an initfval;OWLQN, PQN and SPG updated t…
Mar 7, 2015
b4d86e8
Merge branch 'master' of https://github.com/scalanlp/breeze into nlqp
Mar 7, 2015
3a6fc97
infiniteIteration API in FirstOrderMinimizer takes initialState;PQN b…
Mar 11, 2015
8533ada
migrate LBFGS Eigen calculation to https://github.com/debasish83/bree…
Mar 11, 2015
a0bbd33
cleaned up minEigen call from QuadraticMinimizer
Mar 11, 2015
40a45a8
NonlinearMinimizer inner iterations through BFGS cleaned
Mar 12, 2015
7308c7a
Updated contributions in README.md
Mar 12, 2015
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
PQN fixes from David's fix_pqn branch; added strong wolfe line search…
… to PQN for BFGS equivalency;cleaned up complex tests from SPG;complex function test now passes with PQN
  • Loading branch information
Debasish Das committed Mar 2, 2015
commit 18c7789e6fb43af4e6d75b18b019b1f4137be493
58 changes: 19 additions & 39 deletions math/src/main/scala/breeze/optimize/ProjectedQuasiNewton.scala
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,6 @@ class CompactHessian(M: DenseMatrix[Double], Y: RingBuffer[DenseVector[Double]],
lazy val N = DenseMatrix.horzcat(collectionOfVectorsToMatrix(S).t * sigma, collectionOfVectorsToMatrix(Y).t)
}

//TO DO:
//1. Modify SpectralProjectedGradient to implement SpaRSA so that it can handle proximal algorithms
class ProjectedQuasiNewton(tolerance: Double = 1e-6,
val m: Int = 10,
val initFeas: Boolean = false,
Expand Down Expand Up @@ -109,45 +107,27 @@ class ProjectedQuasiNewton(tolerance: Double = 1e-6,
}
}

protected def determineStepSize(state: State, fn: DiffFunction[DenseVector[Double]], dir: DenseVector[Double]): Double = {
if (state.iter == 0)
return scala.math.min(1.0, 1.0 / norm(state.grad,1.0))
val dirnorm = norm(dir, Double.PositiveInfinity)
if(dirnorm < 1E-10) return 0.0
import state._
// Backtracking line-search
var accepted = false
var lambda = 1.0
val gTd = grad dot dir
var srchit = 0

do {
val candx = x + dir * lambda
val candf = fn.valueAt(candx)
val suffdec = gamma * lambda * gTd

if (testOpt && srchit > 0) {
logger.debug(f"PQN: SrchIt $srchit%4d: f $candf%-10.4f t $lambda%-10.4f\n")
}

if (candf < state.adjustedValue + suffdec) {
accepted = true
} else if (srchit >= maxSrchIt) {
accepted = true
} else {
lambda *= 0.5
srchit = srchit + 1
}
} while (!accepted)

if (srchit >= maxSrchIt) {
logger.info("PQN: Line search cannot make further progress")
throw new LineSearchFailed(norm(state.grad,Double.PositiveInfinity), norm(dir, Double.PositiveInfinity))
}
lambda
/**
* Given a direction, perform a Strong Wolfe Line Search
*
* @param state the current state
* @param f The objective
* @param dir The step direction
* @return stepSize
*/
protected def determineStepSize(state: State, f: DiffFunction[DenseVector[Double]], dir: DenseVector[Double]) = {
val x = state.x
val grad = state.grad

val ff = LineSearch.functionFromSearchDirection(f, x, dir)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we not need to project inside the line search?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Matlab code and the paper did not project inside PQN line search...

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah. Right, alpha is probably always <= 1, so it's safe.

val search = new StrongWolfeLineSearch(maxZoomIter = 10, maxLineSearchIter = maxSrchIt) // TODO: Need good default values here.
val alpha = search.minimize(ff, if(state.iter == 0.0) min(1.0, 1.0/norm(dir)) else 1.0)

if(alpha * norm(grad) < 1E-10)
throw new StepSizeUnderflow
alpha
}


protected def takeStep(state: State, dir: DenseVector[Double], stepSize: Double): DenseVector[Double] = {
projection(state.x + dir * stepSize)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,42 +76,6 @@ class SpectralProjectedGradientTest extends PropSpec with PropertyChecks with Op
}
}


property("optimize a simple multivariate gaussian with l2 regularization") {
val optimizer = new SpectralProjectedGradient[DenseVector[Double]](tolerance = 1.0E-5)

forAll { init: DenseVector[Double] =>
val f = new DiffFunction[DenseVector[Double]] {
def calculate(x: DenseVector[Double]) = {
(norm((x - 3.0) :^ 2.0, 1), (x * 2.0) - 6.0)
}
}

val targetValue = 3 / (1.0 / 2 + 1)
val result = optimizer.minimize(DiffFunction.withL2Regularization(f, 1.0), init)
result should beSimilarTo(DenseVector.ones[Double](init.size) * targetValue, allowedDeviation = 3E-3 * result.size)
}
}

property("optimize a complicated function without projection") {
val optimizer = new SpectralProjectedGradient[DenseVector[Double]](tolerance = 1.0E-5)

forAll { a: DenseVector[Double] =>
whenever(min(a) >= -3.0 && max(a) <= 3.0) {
val init = DenseVector.rand(a.size)
val f = new DiffFunction[DenseVector[Double]] {
def calculate(x: DenseVector[Double]) = {
(sum(exp((x :^ 2.0) :- (a :* x))), (x * 2.0 :- a) :* exp(x :^ 2.0 :- a :* x))
}
}

val result = optimizer.minimize(f, init)
val minimum = f(a / 2.0)
f(result) should be(minimum +- abs(minimum) * 1E-2)
}
}
}

property("simple linear solve without projection") {
val n = 5
val H = new DenseMatrix(n, n, Array(1.8984250861699135,0.5955576666769438,-1.484430453342902,-1.0434994471390804,-3.675310432634351,0.5955576666769438,0.9090751938470876,-2.146380947361661,-0.13037609428980368,-0.40639564652095117,-1.484430453342902,-2.146380947361661,10.262733520770384,-6.097698907163584,2.29625304115155,-1.0434994471390804,-0.13037609428980368,-6.097698907163584,27.775920405610677,-5.574220233644466,-3.675310432634351,-0.40639564652095117,2.29625304115155,-5.574220233644466,12.21329172136971))
Expand Down
Loading