Skip to content

Commit

Permalink
Merge pull request #24 from fides-dev/develop
Browse files Browse the repository at this point in the history
Fides 0.3.0
  • Loading branch information
FFroehlich authored Mar 5, 2021
2 parents b3fdfc4 + 443f618 commit fdf90de
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 13 deletions.
3 changes: 2 additions & 1 deletion fides/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@

# flake8: noqa
from .minimize import Optimizer
from .hessian_approximation import SR1, BFGS, DFP, HessianApproximation
from .hessian_approximation import SR1, BFGS, DFP, HessianApproximation, \
HybridUpdate
from .logging import create_logger
from .version import __version__
from .constants import Options, SubSpaceDim, StepBackStrategy, ExitFlag
39 changes: 38 additions & 1 deletion fides/hessian_approximation.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class HessianApproximation:
"""
def __init__(self, hess_init: Optional[np.ndarray] = None):
"""
Creata Hessian update strategy instance
Create a Hessian update strategy instance
:param hess_init:
Inital guess for the Hessian, if empty Identity matrix will be used
Expand Down Expand Up @@ -107,3 +107,40 @@ def update(self, s, y):
mat2 = np.eye(self._hess.shape[0]) - np.outer(s, y.T) / curv

self._hess = mat1.dot(self._hess).dot(mat2) + np.outer(y, y.T)/curv


class HybridUpdate(HessianApproximation):
def __init__(self,
happ: HessianApproximation = None,
hess_init: Optional[np.ndarray] = None,
switch_iteration: Optional[int] = None):
"""
Create a Hybrid Hessian update strategy which is generated from the
start but only applied after a certain iteration
:param happ:
Hessian Update Strategy (default: BFGS)
:param switch_iteration:
Iteration after which this approximation is used (default: 5*dim)
:param hess_init:
Initial guess for the Hessian. (default: eye)
"""
if happ is None:
happ = BFGS()
self.hessian_update = happ
self.switch_iteration = switch_iteration

super(HybridUpdate, self).__init__(hess_init)

def init_mat(self, dim: int):
if self.switch_iteration is None:
self.switch_iteration = 5*dim
self.hessian_update.init_mat(dim)

def update(self, s, y):
self.hessian_update.update(s, y)

def get_mat(self) -> np.ndarray:
return self.hessian_update.get_mat()
18 changes: 13 additions & 5 deletions fides/minimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from numpy.linalg import norm
from scipy.sparse import csc_matrix
from .trust_region import trust_region, Step
from .hessian_approximation import HessianApproximation
from .hessian_approximation import HessianApproximation, HybridUpdate
from .constants import Options, ExitFlag, DEFAULT_OPTIONS
from .logging import create_logger

Expand Down Expand Up @@ -165,8 +165,11 @@ def minimize(self, x0: np.ndarray):
'update strategy is used), but returned '
f'{funout}')

if self.hessian_update is None:
if self.hessian_update is None or isinstance(self.hessian_update,
HybridUpdate):
self.fval, self.grad, self.hess = funout
if isinstance(self.hessian_update, HybridUpdate):
self.hessian_update.init_mat(len(self.x))
else:
if len(funout) == 3:
raise ValueError('Cannot use Hessian update with a '
Expand Down Expand Up @@ -243,7 +246,8 @@ def minimize(self, x0: np.ndarray):

funout = self.fun(x_new, **self.funargs)

if self.hessian_update is None:
if self.hessian_update is None or isinstance(self.hessian_update,
HybridUpdate):
fval_new, grad_new, hess_new = funout
else:
fval_new, grad_new = funout
Expand Down Expand Up @@ -308,9 +312,13 @@ def update(self,
if self.hessian_update is not None:
self.hessian_update.update(step.s + step.s0,
grad_new - self.grad)
self.hess = self.hessian_update.get_mat()
else:

if self.hessian_update is None or \
(isinstance(self.hessian_update, HybridUpdate) and
self.iteration < self.hessian_update.switch_iteration):
self.hess = hess_new
else:
self.hess = self.hessian_update.get_mat()
self.check_in_bounds(x_new)
self.fval = fval_new
self.x = x_new
Expand Down
7 changes: 3 additions & 4 deletions fides/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,8 +261,8 @@ def __init__(self, x, sg, hess, scaling, g_dscaling, delta, theta,
ub, lb, logger)
n = len(sg)

s_newt = - linalg.lstsq(scaling*hess*scaling + g_dscaling, sg)[0]
posdef = s_newt.dot(hess.dot(s_newt)) > 0
s_newt = - linalg.lstsq(self.shess, sg)[0]
posdef = s_newt.dot(self.shess.dot(s_newt)) > 0
normalize(s_newt)

if n > 1:
Expand All @@ -285,8 +285,7 @@ def __init__(self, x, sg, hess, scaling, g_dscaling, delta, theta,
self.subspace = np.vstack([s_newt, s_grad]).T
return
else:
logger.debug('Singular subspace, continuing with 1D '
'subspace.')
logger.debug('Singular subspace, continuing with 1D subspace.')

self.subspace = np.expand_dims(s_newt, 1)

Expand Down
2 changes: 1 addition & 1 deletion fides/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.2.5"
__version__ = "0.3.0"
4 changes: 3 additions & 1 deletion tests/test_minimize.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from fides import Optimizer, BFGS, SR1, DFP, SubSpaceDim, StepBackStrategy
from fides import Optimizer, BFGS, SR1, DFP, HybridUpdate, SubSpaceDim, \
StepBackStrategy
import numpy as np

import logging
Expand Down Expand Up @@ -119,6 +120,7 @@ def unbounded_and_init():
(rosengrad, SR1()),
(rosengrad, BFGS()),
(rosengrad, DFP()),
(rosenboth, HybridUpdate(BFGS())),
])
def test_minimize_hess_approx(bounds_and_init, fun, happ, subspace_dim,
stepback, refine):
Expand Down

0 comments on commit fdf90de

Please sign in to comment.