Skip to content

Commit

Permalink
0.5 release
Browse files Browse the repository at this point in the history
  • Loading branch information
fabianp committed Oct 4, 2018
1 parent 460ad89 commit 5b4fb28
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 28 deletions.
2 changes: 1 addition & 1 deletion copt/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = '0.4.0-dev'
__version__ = '0.5.0'

from .proxgrad import minimize_PGD, minimize_APGD
from .splitting import minimize_TOS, minimize_PDHG
Expand Down
2 changes: 1 addition & 1 deletion copt/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def load_kdd12(md5_check=True, verbose=0):
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR)
if not os.path.exists(file_path):
print('URL dataset is not present in data folder. Downloading it ...')
print('KDD12 dataset is not present in data folder. Downloading it ...')
url = 'https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/kdd12.bz2'
urllib.request.urlretrieve(url, file_path)
print('Finished downloading')
Expand Down
35 changes: 13 additions & 22 deletions copt/frank_wolfe.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,6 @@
from . import utils


def _backtrack(
f_t, f_grad, x_t, d_t, g_t, L_t,
gamma_max=1, ratio_increase=2., ratio_decrease=0.999,
max_iter=100):
# could be included inside minimize_FW
d2_t = splinalg.norm(d_t) ** 2
for i in range(max_iter):
step_size = min(g_t / (d2_t * L_t), gamma_max)
rhs = f_t - step_size * g_t + 0.5 * (step_size**2) * L_t * d2_t
f_next, grad_next = f_grad(x_t + step_size * d_t)
if f_next <= rhs:
if i == 0:
L_t *= ratio_decrease
break
else:
L_t *= ratio_increase
return step_size, L_t, f_next, grad_next


def minimize_FW(
f_grad, lmo, x0, L=None, max_iter=1000, tol=1e-12,
backtracking=True, callback=None, verbose=0):
Expand Down Expand Up @@ -55,11 +36,21 @@ def minimize_FW(
g_t = g_t[0]
if g_t <= tol:
break
d2_t = splinalg.norm(d_t) ** 2
if backtracking:
step_size, L_t, f_next, grad_next = _backtrack(
f_t, f_grad, x_t, d_t, g_t, L_t)
ratio_decrease = 0.999
ratio_increase = 2
for i in range(max_iter):
step_size = min(g_t / (d2_t * L_t), 1)
rhs = f_t - step_size * g_t + 0.5 * (step_size**2) * L_t * d2_t
f_next, grad_next = f_grad(x_t + step_size * d_t)
if f_next <= rhs + 1e-6:
if i == 0:
L_t *= ratio_decrease
break
else:
L_t *= ratio_increase
else:
d2_t = splinalg.norm(d_t) ** 2
step_size = min(g_t / (d2_t * L_t), 1)
f_next, grad_next = f_grad(x_t + step_size * d_t)
x_t += step_size * d_t
Expand Down
11 changes: 7 additions & 4 deletions copt/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,15 @@ def __call__(self, x):
def init_lipschitz(f_grad, x0):
L0 = 1e-3
f0, grad0 = f_grad(x0)
if sparse.issparse(grad0):
if sparse.issparse(grad0) and not sparse.issparse(x0):
x0 = sparse.csc_matrix(x0).T
elif sparse.issparse(x0):
elif sparse.issparse(x0) and not sparse.issparse(grad0):
grad0 = sparse.csc_matrix(grad0).T
x_tilde = x0 - (1./L0)*grad0
f_tilde = f_grad(x_tilde)[0]
while f_tilde > f0:
for _ in range(100):
if f_tilde <= f0:
break
L0 *= 10
x_tilde = x0 - (1./L0)*grad0
f_tilde = f_grad(x_tilde)[0]
Expand Down Expand Up @@ -182,6 +184,7 @@ def lipschitz(self):
s = splinalg.svds(self.A, k=1, return_singular_vectors=False)[0]
return (s * s) / self.A.shape[0] + self.alpha


class HuberLoss:
"""Huber loss"""
def __init__(self, A, b, alpha=0, delta=1):
Expand All @@ -205,7 +208,7 @@ def f_grad(self, x, return_gradient=True):
grad = self.A[idx].T.dot(z[idx]) / self.A.shape[0] + self.alpha * x.T
grad = np.asarray(grad)
grad += self.A[~idx].T.dot(self.delta * np.sign(z[~idx]))/ self.A.shape[0]
return loss, grad
return loss, np.asarray(grad).ravel()

def lipschitz(self):
s = splinalg.svds(self.A, k=1, return_singular_vectors=False)[0]
Expand Down

0 comments on commit 5b4fb28

Please sign in to comment.