Benchmark of Pairwise Frank-Wolfe variants for sparse logistic regressionΒΆ

Speed of convergence of different Frank-Wolfe variants on various problems with a logistic regression loss (copt.utils.LogLoss()) and a L1 ball constraint (copt.utils.L1Ball()).

Traceback (most recent call last):
  File "/home/pedregosa/dev/sphinx-gallery/sphinx_gallery/gen_rst.py", line 435, in _memory_usage
    multiprocess=True)
  File "/home/pedregosa/dev/memory_profiler/memory_profiler.py", line 343, in memory_usage
    returned = f(*args, **kw)
  File "/home/pedregosa/dev/sphinx-gallery/sphinx_gallery/gen_rst.py", line 426, in __call__
    exec(self.code, self.globals)
  File "/home/pedregosa/dev/copt/examples/frank_wolfe/plot_sparse_benchmark_pairwise.py", line 79, in <module>
    tol=0,
  File "/home/pedregosa/dev/copt/copt/frank_wolfe.py", line 257, in minimize_frank_wolfe
    raise ValueError("Invalid option step=%s" % step)
ValueError: Invalid option step=adaptive
import matplotlib.pyplot as plt
import numpy as np
import copt as cp

# .. datasets and their loading functions ..
# .. last value si the regularization parameter ..
# .. which has been chosen to give 10% feature sparsity ..
datasets = (
    {
        "name": "RCV1",
        "loader": cp.datasets.load_rcv1,
        "alpha": 1e3,
        "max_iter": 5000,
        "f_star": 0.3114744279728717,
    },
    {
        "name": "gisette",
        "loader": cp.datasets.load_gisette,
        "alpha": 1e4,
        "max_iter": 5000,
        "f_star": 2.293654421822428,
    },
    {
        "name": "madelon",
        "loader": cp.datasets.load_madelon,
        "alpha": 1e4,
        "max_iter": 5000,
        "f_star": 0.0,
    },
    {
        "name": "covtype",
        "loader": cp.datasets.load_covtype,
        "alpha": 1e4,
        "max_iter": 5000,
        "f_star": 0,
    },
)


variants_fw = [
    ["adaptive", "adaptive step-size", "s"],
    ["DR", "Lipschitz step-size", "<"],
]

for d in datasets:
    plt.figure()
    print("Running on the %s dataset" % d["name"])

    X, y = d["loader"]()
    print(X.shape)
    n_samples, n_features = X.shape

    l1_ball = cp.utils.L1Ball(d["alpha"])
    f = cp.utils.LogLoss(X, y)
    x0 = np.zeros(n_features)
    x0[0] = d["alpha"]  # start from a (random) vertex

    for step, label, marker in variants_fw:

        cb = cp.utils.Trace(f)
        sol = cp.minimize_frank_wolfe(
            f.f_grad,
            x0,
            l1_ball.lmo_pairwise,
            callback=cb,
            step=step,
            lipschitz=f.lipschitz,
            max_iter=d["max_iter"],
            verbose=True,
            tol=0,
        )

        plt.plot(
            cb.trace_time,
            np.array(cb.trace_fx) - d["f_star"],
            label=label,
            marker=marker,
            markevery=10,
        )

    print("Sparsity of solution: %s" % np.mean(np.abs(sol.x) > 1e-8))
    print(f(sol.x))
    plt.legend()
    plt.xlabel("Time (in seconds)")
    plt.ylabel("Objective function")
    plt.title(d["name"])
    plt.tight_layout()  # otherwise the right y-label is slightly clipped
    #    plt.xlim((0, 0.7 * cb.trace_time[-1]))  # for aesthetics
    plt.grid()
    plt.show()

Total running time of the script: ( 0 minutes 9.370 seconds)

Estimated memory usage: 8 MB

Gallery generated by Sphinx-Gallery