Recent Publications

 author = {Q. Bertrand and Q. Klopfenstein and M. Massias and M. Blondel and S. Vaiter and A. Gramfort and J. Salmon},
 journal = {J. Mach. Learn. Res.},
 pdf = {},
 title = {{Implicit differentiation for fast hyperparameter selection in non-smooth convex learning}},
 year = {2022}

 author = {J.-A. Chevalier and T. B. Nguyen and B. Thirion and J. Salmon},
 journal = {Statistics and Computing},
 pdf = {},
 title = {Spatially relaxed inference on high-dimensional linear models},
 year = {2022}

 author = {C. Garcin and M. Servajean and A. Joly and J. Salmon},
 booktitle = {ICML},
 comment = { [Code] },
 pdf = {},
 title = {Stochastic smoothing of the top-K calibrated hinge loss for deep imbalanced classification},
 year = {2022}

 author = {P. Mangold and A. Bellet and J. Salmon and M. Tommasi},
 booktitle = {ICML},
 pdf = {},
 title = {Differentially Private Coordinate Descent for Composite Empirical Risk Minimization},
 year = {2022}

 author = {T. Moreau and M. Massias and A. Gramfort and P. Ablin and B. Charlier and P.-A. Bannier and M. Dagréou and T. Dupré la Tour and G. Durif and C. F. Dantas and Q. Klopfenstein and J. Larsson and E. Lai and T. Lefort and B. Malézieux and B. Moufad and T. B. Nguyen and A. Rakotomamonjy and Z. Ramzi and J. Salmon and S. Vaiter},
 booktitle = {NeurIPS},
 keywords = {Machine Learning (cs.LG), Optimization and Control (math.OC), Machine Learning (stat.ML), FOS: Computer and information sciences, FOS: Computer and information sciences, FOS: Mathematics, FOS: Mathematics},
 title = {Benchopt: Reproducible, efficient and collaborative optimization benchmarks},
 url = {},
 year = {2022}

 author = {A. Rakotomamonjy and R. Flamary and G. Gasso and J. Salmon},
 booktitle = {AISTATS},
 pdf = {},
 title = {Provably Convergent Working Set Algorithm for Non-Convex Regularized Regression},
 year = {2022}

 author = {K. Šehić and A. Gramfort and J. Salmon and L. Nardi},
 booktitle = {International Conference on
Automated Machine Learning},
 pdf = {},
 title = {LassoBench: A High-Dimensional Hyperparameter Optimization Benchmark Suite for Lasso},
 year = {2022}

Full list of publications



Address: IMAG, c.c. 051
Université de Montpellier
Place Eugène Bataillon
34095 Montpellier Cedex 5
(office 415, building 9)

Phone: +33 4 67 14 35 19



Ph.D. Students


Short Bio

Since 2018, I am a full professor at Université de Montpellier and an associate member at INRIA Parietal Team. For the spring and summer quarters 2018, I was a visiting assistant professor at UW, Statistics departement. From 2012 to 2018 I was an assistant professor at Telecom ParisTech. Back in 2011 and 2012, I was a post-doctoral Associate at Duke university working with Rebecca Willett.

In 2010, I finished my Ph.D. in statistics and image processing under the supervision of Dominique Picard and Erwan Le Pennec at the Laboratoire de Probabilités et de Modélisation Aléatoire, now LPSM, in Université Paris Diderot.


  • BenchOpt: package to simplify, make more transparent and more reproducible comparisons between optimization algorithms

  • Celer: a fast Lasso solver, code of the associated ICML2018 paper "Dual Extrapolation for Faster Lasso Solvers" (pdf, slides)

  • sparse-ho: a fast hyper-parameter package to select the best Lasso parameter efficiently, code of the associated ICML2020 paper "Implicit differentiation of Lasso-type models for hyperparameter optimization" (pdf)
  • matlab toolboxes for statistics and image processing (this is legacy)

More on my Github Page

Joining? Open positions in my lab

I am always looking for outstanding and highly motivated people to join my team as interns, Ph.D. students, post-doctorates or research engineers in the following areas:

  • optimization for machine learning (including federated learning, privacy, etc.)
  • high dimensional and robust statistics

I always have open positions for outstanding applicants (post-doc, Ph.D. thesis, internship).

The application process is light:

  1. Email me your CV, transcript of most recent grades (for interns and Ph.D. students) and explain in a paragraph why you are interested to join my group.
  2. After preliminary feedback on my side, I will ask you to secure two reference letters (one is enough for interns or Ph.D. students) to be sent directly to me.
  3. At this stage an interview (possibly online) will be arranged to double-check your skills and profile compatibility.

Teaching / courses

Full list of courses


Full list of talks, with slides and possibly videos when recorded.


Here (Miscellaneous), you will find some (math)art and other distractions.