{ "id": "1709.08242", "version": "v1", "published": "2017-09-24T19:15:20.000Z", "updated": "2017-09-24T19:15:20.000Z", "title": "Best practices for comparing optimization algorithms", "authors": [ "Vahid Beiranvand", "Warren Hare", "Yves Lucet" ], "comment": "Optim Eng (2017)", "doi": "10.1007/s11081-017-9366-1", "categories": [ "math.OC" ], "abstract": "Comparing, or benchmarking, of optimization algorithms is a complicated task that involves many subtle considerations to yield a fair and unbiased evaluation. In this paper, we systematically review the benchmarking process of optimization algorithms, and discuss the challenges of fair comparison. We provide suggestions for each step of the comparison process and highlight the pitfalls to avoid when evaluating the performance of optimization algorithms. We also discuss various methods of reporting the benchmarking results. Finally, some suggestions for future research are presented to improve the current benchmarking process.", "revisions": [ { "version": "v1", "updated": "2017-09-24T19:15:20.000Z" } ], "analyses": { "subjects": [ "65K05", "F.2.1", "G.1.6" ], "keywords": [ "optimization algorithms", "best practices", "suggestions", "subtle considerations", "fair comparison" ], "tags": [ "journal article" ], "publication": { "publisher": "Springer" }, "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }