%Aigaion2 BibTeX export from Knowledge Engineering Publications %Friday 17 December 2021 11:56:05 PM @ARTICLE{wever22mlclosses, author = {H{\"{u}}llermeier, Eyke and Wever, Marcel and Loza Menc{\'{\i}}a, Eneldo and F{\"{u}}rnkranz, Johannes and Rapp, Michael}, title = {A Flexible Class of Dependence-sensitive Multi-label Loss Functions}, journal = {Machine Learning Journal}, volume = {111}, number = {2}, pages = {713--737}, year = {2022}, url = {https://arxiv.org/abs/2011.00792}, doi = {10.1007/s10994-021-06107-2}, abstract = {Multi-label classification is the task of assigning a subset of labels to a given query instance. For evaluating such predictions, the set of predicted labels needs to be compared to the ground-truth label set associated with that instance, and various loss functions have been proposed for this purpose. In addition to assessing predictive accuracy, a key concern in this regard is to foster and to analyze a learner's ability to capture label dependencies. In this paper, we introduce a new class of loss functions for multi-label classification, which overcome disadvantages of commonly used losses such as Hamming and subset 0/1. To this end, we leverage the mathematical framework of non-additive measures and integrals. Roughly speaking, a non-additive measure allows for modeling the importance of correct predictions of label subsets (instead of single labels), and thereby their impact on the overall evaluation, in a flexible way - by giving full importance to single labels and the entire label set, respectively, Hamming and subset 0/1 are rather extreme in this regard. We present concrete instantiations of this class, which comprise Hamming and subset 0/1 as special cases, and which appear to be especially appealing from a modeling perspective. The assessment of multi-label classifiers in terms of these losses is illustrated in an empirical study.} }