%Aigaion2 BibTeX export from Knowledge Engineering Publications
%Friday 17 December 2021 11:56:24 PM

@INPROCEEDINGS{yk:Relaxed-Pruning,
     author = {Klein, Yannik and Rapp, Michael and Loza Menc{\'{\i}}a, Eneldo},
     editor = {Kralj Novak, Petra and {\v S}muc, Tomislav and D{\v z}eroski, Sa{\v s}o},
   keywords = {Label Dependencies, multilabel classification, Rule Learning},
      month = oct,
      title = {Efficient Discovery of Expressive Multi-label Rules using Relaxed Pruning},
  booktitle = {Discovery Science},
       year = {2019},
      pages = {367--382},
  publisher = {Springer International Publishing},
       note = {Best Student Paper Award},
       isbn = {978-3-030-33778-0},
        url = {https://arxiv.org/abs/1908.06874},
        doi = {10.1007/978-3-030-33778-0_28},
   abstract = {Being able to model correlations between labels is considered crucial in multi-label classification. Rule-based models enable to expose such dependencies, e.g., implications, subsumptions, or exclusions, in an interpretable and human-comprehensible manner. Albeit the number of possible label combinations increases exponentially with the number of available labels, it has been shown that rules with multiple labels in their heads, which are a natural form to model local label dependencies, can be induced efficiently by exploiting certain properties of rule evaluation measures and pruning the label search space accordingly. However, experiments have revealed that multi-label heads are unlikely to be learned by existing methods due to their restrictiveness. To overcome this limitation, we propose a plug-in approach that relaxes the search space pruning used by existing methods in order to introduce a bias towards larger multi-label heads resulting in more expressive rules. We further demonstrate the effectiveness of our approach empirically and show that it does not come with drawbacks in terms of training time or predictive performance.}
}