%Aigaion2 BibTeX export from Knowledge Engineering Publications
%Friday 17 December 2021 11:58:32 PM

@INPROCEEDINGS{jf:ECML-07-QWeighted,
     author = {Park, Sang-Hyeun and F{\"{u}}rnkranz, Johannes},
     editor = {Kok, J. N. and Koronacki, J. and L{\'{o}}pez de M{\'{a}}ntaras, Ramon and Matwin, S. and Mladeni{\'{c}}, Dunja and Skowron, A.},
      title = {Efficient Pairwise Classification},
  booktitle = {Proceedings of the 18th European Conference on Machine Learning (ECML 2007, Warsaw, Poland)},
       year = {2007},
      pages = {658--665},
  publisher = {Springer-Verlag},
        url = {/~juffi/publications/ecml-07-EfficientPairwise.pdf},
        doi = {10.1007/978-3-540-74958-5_65},
   abstract = {Pairwise classification is a class binarization procedure that converts a multi-class problem into a series of two-class problems, one problem for each pair of classes. While it can be shown that for training, this procedure is more efficient than the more commonly used one-against-all approach, it still has to evaluate a quadratic number of classifiers when computing the predicted class for a given example. In this paper, we propose a method that allows a faster computation of the predicted class when weighted or unweighted voting are used for combining the predictions of the individual classifiers. While its worst-case complexity is still quadratic in the number of classes, we show that even in the case of completely random base classifiers, our method still outperforms the conventional pairwise classifier. For the more practical case of well-trained base classifiers, its asymptotic computational complexity seems to be almost linear.}
}