%Aigaion2 BibTeX export from Knowledge Engineering Publications
%Saturday 18 December 2021 12:00:01 AM

@INPROCEEDINGS{loza16medsim,
     author = {Loza Menc{\'{\i}}a, Eneldo and de Melo, Gerard and Nam, Jinseok},
      month = may,
      title = {Medical Concept Embeddings via Labeled Background Corpora},
  booktitle = {Proceedings of the Tenth International Conference on Language Resources and Evaluation (LREC 2016)},
       year = {2016},
      pages = {4629--4636},
  publisher = {European Language Resources Association (ELRA)},
    address = {Paris, France},
       isbn = {978-2-9517408-9-1},
        url = {http://www.lrec-conf.org/proceedings/lrec2016/pdf/1190_Paper.pdf},
   abstract = {In recent years, we have seen an increasing amount of interest in low-dimensional vector representations of words. Among other things,
these facilitate computing word similarity and relatedness scores. The most well-known example of algorithms to produce representations
of this sort are the word2vec approaches. In this paper, we investigate a new model to induce such vector spaces for medical concepts,
based on a joint objective that exploits not only word co-occurrences but also manually labeled documents, as available from sources such
as PubMed. Our extensive experimental analysis shows that our embeddings lead to significantly higher correlations with human similarity
and relatedness assessments than previous work. Due to the simplicity and versatility of vector representations, these findings suggest that
our resource can easily be used as a drop-in replacement to improve any systems relying on medical concept similarity measures.}
}