@article{6378cfda19b049c8aec6ddcbe77b00f3,
title = "medBERT.de: A comprehensive German BERT model for the medical domain",
abstract = "This paper presents medBERT.de, a pre-trained German BERT model specifically designed for the German medical domain. The model has been trained on a large corpus of 4.7 Million German medical documents and has been shown to achieve new state-of-the-art performance on eight different medical benchmarks covering a wide range of disciplines and medical document types. In addition to evaluating the overall performance of the model, this paper also conducts a more in-depth analysis of its capabilities. We investigate the impact of data deduplication on the model's performance, as well as the potential benefits of using more efficient tokenization methods. Our results indicate that domain-specific models such as medBERT.de are particularly useful for longer texts, and that deduplication of training data does not necessarily lead to improved performance. Furthermore, we found that efficient tokenization plays only a minor role in improving model performance, and attribute most of the improved performance to the large amount of training data. To encourage further research, the pre-trained model weights and new benchmarks based on radiological data are made publicly available for use by the scientific community.",
keywords = "BERT, Medicine, Natural language processing",
author = "Bressem, {Keno K.} and Papaioannou, {Jens Michalis} and Paul Grundmann and Florian Borchert and Adams, {Lisa C.} and Leonhard Liu and Felix Busch and Lina Xu and Loyen, {Jan P.} and Niehues, {Stefan M.} and Moritz Augustin and Lennart Grosser and Makowski, {Marcus R.} and Aerts, {Hugo J.W.L.} and Alexander L{\"o}ser",
note = "Funding Information: We would like to thank DocCheck AG and Thieme Medical Publishers for their assistance with data collection. We would also like to thank Manjil Shrestha and Rolf Becker for providing additional training data from their German MedBERT model. We also thank the Scientific Computing of the IT Division at the Charit{\'e} – Universit{\"a}tsmedizin Berlin for providing computational resources that contributed to the research results reported in this paper. KKB is grateful for his participation in the BIH Charit{\'e} Digital Clinician Scientist Program funded by Charit{\'e}-Universit{\"a}tsmedizin Berlin and the Berlin Institute of Health. Funding Information: We would like to thank DocCheck AG and Thieme Medical Publishers for their assistance with data collection. We would also like to thank Manjil Shrestha and Rolf Becker for providing additional training data from their German MedBERT model. We also thank the Scientific Computing of the IT Division at the Charit{\'e} – Universit{\"a}tsmedizin Berlin for providing computational resources that contributed to the research results reported in this paper. KKB is grateful for his participation in the BIH Charit{\'e} Digital Clinician Scientist Program funded by Charit{\'e}-Universit{\"a}tsmedizin Berlin and the Berlin Institute of Health. Publisher Copyright: {\textcopyright} 2023 Elsevier Ltd",
year = "2023",
month = sep,
day = "14",
doi = "10.1016/j.eswa.2023.121598",
language = "English",
volume = "237",
journal = "Expert Systems with Applications",
issn = "0957-4174",
publisher = "Elsevier Science",
}