@inproceedings{be5f079252a54a09933173daa11147ff,
title = "Pre-text Representation Transfer for Deep Learning with Limited and Imbalanced Data: Application to CT-Based COVID-19 Detection",
abstract = "Annotating medical images for disease detection is often tedious and expensive. Moreover, the available training samples for a given task are generally scarce and imbalanced. These conditions are not conducive for learning effective deep neural models. Hence, it is common to {\textquoteleft}transfer{\textquoteright} neural networks trained on natural images to the medical image domain. However, this paradigm lacks in performance due to the large domain gap between the natural and medical image data. To address that, we propose a novel concept of Pre-text Representation Transfer (PRT). In contrast to the conventional transfer learning, which fine-tunes a source model after replacing its classification layers, PRT retains the original classification layers and updates the representation layers through an unsupervised pre-text task. The task is performed with (original, not synthetic) medical images, without utilizing any annotations. This enables representation transfer with a large amount of training data. This high-fidelity representation transfer allows us to use the resulting model as a more effective feature extractor. Moreover, we can also subsequently perform the traditional transfer learning with this model. We devise a collaborative representation based classification layer for the case when we leverage the model as a feature extractor. We fuse the output of this layer with the predictions of a model induced with the traditional transfer learning performed over our pre-text transferred model. The utility of our technique for limited and imbalanced data classification problem is demonstrated with an extensive five-fold evaluation for three large-scale models, tested for five different class-imbalance ratios for CT based COVID-19 detection. Our results show a consistent gain over the conventional transfer learning with the proposed method.",
keywords = "COVID-19, Imbalanced data, Transfer learning",
author = "Fouzia Altaf and Islam, {Syed M.S.} and Janjua, {Naeem K.} and Naveed Akhtar",
year = "2023",
doi = "10.1007/978-3-031-25825-1_9",
language = "English",
isbn = "9783031258244",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "119--130",
editor = "Yan, {Wei Qi} and Minh Nguyen and Martin Stommel",
booktitle = "Image and Vision Computing - 37th International Conference, IVCNZ 2022, Revised Selected Papers",
note = "37th International Conference on Image and Vision Computing New Zealand, IVCNZ 2022 ; Conference date: 24-11-2022 Through 25-11-2022",
}