Citation

BibTex format

@article{Rohanian:2024:10.1017/S1351324923000542,
author = {Rohanian, O and Nouriborji, M and Jauncey, H and Kouchaki, S and Nooralahzadeh, F and Clifton, L and Merson, L and Clifton, DA and Abbas, A and Abdukahil, SA and Abdulkadir, NN and Abe, R and Abel, L and Abrous, A and Absil, L and Jabal, KA and Salah, NA and Acharya, S and Acker, A and Adachi, S and Adam, E and Adewhajah, F and Adriano, E and Adrião, D and Al, Ageel S and Ahmed, S and Aiello, M and Ainscough, K and Airlangga, E and Aisa, T and Hssain, AA and Tamlihat, YA and Akimoto, T and Akmal, E and Al, Qasim E and Alalqam, R and Alameen, AM and Alberti, A and Al-Dabbous, T and Alegesan, S and Alegre, C and Alessi, M and Alex, B and Alexandre, K and Al-Fares, A and Alfoudri, H and Ali, A and Ali, I and Shah, NA and Alidjnou, KE and Aliudin, J and Alkhafajee, Q and Allavena, C and Allou, N and Altaf, A and Alves, J and Alves, R and Cabrita, JA and Amaral, M and Amira, N and Ammerlaan, H and Ampaw, P and Andini, R and Andréjak, C and Angheben, A and Angoulvant, F and Ankrah, S and Ans},
doi = {10.1017/S1351324923000542},
journal = {Natural Language Engineering},
pages = {887--914},
title = {Lightweight transformers for clinical natural language processing},
url = {http://dx.doi.org/10.1017/S1351324923000542},
volume = {30},
year = {2024}
}

RIS format (EndNote, RefMan)

TY  - JOUR
AB - Specialised pre-trained language models are becoming more frequent in Natural language Processing (NLP) since they can potentially outperform models trained on generic texts. BioBERT (Sanh et al., Distilbert, a distilled version of bert: smaller, faster, cheaper and lighter. arXiv preprint arXiv: 1910.01108, 2019) and BioClinicalBERT (Alsentzer et al., Publicly available clinical bert embeddings. In Proceedings of the 2nd Clinical Natural Language Processing Workshop, pp. 72–78, 2019) are two examples of such models that have shown promise in medical NLP tasks. Many of these models are overparametrised and resource-intensive, but thanks to techniques like knowledge distillation, it is possible to create smaller versions that perform almost as well as their larger counterparts. In this work, we specifically focus on development of compact language models for processing clinical texts (i.e. progress notes, discharge summaries, etc). We developed a number of efficient lightweight clinical transformers using knowledge distillation and continual learning, with the number of parameters ranging from 15 million to 65 million. These models performed comparably to larger models such as BioBERT and ClinicalBioBERT and significantly outperformed other compact models trained on general or biomedical data. Our extensive evaluation was done across several standard datasets and covered a wide range of clinical text-mining tasks, including natural language inference, relation extraction, named entity recognition and sequence classification. To our knowledge, this is the first comprehensive study specifically focused on creating efficient and compact transformers for clinical NLP tasks. The models and code used in this study can be found on our Huggingface profile at https://huggingface.co/nlpie and Github page at https://github.com/nlpieresearch/Lightweight-Clinical-Transformers, respectively, promoting reproducibility of our results.
AU - Rohanian,O
AU - Nouriborji,M
AU - Jauncey,H
AU - Kouchaki,S
AU - Nooralahzadeh,F
AU - Clifton,L
AU - Merson,L
AU - Clifton,DA
AU - Abbas,A
AU - Abdukahil,SA
AU - Abdulkadir,NN
AU - Abe,R
AU - Abel,L
AU - Abrous,A
AU - Absil,L
AU - Jabal,KA
AU - Salah,NA
AU - Acharya,S
AU - Acker,A
AU - Adachi,S
AU - Adam,E
AU - Adewhajah,F
AU - Adriano,E
AU - Adrião,D
AU - Al,Ageel S
AU - Ahmed,S
AU - Aiello,M
AU - Ainscough,K
AU - Airlangga,E
AU - Aisa,T
AU - Hssain,AA
AU - Tamlihat,YA
AU - Akimoto,T
AU - Akmal,E
AU - Al,Qasim E
AU - Alalqam,R
AU - Alameen,AM
AU - Alberti,A
AU - Al-Dabbous,T
AU - Alegesan,S
AU - Alegre,C
AU - Alessi,M
AU - Alex,B
AU - Alexandre,K
AU - Al-Fares,A
AU - Alfoudri,H
AU - Ali,A
AU - Ali,I
AU - Shah,NA
AU - Alidjnou,KE
AU - Aliudin,J
AU - Alkhafajee,Q
AU - Allavena,C
AU - Allou,N
AU - Altaf,A
AU - Alves,J
AU - Alves,R
AU - Cabrita,JA
AU - Amaral,M
AU - Amira,N
AU - Ammerlaan,H
AU - Ampaw,P
AU - Andini,R
AU - Andréjak,C
AU - Angheben,A
AU - Angoulvant,F
AU - Ankrah,S
AU - Ansart,S
AU - Anthonidass,S
AU - Antonelli,M
AU - de,Brito CAA
AU - Anwar,KR
AU - Apriyana,A
AU - Arabi,Y
AU - Aragao,I
AU - Arancibia,F
AU - Araujo,C
AU - Arcadipane,A
AU - Archambault,P
AU - Arenz,L
AU - Arlet,JB
AU - Arnold-Day,C
AU - Aroca,A
AU - Arora,L
AU - Arora,R
AU - Artaud-Macari,E
AU - Aryal,D
AU - Asaki,M
AU - Asensio,A
AU - Ashley,EA
AU - Ashraf,M
AU - Asif,N
AU - Asim,M
AU - Assie,JB
AU - Asyraf,A
AU - Atif,M
AU - Atique,A
AU - Attanyake,AMUL
AU - Auchabie,J
DO - 10.1017/S1351324923000542
EP - 914
PY - 2024///
SN - 1351-3249
SP - 887
TI - Lightweight transformers for clinical natural language processing
T2 - Natural Language Engineering
UR - http://dx.doi.org/10.1017/S1351324923000542
VL - 30
ER -

Contact us


For any enquiries related to the MRC Centre please contact:

Scientific Manager
Susannah Fisher
mrc.gida@imperial.ac.uk

External Relationships and Communications Manager
Dr Sabine van Elsland
s.van-elsland@imperial.ac.uk