BibTex format

author = {Seitzer, M and Yang, G and Schlemper, J and Oktay, O and Würfl, T and Christlein, V and Wong, T and Mohiaddin, R and Firmin, D and Keegan, J and Rueckert, D and Maier, A},
doi = {10.1007/978-3-030-00928-1_27},
pages = {232--240},
title = {Adversarial and perceptual refinement for compressed sensing MRI reconstruction},
url = {},
year = {2018}

RIS format (EndNote, RefMan)

AB - © Springer Nature Switzerland AG 2018. Deep learning approaches have shown promising performance for compressed sensing-based Magnetic Resonance Imaging. While deep neural networks trained with mean squared error (MSE) loss functions can achieve high peak signal to noise ratio, the reconstructed images are often blurry and lack sharp details, especially for higher undersampling rates. Recently, adversarial and perceptual loss functions have been shown to achieve more visually appealing results. However, it remains an open question how to (1) optimally combine these loss functions with the MSE loss function and (2) evaluate such a perceptual enhancement. In this work, we propose a hybrid method, in which a visual refinement component is learnt on top of an MSE loss-based reconstruction network. In addition, we introduce a semantic interpretability score, measuring the visibility of the region of interest in both ground truth and reconstructed images, which allows us to objectively quantify the usefulness of the image quality for image post-processing and analysis. Applied on a large cardiac MRI dataset simulated with 8-fold undersampling, we demonstrate significant improvements (p<0.01) over the state-of-the-art in both a human observer study and the semantic interpretability score.
AU - Seitzer,M
AU - Yang,G
AU - Schlemper,J
AU - Oktay,O
AU - Würfl,T
AU - Christlein,V
AU - Wong,T
AU - Mohiaddin,R
AU - Firmin,D
AU - Keegan,J
AU - Rueckert,D
AU - Maier,A
DO - 10.1007/978-3-030-00928-1_27
EP - 240
PY - 2018///
SN - 0302-9743
SP - 232
TI - Adversarial and perceptual refinement for compressed sensing MRI reconstruction
UR -
ER -