Citation

BibTex format

@inproceedings{Bai:2018:10.1007/978-3-030-00937-3_67,
author = {Bai, W and Suzuki, H and Qin, C and Tarroni, G and Oktay, O and Matthews, PM and Rueckert, D},
doi = {10.1007/978-3-030-00937-3_67},
title = {Recurrent neural networks for aortic image sequence segmentation with sparse annotations},
url = {http://dx.doi.org/10.1007/978-3-030-00937-3_67},
year = {2018}
}

RIS format (EndNote, RefMan)

TY  - CPAPER
AB - Segmentation of image sequences is an important task in medical image analysis, which enables clinicians to assess the anatomy and function of moving organs. However, direct application of a segmentation algorithm to each time frame of a sequence may ignore the temporal continuity inherent in the sequence. In this work, we propose an image sequence segmentation algorithm by combining a fully convolutional network with a recurrent neural network, which incorporates both spatial and temporal information into the segmentation task. A key challenge in training this network is that the available manual annotations are temporally sparse, which forbids end-to-end training. We address this challenge by performing non-rigid label propagation on the annotations and introducing an exponentially weighted loss function for training. Experiments on aortic MR image sequences demonstrate that the proposed method significantly improves both accuracy and temporal smoothness of segmentation, compared to a baseline method that utilises spatial information only. It achieves an average Dice metric of 0.960 for the ascending aorta and 0.953 for the descending aorta.
AU - Bai,W
AU - Suzuki,H
AU - Qin,C
AU - Tarroni,G
AU - Oktay,O
AU - Matthews,PM
AU - Rueckert,D
DO - 10.1007/978-3-030-00937-3_67
PY - 2018///
SN - 0302-9743
TI - Recurrent neural networks for aortic image sequence segmentation with sparse annotations
UR - http://dx.doi.org/10.1007/978-3-030-00937-3_67
UR - http://hdl.handle.net/10044/1/64136
ER -