Citation

BibTex format

@article{Rajchl:2016:10.1109/TMI.2016.2621185,
author = {Rajchl, M and Lee, MCH and Oktay, O and Kamnitsas, K and Passerat-Palmbach, J and Bai, W and Damodaram, M and Rutherford, MA and Hajnal, JV and Kainz, B and Rueckert, D},
doi = {10.1109/TMI.2016.2621185},
journal = {IEEE Transactions on Medical Imaging},
pages = {674--683},
title = {DeepCut: object segmentation from bounding box annotations using convolutional neural networks},
url = {http://dx.doi.org/10.1109/TMI.2016.2621185},
volume = {36},
year = {2016}
}

RIS format (EndNote, RefMan)

TY  - JOUR
AB - In this paper, we propose DeepCut, a method to obtain pixelwise object segmentations given an image dataset labelled weak annotations, in our case bounding boxes. It extends the approach of the well-known GrabCut[1] method to include machine learning by training a neural network classifier from bounding box annotations. We formulate the problem as an energy minimisation problem over a densely-connected conditional random field and iteratively update the training targets to obtain pixelwise object segmentations. Additionally, we propose variants of the DeepCut method and compare those to a naive approach to CNN training under weak supervision. We test its applicability to solve brain and lung segmentation problems on a challenging fetal magnetic resonance dataset and obtain encouraging results in terms of accuracy.
AU - Rajchl,M
AU - Lee,MCH
AU - Oktay,O
AU - Kamnitsas,K
AU - Passerat-Palmbach,J
AU - Bai,W
AU - Damodaram,M
AU - Rutherford,MA
AU - Hajnal,JV
AU - Kainz,B
AU - Rueckert,D
DO - 10.1109/TMI.2016.2621185
EP - 683
PY - 2016///
SN - 0278-0062
SP - 674
TI - DeepCut: object segmentation from bounding box annotations using convolutional neural networks
T2 - IEEE Transactions on Medical Imaging
UR - http://dx.doi.org/10.1109/TMI.2016.2621185
UR - http://gateway.webofknowledge.com/gateway/Gateway.cgi?GWVersion=2&SrcApp=PARTNER_APP&SrcAuth=LinksAMR&KeyUT=WOS:000396115800030&DestLinkType=FullRecord&DestApp=ALL_WOS&UsrCustomerID=1ba7043ffcc86c417c072aa74d649202
UR - http://hdl.handle.net/10044/1/45459
VL - 36
ER -