@inproceedings{55ccf076bcfd443c9d94f22f212545e3,
title = "Automatic multi-organ segmentation in thorax CT images using U-Net-GAN",
abstract = "We propose a method to automatically segment multiple organs at risk (OARs) from routinely-acquired thorax CT images using generative adversarial network (GAN). Multi-label U-Net was introduced in generator to enable end-to-end segmentation. Esophagus and spinal cord location information were used to train the GAN in specific regions of interest (ROI). The probability maps of new CT thorax multi-organ were generated by the well-trained network and fused to reconstruct the final contour. This proposed algorithm was evaluated using 20 patients' data with thorax CT images and manual contours. The mean Dice similarity coefficient (DSC) for esophagus, heart, left lung, right lung and spinal cord was 0.73±0.04, 0.85±0.02, 0.96±0.01, 0.97±0.02 and 0.88±0.03. This novel deep-learning-based approach with the GAN strategy can automatically and accurately segment multiple OARs in thorax CT images, which could be a useful tool to improve the efficiency of the lung radiotherapy treatment planning.",
keywords = "CT thorax multi-organ, End-to-end, Generative adversarial network, U-Net",
author = "Yang Lei and Yingzi Liu and Xue Dong and Sibo Tian and Tonghe Wang and Xiaojun Jiang and Kristin Higgins and Beitler, {Jonathan J.} and Yu, {David S.} and Tian Liu and Curran, {Walter J.} and Yi Fang and Xiaofeng Yang",
note = "Publisher Copyright: {\textcopyright} 2019 SPIE.; Medical Imaging 2019: Computer-Aided Diagnosis ; Conference date: 17-02-2019 Through 20-02-2019",
year = "2019",
doi = "10.1117/12.2512552",
language = "English (US)",
series = "Progress in Biomedical Optics and Imaging - Proceedings of SPIE",
publisher = "SPIE",
editor = "Kensaku Mori and Hahn, {Horst K.}",
booktitle = "Medical Imaging 2019",
}