% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Wei:302286,
author = {C. Wei and C. Eze and R. Klaar and D. Thorwarth and C.
Warda and J. Taugner and J. Hörner-Rieber and S. Regnery
and O. Jäkel$^*$ and F. Weykamp$^*$ and M. A. Palacios and
S. Marschner and S. Corradini and C. Belka$^*$ and C. Kurz
and G. Landry and M. Rabe},
title = {{D}eep learning-based contour propagation in magnetic
resonance imaging-guided radiotherapy of lung cancer
patients.},
journal = {Physics in medicine and biology},
volume = {70},
issn = {0031-9155},
address = {Bristol},
publisher = {IOP Publ.},
reportid = {DKFZ-2025-01305},
pages = {145018},
year = {2025},
note = {Med. Biol. 70 145018},
abstract = {Fast and accurate organ-at-risk (OAR) and gross tumor
volume (GTV) contour propagation methods are needed to
improve the efficiency of magnetic resonance (MR)
imaging-guided radiotherapy. We trained deformable image
registration networks to accurately propagate contours from
planning to fraction MR images. Approach: Data from 140
stage 1-2 lung cancer patients treated at a 0.35T MR-Linac
were split into 102/17/21 for training/validation/testing.
Additionally, 18 central lung tumor patients, treated at a
0.35T MR-Linac externally, and 14 stage 3 lung cancer
patients from a phase 1 clinical trial, treated at 0.35T or
1.5T MR-Linacs at three institutions, were used for external
testing. Planning and fraction images were paired (490
pairs) for training. Two hybrid transformer-convolutional
neural network TransMorph models with mean squared error
(MSE), Dice similarity coefficient (DSC), and regularization
losses $(TM_{MSE+Dice})$ or MSE and regularization losses
$(TM_{MSE})$ were trained to deformably register planning to
fraction images. The TransMorph models predicted
diffeomorphic dense displacement fields. Multi-label images
including seven thoracic OARs and the GTV were propagated to
generate fraction segmentations. Model predictions were
compared with contours obtained through B-spline, vendor
registration and the auto-segmentation method nnUNet.
Evaluation metrics included the DSC and Hausdorff distance
percentiles (50th and 95th) against clinical contours. Main
results: $TM_{MSE+Dice}$ and $TM_{MSE}$ achieved mean
OARs/GTV DSCs of 0.90/0.82 and 0.90/0.79 for the internal
and 0.84/0.77 and 0.85/0.76 for the central lung tumor
external test data. On stage 3 data, $TM_{MSE+Dice}$
achieved mean OARs/GTV DSCs of 0.87/0.79 and 0.83/0.78 for
the 0.35 T MR-Linac datasets, and 0.87/0.75 for the 1.5 T
MR-Linac dataset. $TM_{MSE+Dice}$ and $TM_{MSE}$ had
significantly higher geometric accuracy than other methods
on external data. No significant difference between
$TM_{MSE+Dice}$ and $TM_{MSE}$ was found. Significance:
TransMorph models achieved time-efficient segmentation of
fraction MRIs with high geometrical accuracy and accurately
segmented images obtained at different field strengths.},
keywords = {MR-linac (Other) / MRgRT (Other) / PUMA (Other) /
TransMorph (Other) / deep learning (Other) / image
registration (Other) / lung cancer (Other)},
cin = {E040 / MU01 / E050},
ddc = {530},
cid = {I:(DE-He78)E040-20160331 / I:(DE-He78)MU01-20160331 /
I:(DE-He78)E050-20160331},
pnm = {315 - Bildgebung und Radioonkologie (POF4-315)},
pid = {G:(DE-HGF)POF4-315},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:40570891},
doi = {10.1088/1361-6560/ade8d0},
url = {https://inrepo02.dkfz.de/record/302286},
}