% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Starke:284753,
author = {S. Starke$^*$ and A. A. Zwanenburg-Bezemer$^*$ and K.
Leger$^*$ and F. Lohaus$^*$ and A. Linge$^*$ and G.
Kalinauskaite$^*$ and I. Tinhofer$^*$ and N. Guberina$^*$
and M. Guberina$^*$ and P. Balermpas$^*$ and J. v. d.
Grün$^*$ and U. Ganswindt$^*$ and C. Belka$^*$ and J. C.
Peeken$^*$ and S. E. Combs$^*$ and S. Boeke$^*$ and D.
Zips$^*$ and C. Richter$^*$ and E. Troost$^*$ and M.
Krause$^*$ and M. Baumann$^*$ and S. Löck$^*$},
title = {{M}ultitask {L}earning with {C}onvolutional {N}eural
{N}etworks and {V}ision {T}ransformers {C}an {I}mprove
{O}utcome {P}rediction for {H}ead and {N}eck {C}ancer
{P}atients.},
journal = {Cancers},
volume = {15},
number = {19},
issn = {2072-6694},
address = {Basel},
publisher = {MDPI},
reportid = {DKFZ-2023-02064},
pages = {4897},
year = {2023},
abstract = {Neural-network-based outcome predictions may enable further
treatment personalization of patients with head and neck
cancer. The development of neural networks can prove
challenging when a limited number of cases is available.
Therefore, we investigated whether multitask learning
strategies, implemented through the simultaneous
optimization of two distinct outcome objectives
(multi-outcome) and combined with a tumor segmentation task,
can lead to improved performance of convolutional neural
networks (CNNs) and vision transformers (ViTs). Model
training was conducted on two distinct multicenter datasets
for the endpoints loco-regional control (LRC) and
progression-free survival (PFS), respectively. The first
dataset consisted of pre-treatment computed tomography (CT)
imaging for 290 patients and the second dataset contained
combined positron emission tomography (PET)/CT data of 224
patients. Discriminative performance was assessed by the
concordance index (C-index). Risk stratification was
evaluated using log-rank tests. Across both datasets, CNN
and ViT model ensembles achieved similar results. Multitask
approaches showed favorable performance in most
investigations. Multi-outcome CNN models trained with
segmentation loss were identified as the optimal strategy
across cohorts. On the PET/CT dataset, an ensemble of
multi-outcome CNNs trained with segmentation loss achieved
the best discrimination (C-index: 0.29, $95\%$ confidence
interval (CI): 0.22-0.36) and successfully stratified
patients into groups with low and high risk of disease
progression (p=0.003). On the CT dataset, ensembles of
multi-outcome CNNs and of single-outcome ViTs trained with
segmentation loss performed best (C-index: 0.26 and 0.26,
CI: 0.18-0.34 and 0.18-0.35, respectively), both with
significant risk stratification for LRC in independent
validation (p=0.002 and p=0.011). Further validation of the
developed multitask-learning models is planned based on a
prospective validation study, which has recently completed
recruitment.},
keywords = {Cox proportional hazards (Other) / convolutional neural
network (Other) / discrete-time survival models (Other) /
head and neck cancer (Other) / loco-regional control (Other)
/ multitask learning (Other) / progression-free survival
(Other) / survival analysis (Other) / tumor segmentation
(Other) / vision transformer (Other)},
cin = {DD01 / BE01 / E220 / ED01 / FM01 / MU01 / TU01 / HD01},
ddc = {610},
cid = {I:(DE-He78)DD01-20160331 / I:(DE-He78)BE01-20160331 /
I:(DE-He78)E220-20160331 / I:(DE-He78)ED01-20160331 /
I:(DE-He78)FM01-20160331 / I:(DE-He78)MU01-20160331 /
I:(DE-He78)TU01-20160331 / I:(DE-He78)HD01-20160331},
pnm = {315 - Bildgebung und Radioonkologie (POF4-315)},
pid = {G:(DE-HGF)POF4-315},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:37835591},
pmc = {pmc:PMC10571894},
doi = {10.3390/cancers15194897},
url = {https://inrepo02.dkfz.de/record/284753},
}