% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{StudierFischer:180692,
author = {A. Studier-Fischer and S. Seidlitz$^*$ and J. Sellner$^*$
and B. Özdemir and M. Wiesenfarth$^*$ and L. Ayala$^*$ and
J. Odenthal and S. Knödler and K. F. Kowalewski and C. M.
Haney and I. Camplisson and M. Dietrich and K. Schmidt and
G. A. Salg and H. G. Kenngott and T. Adler$^*$ and N.
Schreck$^*$ and A. Kopp-Schneider$^*$ and K. Maier-Hein$^*$
and L. Maier-Hein$^*$ and B. P. Müller-Stich and F. Nickel},
title = {{S}pectral organ fingerprints for machine learning-based
intraoperative tissue classification with hyperspectral
imaging in a porcine model.},
journal = {Scientific reports},
volume = {12},
number = {1},
issn = {2045-2322},
address = {[London]},
publisher = {Macmillan Publishers Limited, part of Springer Nature},
reportid = {DKFZ-2022-01488},
pages = {11028},
year = {2022},
abstract = {Visual discrimination of tissue during surgery can be
challenging since different tissues appear similar to the
human eye. Hyperspectral imaging (HSI) removes this
limitation by associating each pixel with high-dimensional
spectral information. While previous work has shown its
general potential to discriminate tissue, clinical
translation has been limited due to the method's current
lack of robustness and generalizability. Specifically, the
scientific community is lacking a comprehensive spectral
tissue atlas, and it is unknown whether variability in
spectral reflectance is primarily explained by tissue type
rather than the recorded individual or specific acquisition
conditions. The contribution of this work is threefold: (1)
Based on an annotated medical HSI data set (9059 images from
46 pigs), we present a tissue atlas featuring spectral
fingerprints of 20 different porcine organs and tissue
types. (2) Using the principle of mixed model analysis, we
show that the greatest source of variability related to HSI
images is the organ under observation. (3) We show that
HSI-based fully-automatic tissue differentiation of 20 organ
classes with deep neural networks is possible with high
accuracy (> $95\%).$ We conclude from our study that
automatic tissue discrimination based on HSI data is
feasible and could thus aid in intraoperative decisionmaking
and pave the way for context-aware computer-assisted surgery
systems and autonomous robotics.},
keywords = {Animals / Hyperspectral Imaging / Machine Learning / Neural
Networks, Computer / Swine},
cin = {E130 / E230 / C060},
ddc = {600},
cid = {I:(DE-He78)E130-20160331 / I:(DE-He78)E230-20160331 /
I:(DE-He78)C060-20160331},
pnm = {315 - Bildgebung und Radioonkologie (POF4-315)},
pid = {G:(DE-HGF)POF4-315},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:35773276},
pmc = {pmc:PMC9247052},
doi = {10.1038/s41598-022-15040-w},
url = {https://inrepo02.dkfz.de/record/180692},
}