@Article{duepublico_mods_00071902, author = {Goebel, Juliane and Stenzel, Elena and Zuelow, Stefan and Kleinschnitz, Christoph and Forsting, Michael and Moenninghoff, Christoph and Radbruch, Alexander}, title = {Computer aided diagnosis for ASPECT rating: initial experiences with the Frontier ASPECT Score software}, year = {2019}, month = {Apr}, day = {24}, keywords = {Computer-aided diagnosis; early ischemic change detection; brain computed tomography; ASPECT score}, abstract = {Background: Computer-aided diagnosis is increasingly used in radiology and may support not only unexperienced readers but also senior radiologists. It appears promising, especially in the sometimes challenging detection of early ischemic changes in stroke CT. Purpose: To compare the new post-processing software prototype Frontier{\_}ASPECTS against two senior radiologists in ASPECTS evaluation. Material and Methods: Retrospectively, pre-interventional CTs of 100 patients, who underwent endovascular revascularization for acute middle cerebral artery ischemia, were blindly re-analyzed with respect to ASPECTS by two neuroradiologists (separately and in consensus) and by use of Frontier{\_}ASPECTS. In addition to a fully automatic Frontier{\_}ASPECTS reading (Frontier{\_}1), Frontier{\_}ASPECTS readings subsequently manually corrected for old cerebral defects (Frontier{\_}2a), the affected hemisphere (known from CT angiography, Frontier{\_}2b), and both (Frontier{\_}3) were assessed. Statistical analysis was performed by intraclass correlation and Bland--Altman analysis. Results: Median ASPECTS was 10 for Frontier{\_}3 (range{\thinspace}={\thinspace}5--10), 10 for radiologist{\_}1 (range{\thinspace}={\thinspace}4--10), 9 for radiologist{\_}2 (range{\thinspace}={\thinspace}2--10), and 10 for consensus reading (range{\thinspace}={\thinspace}2--10). All Frontier{\_}ASPECTS variants correlated lowly with consensus reading (Frontier{\_}1, r{\thinspace}={\thinspace}0.281; Frontier{\_}2a, r{\thinspace}={\thinspace}0.357; Frontier{\_}2b, r{\thinspace}={\thinspace}0.333; Frontier{\_}3, r{\thinspace}={\thinspace}0.350; always P {\thinspace}<{\thinspace}0.01), while both radiologists and consensus reading correlated highly (radiologist{\_}1, r{\thinspace}={\thinspace}0.817; radiologist{\_}2, r{\thinspace}={\thinspace}0.951; always P {\thinspace}<{\thinspace}0.001). Bland--Altman analysis confirmed a worse agreement between Frontier{\_}3 and consensus reading than between both radiologists and consensus reading. Conclusion: We found only low agreement between the post-processing software Frontier{\_}ASPECTS and expert consensus reading in ASPECTS evaluation. Notably, performance of Frontier{\_}ASPECTS improved by simple manual corrections but is--at Frontier{\_}ASPECTS' current development status--inferior to the performance of senior radiologists.}, note = {Dieser Beitrag ist mit Zustimmung des Rechteinhabers aufgrund einer (DFG-gef{\"o}rderten) Allianz- bzw. Nationallizenz frei zug{\"a}nglich. <hr />This publication is with permission of the rights owner freely accessible due to an Alliance licence and a national licence (funded by the DFG, German Research Foundation) respectively.}, note = {<p>Goebel J, Stenzel E, Zuelow S, et al. Computer aided diagnosis for ASPECT rating: initial experiences with the Frontier ASPECT Score software. <em>Acta Radiologica</em>. 2019;60(12):1673-1679. doi:<a href="https://doi.org/10.1177/0284185119842465">10.1177/0284185119842465</a></p> <p>Article first published online: April 24, 2019</p>}, note = {deepgreen}, note = {SAGE}, doi = {10.1177/0284185119842465}, url = {https://duepublico2.uni-due.de/receive/duepublico_mods_00071902}, url = {https://doi.org/10.1177/0284185119842465}, file = {:https://duepublico2.uni-due.de/servlets/MCRFileNodeServlet/duepublico_derivate_00071734/Goebel_et_al_2019_ASPECT_rating.pdf:PDF}, language = {en} }