{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,3,24]],"date-time":"2025-03-24T09:09:57Z","timestamp":1742807397375},"reference-count":151,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2024,2,1]],"date-time":"2024-02-01T00:00:00Z","timestamp":1706745600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2024,2,1]],"date-time":"2024-02-01T00:00:00Z","timestamp":1706745600000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/legal\/tdmrep-license"},{"start":{"date-parts":[[2024,1,5]],"date-time":"2024-01-05T00:00:00Z","timestamp":1704412800000},"content-version":"vor","delay-in-days":0,"URL":"http:\/\/creativecommons.org\/licenses\/by-nc-nd\/4.0\/"}],"content-domain":{"domain":["clinicalkey.fr","clinicalkey.jp","clinicalkey.com.au","clinicalkey.es","clinicalkey.com","elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Computers in Biology and Medicine"],"published-print":{"date-parts":[[2024,2]]},"DOI":"10.1016\/j.compbiomed.2024.107929","type":"journal-article","created":{"date-parts":[[2024,1,4]],"date-time":"2024-01-04T17:58:00Z","timestamp":1704391080000},"page":"107929","update-policy":"http:\/\/dx.doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":7,"special_numbering":"C","title":["Methods and datasets for segmentation of minimally invasive surgical instruments in endoscopic images and videos: A review of the state of the art"],"prefix":"10.1016","volume":"169","author":[{"ORCID":"http:\/\/orcid.org\/0009-0005-0628-1979","authenticated-orcid":false,"given":"Tobias","family":"Rueckert","sequence":"first","affiliation":[]},{"ORCID":"http:\/\/orcid.org\/0000-0002-5683-5889","authenticated-orcid":false,"given":"Daniel","family":"Rueckert","sequence":"additional","affiliation":[]},{"ORCID":"http:\/\/orcid.org\/0000-0001-9468-2871","authenticated-orcid":false,"given":"Christoph","family":"Palm","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"issue":"7328","key":"10.1016\/j.compbiomed.2024.107929_b1","doi-asserted-by":"crossref","first-page":"31","DOI":"10.1136\/bmj.324.7328.31","article-title":"Recent advances in minimal access surgery","volume":"324","author":"Darzi","year":"2002","journal-title":"BMJ"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b2","doi-asserted-by":"crossref","DOI":"10.1186\/s13018-019-1266-y","article-title":"Open versus minimally invasive TLIF: literature review and meta-analysis","volume":"14","author":"Hammad","year":"2019","journal-title":"J. Orthop. Surg. Res."},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b3","first-page":"S293","article-title":"Minimally invasive versus open distal pancreatectomy (LEOPARD): a multicenter patient-blinded randomized controlled trial","volume":"20","author":"de Rooij","year":"2018","journal-title":"Ann. Surg."},{"issue":"4","key":"10.1016\/j.compbiomed.2024.107929_b4","doi-asserted-by":"crossref","first-page":"621","DOI":"10.1097\/SLA.0000000000003031","article-title":"Robot-assisted minimally invasive thoracolaparoscopic esophagectomy versus open transthoracic esophagectomy for resectable esophageal cancer: A randomized controlled trial","volume":"269","author":"van der Sluis","year":"2019","journal-title":"Ann. Surg."},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b5","doi-asserted-by":"crossref","first-page":"154","DOI":"10.1055\/s-2002-19857","article-title":"Minimally invasive surgery","volume":"34","author":"Fuchs","year":"2002","journal-title":"Endoscopy"},{"issue":"7","key":"10.1016\/j.compbiomed.2024.107929_b6","doi-asserted-by":"crossref","first-page":"835","DOI":"10.1109\/JPROC.2022.3180350","article-title":"Robot-assisted minimally invasive surgery \u2013 surgical robotics in the data age","volume":"110","author":"Haidegger","year":"2022","journal-title":"Proc. IEEE"},{"key":"10.1016\/j.compbiomed.2024.107929_b7","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2021.102306","article-title":"Surgical data science \u2013 from concepts toward clinical translation","volume":"76","author":"Maier-Hein","year":"2022","journal-title":"Med. Image Anal."},{"issue":"3","key":"10.1016\/j.compbiomed.2024.107929_b8","doi-asserted-by":"crossref","first-page":"129","DOI":"10.3109\/13645706.2011.580764","article-title":"In-vivo real-time tracking of surgical instruments in endoscopic video","volume":"21","author":"Bouarfa","year":"2011","journal-title":"Minim. Invasive Ther. Allied Technol."},{"key":"10.1016\/j.compbiomed.2024.107929_b9","series-title":"2017 4th International Conference on Augmented Reality, Virtual Reality, and Computer Graphics","first-page":"129","article-title":"Robust laparoscopic instruments tracking using colored strips","volume":"vol. 10325","author":"Mamone","year":"2017"},{"key":"10.1016\/j.compbiomed.2024.107929_b10","doi-asserted-by":"crossref","first-page":"212","DOI":"10.1109\/RBME.2019.2939091","article-title":"Optical and electromagnetic tracking systems for biomedical applications: A critical review on potentialities and limitations","volume":"13","author":"Sorriento","year":"2019","journal-title":"IEEE Rev. Biomed. Eng."},{"key":"10.1016\/j.compbiomed.2024.107929_b11","doi-asserted-by":"crossref","DOI":"10.1016\/j.robot.2021.103945","article-title":"Visual detection and tracking algorithms for minimally invasive surgical instruments: A comprehensive review of the state-of-the-art","volume":"149","author":"Wang","year":"2022","journal-title":"Robot. Auton. Syst."},{"issue":"4","key":"10.1016\/j.compbiomed.2024.107929_b12","doi-asserted-by":"crossref","first-page":"1521","DOI":"10.1007\/s00464-020-08168-1","article-title":"Deep learning visual analysis in laparoscopic surgery: a systematic review and diagnostic test accuracy meta-analysis","volume":"35","author":"Anteby","year":"2021","journal-title":"Surg. Endosc."},{"key":"10.1016\/j.compbiomed.2024.107929_b13","doi-asserted-by":"crossref","first-page":"48658","DOI":"10.1109\/ACCESS.2021.3068852","article-title":"A review on deep learning in minimally invasive surgery","volume":"9","author":"Rivas-Blanco","year":"2021","journal-title":"IEEE Access"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b14","doi-asserted-by":"crossref","first-page":"15","DOI":"10.1080\/24699322.2020.1801842","article-title":"Image-based laparoscopic tool detection and tracking using convolutional neural networks: a review of the literature","volume":"25","author":"Yang","year":"2020","journal-title":"Comput. Assist. Surg."},{"issue":"6","key":"10.1016\/j.compbiomed.2024.107929_b15","doi-asserted-by":"crossref","first-page":"159","DOI":"10.1049\/htl.2019.0068","article-title":"Real-time surgical instrument tracking in robot-assisted surgery using multi-domain convolutional neural network","volume":"6","author":"Qiu","year":"2019","journal-title":"Healthc. Technol. Lett."},{"issue":"sup1","key":"10.1016\/j.compbiomed.2024.107929_b16","doi-asserted-by":"crossref","first-page":"20","DOI":"10.1080\/24699322.2018.1560097","article-title":"Real-time tracking of surgical instruments based on spatio-temporal context and deep learning","volume":"24","author":"Zhao","year":"2019","journal-title":"Comput. Assist. Surg."},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b17","doi-asserted-by":"crossref","first-page":"335","DOI":"10.1109\/TMRB.2022.3170215","article-title":"Simultaneous depth estimation and surgical tool segmentation in laparoscopic images","volume":"4","author":"Huang","year":"2022","journal-title":"IEEE Trans. Med. Robotics Bionics"},{"key":"10.1016\/j.compbiomed.2024.107929_b18","series-title":"International Conference on Medical Image Computing and Computer-Assisted Intervention","first-page":"412","article-title":"Learning where to look while tracking instruments in robot-assisted surgery","author":"Islam","year":"2019"},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b19","doi-asserted-by":"crossref","first-page":"2188","DOI":"10.1109\/LRA.2019.2900854","article-title":"Real-time instrument segmentation in robotic surgery using auxiliary supervised deep adversarial learning","volume":"4","author":"Islam","year":"2019","journal-title":"IEEE Robot. Autom. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b20","series-title":"2021 IEEE EMBS International Conference on Biomedical and Health Informatics","first-page":"1","article-title":"Exploring deep learning methods for real-time surgical instrument segmentation in laparoscopy","author":"Jha","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b21","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"648","article-title":"Searching for efficient architecture for instrument segmentation in robotic surgery","volume":"vol. 12263","author":"Pakhomov","year":"2020"},{"issue":"9","key":"10.1016\/j.compbiomed.2024.107929_b22","doi-asserted-by":"crossref","first-page":"2222","DOI":"10.1007\/s11263-022-01640-6","article-title":"Surgical tool datasets for machine learning research: A survey","volume":"130","author":"Rodrigues","year":"2022","journal-title":"Int. J. Comput. Vis."},{"key":"10.1016\/j.compbiomed.2024.107929_b23","doi-asserted-by":"crossref","first-page":"633","DOI":"10.1016\/j.media.2016.09.003","article-title":"Vision-based and marker-less surgical tool detection and tracking: a review of the literature","volume":"35","author":"Bouget","year":"2017","journal-title":"Med. Image Anal."},{"key":"10.1016\/j.compbiomed.2024.107929_b24","doi-asserted-by":"crossref","DOI":"10.3389\/frobt.2022.1030846","article-title":"Surgical instrument detection and tracking technologies: Automating dataset labeling for surgical skill assessment","volume":"9","author":"Nema","year":"2022","journal-title":"Front. Robotics AI"},{"key":"10.1016\/j.compbiomed.2024.107929_b25","series-title":"2017 Robotic instrument segmentation challenge","author":"Allan","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b26","series-title":"Comparative evaluation of instrument segmentation and tracking methods in minimally invasive surgery","author":"Bodenstedt","year":"2018"},{"key":"10.1016\/j.compbiomed.2024.107929_b27","series-title":"2018 Robotic scene segmentation challenge","author":"Allan","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b28","series-title":"2021 27th MultiMedia Modeling International Conference","first-page":"218","article-title":"Kvasir-instrument: Diagnostic and therapeutic tool segmentation dataset in gastrointestinal endoscopy","volume":"vol. 12573","author":"Jha","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b29","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2020.101920","article-title":"Comparative validation of multi-instance instrument segmentation in endoscopy: Results of the ROBUST-MIS 2019 challenge","volume":"70","author":"Ro\u00df","year":"2021","journal-title":"Med. Image Anal."},{"issue":"4","key":"10.1016\/j.compbiomed.2024.107929_b30","doi-asserted-by":"crossref","first-page":"6639","DOI":"10.1109\/LRA.2020.3009073","article-title":"Towards better surgical instrument segmentation in endoscopic vision: Multi-angle feature aggregationand contour supervision","volume":"5","author":"Qin","year":"2020","journal-title":"IEEE Robot. Autom. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b31","series-title":"2020 IEEE\/RSJ International Conference on Intelligent Robots and Systems","first-page":"2914","article-title":"LC-GAN: image-to-image translation based on generative adversarial network for endoscopic images","author":"Lin","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b32","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"119","article-title":"Generating large labeled data sets for laparoscopic image processing tasks using unpaired image-to-image translation","volume":"vol. 11768","author":"Pfeiffer","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b33","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"700","article-title":"Synthetic and real inputs for tool segmentation in robotic surgery","volume":"vol. 12263","author":"Colleoni","year":"2020"},{"issue":"5","key":"10.1016\/j.compbiomed.2024.107929_b34","doi-asserted-by":"crossref","first-page":"1450","DOI":"10.1109\/TMI.2021.3057884","article-title":"Image compositing for segmentation of surgical tools without manual annotations","volume":"40","author":"Garc\u00eda-Peraza-Herrera","year":"2021","journal-title":"IEEE Trans. Med. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b35","series-title":"Medical Image Computing and Computer-Assisted Intervention","first-page":"438","article-title":"Can masses of non-experts train highly accurate image classifiers? \u2013 A crowdsourcing approach to instrument segmentation in laparoscopic images","volume":"vol. 8674","author":"Maier-Hein","year":"2014"},{"issue":"12","key":"10.1016\/j.compbiomed.2024.107929_b36","doi-asserted-by":"crossref","first-page":"2603","DOI":"10.1109\/TMI.2015.2450831","article-title":"Detecting surgical tools by modelling local appearance and global shape","volume":"34","author":"Bouget","year":"2015","journal-title":"IEEE Trans. Med. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b37","series-title":"CholecSeg8k: A semantic segmentation dataset for laparoscopic cholecystectomy based on cholec80","author":"Hong","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b38","series-title":"Surgical workflow analysis and full scene segmentation","author":"HeiSurf","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b39","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2021.101994","article-title":"Detection, segmentation, and 3D pose estimation of surgical tools using convolutional neural networks and algebraic geometry","volume":"70","author":"Hasan","year":"2021","journal-title":"Med. Image Anal."},{"key":"10.1016\/j.compbiomed.2024.107929_b40","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2021.102053","article-title":"CaDIS: Cataract dataset for surgical RGB-image segmentation","volume":"71","author":"Grammatikopoulou","year":"2021","journal-title":"Med. Image Anal."},{"key":"10.1016\/j.compbiomed.2024.107929_b41","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"486","article-title":"AutoLaparo: A new dataset of integrated multi-tasks for image-guided surgical automation in laparoscopic hysterectomy","volume":"vol. 13437","author":"Wang","year":"2022"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b42","doi-asserted-by":"crossref","first-page":"86","DOI":"10.1109\/TMI.2016.2593957","article-title":"EndoNet: A deep architecture for recognition tasks on laparoscopic videos","volume":"36","author":"Twinanda","year":"2017","journal-title":"IEEE Trans. Med. Imaging"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b43","doi-asserted-by":"crossref","first-page":"14","DOI":"10.5617\/nmi.9122","article-title":"EM-Net: An efficient M-Net for segmentation of surgical instruments in colonoscopy frames","volume":"1","author":"Banik","year":"2021","journal-title":"Nord. Mach. Intell."},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b44","doi-asserted-by":"crossref","first-page":"17","DOI":"10.5617\/nmi.9125","article-title":"Automatic polyp and instrument segmentation in MedAI-2021","volume":"1","author":"Chou","year":"2021","journal-title":"Nord. Mach. Intell."},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b45","doi-asserted-by":"crossref","first-page":"66","DOI":"10.32996\/jcsts.2022.4.1.7","article-title":"Multi-scale dilated fusion network (MSDFN) for automatic instrument segmentation","volume":"4","author":"Devi","year":"2022","journal-title":"J. Comput. Sci. Technol. Stud."},{"key":"10.1016\/j.compbiomed.2024.107929_b46","doi-asserted-by":"crossref","DOI":"10.1088\/1742-6596\/2003\/1\/012006","article-title":"Semantic segmentation of surgical instruments based on enhanced multi-scale receptive field","volume":"2003","author":"Dong","year":"2021","journal-title":"J. Phys. Conf. Ser."},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b47","doi-asserted-by":"crossref","first-page":"5","DOI":"10.5617\/nmi.9107","article-title":"Polyp and surgical instrument segmentation with double encoder-decoder networks","volume":"1","author":"Galdran","year":"2021","journal-title":"Nord. Mach. Intell."},{"key":"10.1016\/j.compbiomed.2024.107929_b48","series-title":"CAIBDA 2022; 2nd International Conference on Artificial Intelligence, Big Data and Algorithms","first-page":"1","article-title":"Conditional relativistic GAN for fast part segmentation of surgical instruments","author":"Guo","year":"2022"},{"key":"10.1016\/j.compbiomed.2024.107929_b49","series-title":"Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling, Vol. 11598","first-page":"55","article-title":"Segmentation and removal of surgical instruments for background SceneVisualization from endoscopic \/ laparoscopic video","author":"Hasan","year":"2021"},{"issue":"13","key":"10.1016\/j.compbiomed.2024.107929_b50","doi-asserted-by":"crossref","first-page":"3215","DOI":"10.1049\/iet-ipr.2020.0320","article-title":"Multiscale matters for part segmentation of instruments in robotic surgery","volume":"14","author":"He","year":"2020","journal-title":"IET Image Process."},{"key":"10.1016\/j.compbiomed.2024.107929_b51","series-title":"2019 41st Annual International Conference of the IEEE Engineering in Medicine and Biology Society","first-page":"7205","article-title":"U-NetPlus: A modified encoder-decoder U-net architecture for semantic and instance segmentation of surgical instruments from laparoscopic images","author":"Kamrul Hasan","year":"2019"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b52","doi-asserted-by":"crossref","first-page":"26","DOI":"10.5617\/nmi.9130","article-title":"Kvasir-instruments and polyp segmentation using UNet","volume":"1","author":"Keprate","year":"2021","journal-title":"Nord. Mach. Intell."},{"key":"10.1016\/j.compbiomed.2024.107929_b53","series-title":"Medical Image Computing and Computer-Assisted Intervention","first-page":"664","article-title":"Concurrent segmentation and localization for tracking of surgical instruments","author":"Laina","year":"2017"},{"key":"10.1016\/j.compbiomed.2024.107929_b54","series-title":"2019 41st Annual International Conference of the IEEE Engineering in Medicine and Biology Society","first-page":"5735","article-title":"RASNet: Segmentation for tracking surgical instruments in surgical videos using refined attention segmentation network","author":"Ni","year":"2019"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b55","doi-asserted-by":"crossref","first-page":"44","DOI":"10.5617\/nmi.9145","article-title":"Segmentation of polyp instruments using UNet based deep learning model","volume":"1","author":"Rajak","year":"2021","journal-title":"Nord. Mach. Intell."},{"key":"10.1016\/j.compbiomed.2024.107929_b56","doi-asserted-by":"crossref","DOI":"10.1109\/TMI.2023.3288127","article-title":"Branch aggregation attention network for robotic surgical instrument segmentation","author":"Shen","year":"2023","journal-title":"IEEE Trans. Med. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b57","series-title":"Proceedings of the 2019 International Conference on Intelligent Medicine and Image Processing","first-page":"21","article-title":"Depth estimation for instrument segmentation from a single laparoscopic video toward laparoscopic surgery","author":"Suzuki","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b58","unstructured":"V. Vishal, C. Udaya Kumar, Robotic surgical instrument segmentation using Dual Global Attention Upsample, in: 2018 32nd Conference on Neural Information Processing Systems, NIPS, 2018."},{"issue":"12","key":"10.1016\/j.compbiomed.2024.107929_b59","doi-asserted-by":"crossref","first-page":"2959","DOI":"10.1049\/ipr2.12283","article-title":"PaI-Net: A modified U-Net of reducing semantic gap for surgical instrument segmentation","volume":"15","author":"Wang","year":"2021","journal-title":"IET Image Process."},{"key":"10.1016\/j.compbiomed.2024.107929_b60","series-title":"2021 43rd Annual International Conference of the IEEE Engineering in Medicine & Biology Society","first-page":"2672","article-title":"Surgical instrument segmentation based on multi-scale and multi-level feature network","author":"Wang","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b61","doi-asserted-by":"crossref","DOI":"10.1007\/s11548-023-02906-1","article-title":"CGBA-net: context-guided bidirectional attention network for surgical instrument segmentation","author":"Wang","year":"2023","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"issue":"Part","key":"10.1016\/j.compbiomed.2024.107929_b62","article-title":"An attention-guided network for surgical instrument segmentation from endoscopic images","volume":"151","author":"Yang","year":"2022","journal-title":"Comput. Biol. Med."},{"key":"10.1016\/j.compbiomed.2024.107929_b63","first-page":"1","article-title":"TMF-Net: A transformer-based multiscale fusion network for surgical instrument segmentation from endoscopic images","volume":"72","author":"Yang","year":"2022","journal-title":"IEEE Trans. Instrum. Meas."},{"key":"10.1016\/j.compbiomed.2024.107929_b64","doi-asserted-by":"crossref","DOI":"10.1016\/j.bspc.2023.104912","article-title":"MAF-Net: A multi-scale attention fusion network for automatic surgical instrument segmentation","volume":"85","author":"Yang","year":"2023","journal-title":"Biomed. Signal Process. Control"},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b65","doi-asserted-by":"crossref","first-page":"323","DOI":"10.1109\/TMRB.2023.3269856","article-title":"TMA-Net: A transformer-based multi-scale attention network for surgical instrument segmentation","volume":"5","author":"Yang","year":"2023","journal-title":"IEEE Trans. Med. Robotics Bionics"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b66","doi-asserted-by":"crossref","first-page":"47","DOI":"10.5617\/nmi.9157","article-title":"Attention U-Net ensemble for interpretable polyp and instrument segmentation","volume":"1","author":"Yeung","year":"2021","journal-title":"Nord. Mach. Intell."},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b67","doi-asserted-by":"crossref","first-page":"341","DOI":"10.1007\/s10278-019-00277-1","article-title":"A holistically-nested U-net: Surgical instrument segmentation basedon convolutional neural network","volume":"33","author":"Yu","year":"2020","journal-title":"J. Digit. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b68","series-title":"2021 43rd Annual International Conference of the IEEE Engineering in Medicine & Biology Society","first-page":"3061","article-title":"Hierarchical attentional feature fusion for surgical instrument segmentation","author":"Zhou","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b69","series-title":"2020 IEEE International Conference on Robotics and Automation","first-page":"8433","article-title":"AP-MTL: Attention pruned multi-task learning model for real-time instrument detection and segmentation in robot-assisted surgery","author":"Islam","year":"2020"},{"issue":"11","key":"10.1016\/j.compbiomed.2024.107929_b70","doi-asserted-by":"crossref","first-page":"3218","DOI":"10.1109\/TMI.2022.3181229","article-title":"MSDESIS: Multi-task stereo disparity estimation and surgical instrument segmentation","volume":"41","author":"Psychogyios","year":"2022","journal-title":"IEEE Trans. Med. Imaging"},{"issue":"02n03","key":"10.1016\/j.compbiomed.2024.107929_b71","doi-asserted-by":"crossref","DOI":"10.1142\/S2424905X22410033","article-title":"Surgical tool segmentation with pose-informed morphological polar transform of endoscopic images","volume":"7","author":"Huang","year":"2022","journal-title":"J. Med. Robotics Res."},{"key":"10.1016\/j.compbiomed.2024.107929_b72","series-title":"International Conference on Image and Graphics","first-page":"618","article-title":"CEID: Benchmark dataset for designing segmentation algorithms of instruments used in colorectal endoscopy","author":"Han","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b73","series-title":"2019 IEEE\/CVF Conference on Computer Vision and Pattern Recognition Workshops","first-page":"514","article-title":"Autonomous neurosurgical instrument segmentation using end-to-end learning","author":"Kalavakonda","year":"2019"},{"issue":"5","key":"10.1016\/j.compbiomed.2024.107929_b74","doi-asserted-by":"crossref","first-page":"849","DOI":"10.1007\/s11548-021-02383-4","article-title":"Simulation-to-real domain adaptation with teacher-student learning for endoscopic instrument segmentation","volume":"16","author":"Sahu","year":"2021","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b75","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"501","article-title":"Scalable joint detection and segmentation of surgical instruments with weak supervision","volume":"vol. 12902","author":"Sanchez-Matilla","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b76","doi-asserted-by":"crossref","DOI":"10.1109\/TMI.2023.3266137","article-title":"Min-max similarity: A contrastive semi-supervised deep learning network for surgical tools segmentation","author":"Lou","year":"2023","journal-title":"IEEE Trans. Med. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b77","series-title":"2019 International Conference on Robotics and Automation","first-page":"8720","article-title":"Self-supervised surgical tool segmentation using kinematic information","author":"da Costa Rocha","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b78","series-title":"2017 IEEE\/RSJ International Conference on Intelligent Robots and Systems","first-page":"5717","article-title":"ToolNet: Holistically-nested real-time segmentation of robotic surgical tools","author":"Garcia-Peraza-Herrera","year":"2017"},{"issue":"5","key":"10.1016\/j.compbiomed.2024.107929_b79","doi-asserted-by":"crossref","first-page":"1450","DOI":"10.1109\/TMI.2021.3057884","article-title":"Image compositing for segmentation of surgical tools without manual annotations","volume":"40","author":"Garc\u00eda-Peraza-Herrera","year":"2021","journal-title":"IEEE Trans. Med. Imaging"},{"issue":"8","key":"10.1016\/j.compbiomed.2024.107929_b80","doi-asserted-by":"crossref","first-page":"1257","DOI":"10.1007\/s11548-020-02185-0","article-title":"The effects of different levels of realism on the training of CNNs with only synthetic images for the semantic segmentation of robotic instruments in a head phantom","volume":"15","author":"P\u00e9rez","year":"2020","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b81","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"403","article-title":"Co-generation and segmentation for generalized surgical instrument segmentation on unlabelled data","volume":"vol. 12904","author":"Kalia","year":"2021"},{"issue":"6","key":"10.1016\/j.compbiomed.2024.107929_b82","doi-asserted-by":"crossref","first-page":"231","DOI":"10.1049\/htl.2019.0083","article-title":"Weakly supervised segmentation for real-time surgical tool tracking","volume":"6","author":"Lee","year":"2019","journal-title":"Healthc. Technol. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b83","series-title":"Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, Vol. 10951","first-page":"461","article-title":"Segmentation of surgical instruments in laparoscopic videos: training dataset generation and deep-learning-based framework","author":"Lee","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b84","series-title":"2022 26th International Conference on Pattern Recognition","first-page":"5096","article-title":"Pixel-accurate segmentation of surgical tools based on bounding box annotations","author":"Leifman","year":"2022"},{"key":"10.1016\/j.compbiomed.2024.107929_b85","series-title":"2020 IEEE\/RSJ International Conference on Intelligent Robots and Systems","first-page":"8499","article-title":"Towards unsupervised learning for instrument segmentation in robotic surgery with cycle-consistent adversarial networks","author":"Pakhomov","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b86","series-title":"2022 IEEE 10th Jubilee International Conference on Computational Cybernetics and Cyber-Medical Systems","first-page":"000049","article-title":"Surgical tool segmentation on the JIGSAWS dataset for autonomous image-based skill assessment","author":"Papp","year":"2022"},{"key":"10.1016\/j.compbiomed.2024.107929_b87","series-title":"2019 International Conference on Robotics and Automation","first-page":"9821","article-title":"Surgical instrument segmentation for endoscopic vision with data fusion of CNN prediction and kinematic pose","author":"Qin","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b88","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"784","article-title":"Endo-Sim2Real: Consistency learning-based domain adaptation for instrument segmentation","volume":"vol. 12263","author":"Sahu","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b89","series-title":"2018 IEEE\/RSJ International Conference on Intelligent Robots and Systems","first-page":"4411","article-title":"Comparison of 3D surgical tool segmentation procedures with robot kinematics prior","author":"Su","year":"2018"},{"key":"10.1016\/j.compbiomed.2024.107929_b90","series-title":"2018 International Symposium on Medical Robotics","article-title":"Real-time vision-based surgical tool segmentation with robot kinematics prior","author":"Su","year":"2018"},{"key":"10.1016\/j.compbiomed.2024.107929_b91","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"355","article-title":"Rethinking surgical instrument segmentation: A background image can be all you need","volume":"vol. 13437","author":"Wang","year":"2022"},{"key":"10.1016\/j.compbiomed.2024.107929_b92","series-title":"Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling, Vol. 12034","first-page":"412","article-title":"A weakly supervised learning approach for surgical instrument segmentation from laparoscopic video sequences","author":"Yang","year":"2022"},{"issue":"4","key":"10.1016\/j.compbiomed.2024.107929_b93","doi-asserted-by":"crossref","first-page":"6266","DOI":"10.1109\/LRA.2021.3092302","article-title":"Surgical tool segmentation using generative adversarial networks with unpaired training data","volume":"6","author":"Zhang","year":"2021","journal-title":"IEEE Robotics Autom. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b94","series-title":"Proceedings of the IEEE\/CVF International Conference on Computer Vision","first-page":"7015","article-title":"The surprising impact of mask-head architecture on novel class segmentation","author":"Birodkar","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b95","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"272","article-title":"Prototypical interaction graph for unsupervised domain adaptation in surgical instrument segmentation","volume":"vol. 12903","author":"Liu","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b96","series-title":"2020 IEEE International Conference on Robotics and Automation","first-page":"9939","article-title":"Attention-guided lightweight network for real-time segmentation of robotic surgical instruments","author":"Ni","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b97","series-title":"Proceedings of the AAAI Conference on Artificial Intelligence","first-page":"11782","article-title":"Pyramid attention aggregation network for semantic segmentation of surgical instruments","author":"Ni","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b98","series-title":"Proceedings of the Twenty-Ninth International Joint Conference on Artificial Intelligence","first-page":"832","article-title":"BARNet: Bilinear attention network with adaptive receptive fields for surgical instrument segmentation","author":"Ni","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b99","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2021.102310","article-title":"SurgiNet: Pyramid attention aggregation and class-wise self-distillation for surgical instrument segmentation","volume":"76","author":"Ni","year":"2022","journal-title":"Med. Image Anal."},{"key":"10.1016\/j.compbiomed.2024.107929_b100","series-title":"2021 6th International Symposium on Computer and Information Processing Technology","first-page":"744","article-title":"Surgical instrument segmentation method based on improved MobileNetV2 network","author":"Xue","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b101","series-title":"2021 20th International Conference on Advanced Robotics","first-page":"148","article-title":"Real-time segmentation of surgical tools and needle using a mobile-U-Net","author":"Andersen","year":"2021"},{"issue":"4","key":"10.1016\/j.compbiomed.2024.107929_b102","doi-asserted-by":"crossref","DOI":"10.1002\/rcs.2514","article-title":"Unpaired deep adversarial learning for multi-class segmentation of instruments in robot-assisted surgical videos","volume":"19","author":"Nema","year":"2023","journal-title":"Int. J. Med. Robotics Comput. Assist. Surg."},{"issue":"3","key":"10.1016\/j.compbiomed.2024.107929_b103","doi-asserted-by":"crossref","first-page":"225","DOI":"10.1080\/21681163.2020.1835560","article-title":"Synthetic laparoscopic video generation for machine learning-based surgical instrument segmentation from real laparoscopic video and virtual surgical instruments","volume":"9","author":"Ozawa","year":"2021","journal-title":"Comput. Methods Biomech. Biomed. Eng. Imaging Vis."},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b104","doi-asserted-by":"crossref","first-page":"3870","DOI":"10.1109\/LRA.2021.3066956","article-title":"Lightweight deep neural network for real-time instrument semantic segmentation in robot assisted minimally invasive surgery","volume":"6","author":"Sun","year":"2021","journal-title":"IEEE Robotics Autom. Lett."},{"issue":"11","key":"10.1016\/j.compbiomed.2024.107929_b105","doi-asserted-by":"crossref","first-page":"3074","DOI":"10.1109\/TMI.2022.3178549","article-title":"SSIS-Seg: Simulation-supervised image synthesis for surgical instrument segmentation","volume":"41","author":"Colleoni","year":"2022","journal-title":"IEEE Trans. Med. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b106","doi-asserted-by":"crossref","DOI":"10.1016\/j.eswa.2022.117420","article-title":"DSRD-Net: Dual-stream residual dense network for semantic segmentation of instruments in robot-assisted surgery","volume":"202","author":"Mahmood","year":"2022","journal-title":"Expert Syst. Appl."},{"key":"10.1016\/j.compbiomed.2024.107929_b107","series-title":"2019 10th International Machine Learning in Medical Imaging (MLMI) Workshop","first-page":"566","article-title":"Deep residual learning for instrument segmentation in robotic surgery","volume":"vol. 11861","author":"Pakhomov","year":"2019"},{"issue":"3","key":"10.1016\/j.compbiomed.2024.107929_b108","doi-asserted-by":"crossref","first-page":"715","DOI":"10.1109\/TMI.2021.3121138","article-title":"Graph-based surgical instrument adaptive segmentation via domain-common knowledge","volume":"41","author":"Liu","year":"2022","journal-title":"IEEE Trans. Med. Imaging"},{"key":"10.1016\/j.compbiomed.2024.107929_b109","series-title":"2018 17th IEEE International Conference on Machine Mearning and Applications","first-page":"624","article-title":"Automatic instrument segmentation in robot-assisted surgery using deep learning","author":"Shvets","year":"2018"},{"key":"10.1016\/j.compbiomed.2024.107929_b110","series-title":"2017 IEEE International Conference on Computer Vision","first-page":"2242","article-title":"Unpaired image-to-image translation using cycle-consistent adversarial networks","author":"Zhu","year":"2017"},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b111","doi-asserted-by":"crossref","first-page":"935","DOI":"10.1109\/LRA.2021.3056354","article-title":"Robotic instrument segmentation with image-to-image translation","volume":"6","author":"Colleoni","year":"2021","journal-title":"IEEE Robotics Autom. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b112","series-title":"Computer-Assisted and Robotic Endoscopy","first-page":"90","article-title":"2D\/3D real-time tracking of surgical instruments based on endoscopic image processing","author":"Agustinos","year":"2015"},{"issue":"3","key":"10.1016\/j.compbiomed.2024.107929_b113","doi-asserted-by":"crossref","first-page":"121","DOI":"10.3109\/13645706.2016.1141101","article-title":"Automatic tracking of laparoscopic instruments for autonomous control of a cameraman robot","volume":"25","author":"Amini Khoiy","year":"2016","journal-title":"Minim. Invasive Ther. Allied Technol."},{"key":"10.1016\/j.compbiomed.2024.107929_b114","series-title":"2017 IEEE International Conference on Systems, Man, and Cybernetics","first-page":"3373","article-title":"Surgical tool segmentation using a hybrid deep CNN-RNN auto encoder-decoder","author":"Attia","year":"2017"},{"issue":"6","key":"10.1016\/j.compbiomed.2024.107929_b115","doi-asserted-by":"crossref","first-page":"1109","DOI":"10.1007\/s11548-016-1393-4","article-title":"Combined 2D and 3D tracking of surgical instrumentsfor minimally invasive and robotic-assisted surgery","volume":"11","author":"Du","year":"2016","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b116","series-title":"Computer-Assisted and Robotic Endoscopy, Vol. 10170","first-page":"84","article-title":"Real-time segmentation of non-rigid surgical tools based on deep learning and tracking","author":"Garc\u00eda-Peraza-Herrera","year":"2016"},{"key":"10.1016\/j.compbiomed.2024.107929_b117","series-title":"2019 First International Multiscale Multimodal Medical Imaging (MMMI) Workshop","first-page":"93","article-title":"Automatic sinus surgery skill assessment based on instrument segmentation and tracking in endoscopic video","volume":"vol. 11977","author":"Lin","year":"2019"},{"issue":"4","key":"10.1016\/j.compbiomed.2024.107929_b118","doi-asserted-by":"crossref","first-page":"6773","DOI":"10.1109\/LRA.2021.3096156","article-title":"Multi-frame feature aggregation for real-time instrument segmentation in endoscopic video","volume":"6","author":"Lin","year":"2021","journal-title":"IEEE Robotics Autom. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b119","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"657","article-title":"Unsupervised surgical instrument segmentation via anchor generation and semantic diffusion","author":"Liu","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b120","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2023.102751","article-title":"FUN-SIS: A fully unsupervised approach for surgical instrument segmentation","volume":"85","author":"Sestini","year":"2023","journal-title":"Med. Image Anal."},{"issue":"3","key":"10.1016\/j.compbiomed.2024.107929_b121","doi-asserted-by":"crossref","first-page":"696","DOI":"10.1109\/TMRB.2022.3193420","article-title":"DRR-Net: A dense-connected residual recurrent convolutional network for surgical instrument segmentation from endoscopic images","volume":"4","author":"Yang","year":"2022","journal-title":"IEEE Trans. Med. Robotics Bionics"},{"key":"10.1016\/j.compbiomed.2024.107929_b122","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2021.102240","article-title":"Anchor-guided online meta adaptation for fast one-shot instrument segmentation from robotic surgical videos","volume":"74","author":"Zhao","year":"2021","journal-title":"Med. Image Anal."},{"key":"10.1016\/j.compbiomed.2024.107929_b123","series-title":"2021 IEEE International Conference on Robotics and Automation","first-page":"13553","article-title":"One to many: Adaptive instrument segmentation via meta learning and dynamic online adaptation in robotic surgical video","author":"Zhao","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b124","series-title":"2021 3rd International Conference on Intelligent Medicine and Image Processing","first-page":"78","article-title":"Preserving the temporal consistency of video sequences for surgical instruments segmentation","author":"Li","year":"2021"},{"issue":"8","key":"10.1016\/j.compbiomed.2024.107929_b125","doi-asserted-by":"crossref","first-page":"1335","DOI":"10.1007\/s11548-020-02214-y","article-title":"Object extraction via deep learning-based marker-free tracking framework of surgical instruments for laparoscope-holder robots","volume":"15","author":"Zhang","year":"2020","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b126","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2020.101837","article-title":"ST-MTL: Spatio-temporal multitask learning model to predict scanpath while tracking instruments in robotic surgery","volume":"67","author":"Islam","year":"2021","journal-title":"Med. Image Anal."},{"key":"10.1016\/j.compbiomed.2024.107929_b127","series-title":"International Conference on Medical Image Computing and Computer-Assisted Intervention","first-page":"341","article-title":"Efficient global-local memory for real-time instrument segmentation ofrobotic surgical video","author":"Wang","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b128","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"440","article-title":"Incorporating temporal prior from motion flow for instrument segmentation in minimally invasive surgery video","volume":"11768","author":"Jin","year":"2019"},{"key":"10.1016\/j.compbiomed.2024.107929_b129","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"679","article-title":"Learning motion flows for semi-supervisedinstrument segmentation fromrobotic surgical video","author":"Zhao","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b130","series-title":"2021 Innovations in Power and Advanced Computing Technologies","article-title":"Voice based segmentation of laparoscopic surgical tools and its image enhancement","author":"Shimgekar","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b131","series-title":"Proceedings of the IEEE\/CVF Winter Conference on Applications of Computer Vision","first-page":"6191","article-title":"From forks to forceps: A new framework for instance segmentation of surgical instruments","author":"Baby","year":"2023"},{"key":"10.1016\/j.compbiomed.2024.107929_b132","series-title":"2021 43rd Annual International Conference of the IEEE Engineering in Medicine & Biology Society","first-page":"1824","article-title":"Assessing YOLACT++ for real time and robust instance segmentation of medical instruments in endoscopic procedures","author":"Cer\u00f3n","year":"2021"},{"key":"10.1016\/j.compbiomed.2024.107929_b133","doi-asserted-by":"crossref","DOI":"10.1016\/j.media.2022.102569","article-title":"Real-time instance segmentation of surgical instruments using attention and multi-scale feature fusion","volume":"81","author":"Cer\u00f3n","year":"2022","journal-title":"Med. Image Anal."},{"issue":"8","key":"10.1016\/j.compbiomed.2024.107929_b134","doi-asserted-by":"crossref","first-page":"e2226265","DOI":"10.1001\/jamanetworkopen.2022.26265","article-title":"Development and validation of a model for laparoscopic colorectal surgical instrument recognition using convolutional neural network\u2013based instance segmentation and videos of laparoscopic procedures","volume":"5","author":"Kitaguchi","year":"2022","journal-title":"JAMA Netw. Open"},{"issue":"1","key":"10.1016\/j.compbiomed.2024.107929_b135","doi-asserted-by":"crossref","first-page":"12575","DOI":"10.1038\/s41598-022-16923-8","article-title":"Limited generalizability of single deep neural network for surgical instrument segmentation in different surgical environments","volume":"12","author":"Kitaguchi","year":"2022","journal-title":"Sci. Rep."},{"issue":"7","key":"10.1016\/j.compbiomed.2024.107929_b136","doi-asserted-by":"crossref","first-page":"1227","DOI":"10.1007\/s11548-021-02404-2","article-title":"Mask then classify: multi-instance segmentation for surgical instruments","volume":"16","author":"Kurmann","year":"2021","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b137","series-title":"2019 International Conference on Content-Based Multimedia Indexing","first-page":"1","article-title":"Identifying surgical instruments in laparoscopy using deep learning instance segmentation","author":"Kletz","year":"2019"},{"issue":"9","key":"10.1016\/j.compbiomed.2024.107929_b138","doi-asserted-by":"crossref","first-page":"1607","DOI":"10.1007\/s11548-021-02438-6","article-title":"Accurate instance segmentation of surgical instruments in robotic surgery: model refinement and cross-dataset evaluation","volume":"16","author":"Kong","year":"2021","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b139","first-page":"1","article-title":"A parallel network utilizing local features and global representations for segmentation of surgical instruments","author":"Sun","year":"2022","journal-title":"Int. J. Comput. Assist. Radiol. Surg."},{"key":"10.1016\/j.compbiomed.2024.107929_b140","series-title":"Medical Image Computing and Computer Assisted Intervention","first-page":"595","article-title":"ISINet: An instance-based approachfor surgical instrument segmentation","author":"Gonz\u00e1lez","year":"2020"},{"issue":"6","key":"10.1016\/j.compbiomed.2024.107929_b141","doi-asserted-by":"crossref","first-page":"1964","DOI":"10.3390\/jcm9061964","article-title":"Evaluation of surgical skills during robotic surgery by deep learning-based multiple surgical instrument tracking in training and actual operations","volume":"9","author":"Lee","year":"2020","journal-title":"J. Clin. Med."},{"key":"10.1016\/j.compbiomed.2024.107929_b142","series-title":"2020 42nd Annual International Conferences of the IEEE Engineering in Medicine and Biology Society","first-page":"1658","article-title":"Surgical tool segmentation and localization using spatio-temporal deep network","author":"Kanakatte","year":"2020"},{"key":"10.1016\/j.compbiomed.2024.107929_b143","series-title":"2022 International Conference on Robotics and Automation","first-page":"11186","article-title":"TraSeTR: track-to-segment transformer with contrastive query for instance-level instrument segmentation in robotic surgery","author":"Zhao","year":"2022"},{"key":"10.1016\/j.compbiomed.2024.107929_b144","article-title":"Visual servo control of endoscope-holding robot based on multi-objective optimization: System modeling and instrument tracking","volume":"211","author":"Zhang","year":"2023","journal-title":"SSRN Electron. J."},{"key":"10.1016\/j.compbiomed.2024.107929_b145","doi-asserted-by":"crossref","DOI":"10.3389\/frobt.2022.832208","article-title":"Robotic endoscope control via autonomous instrument tracking","volume":"9","author":"Gruijthuijsen","year":"2022","journal-title":"Front. Robotics AI"},{"issue":"7","key":"10.1016\/j.compbiomed.2024.107929_b146","doi-asserted-by":"crossref","first-page":"4291","DOI":"10.1109\/LRA.2023.3281934","article-title":"An autonomous surgical instrument tracking framework with a binocular camera for a robotic flexible laparoscope","volume":"8","author":"Li","year":"2023","journal-title":"IEEE Robot. Autom. Lett."},{"issue":"2","key":"10.1016\/j.compbiomed.2024.107929_b147","doi-asserted-by":"crossref","first-page":"3979","DOI":"10.1109\/LRA.2021.3066834","article-title":"Deep learning assisted robotic magnetic anchored and guided endoscope for real-time instrument tracking","volume":"6","author":"Cheng","year":"2021","journal-title":"IEEE Robotics Autom. Lett."},{"key":"10.1016\/j.compbiomed.2024.107929_b148","doi-asserted-by":"crossref","first-page":"72614","DOI":"10.1109\/ACCESS.2021.3079427","article-title":"Autonomous endoscope robot positioning using instrument segmentation with virtual reality visualization","volume":"9","author":"Zinchenko","year":"2021","journal-title":"IEEE Access"},{"key":"10.1016\/j.compbiomed.2024.107929_b149","doi-asserted-by":"crossref","DOI":"10.1016\/j.compbiomed.2023.106585","article-title":"Error-correcting mean-teacher: Corrections instead of consistency-targets applied to semi-supervised medical image segmentation","volume":"154","author":"Mendel","year":"2023","journal-title":"Comput. Biol. Med."},{"issue":"7","key":"10.1016\/j.compbiomed.2024.107929_b150","doi-asserted-by":"crossref","first-page":"3209","DOI":"10.1109\/JBHI.2022.3154925","article-title":"Space squeeze reasoning and low-rank bilinear feature fusion for surgical image segmentation","volume":"26","author":"Ni","year":"2022","journal-title":"IEEE J. Biomed. Health Inf."},{"key":"10.1016\/j.compbiomed.2024.107929_b151","doi-asserted-by":"crossref","first-page":"46810","DOI":"10.1109\/ACCESS.2021.3067928","article-title":"Noisy-LSTM: Improving temporal awareness for video semantic segmentation","volume":"9","author":"Wang","year":"2021","journal-title":"IEEE Access"}],"updated-by":[{"updated":{"date-parts":[[2024,3,1]],"date-time":"2024-03-01T00:00:00Z","timestamp":1709251200000},"DOI":"10.1016\/j.compbiomed.2024.108027","type":"erratum","source":"publisher","label":"Erratum"}],"container-title":["Computers in Biology and Medicine"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0010482524000131?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0010482524000131?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2024,7,10]],"date-time":"2024-07-10T01:04:11Z","timestamp":1720573451000},"score":1,"resource":{"primary":{"URL":"https:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S0010482524000131"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,2]]},"references-count":151,"alternative-id":["S0010482524000131"],"URL":"https:\/\/doi.org\/10.1016\/j.compbiomed.2024.107929","relation":{},"ISSN":["0010-4825"],"issn-type":[{"value":"0010-4825","type":"print"}],"subject":[],"published":{"date-parts":[[2024,2]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"Methods and datasets for segmentation of minimally invasive surgical instruments in endoscopic images and videos: A review of the state of the art","name":"articletitle","label":"Article Title"},{"value":"Computers in Biology and Medicine","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/doi.org\/10.1016\/j.compbiomed.2024.107929","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2024 The Authors. Published by Elsevier Ltd.","name":"copyright","label":"Copyright"}],"article-number":"107929"}}