{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,9,13]],"date-time":"2024-09-13T09:12:57Z","timestamp":1726218777668},"reference-count":26,"publisher":"Springer Science and Business Media LLC","issue":"7","license":[{"start":{"date-parts":[[2020,5,5]],"date-time":"2020-05-05T00:00:00Z","timestamp":1588636800000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"},{"start":{"date-parts":[[2020,5,5]],"date-time":"2020-05-05T00:00:00Z","timestamp":1588636800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Int J CARS"],"published-print":{"date-parts":[[2020,7]]},"DOI":"10.1007\/s11548-020-02151-w","type":"journal-article","created":{"date-parts":[[2020,5,5]],"date-time":"2020-05-05T18:02:53Z","timestamp":1588701773000},"page":"1177-1186","update-policy":"http:\/\/dx.doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":11,"title":["Detecting the occluding contours of the uterus to automatise augmented laparoscopy: score, loss, dataset, evaluation and user study"],"prefix":"10.1007","volume":"15","author":[{"given":"Tom","family":"Fran\u00e7ois","sequence":"first","affiliation":[]},{"given":"Lilian","family":"Calvet","sequence":"additional","affiliation":[]},{"given":"Sabrina","family":"Madad\u00a0Zadeh","sequence":"additional","affiliation":[]},{"given":"Damien","family":"Saboul","sequence":"additional","affiliation":[]},{"given":"Simone","family":"Gasparini","sequence":"additional","affiliation":[]},{"given":"Prasad","family":"Samarakoon","sequence":"additional","affiliation":[]},{"given":"Nicolas","family":"Bourdel","sequence":"additional","affiliation":[]},{"given":"Adrien","family":"Bartoli","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2020,5,5]]},"reference":[{"key":"2151_CR1","doi-asserted-by":"crossref","unstructured":"Acuna D, Kar A, Fidler S (2019) Devil is in the edges: learning semantic boundaries from noisy annotations. In: CVPR","DOI":"10.1109\/CVPR.2019.01133"},{"key":"2151_CR2","doi-asserted-by":"crossref","unstructured":"Adagolodjo Y, Trivisonne R, Haouchine N, Cotin S, Courtecuisse H (2017) Silhouette-based pose estimation for deformable organs application to surgical augmented reality. In: IROS","DOI":"10.1109\/IROS.2017.8202205"},{"issue":"6","key":"2151_CR3","doi-asserted-by":"publisher","first-page":"679","DOI":"10.1109\/TPAMI.1986.4767851","volume":"8","author":"JF Canny","year":"1986","unstructured":"Canny JF (1986) A computational approach to edge detection. TPAMI 8(6):679\u2013698","journal-title":"TPAMI"},{"key":"2151_CR4","doi-asserted-by":"crossref","unstructured":"Collins T, Pizarro D, Bartoli A, Canis M, Bourdel N (2014) Computer-assisted laparoscopic myomectomy by augmenting the uterus with pre-operative mri data. In: ISMAR","DOI":"10.1109\/ISMAR.2014.6948434"},{"key":"2151_CR5","doi-asserted-by":"crossref","unstructured":"Deng R, Shen C, Liu S, Wang H, Liu X (2018) Learning to predict crisp boundaries. In: ECCV","DOI":"10.1007\/978-3-030-01231-1_35"},{"key":"2151_CR6","unstructured":"Dubuisson M, Jain A (1994) A modified hausdorff distance for object matching. In: ICPR"},{"key":"2151_CR7","unstructured":"Grard M, Chen L, Dellandr\u00e9a E (2019) Bicameral structuring and synthetic imagery for jointly predicting instance boundaries and nearby occlusions from a single image. arXiv"},{"key":"2151_CR8","doi-asserted-by":"crossref","unstructured":"Hariharan B, Arbel\u00e1ez P, Bourdev L, Maji S, Malik J (2011) Semantic contours from inverse detectors. In: ICCV","DOI":"10.1109\/ICCV.2011.6126343"},{"key":"2151_CR9","unstructured":"ISCAS: Miccai endoscopic vision challenges (2019). https:\/\/endovis.grand-challenge.org"},{"key":"2151_CR10","doi-asserted-by":"crossref","unstructured":"Koo B, Ozgur E, Roy BL, Buc E, Bartoli A (2017) Deformable registration of a preoperative 3d liver volume to a laparoscopy image using contour and shading cues. In: MICCAI","DOI":"10.1007\/978-3-319-66182-7_38"},{"key":"2151_CR11","doi-asserted-by":"crossref","unstructured":"Leibetseder A, Petscharnig S, Primus MJ, Kletz S, M\u00fcnzer B, Schoeffmann K, Keckstein J (2018) Lapgyn4: a dataset for 4 automatic content analysis problems in the domain of laparoscopic gynecology. In: Proceedings of the 9th ACM multimedia systems conference, MMSys, pp 357\u2013362","DOI":"10.1145\/3204949.3208127"},{"issue":"8","key":"2151_CR12","doi-asserted-by":"publisher","first-page":"1939","DOI":"10.1109\/TPAMI.2018.2878849","volume":"41","author":"Y Liu","year":"2019","unstructured":"Liu Y, Cheng M, Hu X, Bian J, Zhang L, Bai X, Tang J (2019) Richer convolutional features for edge detection. TPAMI 41(8):1939\u20131946","journal-title":"TPAMI"},{"issue":"4","key":"2151_CR13","doi-asserted-by":"publisher","first-page":"1125","DOI":"10.1016\/j.patcog.2012.10.027","volume":"46","author":"C Lopez-Molina","year":"2013","unstructured":"Lopez-Molina C, Baets BD, Sola HB (2013) Quantitative error measures for edge detection. Pattern Recognit. 46(4):1125\u20131139","journal-title":"Pattern Recognit."},{"issue":"6","key":"2151_CR14","doi-asserted-by":"publisher","first-page":"74","DOI":"10.3390\/jimaging4060074","volume":"4","author":"B Magnier","year":"2018","unstructured":"Magnier B, Abdulrahman H, Montesinos P (2018) A review of supervised edge detection evaluation methods and an objective comparison of filtering gradient computations using hysteresis thresholds. J. Imaging 4(6):74","journal-title":"J. Imaging"},{"issue":"5","key":"2151_CR15","doi-asserted-by":"publisher","first-page":"530","DOI":"10.1109\/TPAMI.2004.1273918","volume":"26","author":"DR Martin","year":"2004","unstructured":"Martin DR, Fowlkes CC, Malik J (2004) Learning to detect natural image boundaries using local brightness, color, and texture cues. PAMI 26(5):530\u2013549","journal-title":"PAMI"},{"key":"2151_CR16","doi-asserted-by":"crossref","unstructured":"Ramamonjisoa M, Lepetit V (2019) Sharpnet: Fast and accurate recovery of occluding contours in monocular depth estimation. arXiv","DOI":"10.1109\/ICCVW.2019.00266"},{"key":"2151_CR17","doi-asserted-by":"crossref","unstructured":"Ronneberger O, Fischer P, Brox T (2015) U-net: Convolutional networks for biomedical image segmentation. In: MICCAI","DOI":"10.1007\/978-3-319-24574-4_28"},{"key":"2151_CR18","doi-asserted-by":"crossref","unstructured":"Stauder R, Ostler D, Kranzfelder M, Koller S, Feu\u00dfner H, Navab N (2016) The TUM lapchole dataset for the M2CAI 2016 workflow challenge. arXiv","DOI":"10.1515\/iss-2017-0035"},{"key":"2151_CR19","unstructured":"Supervisely. https:\/\/supervise.ly\/"},{"issue":"6","key":"2151_CR20","doi-asserted-by":"publisher","first-page":"615","DOI":"10.1159\/000490563","volume":"83","author":"P T\u00f6r\u00f6k","year":"2018","unstructured":"T\u00f6r\u00f6k P, Harangi B (2018) Digital image analysis with fully connected convolutional neural network to facilitate hysteroscopic fibroid resection. Gynecol. Obstet. Investig. 83(6):615\u2013619","journal-title":"Gynecol. Obstet. Investig."},{"issue":"1","key":"2151_CR21","doi-asserted-by":"publisher","first-page":"86","DOI":"10.1109\/TMI.2016.2593957","volume":"36","author":"AP Twinanda","year":"2017","unstructured":"Twinanda AP, Shehata S, Mutter D, Marescaux J, de Mathelin M, Padoy N (2017) Endonet: a deep architecture for recognition tasks on laparoscopic videos. IEEE Trans Med Imaging 36(1):86\u201397","journal-title":"IEEE Trans Med Imaging"},{"key":"2151_CR22","unstructured":"Wang G, Wang X, Li FWB, Liang X (2018) Doobnet: deep object occlusion boundary detection from an image. In: ACCV"},{"key":"2151_CR23","doi-asserted-by":"crossref","unstructured":"Wang P, Yuille AL (2016) DOC: deep occlusion estimation from a single image. In: ECCV","DOI":"10.1007\/978-3-319-46448-0_33"},{"key":"2151_CR24","doi-asserted-by":"crossref","unstructured":"Yang J, Price BL, Cohen S, Lee H, Yang M (2016) Object contour detection with a fully convolutional encoder-decoder network. In: CVPR","DOI":"10.1109\/CVPR.2016.28"},{"key":"2151_CR25","doi-asserted-by":"crossref","unstructured":"Yu Z, Liu W, Zou Y, Feng C, Ramalingam S, Kumar BVKV, Kautz J (2018) Simultaneous edge alignment and learning. In: ECCV","DOI":"10.1007\/978-3-030-01219-9_24"},{"key":"2151_CR26","doi-asserted-by":"crossref","unstructured":"Yu Z, Feng C, Liu M, Ramalingam S (2017) Casenet: deep category-aware semantic edge detection. In: CVPR","DOI":"10.1109\/CVPR.2017.191"}],"container-title":["International Journal of Computer Assisted Radiology and Surgery"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11548-020-02151-w.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s11548-020-02151-w\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11548-020-02151-w.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2021,5,4]],"date-time":"2021-05-04T23:11:20Z","timestamp":1620169880000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s11548-020-02151-w"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2020,5,5]]},"references-count":26,"journal-issue":{"issue":"7","published-print":{"date-parts":[[2020,7]]}},"alternative-id":["2151"],"URL":"https:\/\/doi.org\/10.1007\/s11548-020-02151-w","relation":{},"ISSN":["1861-6410","1861-6429"],"issn-type":[{"value":"1861-6410","type":"print"},{"value":"1861-6429","type":"electronic"}],"subject":[],"published":{"date-parts":[[2020,5,5]]},"assertion":[{"value":"21 November 2019","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"31 March 2020","order":2,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"5 May 2020","order":3,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Compliance with ethical standards"}},{"value":"The authors declare that they have no conflict of interest.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}},{"value":"Informed consent was obtained from all individual participants included in the study. This article does not contain any studies with animals performed by any of the authors.","order":3,"name":"Ethics","group":{"name":"EthicsHeading","label":"Informed consent"}}]}}