{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,11,15]],"date-time":"2024-11-15T05:19:55Z","timestamp":1731647995674,"version":"3.28.0"},"publisher-location":"Cham","reference-count":24,"publisher":"Springer Nature Switzerland","isbn-type":[{"type":"print","value":"9783031550140"},{"type":"electronic","value":"9783031550157"}],"license":[{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":[],"published-print":{"date-parts":[[2024]]},"DOI":"10.1007\/978-3-031-55015-7_28","type":"book-chapter","created":{"date-parts":[[2024,3,13]],"date-time":"2024-03-13T20:57:33Z","timestamp":1710363453000},"page":"337-349","update-policy":"http:\/\/dx.doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":0,"title":["RoboCup 2023 Humanoid AdultSize Winner NimbRo: NimbRoNet3 Visual Perception and\u00a0Responsive Gait with\u00a0Waveform In-Walk Kicks"],"prefix":"10.1007","author":[{"given":"Dmytro","family":"Pavlichenko","sequence":"first","affiliation":[]},{"given":"Grzegorz","family":"Ficht","sequence":"additional","affiliation":[]},{"given":"Angel","family":"Villar-Corrales","sequence":"additional","affiliation":[]},{"given":"Luis","family":"Denninger","sequence":"additional","affiliation":[]},{"given":"Julia","family":"Brocker","sequence":"additional","affiliation":[]},{"given":"Tim","family":"Sinen","sequence":"additional","affiliation":[]},{"given":"Michael","family":"Schreiber","sequence":"additional","affiliation":[]},{"given":"Sven","family":"Behnke","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2024,3,14]]},"reference":[{"key":"28_CR1","doi-asserted-by":"crossref","unstructured":"Allgeuer, P., Behnke, S.: Fused angles: a representation of body orientation for balance. In: IEEE\/RSJ International Conference on Intelligent Robots and Systems (IROS), pp. 366\u2013373 (2015)","DOI":"10.1109\/IROS.2015.7353399"},{"key":"28_CR2","doi-asserted-by":"crossref","unstructured":"Allgeuer, P., Behnke, S.: Omnidirectional bipedal walking with direct fused angle feedback mechanisms. In: 16th IEEE-RAS International Conference on Humanoid Robots (Humanoids), pp. 834\u2013841 (2016)","DOI":"10.1109\/HUMANOIDS.2016.7803370"},{"key":"28_CR3","series-title":"Lecture Notes in Computer Science (Lecture Notes in Artificial Intelligence)","doi-asserted-by":"publisher","first-page":"91","DOI":"10.1007\/978-3-030-98682-7_8","volume-title":"RoboCup 2021: Robot World Cup XXIV","author":"A Amini","year":"2022","unstructured":"Amini, A., Farazi, H., Behnke, S.: Real-time pose estimation from\u00a0images for\u00a0multiple humanoid robots. In: Alami, R., Biswas, J., Cakmak, M., Obst, O. (eds.) RoboCup 2021. LNCS (LNAI), vol. 13132, pp. 91\u2013102. Springer, Cham (2022). https:\/\/doi.org\/10.1007\/978-3-030-98682-7_8"},{"key":"28_CR4","doi-asserted-by":"crossref","unstructured":"Behnke, S.: Online trajectory generation for omnidirectional biped walking. In: IEEE International Conference on Robotics and Automation (ICRA), pp. 1597\u20131603 (2006)","DOI":"10.1109\/ROBOT.2006.1641935"},{"key":"28_CR5","doi-asserted-by":"crossref","unstructured":"Cao, Z., Simon, T., Wei, S.E., Sheikh, Y.: Realtime multi-person 2D pose estimation using part affinity fields. In: IEEE\/CVF Conference on Computer Vision and Pattern Recognition (CVPR), pp. 7291\u20137299 (2017)","DOI":"10.1109\/CVPR.2017.143"},{"key":"28_CR6","doi-asserted-by":"crossref","unstructured":"Cheng, B., Xiao, B., Wang, J., Shi, H., Huang, T.S., Zhang, L.: HigherHRNet: scale-aware representation learning for bottom-up human pose estimation. In: IEEE\/CVF Conference on Computer Vision and Pattern Recognition (CVPR), pp. 5386\u20135395 (2020)","DOI":"10.1109\/CVPR42600.2020.00543"},{"issue":"6","key":"28_CR7","doi-asserted-by":"publisher","first-page":"1285","DOI":"10.1109\/TRO.2007.904907","volume":"23","author":"Y Choi","year":"2007","unstructured":"Choi, Y., Kim, D., Oh, Y., You, B.J.: Posture\/walking control for humanoid robot based on kinematic resolution of CoM Jacobian with embedded motion. IEEE Trans. Rob. 23(6), 1285\u20131293 (2007)","journal-title":"IEEE Trans. Rob."},{"key":"28_CR8","doi-asserted-by":"crossref","unstructured":"Duan, H., Zhao, Y., Chen, K., Lin, D., Dai, B.: Revisiting skeleton-based action recognition. In: IEEE\/CVF Conference on Computer Vision and Pattern Recognition (CVPR), pp. 2969\u20132978 (2022)","DOI":"10.1109\/CVPR52688.2022.00298"},{"key":"28_CR9","doi-asserted-by":"crossref","unstructured":"Englsberger, J., Mesesan, G., Werner, A., Ott, C.: Torque-based dynamic walking-a long way from simulation to experiment. In: IEEE International Conference on Robotics and Automation (ICRA), pp. 440\u2013447 (2018)","DOI":"10.1109\/ICRA.2018.8462862"},{"issue":"2","key":"28_CR10","doi-asserted-by":"publisher","first-page":"495","DOI":"10.1007\/s10514-016-9574-0","volume":"41","author":"ML Felis","year":"2017","unstructured":"Felis, M.L.: RBDL: an efficient rigid-body dynamics library using recursive algorithms. Auton. Robot. 41(2), 495\u2013511 (2017)","journal-title":"Auton. Robot."},{"key":"28_CR11","doi-asserted-by":"crossref","unstructured":"Ficht, G., Allgeuer, P., Farazi, H., Behnke, S.: NimbRo-OP2: grown-up 3D printed open humanoid platform for research. In: 17th IEEE-RAS International Conference on Humanoid Robots (Humanoids), pp. 669\u2013675 (2017)","DOI":"10.1109\/HUMANOIDS.2017.8246944"},{"key":"28_CR12","doi-asserted-by":"crossref","unstructured":"Ficht, G., Behnke, S.: Fast whole-body motion control of humanoid robots with inertia constraints. In: IEEE International Conference on Robotics and Automation (ICRA), pp. 6597\u20136603 (2020)","DOI":"10.1109\/ICRA40945.2020.9197322"},{"issue":"2","key":"28_CR13","doi-asserted-by":"publisher","first-page":"201","DOI":"10.1007\/s43154-021-00050-9","volume":"2","author":"G Ficht","year":"2021","unstructured":"Ficht, G., Behnke, S.: Bipedal humanoid hardware design: a technology review. Curr. Rob. Rep. 2(2), 201\u2013210 (2021)","journal-title":"Curr. Rob. Rep."},{"key":"28_CR14","doi-asserted-by":"crossref","unstructured":"Ficht, G., Behnke, S.: Direct centroidal control for balanced humanoid locomotion. In: 25th International Conference on Climbing and Walking Robots (CLAWAR), pp. 242\u2013255 (2022)","DOI":"10.1007\/978-3-031-15226-9_24"},{"key":"28_CR15","doi-asserted-by":"crossref","unstructured":"Ficht, G., Behnke, S.: Centroidal state estimation and control for hardware-constrained humanoid robots. In: 22nd IEEE-RAS International Conference on Humanoid Robots (Humanoids) (2023)","DOI":"10.1109\/Humanoids57100.2023.10375142"},{"key":"28_CR16","doi-asserted-by":"crossref","unstructured":"Ficht, G., et al.: NimbRo-OP2X: adult-sized open-source 3D printed humanoid robot. In: 18th IEEE-RAS International Conference on Humanoid Robots (Humanoids), pp. 1\u20139 (2018)","DOI":"10.1109\/HUMANOIDS.2018.8625038"},{"key":"28_CR17","doi-asserted-by":"crossref","unstructured":"Ficht, G., et al.: NimbRo-OP2X: affordable adult-sized 3D-printed open-source humanoid robot for research. In. J. Humanoid Rob. 17(05), 2050021:1\u20132050021:35 (2020)","DOI":"10.1142\/S0219843620500218"},{"key":"28_CR18","series-title":"Lecture Notes in Computer Science (Lecture Notes in Artificial Intelligence)","doi-asserted-by":"publisher","first-page":"448","DOI":"10.1007\/978-3-030-00308-1_37","volume-title":"RoboCup 2017: Robot World Cup XXI","author":"G Ficht","year":"2018","unstructured":"Ficht, G., et al.: Grown-Up NimbRo robots winning RoboCup 2017 humanoid AdultSize soccer competitions. In: Akiyama, H., Obst, O., Sammut, C., Tonidandel, F. (eds.) RoboCup 2017. LNCS (LNAI), vol. 11175, pp. 448\u2013460. Springer, Cham (2018). https:\/\/doi.org\/10.1007\/978-3-030-00308-1_37"},{"key":"28_CR19","doi-asserted-by":"crossref","unstructured":"He, K., Zhang, X., Ren, S., Sun, J.: Deep residual learning for image recognition. In: IEEE\/CVF Conference on Computer Vision and Pattern Recognition (CVPR), pp. 770\u2013778 (2016)","DOI":"10.1109\/CVPR.2016.90"},{"key":"28_CR20","doi-asserted-by":"crossref","unstructured":"Missura, M., Bennewitz, M., Behnke, S.: Capture steps: robust walking for humanoid robots. Int. J. Humanoid Rob. 16(6), 1950032:1\u20131950032:28 (2019)","DOI":"10.1142\/S0219843619500324"},{"key":"28_CR21","doi-asserted-by":"publisher","unstructured":"Pavlichenko, D., et al.: RoboCup 2022 AdultSize winner NimbRo: upgraded perception, capture steps gait and phase-based in-walk kicks. In: Eguchi, A., Lau, N., Paetzel-Prusmann, M., Wanichanon, T. (eds.) RoboCup 2022: Robot World Cup XXV. Lecture Notes in Computer Science, vol. 13561, pp. 240\u2013252. Springer, Heidelberg (2023). https:\/\/doi.org\/10.1007\/978-3-031-28469-4_20","DOI":"10.1007\/978-3-031-28469-4_20"},{"key":"28_CR22","series-title":"Lecture Notes in Computer Science (Lecture Notes in Artificial Intelligence)","doi-asserted-by":"publisher","first-page":"631","DOI":"10.1007\/978-3-030-35699-6_51","volume-title":"RoboCup 2019: Robot World Cup XXIII","author":"D Rodriguez","year":"2019","unstructured":"Rodriguez, D., et al.: RoboCup 2019 AdultSize winner NimbRo: deep learning perception, in-walk kick, push recovery, and team play capabilities. In: Chalup, S., Niemueller, T., Suthakorn, J., Williams, M.-A. (eds.) RoboCup 2019. LNCS (LNAI), vol. 11531, pp. 631\u2013645. Springer, Cham (2019). https:\/\/doi.org\/10.1007\/978-3-030-35699-6_51"},{"key":"28_CR23","series-title":"Lecture Notes in Computer Science","doi-asserted-by":"publisher","first-page":"234","DOI":"10.1007\/978-3-319-24574-4_28","volume-title":"Medical Image Computing and Computer-Assisted Intervention \u2013 MICCAI 2015","author":"O Ronneberger","year":"2015","unstructured":"Ronneberger, O., Fischer, P., Brox, T.: U-Net: convolutional networks for biomedical image segmentation. In: Navab, N., Hornegger, J., Wells, W.M., Frangi, A.F. (eds.) MICCAI 2015. LNCS, vol. 9351, pp. 234\u2013241. Springer, Cham (2015). https:\/\/doi.org\/10.1007\/978-3-319-24574-4_28"},{"key":"28_CR24","series-title":"Lecture Notes in Computer Science (Lecture Notes in Artificial Intelligence)","doi-asserted-by":"publisher","first-page":"207","DOI":"10.1007\/978-3-662-44468-9_19","volume-title":"RoboCup 2013: Robot World Cup XVII","author":"M Schwarz","year":"2014","unstructured":"Schwarz, M., Behnke, S.: Compliant robot behavior using servo actuator models identified by iterative learning control. In: Behnke, S., Veloso, M., Visser, A., Xiong, R. (eds.) RoboCup 2013. LNCS (LNAI), vol. 8371, pp. 207\u2013218. Springer, Heidelberg (2014). https:\/\/doi.org\/10.1007\/978-3-662-44468-9_19"}],"container-title":["Lecture Notes in Computer Science","RoboCup 2023: Robot World Cup XXVI"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/978-3-031-55015-7_28","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,11,14]],"date-time":"2024-11-14T07:51:32Z","timestamp":1731570692000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/978-3-031-55015-7_28"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024]]},"ISBN":["9783031550140","9783031550157"],"references-count":24,"URL":"https:\/\/doi.org\/10.1007\/978-3-031-55015-7_28","relation":{},"ISSN":["0302-9743","1611-3349"],"issn-type":[{"type":"print","value":"0302-9743"},{"type":"electronic","value":"1611-3349"}],"subject":[],"published":{"date-parts":[[2024]]},"assertion":[{"value":"14 March 2024","order":1,"name":"first_online","label":"First Online","group":{"name":"ChapterHistory","label":"Chapter History"}},{"value":"RoboCup","order":1,"name":"conference_acronym","label":"Conference Acronym","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Robot World Cup","order":2,"name":"conference_name","label":"Conference Name","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Bordeaux","order":3,"name":"conference_city","label":"Conference City","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"France","order":4,"name":"conference_country","label":"Conference Country","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"2023","order":5,"name":"conference_year","label":"Conference Year","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"4 July 2023","order":7,"name":"conference_start_date","label":"Conference Start Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"10 July 2023","order":8,"name":"conference_end_date","label":"Conference End Date","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"robocup2023","order":10,"name":"conference_id","label":"Conference ID","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"https:\/\/2023.robocup.org\/en\/home\/","order":11,"name":"conference_url","label":"Conference URL","group":{"name":"ConferenceInfo","label":"Conference Information"}},{"value":"Single-blind","order":1,"name":"type","label":"Type","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"easychair","order":2,"name":"conference_management_system","label":"Conference Management System","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"59","order":3,"name":"number_of_submissions_sent_for_review","label":"Number of Submissions Sent for Review","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"36","order":4,"name":"number_of_full_papers_accepted","label":"Number of Full Papers Accepted","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"0","order":5,"name":"number_of_short_papers_accepted","label":"Number of Short Papers Accepted","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"61% - The value is computed by the equation \"Number of Full Papers Accepted \/ Number of Submissions Sent for Review * 100\" and then rounded to a whole number.","order":6,"name":"acceptance_rate_of_full_papers","label":"Acceptance Rate of Full Papers","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"3","order":7,"name":"average_number_of_reviews_per_paper","label":"Average Number of Reviews per Paper","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"3","order":8,"name":"average_number_of_papers_per_reviewer","label":"Average Number of Papers per Reviewer","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}},{"value":"Yes","order":9,"name":"external_reviewers_involved","label":"External Reviewers Involved","group":{"name":"ConfEventPeerReviewInformation","label":"Peer Review Information (provided by the conference organizers)"}}]}}