{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,10,30]],"date-time":"2024-10-30T22:39:23Z","timestamp":1730327963747,"version":"3.28.0"},"publisher-location":"New York, NY, USA","reference-count":44,"publisher":"ACM","funder":[{"name":"Information Technology Lab of National Institute of Standards and Technology","award":["70NANB21H092"]}],"content-domain":{"domain":["dl.acm.org"],"crossmark-restriction":true},"short-container-title":[],"published-print":{"date-parts":[[2024,4,14]]},"DOI":"10.1145\/3644815.3644941","type":"proceedings-article","created":{"date-parts":[[2024,6,11]],"date-time":"2024-06-11T17:28:38Z","timestamp":1718126918000},"page":"140-149","update-policy":"http:\/\/dx.doi.org\/10.1145\/crossmark-policy","source":"Crossref","is-referenced-by-count":0,"title":["A Combinatorial Approach to Hyperparameter Optimization"],"prefix":"10.1145","author":[{"ORCID":"http:\/\/orcid.org\/0009-0000-0672-5107","authenticated-orcid":false,"given":"Krishna","family":"Khadka","sequence":"first","affiliation":[{"name":"University of Texas at Arlington, Arlington, Texas, USA"}]},{"ORCID":"http:\/\/orcid.org\/0000-0001-8694-4296","authenticated-orcid":false,"given":"Jaganmohan","family":"Chandrasekaran","sequence":"additional","affiliation":[{"name":"Virginia Tech, Blacksburg, Virginia, USA"}]},{"ORCID":"http:\/\/orcid.org\/0000-0002-1069-5980","authenticated-orcid":false,"given":"Yu","family":"Lei","sequence":"additional","affiliation":[{"name":"University of Texas at Arlington, Arlington, Texas, USA"}]},{"ORCID":"http:\/\/orcid.org\/0000-0002-7666-3391","authenticated-orcid":false,"given":"Raghu N.","family":"Kacker","sequence":"additional","affiliation":[{"name":"National Institute of Standards and Technology, Gaithersburg, Maryland, USA"}]},{"ORCID":"http:\/\/orcid.org\/0000-0003-0050-1596","authenticated-orcid":false,"given":"D. Richard","family":"Kuhn","sequence":"additional","affiliation":[{"name":"National Institute of Standards and Technology, Gaithersburg, Maryland, USA"}]}],"member":"320","published-online":{"date-parts":[[2024,6,11]]},"reference":[{"volume-title":"Hyperparameter Tuning for Machine and Deep Learning with R: A Practical Guide","author":"Bartz-Beielstein Thomas","unstructured":"Thomas Bartz-Beielstein, Sowmya Chandrasekaran, and Frederik Rehbach. 2023. Case study II: tuning of gradient boosting (xgboost). In Hyperparameter Tuning for Machine and Deep Learning with R: A Practical Guide. Springer, 221--234.","key":"e_1_3_2_1_1_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_2_1","DOI":"10.24432\/C5XW20"},{"volume-title":"Gradient-based optimization of hyperparameters. Neural computation 12, 8","year":"2000","author":"Bengio Yoshua","unstructured":"Yoshua Bengio. 2000. Gradient-based optimization of hyperparameters. Neural computation 12, 8 (2000), 1889--1900.","key":"e_1_3_2_1_3_1"},{"volume-title":"Algorithms for hyper-parameter optimization. Advances in neural information processing systems 24","year":"2011","author":"Bergstra James","unstructured":"James Bergstra, R\u00e9mi Bardenet, Yoshua Bengio, and Bal\u00e1zs K\u00e9gl. 2011. Algorithms for hyper-parameter optimization. Advances in neural information processing systems 24 (2011).","key":"e_1_3_2_1_4_1"},{"key":"e_1_3_2_1_5_1","article-title":"Random search for hyper-parameter optimization","volume":"13","author":"Bergstra James","year":"2012","unstructured":"James Bergstra and Yoshua Bengio. 2012. Random search for hyper-parameter optimization. Journal of machine learning research 13, 2 (2012).","journal-title":"Journal of machine learning research"},{"unstructured":"Iain Johnstone Robert Tibshirani Bradley Efron Trevor Hastie. 2004. Diabetes Data Analysis - SAS and R Code with Description. https:\/\/www4.stat.ncsu.edu\/~boos\/var.select\/diabetes.html. Accessed: [2023].","key":"e_1_3_2_1_6_1"},{"volume-title":"Random forests. Machine learning 45","year":"2001","author":"Breiman Leo","unstructured":"Leo Breiman. 2001. Random forests. Machine learning 45 (2001), 5--32.","key":"e_1_3_2_1_7_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_8_1","DOI":"10.1145\/2939672.2939785"},{"volume-title":"Hyperparameter search in machine learning. arXiv preprint arXiv:1502.02127","year":"2015","author":"Claesen Marc","unstructured":"Marc Claesen and Bart De Moor. 2015. Hyperparameter search in machine learning. arXiv preprint arXiv:1502.02127 (2015).","key":"e_1_3_2_1_9_1"},{"volume-title":"Easy hyperparameter search using optunity. arXiv preprint arXiv:1412.1114","year":"2014","author":"Claesen Marc","unstructured":"Marc Claesen, Jaak Simm, Dusan Popovic, Yves Moreau, and Bart De Moor. 2014. Easy hyperparameter search using optunity. arXiv preprint arXiv:1412.1114 (2014).","key":"e_1_3_2_1_10_1"},{"key":"e_1_3_2_1_11_1","volume-title":"NIPS workshop on Bayesian Optimization in Theory and Practice","volume":"10","author":"Eggensperger Katharina","year":"2013","unstructured":"Katharina Eggensperger, Matthias Feurer, Frank Hutter, James Bergstra, Jasper Snoek, Holger Hoos, Kevin Leyton-Brown, et al. 2013. Towards an empirical foundation for assessing bayesian optimization of hyperparameters. In NIPS workshop on Bayesian Optimization in Theory and Practice, Vol. 10."},{"key":"e_1_3_2_1_12_1","first-page":"2825","article-title":"Scikit-learn: Machine learning in Python","volume":"12","author":"Fabian Pedregosa","year":"2011","unstructured":"Pedregosa Fabian. 2011. Scikit-learn: Machine learning in Python. Journal of machine learning research 12 (2011), 2825.","journal-title":"Journal of machine learning research"},{"volume-title":"Generative adversarial nets. Advances in neural information processing systems 27","year":"2014","author":"Goodfellow Ian","unstructured":"Ian Goodfellow, Jean Pouget-Abadie, Mehdi Mirza, Bing Xu, David Warde-Farley, Sherjil Ozair, Aaron Courville, and Yoshua Bengio. 2014. Generative adversarial nets. Advances in neural information processing systems 27 (2014).","key":"e_1_3_2_1_13_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_14_1","DOI":"10.1016\/j.procs.2023.01.104"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_15_1","DOI":"10.1080\/10556788.2020.1808977"},{"volume-title":"Hyperparameter optimization: A spectral approach. arXiv preprint arXiv:1706.00764","year":"2017","author":"Hazan Elad","unstructured":"Elad Hazan, Adam Klivans, and Yang Yuan. 2017. Hyperparameter optimization: A spectral approach. arXiv preprint arXiv:1706.00764 (2017).","key":"e_1_3_2_1_16_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_17_1","DOI":"10.1109\/CVPR.2016.90"},{"volume-title":"Gaussian processes for big data. arXiv preprint arXiv:1309.6835","year":"2013","author":"Hensman James","unstructured":"James Hensman, Nicolo Fusi, and Neil D Lawrence. 2013. Gaussian processes for big data. arXiv preprint arXiv:1309.6835 (2013).","key":"e_1_3_2_1_18_1"},{"volume-title":"Automated machine learning: methods, systems, challenges","author":"Hutter Frank","unstructured":"Frank Hutter, Lars Kotthoff, and Joaquin Vanschoren. 2019. Automated machine learning: methods, systems, challenges. Springer Nature.","key":"e_1_3_2_1_19_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_20_1","DOI":"10.1007\/s13218-015-0381-0"},{"volume-title":"A Simple and Fast Baseline for Tuning Large XGBoost Models. arXiv preprint arXiv:2111.06924","year":"2021","author":"Kapoor Sanyam","unstructured":"Sanyam Kapoor and Valerio Perrone. 2021. A Simple and Fast Baseline for Tuning Large XGBoost Models. arXiv preprint arXiv:2111.06924 (2021).","key":"e_1_3_2_1_21_1"},{"volume-title":"Introduction to combinatorial testing","author":"Kuhn D Richard","unstructured":"D Richard Kuhn, Raghu N Kacker, and Yu Lei. 2013. Introduction to combinatorial testing. CRC press.","key":"e_1_3_2_1_22_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_23_1","DOI":"10.1109\/MITP.2008.54"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_24_1","DOI":"10.1007\/978-3-031-21750-0_23"},{"volume-title":"Deep learning. nature 521, 7553","year":"2015","author":"LeCun Yann","unstructured":"Yann LeCun, Yoshua Bengio, and Geoffrey Hinton. 2015. Deep learning. nature 521, 7553 (2015), 436--444.","key":"e_1_3_2_1_25_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_26_1","DOI":"10.1109\/ECBS.2007.47"},{"volume-title":"Hyperband: A novel bandit-based approach to hyperparameter optimization. The journal of machine learning research 18, 1","year":"2017","author":"Li Lisha","unstructured":"Lisha Li, Kevin Jamieson, Giulia DeSalvo, Afshin Rostamizadeh, and Ameet Talwalkar. 2017. Hyperband: A novel bandit-based approach to hyperparameter optimization. The journal of machine learning research 18, 1 (2017), 6765--6816.","key":"e_1_3_2_1_27_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_28_1","DOI":"10.1007\/s13721-016-0125-6"},{"volume-title":"International conference on machine learning. PMLR, 2113--2122","year":"2015","author":"Maclaurin Dougal","unstructured":"Dougal Maclaurin, David Duvenaud, and Ryan Adams. 2015. Gradient-based hyperparameter optimization through reversible learning. In International conference on machine learning. PMLR, 2113--2122.","key":"e_1_3_2_1_29_1"},{"volume-title":"Sparse spatial autoregressions. https:\/\/www.dcc.fc.up.pt\/~ltorgo\/Regression\/cal_housing.html. (1997). Accessed","year":"2023","author":"Kelley Pace R.","unstructured":"R. Kelley Pace and Ronald Barry. 1997. Sparse spatial autoregressions. https:\/\/www.dcc.fc.up.pt\/~ltorgo\/Regression\/cal_housing.html. (1997). Accessed: 2023.","key":"e_1_3_2_1_30_1"},{"unstructured":"A Paszke S Gross F Massa A Lerer Jea PyTorch Bradbury G Chanan T Killeen Z Lin N Gimelshein L Antiga et al. [n. d.]. An imperative style high-performance deep learning library. Adv. Neural Inf. Process. Syst 32 ([n. d.]) 8026.","key":"e_1_3_2_1_31_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_32_1","DOI":"10.5555\/3322706.3361994"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_33_1","DOI":"10.1002\/widm.1301"},{"volume-title":"GWO-XGBoost and BO-XGBoost models to predict blast-induced ground vibration. Engineering with Computers","year":"2021","author":"Qiu Yingui","unstructured":"Yingui Qiu, Jian Zhou, Manoj Khandelwal, Haitao Yang, Peixi Yang, and Chuanqi Li. 2021. Performance evaluation of hybrid WOA-XGBoost, GWO-XGBoost and BO-XGBoost models to predict blast-induced ground vibration. Engineering with Computers (2021), 1--18.","key":"e_1_3_2_1_34_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_35_1","DOI":"10.3390\/jpm11100978"},{"volume-title":"Rajat Kumar Pal, and Samir Kumar Bandyopadhyay","year":"2023","author":"Roy Sunita","unstructured":"Sunita Roy, Ranjan Mehera, Rajat Kumar Pal, and Samir Kumar Bandyopadhyay. 2023. Hyperparameter Optimization for Deep NeuralNetwork Models: A Comprehensive Study onMethods and Techniques. (2023).","key":"e_1_3_2_1_36_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_37_1","DOI":"10.1177\/1536867X20909688"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_38_1","DOI":"10.1109\/ICCMC.2019.8819775"},{"volume-title":"Practical bayesian optimization of machine learning algorithms. Advances in neural information processing systems 25","year":"2012","author":"Snoek Jasper","unstructured":"Jasper Snoek, Hugo Larochelle, and Ryan P Adams. 2012. Practical bayesian optimization of machine learning algorithms. Advances in neural information processing systems 25 (2012).","key":"e_1_3_2_1_39_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_40_1","DOI":"10.1038\/s41598-023-32027-3"},{"volume-title":"Breast cancer Wisconsin (diagnostic) UCI machine learning repository","year":"1995","author":"Wolberg W","unstructured":"W Wolberg, W Street, and O Mangasarian. 1995. Breast cancer Wisconsin (diagnostic) UCI machine learning repository. Irvine, CA, USA (1995).","key":"e_1_3_2_1_41_1"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_42_1","DOI":"10.1016\/j.neucom.2020.07.061"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_43_1","DOI":"10.1109\/ICST.2013.52"},{"doi-asserted-by":"publisher","key":"e_1_3_2_1_44_1","DOI":"10.1613\/jair.1.11854"}],"event":{"sponsor":["SIGSOFT ACM Special Interest Group on Software Engineering"],"acronym":"CAIN 2024","name":"CAIN 2024: IEEE\/ACM 3rd International Conference on AI Engineering - Software Engineering for AI","location":"Lisbon Portugal"},"container-title":["Proceedings of the IEEE\/ACM 3rd International Conference on AI Engineering - Software Engineering for AI"],"original-title":[],"link":[{"URL":"https:\/\/dl.acm.org\/doi\/pdf\/10.1145\/3644815.3644941","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,6,12]],"date-time":"2024-06-12T17:48:20Z","timestamp":1718214500000},"score":1,"resource":{"primary":{"URL":"https:\/\/dl.acm.org\/doi\/10.1145\/3644815.3644941"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,4,14]]},"references-count":44,"alternative-id":["10.1145\/3644815.3644941","10.1145\/3644815"],"URL":"https:\/\/doi.org\/10.1145\/3644815.3644941","relation":{},"subject":[],"published":{"date-parts":[[2024,4,14]]},"assertion":[{"value":"2024-06-11","order":2,"name":"published","label":"Published","group":{"name":"publication_history","label":"Publication History"}}]}}