{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,8,8]],"date-time":"2024-08-08T19:15:14Z","timestamp":1723144514604},"reference-count":44,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2016,9,1]],"date-time":"2016-09-01T00:00:00Z","timestamp":1472688000000},"content-version":"tdm","delay-in-days":0,"URL":"http:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["61533002"],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"name":"National Science Fund","award":["61225016"]},{"DOI":"10.13039\/501100002858","name":"China Postdoctoral Science Foundation","doi-asserted-by":"publisher","award":["2015M570911"],"id":[{"id":"10.13039\/501100002858","id-type":"DOI","asserted-by":"publisher"}]},{"name":"ChaoYang District Postdoctoral Research Foundation","award":["2015ZZ-6"]},{"name":"Basic Research Foundation Project of Beijing University of Technology","award":["002000514315501"]}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Neurocomputing"],"published-print":{"date-parts":[[2016,9]]},"DOI":"10.1016\/j.neucom.2016.05.054","type":"journal-article","created":{"date-parts":[[2016,6,4]],"date-time":"2016-06-04T23:14:06Z","timestamp":1465082046000},"page":"676-683","update-policy":"http:\/\/dx.doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":33,"special_numbering":"C","title":["Mutual information based weight initialization method for sigmoidal feedforward neural networks"],"prefix":"10.1016","volume":"207","author":[{"given":"Junfei","family":"Qiao","sequence":"first","affiliation":[]},{"given":"Sanyi","family":"Li","sequence":"additional","affiliation":[]},{"given":"Wenjing","family":"Li","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"key":"10.1016\/j.neucom.2016.05.054_bib1","doi-asserted-by":"crossref","first-page":"160","DOI":"10.1016\/j.neucom.2009.02.023","article-title":"A novel weight initialization method for the random neural network","volume":"73","author":"Timotheou","year":"2009","journal-title":"Neurocomputing"},{"key":"10.1016\/j.neucom.2016.05.054_bib2","doi-asserted-by":"crossref","first-page":"70","DOI":"10.1016\/j.neunet.2016.01.001","article-title":"State estimation for a class of artificial neural networks with stochastically corrupted measurements under Round-Robin protocol","volume":"77","author":"Luo","year":"2016","journal-title":"Neural Netw."},{"issue":"2","key":"10.1016\/j.neucom.2016.05.054_bib3","doi-asserted-by":"crossref","first-page":"325","DOI":"10.1109\/TNN.2004.841777","article-title":"Linear-Least-Squares initialization of multilayer perceptrons through backpropagation of the desired response","volume":"16","author":"Erdogmus","year":"2005","journal-title":"IEEE Trans. Neural Netw."},{"key":"10.1016\/j.neucom.2016.05.054_bib4","doi-asserted-by":"crossref","first-page":"4458","DOI":"10.1016\/j.asoc.2011.05.017","article-title":"A self-organizing map-based initialization for hybrid training of feedforward neural networks","volume":"11","author":"Nasr","year":"2011","journal-title":"Appl. Soft Comput."},{"key":"10.1016\/j.neucom.2016.05.054_bib5","doi-asserted-by":"crossref","unstructured":"Y.K. Kim, J. B. Ra, Weight value initialization for improving training speed in the backpropagation network, in: Proceedings of the 1991 IEEE International Joint Conference on Neural Networks, IEEE, 1991, pp. 2396\u20132401.","DOI":"10.1109\/IJCNN.1991.170747"},{"issue":"2","key":"10.1016\/j.neucom.2016.05.054_bib6","doi-asserted-by":"crossref","first-page":"13","DOI":"10.1007\/BF02312350","article-title":"Determining initial weights of feedforward neural networks based on least squares method","volume":"2","author":"Yam","year":"1995","journal-title":"Neural Process. Lett."},{"key":"10.1016\/j.neucom.2016.05.054_bib7","doi-asserted-by":"crossref","first-page":"26","DOI":"10.1016\/j.chemolab.2006.01.008","article-title":"Automatical initialization of RBF neural networks","volume":"87","author":"Ros","year":"2007","journal-title":"Chemom. Intell. Lab. Syst."},{"key":"10.1016\/j.neucom.2016.05.054_bib8","doi-asserted-by":"crossref","first-page":"2491","DOI":"10.1016\/j.neucom.2010.11.033","article-title":"A new robust training algorithm for a class of single-hidden layer feedforward neural networks","volume":"74","author":"Man","year":"2011","journal-title":"Neurocomputing"},{"key":"10.1016\/j.neucom.2016.05.054_bib9","doi-asserted-by":"crossref","first-page":"23","DOI":"10.1016\/S0925-2312(96)00058-6","article-title":"A new method in determining initial weights of feedforward neural networks for training enhancement","volume":"16","author":"Yam","year":"1997","journal-title":"Neurocomputing"},{"key":"10.1016\/j.neucom.2016.05.054_bib10","doi-asserted-by":"crossref","unstructured":"D. Erdogmus, O. F. Romero, J. C. Principe, A. A. Betanzos, E. Castillo, R. Jenssen, Accurate initialization of neural network weights by backpropagation of the desired response, in: Proceedings of the International Joint Conference on Neural Networks, IEEE, vol. 3, 2003, pp. 2005\u20132010.","DOI":"10.1109\/IJCNN.2003.1223715"},{"key":"10.1016\/j.neucom.2016.05.054_bib11","doi-asserted-by":"crossref","unstructured":"Y. Liu, Y. W. Chen, Weight initialization of feedforward neural networks by means of partial least squares, in: Proceedings of the 2006 International Conference on Machine Learning and Cybernetics, IEEE, 2006, pp. 3119-3122.","DOI":"10.1109\/ICMLC.2006.258402"},{"key":"10.1016\/j.neucom.2016.05.054_bib12","doi-asserted-by":"crossref","first-page":"199","DOI":"10.1016\/j.jhydrol.2011.06.015","article-title":"Impact of EMD decomposition and random initialisation of weights in ANN hindcasting of daily stream flow series: an empirical examination","volume":"406","author":"Napolitano","year":"2011","journal-title":"J. Hydrol."},{"issue":"12","key":"10.1016\/j.neucom.2016.05.054_bib13","doi-asserted-by":"crossref","first-page":"2460","DOI":"10.1109\/TNN.2011.2168423","article-title":"Robust initialization of a jordan network with recurrent constrained learning","volume":"22","author":"Song","year":"2011","journal-title":"IEEE Trans. Neural Netw."},{"issue":"6","key":"10.1016\/j.neucom.2016.05.054_bib14","doi-asserted-by":"crossref","first-page":"899","DOI":"10.1109\/72.165592","article-title":"Avoiding false local initialization of minima by proper connections","volume":"3","author":"Wessels","year":"1992","journal-title":"IEEE Trans. Neural Netw."},{"issue":"2","key":"10.1016\/j.neucom.2016.05.054_bib15","doi-asserted-by":"crossref","first-page":"349","DOI":"10.1109\/72.557673","article-title":"High-order and multilayer perceptron initialization","volume":"8","author":"Thimm","year":"1997","journal-title":"IEEE Trans. Neural Netw."},{"issue":"3","key":"10.1016\/j.neucom.2016.05.054_bib16","doi-asserted-by":"crossref","first-page":"313","DOI":"10.1049\/el:19930214","article-title":"New approach to selection of initial values of weights in neural function approximation","volume":"29","author":"Osowski","year":"1993","journal-title":"Electron. Lett."},{"key":"10.1016\/j.neucom.2016.05.054_bib17","doi-asserted-by":"crossref","first-page":"119","DOI":"10.1016\/j.amc.2015.04.123","article-title":"An efficient initialization mechanism of neurons for winner takes all neural network implemented in the CMOS technology","volume":"267","author":"Talaska","year":"2015","journal-title":"Appl. Math. Comput."},{"issue":"1\u20134","key":"10.1016\/j.neucom.2016.05.054_bib18","doi-asserted-by":"crossref","first-page":"219","DOI":"10.1016\/S0925-2312(99)00127-7","article-title":"A weight initialization method for improving training speed in feedforward neural network","volume":"30","author":"Yam","year":"2000","journal-title":"Neurocomputing"},{"issue":"2","key":"10.1016\/j.neucom.2016.05.054_bib19","doi-asserted-by":"crossref","first-page":"430","DOI":"10.1109\/72.914538","article-title":"Feedforward networks training speed enhancement by optimal initialization of the synaptic coefficients","volume":"12","author":"Yam","year":"2001","journal-title":"IEEE Trans. Neural Netw."},{"issue":"4","key":"10.1016\/j.neucom.2016.05.054_bib20","doi-asserted-by":"crossref","first-page":"627","DOI":"10.1109\/72.143378","article-title":"Statistically controlled activation weight initialization (SCAWI)","volume":"3","author":"Drago","year":"1992","journal-title":"IEEE Trans. Neural Netw."},{"key":"10.1016\/j.neucom.2016.05.054_bib21","doi-asserted-by":"crossref","first-page":"17","DOI":"10.1016\/j.neunet.2014.02.006","article-title":"Solving the linear interval tolerance problem for weight initialization of neural networks","volume":"54","author":"Adam","year":"2014","journal-title":"Neural Netw."},{"issue":"9","key":"10.1016\/j.neucom.2016.05.054_bib22","doi-asserted-by":"crossref","first-page":"1564","DOI":"10.1109\/TNN.2008.2000805","article-title":"Analysis of the initial values in split-complex backpropagation algorithm","volume":"19","author":"Yang","year":"2008","journal-title":"IEEE Trans. Neural Netw."},{"issue":"8","key":"10.1016\/j.neucom.2016.05.054_bib23","doi-asserted-by":"crossref","first-page":"1729","DOI":"10.1016\/j.automatica.2012.05.034","article-title":"Identification and modeling of nonlinear dynamical systems using a novel self-organizing RBF-based approach","volume":"48","author":"Qiao","year":"2012","journal-title":"Automatica"},{"key":"10.1016\/j.neucom.2016.05.054_bib24","doi-asserted-by":"crossref","unstructured":"Z.Z. Zhang, Q.L. Chen, J.F. Qiao, A merging and splitting algorithm based on mutual information for design neural networks, in: Proceedings of the 2010 IEEE Fifth International Conference on Bio-Inspired Computing: Theories and Applications (BIC-TA), (IEEE, 2010), pp. 1268\u20131272","DOI":"10.1109\/BICTA.2010.5645080"},{"issue":"8","key":"10.1016\/j.neucom.2016.05.054_bib25","doi-asserted-by":"crossref","first-page":"1226","DOI":"10.1109\/TPAMI.2005.159","article-title":"Feature selection based on mutual information: criteria of max-dependency, max-relevance, and min-redundancy","volume":"27","author":"Peng","year":"2005","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"issue":"6","key":"10.1016\/j.neucom.2016.05.054_bib26","doi-asserted-by":"crossref","first-page":"1177","DOI":"10.1109\/TNNLS.2014.2334599","article-title":"Optimization of a multilayer neural network by using minimal redundancy maximal relevance-partial mutual information clustering with least square regression","volume":"26","author":"Chen","year":"2015","journal-title":"IEEE Trans. Neural Netw. Learn. Syst."},{"key":"10.1016\/j.neucom.2016.05.054_bib27","doi-asserted-by":"crossref","first-page":"118","DOI":"10.1016\/j.neucom.2015.12.004","article-title":"Forecasting electricity load with advanced wavelet neural networks","volume":"182","author":"Rana","year":"2016","journal-title":"Neurocomputing"},{"issue":"10\u201311","key":"10.1016\/j.neucom.2016.05.054_bib28","doi-asserted-by":"crossref","first-page":"1289","DOI":"10.1016\/j.envsoft.2008.03.008","article-title":"Application of partial mutual information variable selection to ANN forecasting of water quality in water distribution systems","volume":"23","author":"May","year":"2008","journal-title":"Environ. Model. Softw."},{"key":"10.1016\/j.neucom.2016.05.054_bib29","doi-asserted-by":"crossref","first-page":"126","DOI":"10.1016\/j.neucom.2012.05.037","article-title":"Nonlinear dynamics characterization of emotional speech","volume":"132","author":"Henr\u00edquez","year":"2014","journal-title":"Neurocomputing"},{"issue":"5","key":"10.1016\/j.neucom.2016.05.054_bib30","doi-asserted-by":"crossref","first-page":"359","DOI":"10.1016\/0893-6080(89)90020-8","article-title":"Multilayer feedforward networks are universal approximators","volume":"2","author":"Hornik","year":"1989","journal-title":"Neural Netw."},{"issue":"3","key":"10.1016\/j.neucom.2016.05.054_bib31","doi-asserted-by":"crossref","first-page":"183","DOI":"10.1016\/0893-6080(89)90003-8","article-title":"On the approximate realization of continuous mappings by neural networks","volume":"2","author":"Funahashi","year":"1989","journal-title":"Neural Netw."},{"key":"10.1016\/j.neucom.2016.05.054_bib32","doi-asserted-by":"crossref","first-page":"47","DOI":"10.1016\/j.neucom.2015.06.016","article-title":"Global mutual information-based feature selection approach using single-objective and multi-objective optimization","volume":"168","author":"Han","year":"2015","journal-title":"Neurocomputing"},{"issue":"12","key":"10.1016\/j.neucom.2016.05.054_bib33","doi-asserted-by":"crossref","first-page":"1667","DOI":"10.1109\/TPAMI.2002.1114861","article-title":"Input feature selection by mutual information based on Parzen window","volume":"24","author":"Kwak","year":"2002","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"key":"10.1016\/j.neucom.2016.05.054_bib34","doi-asserted-by":"crossref","first-page":"250","DOI":"10.1016\/j.engappai.2014.08.011","article-title":"Joint mutual information-based input variable selection for multivariate time series modeling","volume":"37","author":"Han","year":"2015","journal-title":"Eng. Appl. Artif. Intell."},{"issue":"10","key":"10.1016\/j.neucom.2016.05.054_bib35","doi-asserted-by":"crossref","first-page":"1302","DOI":"10.1016\/j.patrec.2012.02.022","article-title":"Low bias histogram-based estimation of mutual information for feature selection","volume":"33","author":"Hacine-Gharbi","year":"2012","journal-title":"Pattern Recognit. Lett."},{"key":"10.1016\/j.neucom.2016.05.054_bib36","doi-asserted-by":"crossref","first-page":"706","DOI":"10.1016\/j.neucom.2015.05.053","article-title":"Heterogeneous feature subset selection using mutual information-based feature transformation","volume":"168","author":"Wei","year":"2015","journal-title":"Neurocomputing"},{"key":"10.1016\/j.neucom.2016.05.054_bib37","unstructured":"M. Fern\u00e1ndez-Redondo, C. Hern\u00e1ndez-Espinosa, Weight initialization methods for multilayer feedforward, in: Proceedings of the European symposium on artificial neural networks (ESANN), 2001, pp. 119\u2013124."},{"key":"10.1016\/j.neucom.2016.05.054_bib38","unstructured":"H. Shimodaira, A weight value initialization method for improved learning performance of the back propagation algorithm in neural networks, in: Proceedings of the 1994 the 6th International Conference on Tools with Artificial Intelligence, IEEE, 1994, pp. 672-675."},{"key":"10.1016\/j.neucom.2016.05.054_bib39","series-title":"An Empirical Study of Learning Speed in Back-propagation Networks, Technical Report CMU-CS-88-162, School of Computer Science","author":"Fahlman","year":"1988"},{"key":"10.1016\/j.neucom.2016.05.054_bib40","doi-asserted-by":"crossref","unstructured":"A. Mittal, P. Chandra, A. P. Singh, A statistically resilient method of weight initialization for SFANN, in: Proceedings of the International Conference on Advances in Computing, Communications and Informatics (ICACCI), IEEE, 2015, pp. 1371-1376","DOI":"10.1109\/ICACCI.2015.7275804"},{"key":"10.1016\/j.neucom.2016.05.054_bib41","doi-asserted-by":"crossref","first-page":"365","DOI":"10.1016\/j.bej.2007.01.033","article-title":"Monitoring of sequencing batch reactor for nitrogen and phosphorus removal using neural networks","volume":"35","author":"Hong","year":"2007","journal-title":"Biochem. Eng. J."},{"key":"10.1016\/j.neucom.2016.05.054_bib42","doi-asserted-by":"crossref","first-page":"465","DOI":"10.1016\/j.compchemeng.2008.10.018","article-title":"A methodology for sequencing batch reactor identification with artificial neural networks: a case study","volume":"33","author":"Aguado","year":"2009","journal-title":"Comput. Chem. Eng."},{"key":"10.1016\/j.neucom.2016.05.054_bib43","doi-asserted-by":"crossref","first-page":"111","DOI":"10.1016\/j.psep.2014.04.006","article-title":"Modeling of a sequencing batch reactor treatingmunicipal wastewater using multi-layer perceptronand radial basis function artificial neural networks","volume":"93","author":"Bagheri","year":"2015","journal-title":"Process. Saf. Environ. Prot."},{"key":"10.1016\/j.neucom.2016.05.054_bib44","doi-asserted-by":"crossref","first-page":"14","DOI":"10.1016\/j.neucom.2011.05.025","article-title":"A constructive algorithm to synthesize arbitrarily connected feedforward neural networks","volume":"75","author":"Puma-Villanueva","year":"2012","journal-title":"Neurocomputing"}],"container-title":["Neurocomputing"],"original-title":[],"language":"en","link":[{"URL":"http:\/\/api.elsevier.com\/content\/article\/PII:S092523121630491X?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"http:\/\/api.elsevier.com\/content\/article\/PII:S092523121630491X?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2024,6,17]],"date-time":"2024-06-17T10:41:25Z","timestamp":1718620885000},"score":1,"resource":{"primary":{"URL":"http:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S092523121630491X"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2016,9]]},"references-count":44,"alternative-id":["S092523121630491X"],"URL":"https:\/\/doi.org\/10.1016\/j.neucom.2016.05.054","relation":{},"ISSN":["0925-2312"],"issn-type":[{"value":"0925-2312","type":"print"}],"subject":[],"published":{"date-parts":[[2016,9]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"Mutual information based weight initialization method for sigmoidal feedforward neural networks","name":"articletitle","label":"Article Title"},{"value":"Neurocomputing","name":"journaltitle","label":"Journal Title"},{"value":"http:\/\/dx.doi.org\/10.1016\/j.neucom.2016.05.054","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2016 Elsevier B.V. All rights reserved.","name":"copyright","label":"Copyright"}]}}