@Techreport{MR11tech, AUTHOR = {Iveta Mr{\'a}zov{\'a} and Zuzana Reitermanov{\'a}}, TITLE = {A New Sensitivity-Based Feature Selection Technique for Feed-Forward Neural Networks That Improves Generalization}, INSTITUTION = {Charles University, Faculty of Mathematics and Physics}, YEAR = {2011}, ADDRESS = {Prague}, URL = {http://popelka.ms.mff.cuni.cz/cerno/files/mrazova_reitermanova_feature_selection.pdf}, ABSTRACT = {Multi-layer neural networks of the back-propagation type became already a well-established tool used successfully in various application areas. Efficient solutions to complex tasks currently dealt with obviously require sufficient generalization capabilities of the formed networks and an easy interpretation of their function. For this reason, we will introduce here a new feature selection technique called SCGSIR inspired by the fast method of scaled conjugate gradients (SCG) and sensitivity analysis. Enforced internal knowledge representation supports an easy interpretation of the formed network structure. Network sensitivity inhibited during training impacts successful pruning of input neurons and optimization of network structure, too. Experiments performed so far on the problem of binary addition and on real data obtained from the World Bank yield promising results outperforming reference techniques when considering both their ability to find networks with and optimum architectures and generalization capabilities of the trained networks.}, KEYWORDS = {neural networks, feature selection}, }