<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.0 20120330//EN" "JATS-journalpublishing1.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article"><front><journal-meta><journal-id journal-id-type="publisher-id">INFORMATICA</journal-id><journal-title-group><journal-title>Informatica</journal-title></journal-title-group><issn pub-type="epub">0868-4952</issn><issn pub-type="ppub">0868-4952</issn><publisher><publisher-name>VU</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">INF3301</article-id><article-id pub-id-type="doi">10.3233/INF-1992-3301</article-id><article-categories><subj-group subj-group-type="heading"><subject>Research article</subject></subj-group></article-categories><title-group><article-title>On training sample size and complexity of artificial neural net classifier</article-title></title-group><contrib-group><contrib contrib-type="Author"><name><surname>Jain</surname><given-names>Anil</given-names></name><xref ref-type="aff" rid="j_INFORMATICA_aff_000"/></contrib><contrib contrib-type="Author"><name><surname>Raudys</surname><given-names>Šarūnas</given-names></name><xref ref-type="aff" rid="j_INFORMATICA_aff_001"/></contrib><aff id="j_INFORMATICA_aff_000">Department of Computer Science, Michigan State University, East Lansing, MI 48824, USA</aff><aff id="j_INFORMATICA_aff_001">Department of Data Analysis, Institute of Mathematics and Informatics, 2600 Vilnius, Akademijos St.4, Lithuania</aff></contrib-group><pub-date pub-type="epub"><day>01</day><month>01</month><year>1992</year></pub-date><volume>3</volume><issue>3</issue><fpage>301</fpage><lpage>337</lpage><abstract><p>Small training sample effects common in statistical classification and artificial neural network classifier design are discussed. A review of known small sample results are presented, and peaking phenomena related to the increase in the number of features and the number of neurons is discussed.</p></abstract><kwd-group><label>Keywords</label><kwd>feed forward neural nets</kwd><kwd>classification error training samples</kwd><kwd>dimensionality</kwd><kwd>complexity</kwd><kwd>peaning phenomenon</kwd></kwd-group></article-meta></front></article>