Conclude AI chapter
This commit is contained in:
@@ -561,7 +561,7 @@
|
||||
year = 2010,
|
||||
series = {Prentice Hall Series in Artificial Intelligence},
|
||||
edition = {3rd},
|
||||
pages = {38-45, 55-56}
|
||||
pages = {38-45, 48-49, 55-56}
|
||||
}
|
||||
|
||||
@article{McCarthy_Minsky_Rochester_Shannon_2006,
|
||||
@@ -599,3 +599,66 @@
|
||||
edition = 2,
|
||||
pages = {39-40}
|
||||
}
|
||||
|
||||
@Inbook{Zou2009,
|
||||
author = "Zou, Jinming and Han, Yi and So, Sung-Sau",
|
||||
editor = "Livingstone, David J.",
|
||||
title = "Overview of Artificial Neural Networks",
|
||||
bookTitle = "Artificial Neural Networks: Methods and Applications",
|
||||
year = 2009,
|
||||
publisher = "Humana Press",
|
||||
address = "Totowa, NJ",
|
||||
pages = "14--22",
|
||||
abstract = "The artificial neural network (ANN), or simply neural
|
||||
network, is a machine learning method evolved from the idea of
|
||||
simulating the human brain. The data explosion in modern drug
|
||||
discovery research requires sophisticated analysis methods to
|
||||
uncover the hidden causal relationships between single or
|
||||
multiple responses and a large set of properties. The ANN is
|
||||
one of many versatile tools to meet the demand in drug
|
||||
discovery modeling. Compared to a traditional regression
|
||||
approach, the ANN is capable of modeling complex nonlinear
|
||||
relationships. The ANN also has excellent fault tolerance and
|
||||
is fast and highly scalable with parallel processing. This
|
||||
chapter introduces the background of ANN development and
|
||||
outlines the basic concepts crucially important for
|
||||
understanding more sophisticated ANN. Several commonly used
|
||||
learning methods and network setups are discussed briefly at
|
||||
the end of the chapter.",
|
||||
isbn = "978-1-60327-101-1",
|
||||
doi = "10.1007/978-1-60327-101-1_2",
|
||||
url = "https://doi.org/10.1007/978-1-60327-101-1_2"
|
||||
}
|
||||
|
||||
@book{book:2610592,
|
||||
title = {Principles of artificial neural networks},
|
||||
author = {Graupe, Daniel},
|
||||
publisher = {World Scientific Publ},
|
||||
isbn = {9789814522731,9814522732},
|
||||
year = 2013,
|
||||
edition = {3. ed},
|
||||
pages = {28-31}
|
||||
}
|
||||
|
||||
@Article{Cireşan2010,
|
||||
author = {Cire{\c{s}}an, Dan Claudiu and Meier, Ueli and Gambardella,
|
||||
Luca Maria and Schmidhuber, J{\"u}rgen},
|
||||
title = {Deep, Big, Simple Neural Nets for Handwritten Digit
|
||||
Recognition},
|
||||
journal = {Neural Computation},
|
||||
year = 2010,
|
||||
month = {Dec},
|
||||
day = 01,
|
||||
volume = 22,
|
||||
number = 12,
|
||||
pages = {3207-3220},
|
||||
abstract = {Good old online backpropagation for plain multilayer
|
||||
perceptrons yields a very low 0.35{\%} error rate on the MNIST
|
||||
handwritten digits benchmark. All we need to achieve this best
|
||||
result so far are many hidden layers, many neurons per layer,
|
||||
numerous deformed training images to avoid overfitting, and
|
||||
graphics cards to greatly speed up learning.},
|
||||
issn = {0899-7667},
|
||||
doi = {10.1162/NECO_a_00052},
|
||||
url = {https://doi.org/10.1162/NECO_a_00052}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user