@article{Asaad_Ali_2019, title={Back Propagation Neural Network(BPNN) and Sigmoid Activation Function in Multi-Layer Networks}, volume={8}, url={https://journals.nawroz.edu.krd/index.php/ajnu/article/view/464}, DOI={10.25007/ajnu.v8n4a464}, abstractNote={<p>Back propagation neural network are known for computing the problems that cannot easily be computed (huge datasets analysis or training) in artificial neural networks. The main idea of this paper is to implement XOR logic gate by ANNs using back propagation neural network for back propagation of errors, and sigmoid activation function. This neural network to map non-linear threshold gate. The non-linear used to classify binary inputs (x<sub>1</sub>, x<sub>2</sub>) and passing it through hidden layer for computing coefficient_errors and gradient_errors (C<sub>errors</sub>, G<sub>errors</sub>), after computing errors by (e<sub>i</sub> = Outpu<sub>t_desired</sub>- Outpu<sub>t_actual) </sub>the weights and thetas (ΔWji = (α)(Xj)(gi), Δϴj = (α)(-1)(gi)) are changing according to errors. Sigmoid activation function is = sig(x)=1/(1+e<sup>-x</sup>) and Derivation of sigmoid is = dsig(x) = sig(x)(1-sig(x)). The sig(x) and Dsig(x) is between 1 to 0.</p> <p>&nbsp;</p>}, number={4}, journal={Academic Journal of Nawroz University}, author={Asaad, Renas Rajab and Ali, Rasan Ismail}, year={2019}, month={Nov.}, pages={216–221} }