<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE root>
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" article-type="research-article" dtd-version="1.2" xml:lang="en"><front><journal-meta><journal-id journal-id-type="publisher-id">Discrete and Continuous Models and Applied Computational Science</journal-id><journal-title-group><journal-title xml:lang="en">Discrete and Continuous Models and Applied Computational Science</journal-title><trans-title-group xml:lang="ru"><trans-title>Discrete and Continuous Models and Applied Computational Science</trans-title></trans-title-group></journal-title-group><issn publication-format="print">2658-4670</issn><issn publication-format="electronic">2658-7149</issn><publisher><publisher-name xml:lang="en">Peoples' Friendship University of Russia named after Patrice Lumumba (RUDN University)</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">22913</article-id><article-id pub-id-type="doi">10.22363/2658-4670-2019-27-4-305-315</article-id><article-categories><subj-group subj-group-type="toc-heading" xml:lang="en"><subject>Computer Science</subject></subj-group><subj-group subj-group-type="toc-heading" xml:lang="ru"><subject>Информатика и вычислительная техника</subject></subj-group><subj-group subj-group-type="article-type"><subject>Research Article</subject></subj-group></article-categories><title-group><article-title xml:lang="en">Review and comparative analysis of machine learning libraries for machine learning</article-title><trans-title-group xml:lang="ru"><trans-title>Обзор и сравнительный анализ библиотек машинного обучения для построения нейронных сетей</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author"><name-alternatives><name xml:lang="en"><surname>Gevorkyan</surname><given-names>Migran N.</given-names></name><name xml:lang="ru"><surname>Геворкян</surname><given-names>М. Н.</given-names></name></name-alternatives><bio xml:lang="en"><p>Candidate of Physical and Mathematical Sciences, assistant professor of Department of Applied Probability and Informatics</p></bio><bio xml:lang="ru"><p>Кафедра прикладной информатики и теории вероятностей</p></bio><email>gevorkyan-mn@rudn.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><name-alternatives><name xml:lang="en"><surname>Demidova</surname><given-names>Anastasia V.</given-names></name><name xml:lang="ru"><surname>Демидова</surname><given-names>А. В.</given-names></name></name-alternatives><bio xml:lang="en"><p>Candidate of Physical and Mathematical Sciences, assistant professor of Department of Applied Probability and Informatics</p></bio><bio xml:lang="ru"><p>Кафедра прикладной информатики и теории вероятностей</p></bio><email>demidova-av@rudn.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><name-alternatives><name xml:lang="en"><surname>Demidova</surname><given-names>Tatiana S.</given-names></name><name xml:lang="ru"><surname>Демидова</surname><given-names>Т. С.</given-names></name></name-alternatives><bio xml:lang="en"><p>student of Department of Applied Probability and Informatics</p></bio><bio xml:lang="ru"><p>Кафедра прикладной информатики и теории вероятностей</p></bio><email>1032152607@pfur.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><name-alternatives><name xml:lang="en"><surname>Sobolev</surname><given-names>Anton A.</given-names></name><name xml:lang="ru"><surname>Соболев</surname><given-names>А. А.</given-names></name></name-alternatives><bio xml:lang="en"><p>student of Department of Applied Probability and Informatics</p></bio><bio xml:lang="ru"><p>Кафедра прикладной информатики и теории вероятностей</p></bio><email>1032152618@pfur.ru</email><xref ref-type="aff" rid="aff1"/></contrib></contrib-group><aff-alternatives id="aff1"><aff><institution xml:lang="en">Peoples’ Friendship University of Russia</institution></aff><aff><institution xml:lang="ru">Российский университет дружбы народов</institution></aff></aff-alternatives><pub-date date-type="pub" iso-8601-date="2019-12-15" publication-format="electronic"><day>15</day><month>12</month><year>2019</year></pub-date><volume>27</volume><issue>4</issue><issue-title xml:lang="en">VOL 27, NO4 (2019)</issue-title><issue-title xml:lang="ru">ТОМ 27, №4 (2019)</issue-title><fpage>305</fpage><lpage>315</lpage><history><date date-type="received" iso-8601-date="2020-02-19"><day>19</day><month>02</month><year>2020</year></date></history><permissions><copyright-statement xml:lang="en">Copyright ©; 2019, Gevorkyan M.N., Demidova A.V., Demidova T.S., Sobolev A.A.</copyright-statement><copyright-statement xml:lang="ru">Copyright ©; 2019, Геворкян М.Н., Демидова А.В., Демидова Т.С., Соболев А.А.</copyright-statement><copyright-year>2019</copyright-year><copyright-holder xml:lang="en">Gevorkyan M.N., Demidova A.V., Demidova T.S., Sobolev A.A.</copyright-holder><copyright-holder xml:lang="ru">Геворкян М.Н., Демидова А.В., Демидова Т.С., Соболев А.А.</copyright-holder><ali:free_to_read xmlns:ali="http://www.niso.org/schemas/ali/1.0/"/><license><ali:license_ref xmlns:ali="http://www.niso.org/schemas/ali/1.0/">http://creativecommons.org/licenses/by/4.0</ali:license_ref></license></permissions><self-uri xlink:href="https://journals.rudn.ru/miph/article/view/22913">https://journals.rudn.ru/miph/article/view/22913</self-uri><abstract xml:lang="en"><p>The article is an overview. We carry out the comparison of actual machine learning libraries that can be used the neural networks development. The first part of the article gives a brief description of TensorFlow, PyTorch, Theano, Keras, SciKit Learn libraries, SciPy library stack. An overview of the scope of these libraries and the main technical characteristics, such as performance, supported programming languages, the current state of development is given. In the second part of the article, a comparison of five libraries is carried out on the example of a multilayer perceptron, which is applied to the problem of handwritten digits recognizing. This problem is well known and well suited for testing different types of neural networks. The study time is compared depending on the number of epochs and the accuracy of the classifier. The results of the comparison are presented in the form of graphs of training time and accuracy depending on the number of epochs and in tabular form.</p></abstract><trans-abstract xml:lang="ru"><p>Статья носит обзорный характер. В ней проведено сравнение актуальных библиотек машинного обучения, которые могут быть использованы для построения нейронных сетей. В первой части статьи даётся краткое описание библиотек TensorFlow, PyTorch, Theano, Keras, SciKit Learn, стека библиотек SciPy (NumPy, SciPy, Pandas, Matplotlib, Jupyter). Делается обзор области применения перечисленных библиотек и основных технических характеристик, таких как быстродействие, поддерживаемые языки программирования, текущее состояние разработки. Среди рассматриваемых библиотек только PyTorch и TensorFlow непосредственно конкурируют друг с другом. Остальные библиотеки взаимодополняют друг друга и часто используются совместно при построении различных моделей машинного обучения. Во второй части статьи проводится сравнение пяти библиотек на примере многослойного перцептрона, который применяется к задаче распознания рукописных цифр. Данная задача хорошо разработана и является модельной для тестирования различных реализаций нейронных сетей. Сравнивается время обучения в зависимости от количества эпох и точности работы классификатора. Результаты сравнения представлены в виде графиков времени обучения и точности в зависимости от количества эпох и в табличном виде.</p></trans-abstract><kwd-group xml:lang="en"><kwd>machine learning</kwd><kwd>neural networks</kwd><kwd>MNIST</kwd><kwd>TensorFlow</kwd><kwd>PyTorch</kwd><kwd>MNIST</kwd><kwd>TensorFlow</kwd><kwd>PyTorch</kwd></kwd-group><kwd-group xml:lang="ru"><kwd>машинное обучение</kwd><kwd>нейронные сети</kwd></kwd-group><funding-group><funding-statement xml:lang="en">The publication was prepared with the support of the “RUDN University Program 5-100”.</funding-statement><funding-statement xml:lang="ru">The publication was prepared with the support of the “RUDN University Program 5-100”.</funding-statement></funding-group></article-meta></front><body></body><back><ref-list><ref id="B1"><label>1.</label><mixed-citation>G. Van Rossum and F. L. Drake Jr, Python tutorial. Centrum voor Wiskunde en Informatica Amsterdam, The Netherlands, 1995.</mixed-citation></ref><ref id="B2"><label>2.</label><mixed-citation>T. Kluyver et al., “Jupyter Notebooks - a publishing format for repro- ducible computational workflows,” in Positioning and Power in Academic Publishing: Players, Agents and Agendas, F. Loizides and B. Schmidt, Eds., IOS Press, 2016, pp. 87-90. DOI: 10.3233/978-1-61499-649-1- 87.</mixed-citation></ref><ref id="B3"><label>3.</label><mixed-citation>J. D. Hunter, “Matplotlib: A 2D graphics environment,” Computing in Science &amp; Engineering, vol. 9, no. 3, pp. 90-95, 2007. DOI: 10.1109/ MCSE.2007.55.</mixed-citation></ref><ref id="B4"><label>4.</label><mixed-citation>F. Pérez and B. E. Granger, “IPython: a system for interactive scientific computing,” Computing in Science and Engineering, vol. 9, no. 3, pp. 21- 29, May 2007. DOI: 10.1109/MCSE.2007.53.</mixed-citation></ref><ref id="B5"><label>5.</label><mixed-citation>S. Behnel, R. Bradshaw, C. Citro, L. Dalcin, D. S. Seljebotn, and K. Smith, “Cython: the best of both worlds,” Computing in Science &amp; Engineering, vol. 13, no. 2, pp. 31-39, Mar. 2011. DOI: 10.1109/MCSE. 2010.118.</mixed-citation></ref><ref id="B6"><label>6.</label><mixed-citation>S. van der Walt, S. C. Colbert, and G. Varoquaux, “The NumPy array: a structure for efficient numerical computation,” Computing in Science Engineering, vol. 13, no. 2, pp. 22-30, Mar. 2011. DOI: 10.1109/MCSE. 2011.37.</mixed-citation></ref><ref id="B7"><label>7.</label><mixed-citation>E. Jones, T. Oliphant, P. Peterson, et al. (2001-). SciPy: open source scientific tools for Python, [Online]. Available: http:// www. scipy. org/.</mixed-citation></ref><ref id="B8"><label>8.</label><mixed-citation>W. McKinney, “Data structures for statistical computing in Python,” in Proceedings of the 9th Python in Science Conference, S. van der Walt and J. Millman, Eds., 2010, pp. 51-56.</mixed-citation></ref><ref id="B9"><label>9.</label><mixed-citation>[9] Martıń Abadi et al. (2015). TensorFlow: large-scale machine learning on heterogeneous systems. Software available from tensorflow.org, [Online]. Available: http://tensorflow.org/.</mixed-citation></ref><ref id="B10"><label>10.</label><mixed-citation>(2019). TensorFlow official repository, [Online]. Available: https:// github.com/tensorflow/tensorflow.</mixed-citation></ref><ref id="B11"><label>11.</label><mixed-citation>A. Paszke, S. Gross, S. Chintala, G. Chanan, E. Yang, Z. DeVito, Z. Lin, Desmaison, L. Antiga, and A. Lerer, “Automatic differentiation in PyTorch,” in 31st Conference on Neural Information Processing Systems (NIPS 2017), Long Beach, CA, USA, 2017.</mixed-citation></ref><ref id="B12"><label>12.</label><mixed-citation>(2019). Torch official repository, [Online]. Available: https://github. com/torch/torch7.</mixed-citation></ref><ref id="B13"><label>13.</label><mixed-citation>Theano Development Team, “Theano: a Python framework for fast computation of mathematical expressions,” May 2016. eprint: arXiv: abs/1605.02688.</mixed-citation></ref><ref id="B14"><label>14.</label><mixed-citation>F. Chollet. (2019). Keras, [Online]. Available: https://keras.io/.</mixed-citation></ref><ref id="B15"><label>15.</label><mixed-citation>(2019). CNTC official repository, [Online]. Available: https://github. com/Microsoft/cntk.</mixed-citation></ref><ref id="B16"><label>16.</label><mixed-citation>F. Pedregosa et al., “Scikit-learn: machine learning in Python,” Journal of Machine Learning Research, vol. 12, pp. 2825-2830, 2011.</mixed-citation></ref><ref id="B17"><label>17.</label><mixed-citation>(2019). MNIST handwritten digit database, Yann LeCun, Corinna Cortes and Chris Burges, [Online]. Available: http:// yann. lecun. com/exdb/mnist/.</mixed-citation></ref><ref id="B18"><label>18.</label><mixed-citation>F. Rosenblatt, “The perceptron: a probabilistic model for information storage and organization in the brain,” Psychological review, vol. 65, no. 6, 1958. DOI: 10.1037/h0042519.</mixed-citation></ref><ref id="B19"><label>19.</label><mixed-citation>X. Glorot, A. Bordes, and Y. Bengio, “Deep Sparse Rectifier Neural Networks,” in Proceedings of the Fourteenth International Conference on Artificial Intelligence and Statistics, G. Gordon, D. Dunson, and M. Dudík, Eds., ser. Proceedings of Machine Learning Research, vol. 15, Fort Lauderdale, FL, USA: PMLR, Nov. 2011, pp. 315-323.</mixed-citation></ref><ref id="B20"><label>20.</label><mixed-citation>T. Zhang, “Solving large scale linear prediction problems using sto- chastic gradient descent algorithms,” in Proceedings of the Twenty-first International Conference on Machine Learning, ser. ICML ’04, Banff, Alberta, Canada: ACM, 2004. DOI: 10.1145/1015330.1015332.</mixed-citation></ref></ref-list></back></article>
