<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE root>
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" article-type="research-article" dtd-version="1.2" xml:lang="en"><front><journal-meta><journal-id journal-id-type="publisher-id">Discrete and Continuous Models and Applied Computational Science</journal-id><journal-title-group><journal-title xml:lang="en">Discrete and Continuous Models and Applied Computational Science</journal-title><trans-title-group xml:lang="ru"><trans-title>Discrete and Continuous Models and Applied Computational Science</trans-title></trans-title-group></journal-title-group><issn publication-format="print">2658-4670</issn><issn publication-format="electronic">2658-7149</issn><publisher><publisher-name xml:lang="en">Peoples' Friendship University of Russia named after Patrice Lumumba (RUDN University)</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">46735</article-id><article-id pub-id-type="doi">10.22363/2658-4670-2025-33-3-242-259</article-id><article-id pub-id-type="edn">HFFBMV</article-id><article-categories><subj-group subj-group-type="toc-heading" xml:lang="en"><subject>Computer Science</subject></subj-group><subj-group subj-group-type="toc-heading" xml:lang="ru"><subject>Информатика и вычислительная техника</subject></subj-group><subj-group subj-group-type="article-type"><subject>Research Article</subject></subj-group></article-categories><title-group><article-title xml:lang="en">Construction and modeling of the operation of elements of computing technology on fast neurons</article-title><trans-title-group xml:lang="ru"><trans-title>Построение и моделирование работы элементов вычислительной техники на быстрых нейронах</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-5117-384X</contrib-id><contrib-id contrib-id-type="scopus">55570238100</contrib-id><name-alternatives><name xml:lang="en"><surname>Khachumov</surname><given-names>Mikhail V.</given-names></name><name xml:lang="ru"><surname>Хачумов</surname><given-names>М. В.</given-names></name></name-alternatives><bio xml:lang="en"><p>Candidate of Physical and Mathematical Sciences, Senior Researcher</p></bio><email>khmike@inbox.ru</email><xref ref-type="aff" rid="aff1"/><xref ref-type="aff" rid="aff2"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-7735-6820</contrib-id><contrib-id contrib-id-type="scopus">57202835704</contrib-id><name-alternatives><name xml:lang="en"><surname>Emelyanova</surname><given-names>Yuliya G.</given-names></name><name xml:lang="ru"><surname>Емельянова</surname><given-names>Ю. Г.</given-names></name></name-alternatives><bio xml:lang="en"><p>Candidate of Technical Sciences, Senior Researcher</p></bio><email>yuliya.emelyanowa2015@yandex.ru</email><xref ref-type="aff" rid="aff3"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-9577-1438</contrib-id><contrib-id contrib-id-type="scopus">56042383100</contrib-id><name-alternatives><name xml:lang="en"><surname>Khachumov</surname><given-names>Vyacheslav M.</given-names></name><name xml:lang="ru"><surname>Хачумов</surname><given-names>В. М.</given-names></name></name-alternatives><bio xml:lang="en"><p>Doctor of Technical Sciences, Chief Researcher</p></bio><email>vmh48@mail.ru</email><xref ref-type="aff" rid="aff1"/><xref ref-type="aff" rid="aff2"/><xref ref-type="aff" rid="aff3"/></contrib></contrib-group><aff-alternatives id="aff1"><aff><institution xml:lang="en">RUDN University</institution></aff><aff><institution xml:lang="ru">Российский университет дружбы народов</institution></aff></aff-alternatives><aff-alternatives id="aff2"><aff><institution xml:lang="en">Federal Research Center “Computer Science and Control” of Russian Academy of Sciences</institution></aff><aff><institution xml:lang="ru">Федеральный исследовательский центр «Информатика и управление» Российской академии наук</institution></aff></aff-alternatives><aff-alternatives id="aff3"><aff><institution xml:lang="en">Ailamazyan Program Systems Institute of RAS</institution></aff><aff><institution xml:lang="ru">ИПС им. А. К. Айламазяна РАН</institution></aff></aff-alternatives><pub-date date-type="pub" iso-8601-date="2025-10-15" publication-format="electronic"><day>15</day><month>10</month><year>2025</year></pub-date><volume>33</volume><issue>3</issue><issue-title xml:lang="en">VOL 33, NO3 (2025)</issue-title><issue-title xml:lang="ru">ТОМ 33, №3 (2025)</issue-title><fpage>242</fpage><lpage>259</lpage><history><date date-type="received" iso-8601-date="2025-10-28"><day>28</day><month>10</month><year>2025</year></date></history><permissions><copyright-statement xml:lang="en">Copyright ©; 2025, Khachumov M.V., Emelyanova Y.G., Khachumov V.M.</copyright-statement><copyright-statement xml:lang="ru">Copyright ©; 2025, Хачумов М.В., Емельянова Ю.Г., Хачумов В.М.</copyright-statement><copyright-year>2025</copyright-year><copyright-holder xml:lang="en">Khachumov M.V., Emelyanova Y.G., Khachumov V.M.</copyright-holder><copyright-holder xml:lang="ru">Хачумов М.В., Емельянова Ю.Г., Хачумов В.М.</copyright-holder><ali:free_to_read xmlns:ali="http://www.niso.org/schemas/ali/1.0/"/><license><ali:license_ref xmlns:ali="http://www.niso.org/schemas/ali/1.0/">https://creativecommons.org/licenses/by-nc/4.0</ali:license_ref></license></permissions><self-uri xlink:href="https://journals.rudn.ru/miph/article/view/46735">https://journals.rudn.ru/miph/article/view/46735</self-uri><abstract xml:lang="en"><p>The article is devoted to the construction of fast neurons and neural networks for the implementation of two complete logical bases and modeling of computing devices on their basis. The main idea is to form a fast activation function based on semi-parabolas and its variations that have effective computational support. The constructed activation functions meet the basic requirements that allow configuring logical circuits using the backpropagation method. The main result is obtaining complete logical bases that open the way to constructing arbitrary logical functions. Models of such elements as a trigger, a half adder, and an adder, which form the basis of various specific computing devices, are presented and tested. It is shown that the new activation functions allow obtaining fast solutions with a slight decrease in quality compared to reference outputs. To standardize the outputs, it is proposed to combine the constructed circuits with a unit jump activation function.</p></abstract><trans-abstract xml:lang="ru"><p>Статья посвящена построению быстрых нейронов и нейронных сетей для реализации двух полных логических базисов и моделирования на их основе устройств вычислительной техники. Основная идея заключается в формировании быстрой функции активации на основе полупарабол и её вариаций, имеющих эффективную вычислительную поддержку. Построенные функции активации отвечают основным требованиям, позволяющим настраивать логические схемы методом обратного распространения ошибки. Основным результатом является получение полных логических базисов, открывающих путь к построению произвольных логических функций. Представлены и протестированы модели таких элементов как триггер, полусумматор, сумматор, составляющих основу различных конкретных вычислительных устройств. Показано, что новые функции активации позволяют получать быстрые решения при небольшом снижении качества по сравнению с эталонными выходами. Для стандартизации выходов предлагается комбинировать построенные схемы с функцией активации типа единичный скачок.</p></trans-abstract><kwd-group xml:lang="en"><kwd>new activation functions</kwd><kwd>parabola</kwd><kwd>full logical basis</kwd><kwd>element models</kwd><kwd>performance</kwd><kwd>experimental studies</kwd></kwd-group><kwd-group xml:lang="ru"><kwd>новые функции активации</kwd><kwd>парабола</kwd><kwd>полный логический базис</kwd><kwd>модели элементов</kwd><kwd>быстродействие</kwd><kwd>экспериментальные исследования</kwd></kwd-group><funding-group><award-group><funding-source><institution-wrap><institution xml:lang="en">This research was funded by Russian Science Foundation, grant number 25-21-00222 (https://rscf.ru/en/project/25-2100222/).</institution></institution-wrap><institution-wrap><institution xml:lang="ru">This research was funded by Russian Science Foundation, grant number 25-21-00222 (https://rscf.ru/en/project/25-2100222/).</institution></institution-wrap></funding-source></award-group></funding-group></article-meta><fn-group/></front><body></body><back><ref-list><ref id="B1"><label>1.</label><mixed-citation>Limonova, E., Nikolaev, D. &amp; Alfonso, D. Bipolar morphological neural networks: Gate-efficient architecture for confined environment, 573-580. doi:10.1109/NAECON46414.2019.9058018 (2019).</mixed-citation></ref><ref id="B2"><label>2.</label><mixed-citation>Limonova, E., Nikolaev, D. &amp; Arlazarov, V. Bipolar morphological U-Net for document binarization, 1-9. doi:10.1117/12.2587174 (2021).</mixed-citation></ref><ref id="B3"><label>3.</label><mixed-citation>Limonova, E., Nikolaev, D., Alfonso, D. &amp; Arlazarov, V. ResNet-like architecture with low hardware requirements, 6204-6211. doi:10.1109/ICPR48806.2021.9413186 (2021).</mixed-citation></ref><ref id="B4"><label>4.</label><mixed-citation>Dubey, S., Singh, S. &amp; Chaudhuri, B. Activation Functions in Deep Learning: A Comprehensive Survey and Benchmark. Neurocomputing 503, 1-18. doi:10.1016/j.neucom.2022.06.111 (2022).</mixed-citation></ref><ref id="B5"><label>5.</label><mixed-citation>Feng, J. &amp; Lu, S. Performance Analysis of Various Activation Functions in Artificial Neural Networks. Journal of Physics Conference Series, 1-7. doi:10.1088/1742-6596/1237/2/02203 (2019).</mixed-citation></ref><ref id="B6"><label>6.</label><mixed-citation>Akgül, I. Activation functions used in artificial neural networks. In book: Academic Studies in Engineering, 41-58 (Oct. 2023).</mixed-citation></ref><ref id="B7"><label>7.</label><mixed-citation>Arce, F., Zamora, E., Humberto, S. &amp; Barrón, R. Differential evolution training algorithm for dendrite morphological neural networks. Applied Soft Computing 68, 303-313. doi:10.1016/j.asoc. 2018.03.033 (2018).</mixed-citation></ref><ref id="B8"><label>8.</label><mixed-citation>Dimitriadis, N. &amp; Maragos, P. Advances in the training, pruning and enforcement of shape constraints of morphological neural networks using tropical algebra, 3825-3829. doi:10.48550/ arXiv.2011.07643 (2021).</mixed-citation></ref><ref id="B9"><label>9.</label><mixed-citation>Limonova, E., Nikolaev, D., Alfonso, D. &amp; Arlazarov, V. Bipolar morphological neural networks: gate-efficient architecture for computer vision. IEEE Access 9, 97569-97581. doi:10.1109/ACCESS. 2021.3094484 (2021).</mixed-citation></ref><ref id="B10"><label>10.</label><mixed-citation>Galushkin, A., Sudarikov,V. &amp; Shabanov, E. Neuromathematics: the methods of solving problems on neurocomputers. 2, 1179-1188. doi:10.1109/RNNS.1992.268515 (1992).</mixed-citation></ref><ref id="B11"><label>11.</label><mixed-citation>Khachumov, M. &amp; Emelyanova, Y. Parabola as an Activation Function of Artificial Neural Networks. Scientific and Technical Information Processing 51, 471-477. doi:10. 3103 / S0147688224700382 (2024).</mixed-citation></ref><ref id="B12"><label>12.</label><mixed-citation>Khachumov, M., Emelyanova, Y. &amp; Khachumov, V. Parabola-Based Artificial Neural Network Activation Functions, 249-254. doi:10.1109/RusAutoCon58002.2023.10272855 (Sept. 2023).</mixed-citation></ref><ref id="B13"><label>13.</label><mixed-citation>Hanche-Olsen, H. &amp; Holden, H. The Kolmogorov-Riesz compactness theorem. 28, 385-394. doi:10.1016/j.exmath.2010.03.001 (June 2009).</mixed-citation></ref><ref id="B14"><label>14.</label><mixed-citation>Nabavinejad, S., Reda, S. &amp; Ebrahimi, M. Coordinated Batching and DVFS for DNN Inference on GPU Accelerators. 33, 1-12. doi:10.1109/TPDS.2022.3144614 (Oct. 2022).</mixed-citation></ref><ref id="B15"><label>15.</label><mixed-citation>Trusov, A., Limonova, E., Slugin, D., Nikolaev, D. &amp; Arlazarov, V. Fast Implementation of 4-bit Convolutional Neural Networks for Mobile Devices, 9897-9903. doi:10.1109/ICPR48806.2021. 9412841 (Sept. 2020).</mixed-citation></ref><ref id="B16"><label>16.</label><mixed-citation>Shashev, D. &amp; Shatravin, V. Implementation of the sigmoid activation function using the reconfigurable computing environments. Russian. Tomsk State University Journal of Control and Computer Science, 117-127. doi:10.17223/19988605/61/12 (2022).</mixed-citation></ref><ref id="B17"><label>17.</label><mixed-citation>Hyyro, H. &amp; Navarro, G. Bit-Parallel Computation of Local Similarity Score Matrices with Unitary Weights. International Journal of Foundations of Computer Science. doi:10.1142/S0129054106004443.</mixed-citation></ref><ref id="B18"><label>18.</label><mixed-citation>Hyyro, H. Explaining and extending the bit-parallel approximate string matching algorithm of Myers (2001).</mixed-citation></ref><ref id="B19"><label>19.</label><mixed-citation>Hyyro, H. &amp; Navarro, G. Faster bit-parallel approximate string matching, 203-224 (2002).</mixed-citation></ref><ref id="B20"><label>20.</label><mixed-citation>Hyyro, H. &amp; Navarro, G. Bit-parallel witnesses and their applications to approximate string matching. Algorithmica 41, 203-231 (2005).</mixed-citation></ref><ref id="B21"><label>21.</label><mixed-citation>Zakharov, A. &amp; Khachumov, V. Bit-parallel Representation of Activation Functions for Fast Neural Networks. 2, 568-571 (2014).</mixed-citation></ref><ref id="B22"><label>22.</label><mixed-citation>Volder, J. The Birth of Cordic. The Journal of VLSI Signal Processing-Systems for Signal, Image, and Video Technology 25, 101-105. doi:10.1023/A:1008110704586 (2000).</mixed-citation></ref><ref id="B23"><label>23.</label><mixed-citation>Chetana &amp; Sharmila, K. VLSI Implementation of Coordinate Rotation Based Design Methodology using Verilog HDL. doi:10.1109/ICAIS56108.2023.10073928 (2023).</mixed-citation></ref></ref-list></back></article>
