<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3.dtd">
<article article-type="research-article" dtd-version="1.3" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xml:lang="ru"><front><journal-meta><journal-id journal-id-type="publisher-id">madi</journal-id><journal-title-group><journal-title xml:lang="ru">Автомобиль. Дорога. Инфраструктура. = Avtomobil'. Doroga. Infrastruktura.</journal-title><trans-title-group xml:lang="en"><trans-title>Avtomobil'. Doroga. Infrastruktura.</trans-title></trans-title-group></journal-title-group><issn pub-type="epub">2409-7217</issn><publisher><publisher-name>МАДИ</publisher-name></publisher></journal-meta><article-meta><article-id custom-type="elpub" pub-id-type="custom">madi-1547</article-id><article-categories><subj-group subj-group-type="heading"><subject>Research Article</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="ru"><subject>2.5.11. Наземные транспортно-технологические средства и комплексы</subject></subj-group></article-categories><title-group><article-title>Актуальные вопросы совершенствования технического зрения  при использовании на автомобилях</article-title><trans-title-group xml:lang="en"><trans-title>Current issues in improving technical vision for use in vehicles</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Тыняный</surname><given-names>Александр Гергиевич</given-names></name><name name-style="western" xml:lang="en"><surname>Tynyanyy</surname><given-names>Aleksandr G.</given-names></name></name-alternatives><bio xml:lang="ru"><p>аспирант кафедры «Автомобили»</p></bio><bio xml:lang="en"><p>postgraduate student of the Department of Automobiles</p></bio><email xlink:type="simple">in.a1ex.ph@gmail.com</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Кристальный</surname><given-names>Сергей Робертович</given-names></name><name name-style="western" xml:lang="en"><surname>Kristalniy</surname><given-names>Sergey R.</given-names></name></name-alternatives><bio xml:lang="ru"><p>канд. техн. наук, доцент кафедры «Автомобили»</p></bio><bio xml:lang="en"><p>Candidate of Sciences (Technical), associate professor of the Department of Automobiles</p></bio><email xlink:type="simple">sporauto@mail.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Красавин</surname><given-names>Павел Александрович</given-names></name><name name-style="western" xml:lang="en"><surname>Krasavin</surname><given-names>Pavel A.,</given-names></name></name-alternatives><bio xml:lang="ru"><p>канд. техн. наук, доцент кафедры «Автомобили»</p></bio><bio xml:lang="en"><p>Candidate of Sciences (Technical), associate professor of the Department of Automobiles,</p></bio><email xlink:type="simple">krasavin.madi@yandex.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Топорков</surname><given-names>Максим Алексееевич</given-names></name><name name-style="western" xml:lang="en"><surname>Toporkov</surname><given-names>Maksim A.</given-names></name></name-alternatives><bio xml:lang="ru"><p>канд. техн. наук, доцент кафедры «Автомобили»</p></bio><bio xml:lang="en"><p>Candidate of Sciences (Technical),  associate professor of the Department of Automobiles</p></bio><email xlink:type="simple">makstoporkov@rambler.ru</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Андреев</surname><given-names>Алексей Николаевич</given-names></name><name name-style="western" xml:lang="en"><surname>Andreev</surname><given-names>Aleksey N.</given-names></name></name-alternatives><bio xml:lang="ru"><p>старший преподаватель кафедры «Автомобили»</p></bio><bio xml:lang="en"><p> senior lecturer of the Department of Automobiles</p></bio><email xlink:type="simple">andr_aleksei@mail.ru</email><xref ref-type="aff" rid="aff-1"/></contrib></contrib-group><aff-alternatives id="aff-1"><aff xml:lang="ru">МАДИ<country>Россия</country></aff><aff xml:lang="en">MADI<country>Russian Federation</country></aff></aff-alternatives><pub-date pub-type="collection"><year>2026</year></pub-date><pub-date pub-type="epub"><day>31</day><month>03</month><year>2026</year></pub-date><volume>0</volume><issue>1(47)</issue><fpage>6</fpage><lpage>6</lpage><permissions><copyright-statement>Copyright &amp;#x00A9; Тыняный А.Г., Кристальный С.Р., Красавин П.А., Топорков М.А., Андреев А.Н., 2026</copyright-statement><copyright-year>2026</copyright-year><copyright-holder xml:lang="ru">Тыняный А.Г., Кристальный С.Р., Красавин П.А., Топорков М.А., Андреев А.Н.</copyright-holder><copyright-holder xml:lang="en">Tynyanyy A.G., Kristalniy S.R., Krasavin P.A., Toporkov M.A., Andreev A.N.</copyright-holder><license license-type="creative-commons-attribution" xlink:href="https://creativecommons.org/licenses/by/4.0/" xlink:type="simple"><license-p>This work is licensed under a Creative Commons Attribution 4.0 License.</license-p></license></permissions><self-uri xlink:href="https://www.adi-madi.ru/madi/article/view/1547">https://www.adi-madi.ru/madi/article/view/1547</self-uri><abstract><p>Системы технического зрения активно используются на автомобильном транспорте. Развитие этого направления приводит к увеличению числа бортовых камер, установленных на транспортном средстве. При помощи различных алгоритмов анализа изображений, с применением нейронных сетей можно получать информацию об окружающем пространстве автомобиля и свойствах рядом расположенных объектов. Важной особенностью является позиционирование объектов в пространстве и определение расстояния до них. Такие функции используются для определения безопасной дистанции и с целью построения оптимальной траектории движения автомобиля в заданном пространстве. В данной статье рассматриваются различные способы анализа изображений с целью применения на автомобилях в системах помощи водителю или в беспилотных автомобилях.</p></abstract><trans-abstract xml:lang="en"><p>Technical vision systems are actively used in automotive transport. The development of this area leads to an increase in the number of onboard cameras installed on vehicles. Using various image analysis algorithms and neural networks, it is possible to obtain information about the vehicle's surroundings and the properties of nearby objects. An important feature is the positioning of objects in space and the determination of their distance. These functions are used to determine a safe distance and to create an optimal vehicle trajectory in a given space. This article discusses various methods of image analysis for use in car assistance systems or even in self-driving cars.</p></trans-abstract><kwd-group xml:lang="ru"><kwd>техническое зрение</kwd><kwd>обнаружение окружающего пространства</kwd><kwd>определение расстояния по изображению</kwd><kwd>нейронная сеть</kwd><kwd>системы помощи водителю.</kwd></kwd-group><kwd-group xml:lang="en"><kwd>technical vision</kwd><kwd>environmental detection</kwd><kwd>image-based distance measurement</kwd><kwd>neural networks</kwd><kwd>and driver assistance systems</kwd></kwd-group></article-meta></front><back><ref-list><title>References</title><ref id="cit1"><label>1</label><citation-alternatives><mixed-citation xml:lang="ru">Эволюция архитектур нейросетей в компьютерном зрении: сегментация изображений. – URL: https://habr.com/ru/companies/slsoft/articles/864994/?ysclid</mixed-citation><mixed-citation xml:lang="en">Evolution of Neural Network Architectures in Computer Vision: Image Segmentation. Available at: https://habr.com/ru/companies/slsoft/articles/864994/?ysclid=mi7e2qugg9143782482.</mixed-citation></citation-alternatives></ref><ref id="cit2"><label>2</label><citation-alternatives><mixed-citation xml:lang="ru">=mi7e2qugg9143782482 (дата обращения: 24.10.2025).</mixed-citation><mixed-citation xml:lang="en">Semantic Segmentation Using Deep Learning - MATLAB &amp; Simulink. Available at:  https://www.mathworks.com/help/vision/ug/semantic-segmentation-using-deep-learning.html#d119e211.</mixed-citation></citation-alternatives></ref><ref id="cit3"><label>3</label><citation-alternatives><mixed-citation xml:lang="ru">Semantic Segmentation Using Deep Learning. – MATLAB &amp; Simulink. – URL: https://www.mathworks.com/help/vision/ug/semantic-segmentation-using-deep-learning.html#d119e211 (дата обращения: 20.12.2024).</mixed-citation><mixed-citation xml:lang="en">A. Karpathy. PyTorch at Tesla. Scientific and technical report and presentation, PyTorch DEVCON-19, 06.11.2019. Available at: https://www.youtube.com/watch?v=oBklltKXtDE&amp;ab_channel=PyTorch.</mixed-citation></citation-alternatives></ref><ref id="cit4"><label>4</label><citation-alternatives><mixed-citation xml:lang="ru">Karpathy, A. PyTorch at Tesla / A. Karpathy // Научно-технический доклад и презентация, PyTorch DEVCON-19, 6 ноября 2019 г. – URL: https://www.youtube.com/watch?v=oBklltKXtDE&amp;ab_channel=PyTorch (дата обращения: 15.07.2025).</mixed-citation><mixed-citation xml:lang="en">Goodfellow I., Bengio Y., Courville A. Deep Learning. Massachusetts: MIT Press. 2016.</mixed-citation></citation-alternatives></ref><ref id="cit5"><label>5</label><citation-alternatives><mixed-citation xml:lang="ru">Goodfellow, I. Deep Learning / I. Goodfellow, Y. Bengio, A. Courville. –Massachusetts: MIT Press, 2016. – 800 p. – ISBN 0262035618.</mixed-citation><mixed-citation xml:lang="en">Rumelhart, D., Hinton, G. &amp; Williams, R. Learning representations by back-propagating errors // Nature. 1986. pp. 533-536</mixed-citation></citation-alternatives></ref><ref id="cit6"><label>6</label><citation-alternatives><mixed-citation xml:lang="ru">Kingma, Diederik P. Adam: A Method for Stochastic Optimization / Diederik P. Kingma, J. Ba. – URL: https://arxiv.org/abs/1412.6980 (дата обращения: 15.07.2025).</mixed-citation><mixed-citation xml:lang="en">Diederik P. Kingma, Ba J. Adam: A Method for Stochastic Optimization. URL: https://arxiv.org/abs/1412.6980.</mixed-citation></citation-alternatives></ref><ref id="cit7"><label>7</label><citation-alternatives><mixed-citation xml:lang="ru">Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks / S. Ren, K. He, R. Girshick, J. Sun // IEEE transactions on pattern analysis and machine intelligence. – 2017. – Vol. 39, No. 6. – P. 1137-1149. – DOI 10.1109/TPAMI.2016.2577031.</mixed-citation><mixed-citation xml:lang="en">Ren S. Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks // IEEE Trans. Pattern Anal. Mach. Intell. 2017. Т. 39. № 6. С. 1137–1149.</mixed-citation></citation-alternatives></ref><ref id="cit8"><label>8</label><citation-alternatives><mixed-citation xml:lang="ru">Region Proposal by Guided Anchoring / J. Wang, K. Chen, Sh. Yang, Ch.Ch. Loy // Proceedings of the IEEE/CVF conference on computer vision and pattern recognition. – 2019. – P. 2965-2974. – DOI 10.1109/cvpr.2019.00308.</mixed-citation><mixed-citation xml:lang="en">Wang J. Region Proposal by Guided Anchoring // 2019 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR). 2019.</mixed-citation></citation-alternatives></ref><ref id="cit9"><label>9</label><citation-alternatives><mixed-citation xml:lang="ru">Region-Based Convolutional Networks for Accurate Object Detection and Segmentation / R. Girshick, J. Donahue, T. Darrell, J. Malik // IEEE transactions on pattern analysis and machine intelligence. – 2016. – Vol. 38, No. 1. – P. 142-158. – DOI 10.1109/TPAMI.2015.2437384.</mixed-citation><mixed-citation xml:lang="en">Girshick R. Region-Based Convolutional Networks for Accurate Object Detection and  Segmentation // IEEE Trans. Pattern Anal. Mach. Intell. 2016. Т. 38. № 1. С. 142–158.</mixed-citation></citation-alternatives></ref><ref id="cit10"><label>10</label><citation-alternatives><mixed-citation xml:lang="ru">Семантическая сегментация с высоты птичьего полета для автономного вождения. – URL: https://readmedium.com/monocular-birds-eye-view-semantic-segmentation-for-autonomous-driving-ee2f771afb59 (дата обращения: 05.10.20250).</mixed-citation><mixed-citation xml:lang="en">Monocular Bird’s-Eye-View Semantic Segmentation for Autonomous Driving. Available at: https://readmedium.com/monocular-birds-eye-view-semantic-segmentation-for-autonomous-driving-ee2f771afb59.</mixed-citation></citation-alternatives></ref><ref id="cit11"><label>11</label><citation-alternatives><mixed-citation xml:lang="ru">Обратное преобразование перспективы с помощью ручной точки схода. – URL: https://github.com/osvaldlaszlo/inverse-perspective-mapping?tab=readme-ov-file (дата обращения: 05.10.2025).</mixed-citation><mixed-citation xml:lang="en">Reverse perspective transformation using a manual vanishing point. Available at: https://github.com/osvaldlaszlo/inverse-perspective-mapping?tab=readme-ov-file.</mixed-citation></citation-alternatives></ref><ref id="cit12"><label>12</label><citation-alternatives><mixed-citation xml:lang="ru">DisNet: a novel method for distance estimation from monocular camera / MA. Haseeb, J. Guan, D. Ristic-Durrant, A. Gräser // IEEE/RSJ international conference on intelligent robots and systems – IROS 2018, 10th workshop on planning, perception and navigation for intelligent vehicles (PPNIV). – 2018.</mixed-citation><mixed-citation xml:lang="en">Haseeb MA, Guan J, Ristić-Durrant D, et al. DisNet: a novel method for distance estimation from monocular camera. // IEEE/RSJ international conference on intelligent robots and systems – IROS 2018, 10th workshop on planning, perception and navigation for intelligent vehicles (PPNIV). 2018</mixed-citation></citation-alternatives></ref><ref id="cit13"><label>13</label><citation-alternatives><mixed-citation xml:lang="ru">Nair, R.S. Robotic Path Planning Using Recurrent Neural Networks / R.S. Nair, P. Supriya // 2020 11th International Conference on Computing, Communication and Networking Technologies (ICCCNT). – IEEE, 2020. – P. 1-5. – DOI 10/1109/ICCCNT49239.2020.9225479.</mixed-citation><mixed-citation xml:lang="en">Nair R. S., Supriya P. Robotic Path Planning Using Recurrent Neural Networks // 2020 11th International Conference on Computing, Communication and Networking Technologies (ICCCNT). 2020.</mixed-citation></citation-alternatives></ref><ref id="cit14"><label>14</label><citation-alternatives><mixed-citation xml:lang="ru">Multimodal End-to-End Autonomous Driving / Yi. Xiao, F. Codevilla, A. Gurram [et al.] // IEEE Transactions on Intelligent Transportation Systems. – 2022. – Vol. 23, No. 1. – P. 537-547. – DOI 10.1109/tits.2020.3013234. – EDN AVEFER.</mixed-citation><mixed-citation xml:lang="en">Xiao Y. и др. Multimodal End-to-End Autonomous Driving // IEEE Trans. Intell. Transport. Syst. 2022. Т. 23. № 1. С. 537–547.</mixed-citation></citation-alternatives></ref><ref id="cit15"><label>15</label><citation-alternatives><mixed-citation xml:lang="ru">Behavior-based neuro-fuzzy controller for mobile robot navigation / P. Rusu, E.M. Petriu, T.E. Whalen, A. Cornell, H.J.W. Spoelder // IEEE Transactions on Instrumentation and Measurement. – 2003. – Vol. 52, No. 4. – P. 1335-1340. – DOI 10.1109/TIM.003.816846.</mixed-citation><mixed-citation xml:lang="en">Rusu P. и др. Behavior-based neuro-fuzzy controller for mobile robot navigation // IEEE Trans. Instrum. Meas. 2003. Т. 52. № 4. С. 1335–1340.</mixed-citation></citation-alternatives></ref><ref id="cit16"><label>16</label><citation-alternatives><mixed-citation xml:lang="ru">End-to-End Deep Neural Network Design for Short-term Path Planning / M. Dao, D. Lanza, V. Fremont. – URL: https://hal.archives-ouvertes.fr/hal-02266802 (дата обращения: 15.07.2025).</mixed-citation><mixed-citation xml:lang="en">Dao M., Lanza D., Fremont V., End-to-End Deep Neural Network Design for Short-term Path Planning. Available at: https://hal.archives-ouvertes.fr/hal-02266802.</mixed-citation></citation-alternatives></ref></ref-list><fn-group><fn fn-type="conflict"><p>The authors declare that there are no conflicts of interest present.</p></fn></fn-group></back></article>
