Yayın:
Visual Object Detection System for Autonomous Vehicles in Smart Factories

dc.contributor.authorGENGEC, Nazlican
dc.contributor.authorCEVIKALP, Hakan
dc.contributor.authorYavuz, Hasan Serhan
dc.contributor.authorYazic, Ahmet
dc.date.accessioned2026-01-04T13:29:07Z
dc.date.issued2019-10-01
dc.description.abstractAutonomous transport vehicles are very important for smart factories. Computer vision studies for autonomous vehicles in industrial environments are considerably less than that of outdoor applications. Recognition of safety signs has an important place in safe movement of vehicles and safety of humans in factories. In this study, we built a test environment for smart factories and collected a visual data set including some important safety signs for the safe and comfortable movement of the vehicles in smart factories. Then, we developed a visual object detection system using YOLOv3 deep learning model and tested it by using autonomous robots. In our tests, an accuracy of 76.14% mAP (mean average precision) score was obtained in the dataset we collected.
dc.description.urihttps://doi.org/10.1109/asyu48272.2019.8946370
dc.description.urihttps://dx.doi.org/10.1109/asyu48272.2019.8946370
dc.identifier.doi10.1109/asyu48272.2019.8946370
dc.identifier.endpage5
dc.identifier.openairedoi_dedup___::f2b04d7340c231fcf07aecb1ad8e9af0
dc.identifier.scopus2-s2.0-85078507634
dc.identifier.startpage1
dc.identifier.urihttps://hdl.handle.net/20.500.12597/37524
dc.identifier.wos000631252400010
dc.publisherIEEE
dc.relation.ispartof2019 Innovations in Intelligent Systems and Applications Conference (ASYU)
dc.rightsCLOSED
dc.subject.sdg11. Sustainability
dc.titleVisual Object Detection System for Autonomous Vehicles in Smart Factories
dc.typeArticle
dspace.entity.typePublication
local.api.response{"authors":[{"fullName":"Nazlican GENGEC","name":"Nazlican","surname":"GENGEC","rank":1,"pid":null},{"fullName":"Hakan CEVIKALP","name":"Hakan","surname":"CEVIKALP","rank":2,"pid":null},{"fullName":"Hasan Serhan Yavuz","name":"Hasan Serhan","surname":"Yavuz","rank":3,"pid":null},{"fullName":"Ahmet Yazic","name":"Ahmet","surname":"Yazic","rank":4,"pid":null}],"openAccessColor":null,"publiclyFunded":false,"type":"publication","language":{"code":"und","label":"Undetermined"},"countries":null,"subjects":[{"subject":{"scheme":"SDG","value":"11. Sustainability"},"provenance":null},{"subject":{"scheme":"FOS","value":"0202 electrical engineering, electronic engineering, information engineering"},"provenance":null},{"subject":{"scheme":"FOS","value":"02 engineering and technology"},"provenance":null}],"mainTitle":"Visual Object Detection System for Autonomous Vehicles in Smart Factories","subTitle":null,"descriptions":["Autonomous transport vehicles are very important for smart factories. Computer vision studies for autonomous vehicles in industrial environments are considerably less than that of outdoor applications. Recognition of safety signs has an important place in safe movement of vehicles and safety of humans in factories. In this study, we built a test environment for smart factories and collected a visual data set including some important safety signs for the safe and comfortable movement of the vehicles in smart factories. Then, we developed a visual object detection system using YOLOv3 deep learning model and tested it by using autonomous robots. In our tests, an accuracy of 76.14% mAP (mean average precision) score was obtained in the dataset we collected."],"publicationDate":"2019-10-01","publisher":"IEEE","embargoEndDate":null,"sources":["Crossref"],"formats":null,"contributors":null,"coverages":null,"bestAccessRight":{"code":"c_14cb","label":"CLOSED","scheme":"http://vocabularies.coar-repositories.org/documentation/access_rights/"},"container":{"name":"2019 Innovations in Intelligent Systems and Applications Conference (ASYU)","issnPrinted":null,"issnOnline":null,"issnLinking":null,"ep":"5","iss":null,"sp":"1","vol":null,"edition":null,"conferencePlace":null,"conferenceDate":null},"documentationUrls":null,"codeRepositoryUrl":null,"programmingLanguage":null,"contactPeople":null,"contactGroups":null,"tools":null,"size":null,"version":null,"geoLocations":null,"id":"doi_dedup___::f2b04d7340c231fcf07aecb1ad8e9af0","originalIds":["10.1109/asyu48272.2019.8946370","50|doiboost____|f2b04d7340c231fcf07aecb1ad8e9af0","2996904073"],"pids":[{"scheme":"doi","value":"10.1109/asyu48272.2019.8946370"}],"dateOfCollection":null,"lastUpdateTimeStamp":null,"indicators":{"citationImpact":{"citationCount":0,"influence":2.5349236e-9,"popularity":1.2286145e-9,"impulse":0,"citationClass":"C5","influenceClass":"C5","impulseClass":"C5","popularityClass":"C5"}},"instances":[{"pids":[{"scheme":"doi","value":"10.1109/asyu48272.2019.8946370"}],"license":"IEEE Copyright","type":"Article","urls":["https://doi.org/10.1109/asyu48272.2019.8946370"],"publicationDate":"2019-10-01","refereed":"peerReviewed"},{"alternateIdentifiers":[{"scheme":"doi","value":"10.1109/asyu48272.2019.8946370"},{"scheme":"mag_id","value":"2996904073"}],"type":"Other literature type","urls":["https://dx.doi.org/10.1109/asyu48272.2019.8946370"],"refereed":"nonPeerReviewed"}],"isGreen":false,"isInDiamondJournal":false}
local.import.sourceOpenAire
local.indexed.atWOS
local.indexed.atScopus

Dosyalar

Koleksiyonlar