Understanding driving situations regardless the conditions of the traffic scene is a cornerstone on the path towards autonomous vehicles; however, despite common sensor setups already include complementary devices such as LiDAR or radar, most of the research on perception systems has traditionally focused on computer vision. We present a LiDAR-based 3D object detection pipeline entailing three stages. First, laser information is projected into a novel cell encoding for bird's eye view projection. Later, both object location on the plane and its heading are estimated through a convolutional neural network originally designed for image processing. Finally, 3D oriented detections are computed in a post-processing phase. Experiments on KITTI dataset show that the proposed framework achieves state-of-the-art results among comparable methods. Further tests with different LiDAR sensors in real scenarios assess the multi-device capabilities of the approach.
Beschreibung
BirdNet: A 3D Object Detection Framework from LiDAR Information - IEEE Conference Publication
%0 Conference Paper
%1 8569311
%A Beltrán, J.
%A Guindel, C.
%A Moreno, F. M.
%A Cruzado, D.
%A García, F.
%A De La Escalera, A.
%B 2018 21st International Conference on Intelligent Transportation Systems (ITSC)
%D 2018
%K dnn lidar order1 pedestrian
%P 3517-3523
%R 10.1109/ITSC.2018.8569311
%T BirdNet: A 3D Object Detection Framework from LiDAR Information
%U https://ieeexplore.ieee.org/document/8569311
%X Understanding driving situations regardless the conditions of the traffic scene is a cornerstone on the path towards autonomous vehicles; however, despite common sensor setups already include complementary devices such as LiDAR or radar, most of the research on perception systems has traditionally focused on computer vision. We present a LiDAR-based 3D object detection pipeline entailing three stages. First, laser information is projected into a novel cell encoding for bird's eye view projection. Later, both object location on the plane and its heading are estimated through a convolutional neural network originally designed for image processing. Finally, 3D oriented detections are computed in a post-processing phase. Experiments on KITTI dataset show that the proposed framework achieves state-of-the-art results among comparable methods. Further tests with different LiDAR sensors in real scenarios assess the multi-device capabilities of the approach.
@inproceedings{8569311,
abstract = {Understanding driving situations regardless the conditions of the traffic scene is a cornerstone on the path towards autonomous vehicles; however, despite common sensor setups already include complementary devices such as LiDAR or radar, most of the research on perception systems has traditionally focused on computer vision. We present a LiDAR-based 3D object detection pipeline entailing three stages. First, laser information is projected into a novel cell encoding for bird's eye view projection. Later, both object location on the plane and its heading are estimated through a convolutional neural network originally designed for image processing. Finally, 3D oriented detections are computed in a post-processing phase. Experiments on KITTI dataset show that the proposed framework achieves state-of-the-art results among comparable methods. Further tests with different LiDAR sensors in real scenarios assess the multi-device capabilities of the approach.},
added-at = {2020-05-27T23:32:33.000+0200},
author = {{Beltrán}, J. and {Guindel}, C. and {Moreno}, F. M. and {Cruzado}, D. and {García}, F. and {De La Escalera}, A.},
biburl = {https://www.bibsonomy.org/bibtex/273207ac2bb375f438bce8569b5a3b32d/sohnki},
booktitle = {2018 21st International Conference on Intelligent Transportation Systems (ITSC)},
description = {BirdNet: A 3D Object Detection Framework from LiDAR Information - IEEE Conference Publication},
doi = {10.1109/ITSC.2018.8569311},
interhash = {037961dd783c6d1c30b0f5a83de26a5f},
intrahash = {73207ac2bb375f438bce8569b5a3b32d},
issn = {2153-0017},
keywords = {dnn lidar order1 pedestrian},
month = nov,
pages = {3517-3523},
timestamp = {2020-06-02T20:02:18.000+0200},
title = {BirdNet: A 3D Object Detection Framework from LiDAR Information},
url = {https://ieeexplore.ieee.org/document/8569311},
year = 2018
}