Skip to content

Commit

Permalink
parse and filter all objects from nuscenes
Browse files Browse the repository at this point in the history
  • Loading branch information
EllingtonKirby committed Jun 18, 2024
1 parent 4cf4240 commit 6e13198
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 19 deletions.
4 changes: 2 additions & 2 deletions lidiff/config/object_generation/config_combined_gen.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
experiment:
id: combined_gen_1
id: all_objects_gen_1

##Data
data:
data_dir: '/home/ekirby/scania/ekirby/datasets/combined_from_nuscenes/combined_from_nuscenes_train_val.json'
data_dir: '/home/ekirby/scania/ekirby/datasets/all_objects_from_nuscenes/all_objects_from_nuscenes_train_val.json'
resolution: 0.05
dataloader: 'nuscenes'
coordinates: 'cylindrical'
Expand Down
4 changes: 4 additions & 0 deletions lidiff/datasets/dataloader/NuscenesObjects.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@
import numpy as np
from nuscenes.utils.geometry_utils import points_in_box
from nuscenes.utils.data_classes import Box, Quaternion
from lidiff.utils import data_map
from lidiff.utils.three_d_helpers import extract_yaw_angle, cartesian_to_spherical
import open3d as o3d
from nuscenes.utils.data_io import load_bin_file

class NuscenesObjectsSet(Dataset):
def __init__(self, data_dir, split, points_per_object=None, volume_expansion=1., recenter=True):
Expand All @@ -29,6 +31,7 @@ def __getitem__(self, index):

class_name = object_json['class']
points = np.fromfile(object_json['lidar_data_filepath'], dtype=np.float32).reshape((-1, 5)) #(x, y, z, intensity, ring index)
labels = np.fromfile(object_json['lidarseg_label_filepath'], dtype=np.uint8).reshape((-1, 1))
center = np.array(object_json['center'])
size = np.array(object_json['size'])
rotation_real = np.array(object_json['rotation_real'])
Expand All @@ -39,6 +42,7 @@ def __getitem__(self, index):

points_from_object = points_in_box(box, points=points[:,:3].T, wlh_factor=self.volume_expansion)
object_points = torch.from_numpy(points[points_from_object])[:,:3]
object_points = object_points[(labels[points_from_object] == data_map.class_mapping[class_name]).flatten()]

if self.points_per_object > 0:
pcd_object = o3d.geometry.PointCloud()
Expand Down
15 changes: 14 additions & 1 deletion lidiff/datasets/dataloader/ShapeNetObjects.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,21 @@ def __getitem__(self, index):
size[i] = np.max(object_points[i]) - np.min(object_points[i])
center = np.zeros(3)
orientation = np.zeros(1)
num_points = object_points.shape[0]
ring_indexes = np.zeros_like(object_points)
class_name = 'vehicle.motorcycle'

if self.points_per_object > 0:
pcd_object = o3d.geometry.PointCloud()
pcd_object.points = o3d.utility.Vector3dVector(object_points)

if object_points.shape[0] > self.points_per_object:
pcd_object = pcd_object.farthest_point_down_sample(self.points_per_object)

object_points = torch.tensor(np.array(pcd_object.points))
concat_part = int(np.ceil(self.points_per_object / object_points.shape[0]) )
object_points = object_points.repeat(concat_part, 1)
object_points = object_points[torch.randperm(object_points.shape[0])][:self.points_per_object]

num_points = object_points.shape[0]

return [object_points, center, torch.from_numpy(size), orientation, num_points, ring_indexes, class_name]
2 changes: 1 addition & 1 deletion lidiff/datasets/datasets_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def __call__(self, data):
orientation = torch.Tensor([[quaternion.yaw_pitch_roll[0]] for quaternion in batch[3]]).float()

class_mapping = torch.tensor([data_map.class_mapping[class_name] for class_name in batch[6]]).reshape(-1, 1)
class_mapping = torch.nn.functional.one_hot(class_mapping, num_classes=3)
class_mapping = torch.nn.functional.one_hot(class_mapping, num_classes=32)

return {'pcd_object': pcd_object,
'center': center,
Expand Down
2 changes: 1 addition & 1 deletion lidiff/models/minkunet_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def __init__(self, **kwargs):
ME.MinkowskiBatchNorm(cs[0]),
ME.MinkowskiReLU(inplace=True)
)
self.class_conditioning = 3 if kwargs.get('class_conditioning', True) else 0
self.class_conditioning = 32 if kwargs.get('class_conditioning', True) else 0
num_conditions = 8
self.num_cyclic_conditions = 2
self.embeddings_type = kwargs.get('embeddings_type','positional')
Expand Down
36 changes: 32 additions & 4 deletions lidiff/utils/data_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,36 @@
}

class_mapping = {
'car': 0,
'vehicle.car': 0,
'vehicle.bicycle': 1,
'vehicle.motorcycle': 2,
'noise':0,
'animal':1,
'human.pedestrian.adult':2,
'human.pedestrian.child':3,
'human.pedestrian.construction_worker':4,
'human.pedestrian.personal_mobility':5,
'human.pedestrian.police_officer':6,
'human.pedestrian.stroller':7,
'human.pedestrian.wheelchair':8,
'movable_object.barrier':9,
'movable_object.debris':10,
'movable_object.pushable_pullable':11,
'movable_object.trafficcone':12,
'static_object.bicycle_rack':13,
'vehicle.bicycle':14,
'vehicle.bus.bendy':15,
'vehicle.bus.rigid':16,
'vehicle.car':17,
'vehicle.construction':18,
'vehicle.emergency.ambulance':19,
'vehicle.emergency.police':20,
'vehicle.motorcycle':21,
'vehicle.trailer':22,
'vehicle.truck':23,
'flat.driveable_surface':24,
'flat.other':25,
'flat.sidewalk':26,
'flat.terrain':27,
'static.manmade':28,
'static.other':29,
'static.vegetation':30,
'vehicle.ego':31,
}
32 changes: 22 additions & 10 deletions parse_objects_from_nuscenes.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@
from nuscenes.nuscenes import NuScenes
import nuscenes.utils.splits as splits
from tqdm import tqdm
import lidiff.utils.data_map as data_map
import sys


def parse_objects_from_nuscenes(points_threshold, object_name, object_tag):
def parse_objects_from_nuscenes(points_threshold, object_name, object_tag, range_to_use):
# Path to the dataset
dataroot = '/datasets_local/nuscenes'

Expand All @@ -24,7 +26,8 @@ def parse_objects_from_nuscenes(points_threshold, object_name, object_tag):
train_split = set(splits.train)

object_lidar_data = {'train':[], 'val':[]}
for sample in tqdm(nusc.sample):
for i in tqdm(range_to_use):
sample = nusc.sample[i]
scene_token = sample['scene_token']
sample_token = sample['token']
sample_data_lidar_token = sample['data']['LIDAR_TOP']
Expand All @@ -34,20 +37,27 @@ def parse_objects_from_nuscenes(points_threshold, object_name, object_tag):
objects = nusc.get_sample_data(sample_data_lidar_token)[1]
lidar_data = nusc.get('sample_data', sample_data_lidar_token)
lidar_filepath = os.path.join(dataroot, lidar_data['filename'])

lidarseg_label_filename = os.path.join(nusc.dataroot, nusc.get('lidarseg', sample_data_lidar_token)['filename'])
points = np.fromfile(lidar_filepath, dtype=np.float32).reshape((-1, 5)) #(x, y, z, intensity, ring index)
labels = np.fromfile(lidarseg_label_filename, dtype=np.uint8).reshape((-1, 1))
for object in objects:
if object_tag in object.name:
annotation = nusc.get('sample_annotation', object.token)
num_lidar_points = annotation['num_lidar_pts']
points_from_object = points_in_box(object, points=points[:,:3].T)
object_points = points[points_from_object][:,:3]
object_points = object_points[(labels[points_from_object] == data_map.class_mapping[object.name]).flatten()]

num_lidar_points = len(object_points)
if num_lidar_points < points_threshold:
continue

object_info = {
'instance_token': object.token,
'sample_token': sample_token,
'scene_token': scene_token,
'sample_data_lidar_token': sample_data_lidar_token,
'lidar_data_filepath': lidar_filepath,
'class': object_tag,
'lidarseg_label_filepath': lidarseg_label_filename,
'class': object.name,
'center': object.center.tolist(),
'size': object.wlh.tolist(),
'rotation_real': object.orientation.real.tolist(),
Expand Down Expand Up @@ -77,7 +87,9 @@ def parse_largest_x_from_dataset(output_dir, object_name, object_lidar_data, top
json.dump(reduced_train_val_objects, fp)

if __name__ == '__main__':
points_threshold = 10
object_name = 'pedestrian'
object_tag = 'human.pedestrian.adult'
object_lidar_data, output_dir = parse_objects_from_nuscenes(points_threshold, object_name, object_tag)
range_index = int(sys.argv[1])
points_threshold = 20
object_name = f'all_objects_filtered_range_{range_index}'
object_tag = ''
ranges = [range(0,8537), range(8537,17074), range(17074,25612), range(25612,34149)]
object_lidar_data, output_dir = parse_objects_from_nuscenes(points_threshold, object_name, object_tag, ranges[range_index])

0 comments on commit 6e13198

Please sign in to comment.