Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
e15fbb3
clean up property in video_contxt
YongmingFrankGe Sep 14, 2021
e90c458
Merge branch 'main' of github.com:apperception-db/apperception into main
YongmingFrankGe Oct 3, 2021
4201843
set up initial test framework for scenic store
YongmingFrankGe Jan 1, 2022
159778c
Use mongoDB for quick query in nuScenes dataset
eeeeenchanted Jan 24, 2022
1b7ea50
Add box class
eeeeenchanted Feb 4, 2022
b4b7dd7
Update scenic_recognize to return two tables as discussed
eeeeenchanted Feb 8, 2022
9f8bef3
scenic_recognize pass in a list of filenames & add frame num
eeeeenchanted Feb 12, 2022
c92d3a1
Save object and camera to database
eeeeenchanted Feb 16, 2022
6aef5da
fix front end metadata view for scenic tables, query for trajectories…
YongmingFrankGe Feb 16, 2022
175c45c
write skeleton for the logic of overlay, most importantly the helper …
YongmingFrankGe Feb 16, 2022
ebfd354
Remove frameId from trajectory table
eeeeenchanted Feb 17, 2022
2b310b8
transform 3d to 2d
eeeeenchanted Feb 23, 2022
760264f
export direct export and import tables from postgres
YongmingFrankGe Feb 23, 2022
d16fd9c
delete contaminated file
YongmingFrankGe Feb 23, 2022
f40f1f6
merged with latest branch
YongmingFrankGe Feb 23, 2022
70e839a
fix a wrong tag and delete mongoclient
jimlinntu Feb 25, 2022
6ea8cab
XinyiJi1: Doverlay, with bug of sql
chanwutk Feb 27, 2022
cfb476a
fix bug
chanwutk Mar 1, 2022
1159a85
Merge pull request #18 from apperception-db/scenic_dev_debug
chanwutk Mar 1, 2022
9174531
remove scenic_* prefix; prepare for combining the original and scenic…
chanwutk Mar 2, 2022
695f034
replace world with scenic_world
chanwutk Mar 2, 2022
28b826d
clean up util
chanwutk Mar 2, 2022
29901ea
clean up video_util.py
chanwutk Mar 2, 2022
0cddf09
Merge pull request #21 from apperception-db/scenic_dev_combine_world
chanwutk Mar 2, 2022
b5d0124
clean up
chanwutk Mar 2, 2022
5749052
add support for directly import all mini dataset, have uploaded the d…
YongmingFrankGe Mar 4, 2022
71de67b
delete jupyter for mere
YongmingFrankGe Mar 4, 2022
c709730
finish merge with latest scenic dev
YongmingFrankGe Mar 4, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
__MACOSX
__pycache__
.ipynb_checkpoints
104 changes: 104 additions & 0 deletions apperception/box.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import numpy as np
from pyquaternion import Quaternion

class Box:
""" Simple data class representing a 3d box including, label, score and velocity. """

def __init__(self,
center,
size,
orientation: Quaternion):
"""
:param center: Center of box given as x, y, z.
:param size: Size of box in width, length, height.
:param orientation: Box orientation.
"""
assert not np.any(np.isnan(center))
assert not np.any(np.isnan(size))
assert len(center) == 3
assert len(size) == 3
assert type(orientation) == Quaternion

self.center = np.array(center)
self.wlh = np.array(size)
self.orientation = orientation

@property
def rotation_matrix(self) -> np.ndarray:
"""
Return a rotation matrix.
:return: <np.float: 3, 3>. The box's rotation matrix.
"""
return self.orientation.rotation_matrix

def translate(self, x: np.ndarray) -> None:
"""
Applies a translation.
:param x: <np.float: 3, 1>. Translation in x, y, z direction.
"""
self.center += x

def rotate(self, quaternion: Quaternion) -> None:
"""
Rotates box.
:param quaternion: Rotation to apply.
"""
self.center = np.dot(quaternion.rotation_matrix, self.center)
self.orientation = quaternion * self.orientation

def corners(self, wlh_factor: float = 1.0) -> np.ndarray:
"""
Returns the bounding box corners.
:param wlh_factor: Multiply w, l, h by a factor to scale the box.
:return: <np.float: 3, 8>. First four corners are the ones facing forward.
The last four are the ones facing backwards.
"""
w, l, h = self.wlh * wlh_factor

# 3D bounding box corners. (Convention: x points forward, y to the left, z up.)
x_corners = l / 2 * np.array([1, 1, 1, 1, -1, -1, -1, -1])
y_corners = w / 2 * np.array([1, -1, -1, 1, 1, -1, -1, 1])
z_corners = h / 2 * np.array([1, 1, -1, -1, 1, 1, -1, -1])
corners = np.vstack((x_corners, y_corners, z_corners))

# Rotate
corners = np.dot(self.orientation.rotation_matrix, corners)

# Translate
x, y, z = self.center
corners[0, :] = corners[0, :] + x
corners[1, :] = corners[1, :] + y
corners[2, :] = corners[2, :] + z

return corners

def bottom_corners(self) -> np.ndarray:
"""
Returns the four bottom corners.
:return: <np.float: 3, 4>. Bottom corners. First two face forward, last two face backwards.
"""
return self.corners()[:, [2, 3, 7, 6]]

def view_points(self, points: np.ndarray, view: np.ndarray, normalize = True) -> np.ndarray:
viewpad = np.eye(4)
viewpad[:view.shape[0], :view.shape[1]] = view

nbr_points = points.shape[1]

# Do operation in homogenous coordinates.
points = np.concatenate((points, np.ones((1, nbr_points))))
points = np.dot(viewpad, points)
points = points[:3, :]

if normalize:
points = points / points[2:3, :].repeat(3, 0).reshape(3, nbr_points)

return points

def map_2d(self,
view: np.ndarray = np.eye(3),
normalize: bool = True) -> np.ndarray:

corners = self.view_points(self.corners(), view, normalize)[:2, :]

return corners
4 changes: 2 additions & 2 deletions apperception/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ class TrajectoryView(View):
object_type = "objectType"
color = "color"
trajectory = "trajCentroids"
table_name = "Item_General_Trajectory"
table_name = "Test_Scenic_Item_General_Trajectory"
def __init__(self):
super().__init__(self.table_name)
self.default = True

class LocationView(View):
location = "trajBbox"
timestamp = "timestamp"
table_name = "General_Bbox"
table_name = "Test_Scenic_General_Bbox"
def __init__(self):
super().__init__(self.table_name)
self.default = True
Expand Down
5 changes: 3 additions & 2 deletions apperception/metadata_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ def aggregate(self, func_name:str, parameters:list=[], special_args=[]):
return self

def get_coordinates(self):
self.aggregate("asMFJSON", special_args=["coordinates"])
# self.aggregate("asMFJSON", special_args=["coordinates"])
self.aggregate("asMFJSON")

def interval(self, starttime, endtime):
self.aggregate("atPeriodSet", parameters=["\'{[%s, %s)}\'"%(starttime, endtime)])
Expand All @@ -64,7 +65,7 @@ def __init__(self, func_name:str, parameters:list=[]):

class asMFJSON(Aggregate):

def __init__(self, func_name="asMFJSON", parameters:list=[], interesting_fields = [""]):
def __init__(self, func_name="asMFJSON", parameters:list=[], interesting_fields = []):
super().__init__(func_name, parameters)
self.interesting_fields = interesting_fields
# def function_map(self):
Expand Down
3 changes: 2 additions & 1 deletion apperception/metadata_context_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,5 +117,6 @@ def translate_aggregation(aggr_node, aggregated):
if len(aggr_node.interesting_fields) > 0:
interesting_field = aggr_node.interesting_fields[0]
aggregated = aggregated + "::json->" + "\'"+interesting_field+"\'"

else:
aggregated = aggregated + "::json"
return aggregated
87 changes: 87 additions & 0 deletions apperception/scenic_generate_df.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import json
import pandas as pd

def scenic_generate_df():
with open('v1.0-mini/v1.0-mini/attribute.json') as f:
attribute_json = json.load(f)

with open('v1.0-mini/v1.0-mini/calibrated_sensor.json') as f:
calibrated_sensor_json = json.load(f)

with open('v1.0-mini/v1.0-mini/category.json') as f:
category_json = json.load(f)

with open('v1.0-mini/v1.0-mini/sample.json') as f:
sample_json = json.load(f)

with open('v1.0-mini/v1.0-mini/sample_data.json') as f:
sample_data_json = json.load(f)

with open('v1.0-mini/v1.0-mini/sample_annotation.json') as f:
sample_annotation_json = json.load(f)

with open('v1.0-mini/v1.0-mini/instance.json') as f:
instance_json = json.load(f)

with open('v1.0-mini/v1.0-mini/scene.json') as f:
scene_json = json.load(f)

with open('v1.0-mini/v1.0-mini/ego_pose.json') as f:
ego_pose_json = json.load(f)

df_sample_data = pd.DataFrame(sample_data_json)
df_sample_data = df_sample_data[['token','sample_token', 'calibrated_sensor_token', 'ego_pose_token', 'timestamp', 'fileformat', 'filename', 'prev', 'next']]
df_sample_data = df_sample_data[df_sample_data['fileformat'] == 'jpg']
df_sample_data.index = range(len(df_sample_data))

df_sample = pd.DataFrame(sample_json)
df_sample.columns = ['sample_token', 'timestamp', 'prev_sample', 'next_sample', 'scene_token']
df_sample = df_sample[['sample_token', 'prev_sample', 'next_sample', 'scene_token']]
df_sample_data = pd.merge(df_sample_data, df_sample, on="sample_token", how="left")

df_calibrated_sensor = pd.DataFrame(calibrated_sensor_json)
df_calibrated_sensor.columns = ['calibrated_sensor_token', 'sensor_token', 'camera_translation', 'camera_rotation', 'camera_intrinsic']
df_calibrated_sensor = df_calibrated_sensor[['calibrated_sensor_token', 'camera_translation', 'camera_rotation', 'camera_intrinsic']]
df_sample_data = pd.merge(df_sample_data, df_calibrated_sensor, on="calibrated_sensor_token", how="left")
df_sample_data = df_sample_data.drop(columns=['calibrated_sensor_token'])

df_ego_pose = pd.DataFrame(ego_pose_json)
df_ego_pose.columns = ['ego_pose_token', 'timestamp', 'ego_rotation', 'ego_translation']
df_ego_pose = df_ego_pose.drop(columns=['timestamp'])
df_sample_data = pd.merge(df_sample_data, df_ego_pose, on="ego_pose_token", how="left")
df_sample_data = df_sample_data.drop(columns=['ego_pose_token'])

df_scene = pd.DataFrame(scene_json)
df_scene.columns = ['scene_token', 'log_token', 'nbr_samples', 'first_sample_token', 'last_sample_token', 'scene_name', 'description']
df_scene = df_scene[['scene_token', 'first_sample_token', 'last_sample_token', 'scene_name']]
df_sample_data = pd.merge(df_sample_data, df_scene, on="scene_token", how="left")
df_sample_data = df_sample_data.drop(columns=['scene_token'])

df_sample_data['frame_order'] = 0
for index, row in df_sample_data.iterrows():
if len(row['prev']) == 0:
df_sample_data.loc[index,'frame_order'] = 1
i = 2
next_frame_token = row['next']
while len(next_frame_token) != 0:
#print(next_frame_token)
cur_index = df_sample_data[df_sample_data['token'] == next_frame_token].index.tolist()[0]
#print(cur_index)
df_sample_data.loc[cur_index,'frame_order'] = i
i += 1
next_frame_token = list(df_sample_data[df_sample_data['token'] == next_frame_token]['next'])[0]

df_sample_data = df_sample_data.drop(columns=['fileformat', 'prev', 'next', 'prev_sample', 'next_sample', 'first_sample_token', 'last_sample_token'])

df_sample_annotation = pd.DataFrame(sample_annotation_json)
df_sample_annotation = df_sample_annotation[['token', 'sample_token', 'instance_token', 'translation', 'size', 'rotation']]

df_instance = pd.DataFrame(instance_json)
df_category = pd.DataFrame(category_json)
df_category.rename(columns={'token': 'cat_token'}, inplace=True)
df_instance = pd.merge(df_instance, df_category, left_on="category_token", right_on="cat_token", how="left")
df_instance = df_instance.drop(columns=['category_token', 'cat_token', 'nbr_annotations', 'first_annotation_token', 'last_annotation_token', 'description'])
df_instance.columns = ['instance_token', 'category']
df_sample_annotation = pd.merge(df_sample_annotation, df_instance, on="instance_token", how="left")

return df_sample_data, df_sample_annotation
Loading