Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
J
jfxmap_python
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
oscar
jfxmap_python
Commits
6544daab
Commit
6544daab
authored
Feb 08, 2022
by
oscar
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
提交更新
parent
20e90bcf
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
42 additions
and
26 deletions
+42
-26
trans_and_export_data_multi_auto.py
script/trans_and_export_data_multi_auto.py
+42
-26
No files found.
script/trans_and_export_data_multi_auto.py
View file @
6544daab
...
@@ -493,7 +493,7 @@ MAX_SAVE_FILE_NUM = 10000
...
@@ -493,7 +493,7 @@ MAX_SAVE_FILE_NUM = 10000
g_thread_lock
=
threading
.
Lock
()
g_thread_lock
=
threading
.
Lock
()
g_map_lock
=
threading
.
Lock
()
g_map_lock
=
threading
.
Lock
()
def
Save_Cloud_File
(
cloud
,
json
):
def
Save_Cloud_File
(
cloud
,
json
,
dir_g
):
global
g_saveFileType
global
g_saveFileType
global
g_showCloud
global
g_showCloud
global
g_save_count
global
g_save_count
...
@@ -545,7 +545,7 @@ def Save_Cloud_File(cloud,json):
...
@@ -545,7 +545,7 @@ def Save_Cloud_File(cloud,json):
savefile
=
os
.
path
.
join
(
cloud
[
"path"
],
cloud
[
"pcdName"
])
savefile
=
os
.
path
.
join
(
cloud
[
"path"
],
cloud
[
"pcdName"
])
cloud
[
"np_pcd"
]
=
cloud
[
"np_pcd"
]
.
astype
(
np
.
float32
)
cloud
[
"np_pcd"
]
=
cloud
[
"np_pcd"
]
.
astype
(
np
.
float32
)
cloud
[
"np_pcd"
]
.
tofile
(
savefile
)
cloud
[
"np_pcd"
]
.
tofile
(
savefile
)
jsn_pcd
[
"fileuri"
]
=
"5-3
/pcd/"
+
cloud
[
"pcdName"
]
jsn_pcd
[
"fileuri"
]
=
dir_g
+
"
/pcd/"
+
cloud
[
"pcdName"
]
json
.
append
(
jsn_pcd
)
json
.
append
(
jsn_pcd
)
if
g_showCloud
==
1
:
if
g_showCloud
==
1
:
axis_pcd
=
o3d
.
geometry
.
TriangleMesh
.
create_coordinate_frame
(
size
=
5
,
origin
=
[
0
,
0
,
0
])
axis_pcd
=
o3d
.
geometry
.
TriangleMesh
.
create_coordinate_frame
(
size
=
5
,
origin
=
[
0
,
0
,
0
])
...
@@ -557,14 +557,14 @@ def Save_Cloud_File(cloud,json):
...
@@ -557,14 +557,14 @@ def Save_Cloud_File(cloud,json):
def
Add_Cloud_box
(
save_cloud
,
add_idx
,
bbox
,
pickcloud
,
json
):
def
Add_Cloud_box
(
save_cloud
,
add_idx
,
bbox
,
pickcloud
,
json
,
dir_g
):
save_cloud
[
add_idx
][
"boxes"
]
.
append
(
bbox
)
save_cloud
[
add_idx
][
"boxes"
]
.
append
(
bbox
)
delflag
=
in_hull
(
save_cloud
[
add_idx
][
"np_pcd"
],
bbox
[
1
])
delflag
=
in_hull
(
save_cloud
[
add_idx
][
"np_pcd"
],
bbox
[
1
])
save_cloud
[
add_idx
][
"np_pcd"
]
=
save_cloud
[
add_idx
][
"np_pcd"
][
~
delflag
]
save_cloud
[
add_idx
][
"np_pcd"
]
=
save_cloud
[
add_idx
][
"np_pcd"
][
~
delflag
]
save_cloud
[
add_idx
][
"np_pcd"
]
=
np
.
vstack
((
save_cloud
[
add_idx
][
"np_pcd"
],
pickcloud
))
save_cloud
[
add_idx
][
"np_pcd"
]
=
np
.
vstack
((
save_cloud
[
add_idx
][
"np_pcd"
],
pickcloud
))
if
save_cloud
[
0
][
"isSave"
]
==
1
:
if
save_cloud
[
0
][
"isSave"
]
==
1
:
if
len
(
save_cloud
[
0
][
"boxes"
])
>=
10
:
if
len
(
save_cloud
[
0
][
"boxes"
])
>=
10
:
Save_Cloud_File
(
save_cloud
[
0
],
json
)
Save_Cloud_File
(
save_cloud
[
0
],
json
,
dir_g
)
save_cloud
.
pop
(
0
)
save_cloud
.
pop
(
0
)
def
Check_Add_Cloud_box
(
save_cloud
,
bbox
,
pcd
,
pcdName
,
path
,
index
):
def
Check_Add_Cloud_box
(
save_cloud
,
bbox
,
pcd
,
pcdName
,
path
,
index
):
...
@@ -609,7 +609,7 @@ threads = []
...
@@ -609,7 +609,7 @@ threads = []
threadID
=
1
threadID
=
1
class
pcdThread
(
threading
.
Thread
):
class
pcdThread
(
threading
.
Thread
):
def
__init__
(
self
,
threadID
,
name
,
start
,
idx
,
_dirs
,
_dir_pcd_list
,
save_list
,
jsn_list
,
trans
,
kit2o
,
box_info_count
,
_dir_g
,
_m
):
def
__init__
(
self
,
threadID
,
name
,
start
,
idx
,
_dirs
,
_dir_pcd_list
,
save_list
,
jsn_list
,
trans
,
kit2o
,
box_info_count
,
_dir_g
,
_m
,
pcd_path
):
threading
.
Thread
.
__init__
(
self
)
threading
.
Thread
.
__init__
(
self
)
self
.
threadID
=
threadID
self
.
threadID
=
threadID
self
.
name
=
name
self
.
name
=
name
...
@@ -624,6 +624,7 @@ class pcdThread (threading.Thread):
...
@@ -624,6 +624,7 @@ class pcdThread (threading.Thread):
self
.
box_info_count
=
box_info_count
self
.
box_info_count
=
box_info_count
self
.
dir_g
=
_dir_g
self
.
dir_g
=
_dir_g
self
.
m
=
_m
self
.
m
=
_m
self
.
pcd_path
=
pcd_path
def
run
(
self
):
def
run
(
self
):
global
g_save_count
global
g_save_count
global
MAX_SAVE_FILE_NUM
global
MAX_SAVE_FILE_NUM
...
@@ -670,6 +671,7 @@ class pcdThread (threading.Thread):
...
@@ -670,6 +671,7 @@ class pcdThread (threading.Thread):
mapInfoExport
=
get_map_data
(
exportCenterBL
[
0
],
exportCenterBL
[
1
],
angle2
)
mapInfoExport
=
get_map_data
(
exportCenterBL
[
0
],
exportCenterBL
[
1
],
angle2
)
g_map_lock
.
release
()
g_map_lock
.
release
()
# print("call get ex data isInMap = ",mapInfoExport)
# print("call get ex data isInMap = ",mapInfoExport)
self
.
box_info_count
[
"boxes"
]
+=
1
if
mapInfoExport
[
0
]
!=
1
:
if
mapInfoExport
[
0
]
!=
1
:
continue
;
continue
;
laneAngle
=
mapInfoExport
[
10
]
laneAngle
=
mapInfoExport
[
10
]
...
@@ -691,13 +693,14 @@ class pcdThread (threading.Thread):
...
@@ -691,13 +693,14 @@ class pcdThread (threading.Thread):
#获取到汽车点云符合条件,可以添加到新点云里
#获取到汽车点云符合条件,可以添加到新点云里
# f1.write('%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\n'%(exportCenterBL[0],exportCenterBL[1],bbox[4][0], bbox[4][1], bbox[4][2],bbox[6][0], bbox[6][1], bbox[6][2],angle2,laneAngle))
# f1.write('%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\n'%(exportCenterBL[0],exportCenterBL[1],bbox[4][0], bbox[4][1], bbox[4][2],bbox[6][0], bbox[6][1], bbox[6][2],angle2,laneAngle))
self
.
box_info_count
[
bbox
[
0
]]
+=
1
self
.
box_info_count
[
bbox
[
0
]]
+=
1
self
.
box_info_count
[
"choose"
]
+=
1
name
=
pcd_file
.
split
(
"/"
)[
-
1
]
name
=
pcd_file
.
split
(
"/"
)[
-
1
]
if
g_saveFileType
==
1
:
if
g_saveFileType
==
1
:
name
=
name
[
0
:
-
3
]
+
"bin"
name
=
name
[
0
:
-
3
]
+
"bin"
ind
=
0
ind
=
0
if
index
>
self
.
begin
:
if
index
>
self
.
begin
:
ind
=
1
ind
=
1
idx
=
Check_Add_Cloud_box
(
save_cloud_list
,
bbox
,
g_converted_pcd
,
name
,
generate_
pcd_path
,
ind
)
idx
=
Check_Add_Cloud_box
(
save_cloud_list
,
bbox
,
g_converted_pcd
,
name
,
self
.
pcd_path
,
ind
)
if
idx
==
-
1
:
if
idx
==
-
1
:
continue
;
continue
;
flag
=
in_hull
(
xyz
,
bbox
[
1
])
flag
=
in_hull
(
xyz
,
bbox
[
1
])
...
@@ -712,7 +715,7 @@ class pcdThread (threading.Thread):
...
@@ -712,7 +715,7 @@ class pcdThread (threading.Thread):
# print(pickcloud)
# print(pickcloud)
bbox
[
1
]
+=
dz
bbox
[
1
]
+=
dz
Add_Cloud_box
(
save_cloud_list
,
idx
,
bbox
,
pickcloud
,
json_list
)
Add_Cloud_box
(
save_cloud_list
,
idx
,
bbox
,
pickcloud
,
json_list
,
self
.
dir_g
)
if
isDeal
==
1
:
if
isDeal
==
1
:
index
+=
self
.
idx
index
+=
self
.
idx
...
@@ -732,6 +735,7 @@ if __name__ == '__main__':
...
@@ -732,6 +735,7 @@ if __name__ == '__main__':
# origin_root_path = "/media/sf_shared/nodes/" #读取数据的总目录
# origin_root_path = "/media/sf_shared/nodes/" #读取数据的总目录
# generate_root_path = "/media/sf_shared/5-3/"
# generate_root_path = "/media/sf_shared/5-3/"
origin_root_path
=
"/host/home/sata2/datasets/N19_annotations/"
#读取数据的总目录
origin_root_path
=
"/host/home/sata2/datasets/N19_annotations/"
#读取数据的总目录
base_root_path
=
"/host/home/sata2/oscar/jfxmap_python/script/generate/"
generate_root_path
=
"/host/home/sata2/oscar/jfxmap_python/script/generate/"
generate_root_path
=
"/host/home/sata2/oscar/jfxmap_python/script/generate/"
# generate_car_yaw_cal_angle = 79.89299572540227
# generate_car_yaw_cal_angle = 79.89299572540227
# p_dir = "/home/oscar/ros/git/jfxmap_python/jfxmap/"
# p_dir = "/home/oscar/ros/git/jfxmap_python/jfxmap/"
...
@@ -740,6 +744,7 @@ if __name__ == '__main__':
...
@@ -740,6 +744,7 @@ if __name__ == '__main__':
debug
=
0
debug
=
0
if
debug
==
1
:
if
debug
==
1
:
origin_root_path
=
"/media/sf_shared/nodes/"
origin_root_path
=
"/media/sf_shared/nodes/"
base_root_path
=
"/home/oscar/ros/git/jfxmap_python/script/generate/"
generate_root_path
=
"/media/sf_shared/generate/"
generate_root_path
=
"/media/sf_shared/generate/"
p_dir
=
"/home/oscar/ros/git/jfxmap_python/jfxmap/"
p_dir
=
"/home/oscar/ros/git/jfxmap_python/jfxmap/"
...
@@ -747,10 +752,10 @@ if __name__ == '__main__':
...
@@ -747,10 +752,10 @@ if __name__ == '__main__':
ret
=
init_jfxmap
(
p_dir
,
f_path
)
ret
=
init_jfxmap
(
p_dir
,
f_path
)
print
(
ret
)
print
(
ret
)
dirsAll_g
=
os
.
listdir
(
generat
e_root_path
)
dirsAll_g
=
os
.
listdir
(
bas
e_root_path
)
dirs_g
=
[]
dirs_g
=
[]
for
file_g
in
dirsAll_g
:
for
file_g
in
dirsAll_g
:
if
os
.
path
.
isdir
(
generat
e_root_path
+
file_g
)
==
True
:
if
os
.
path
.
isdir
(
bas
e_root_path
+
file_g
)
==
True
:
dirs_g
.
append
(
file_g
)
dirs_g
.
append
(
file_g
)
dir_pcd_list
=
{}
dir_pcd_list
=
{}
...
@@ -777,17 +782,25 @@ if __name__ == '__main__':
...
@@ -777,17 +782,25 @@ if __name__ == '__main__':
dir_pcd_list
[
dir
]
.
append
(
anno
)
dir_pcd_list
[
dir
]
.
append
(
anno
)
for
dir_g
in
dirs_g
:
for
dir_g
in
dirs_g
:
pcd_list
=
glob
.
glob
(
os
.
path
.
join
(
generate_root_path
+
dir_g
,
"*.pcd"
))
base_dir_path
=
os
.
path
.
join
(
base_root_path
,
dir_g
)
pcd_list
=
glob
.
glob
(
os
.
path
.
join
(
base_dir_path
,
"*.pcd"
))
if
len
(
pcd_list
)
<=
0
:
if
len
(
pcd_list
)
<=
0
:
continue
continue
yaml_list
=
glob
.
glob
(
os
.
path
.
join
(
generate_root_path
+
dir_g
,
"*.yaml"
))
yaml_list
=
glob
.
glob
(
os
.
path
.
join
(
base_dir_path
,
"*.yaml"
))
if
len
(
yaml_list
)
<=
0
:
if
len
(
yaml_list
)
<=
0
:
continue
continue
plane_list
=
glob
.
glob
(
os
.
path
.
join
(
generate_root_path
+
dir_g
,
"*.json"
))
plane_list
=
glob
.
glob
(
os
.
path
.
join
(
base_dir_path
,
"*.json"
))
if
len
(
plane_list
)
<=
0
:
if
len
(
plane_list
)
<=
0
:
continue
continue
generate_pcd_path
=
os
.
path
.
join
(
generate_root_path
+
dir_g
,
"pcd/"
)
generate_dir_path
=
os
.
path
.
join
(
generate_root_path
,
dir_g
)
if
os
.
path
.
exists
(
generate_dir_path
)
==
False
:
os
.
mkdir
(
generate_dir_path
)
if
os
.
path
.
isdir
(
generate_dir_path
)
==
False
:
os
.
remove
(
generate_dir_path
)
os
.
mkdir
(
generate_dir_path
)
generate_pcd_path
=
os
.
path
.
join
(
generate_dir_path
,
"pcd/"
)
if
os
.
path
.
exists
(
generate_pcd_path
)
==
False
:
if
os
.
path
.
exists
(
generate_pcd_path
)
==
False
:
os
.
mkdir
(
generate_pcd_path
)
os
.
mkdir
(
generate_pcd_path
)
if
os
.
path
.
isdir
(
generate_pcd_path
)
==
False
:
if
os
.
path
.
isdir
(
generate_pcd_path
)
==
False
:
...
@@ -796,7 +809,7 @@ if __name__ == '__main__':
...
@@ -796,7 +809,7 @@ if __name__ == '__main__':
generate_child_dir
=
dir_g
generate_child_dir
=
dir_g
generate_cfg_path
=
os
.
path
.
join
(
generate_root_path
+
dir_g
,
yaml_list
[
0
])
generate_cfg_path
=
os
.
path
.
join
(
base_dir_path
,
yaml_list
[
0
])
generate_cfg
=
read_yaml
(
generate_cfg_path
)
generate_cfg
=
read_yaml
(
generate_cfg_path
)
generate_Trans
=
generate_cfg
.
TRACKING
.
TRANS
generate_Trans
=
generate_cfg
.
TRACKING
.
TRANS
generate_Trans
=
np
.
array
(
generate_Trans
)
generate_Trans
=
np
.
array
(
generate_Trans
)
...
@@ -805,7 +818,7 @@ if __name__ == '__main__':
...
@@ -805,7 +818,7 @@ if __name__ == '__main__':
base_file_path
=
os
.
path
.
join
(
generate_root_path
+
dir_g
,
pcd_list
[
0
])
base_file_path
=
os
.
path
.
join
(
base_dir_path
,
pcd_list
[
0
])
g_xyzi
,
g_pcd
,
g_converted_pcd
=
parse_pandarmind_pcd
(
base_file_path
,
rotMat
[
generate_child_dir
])
g_xyzi
,
g_pcd
,
g_converted_pcd
=
parse_pandarmind_pcd
(
base_file_path
,
rotMat
[
generate_child_dir
])
# 计算点云图的地面平面
# 计算点云图的地面平面
# converted_points = np.array(g_converted_pcd.points)
# converted_points = np.array(g_converted_pcd.points)
...
@@ -815,7 +828,7 @@ if __name__ == '__main__':
...
@@ -815,7 +828,7 @@ if __name__ == '__main__':
# seg,m = ground_segmentation(data=cropped_cropped[:,:3])
# seg,m = ground_segmentation(data=cropped_cropped[:,:3])
# m = [ 1.11119401e-03, -8.66741252e-04, 1.60358817e-01, 9.87057781e-01]
# m = [ 1.11119401e-03, -8.66741252e-04, 1.60358817e-01, 9.87057781e-01]
jsn_file
=
os
.
path
.
join
(
generate_root_path
+
dir_g
,
plane_list
[
0
])
jsn_file
=
os
.
path
.
join
(
base_dir_path
,
plane_list
[
0
])
m
=
[]
m
=
[]
with
open
(
jsn_file
,
'r'
,
encoding
=
'utf-8'
,
errors
=
'ignore'
)
as
fp
:
with
open
(
jsn_file
,
'r'
,
encoding
=
'utf-8'
,
errors
=
'ignore'
)
as
fp
:
m
=
json
.
load
(
fp
)
m
=
json
.
load
(
fp
)
...
@@ -832,9 +845,9 @@ if __name__ == '__main__':
...
@@ -832,9 +845,9 @@ if __name__ == '__main__':
save_cloud_list
.
append
(
list
)
save_cloud_list
.
append
(
list
)
jsn_list
=
[]
jsn_list
=
[]
save_json_list
.
append
(
jsn_list
)
save_json_list
.
append
(
jsn_list
)
box_info_cout
=
{
'big'
:
0
,
'little'
:
0
,
'pedestrian'
:
0
,
'mid'
:
0
,
'cyclist'
:
0
}
box_info_cout
=
{
'big'
:
0
,
'little'
:
0
,
'pedestrian'
:
0
,
'mid'
:
0
,
'cyclist'
:
0
,
"boxes"
:
0
,
"choose"
:
0
}
cloud_box_count
.
append
(
box_info_cout
)
cloud_box_count
.
append
(
box_info_cout
)
thread
=
pcdThread
(
i
,
"Thread"
+
str
(
i
)
,
i
,
Thread_NUM
,
dirs
,
dir_pcd_list
,
save_cloud_list
[
i
],
save_json_list
[
i
],
generate_Trans
,
generate_kitti2origin
,
cloud_box_count
[
i
],
dir_g
,
m
)
thread
=
pcdThread
(
i
,
"Thread"
+
str
(
i
)
,
i
,
Thread_NUM
,
dirs
,
dir_pcd_list
,
save_cloud_list
[
i
],
save_json_list
[
i
],
generate_Trans
,
generate_kitti2origin
,
cloud_box_count
[
i
],
dir_g
,
m
,
generate_pcd_path
)
thread
.
start
()
thread
.
start
()
threads
.
append
(
thread
)
threads
.
append
(
thread
)
for
t
in
threads
:
for
t
in
threads
:
...
@@ -847,18 +860,21 @@ if __name__ == '__main__':
...
@@ -847,18 +860,21 @@ if __name__ == '__main__':
for
jsn
in
t_idx
:
for
jsn
in
t_idx
:
save_json
[
"annotations"
]
.
append
(
jsn
)
save_json
[
"annotations"
]
.
append
(
jsn
)
jsn_path
=
os
.
path
.
join
(
generate_
root_path
+
dir_g
,
generate_child_dir
+
".json"
)
jsn_path
=
os
.
path
.
join
(
generate_
dir_path
,
generate_child_dir
+
".json"
)
with
open
(
jsn_path
,
'w'
)
as
file_obj
:
with
open
(
jsn_path
,
'w'
)
as
file_obj
:
json
.
dump
(
save_json
,
file_obj
,
cls
=
NumpyEncoder
,
indent
=
4
)
json
.
dump
(
save_json
,
file_obj
,
cls
=
NumpyEncoder
,
indent
=
4
)
totel_count
=
{
'
big'
:
0
,
'little'
:
0
,
'pedestrian'
:
0
,
'mid'
:
0
,
'cyclist'
:
0
}
totel_count
=
{
'
totel_big'
:
0
,
'totel_little'
:
0
,
'totel_pedestrian'
:
0
,
'totel_mid'
:
0
,
'totel_cyclist'
:
0
,
"totel_boxes"
:
0
,
"totel_choose"
:
0
,
"totel_rate"
:
0.
0
}
for
count
in
cloud_box_count
:
for
count
in
cloud_box_count
:
totel_count
[
'big'
]
+=
count
[
'big'
]
totel_count
[
'totel_big'
]
+=
count
[
'big'
]
totel_count
[
'little'
]
+=
count
[
'little'
]
totel_count
[
'totel_little'
]
+=
count
[
'little'
]
totel_count
[
'pedestrian'
]
+=
count
[
'pedestrian'
]
totel_count
[
'totel_pedestrian'
]
+=
count
[
'pedestrian'
]
totel_count
[
'mid'
]
+=
count
[
'mid'
]
totel_count
[
'totel_mid'
]
+=
count
[
'mid'
]
totel_count
[
'cyclist'
]
+=
count
[
'cyclist'
]
totel_count
[
'totel_cyclist'
]
+=
count
[
'cyclist'
]
totel_count
[
'totel_boxes'
]
+=
count
[
'boxes'
]
totel_count
[
'totel_choose'
]
+=
count
[
'choose'
]
totel_count
[
'totel_rate'
]
=
totel_count
[
'totel_choose'
]
/
totel_count
[
'totel_boxes'
];
cloud_box_count
.
append
(
totel_count
)
cloud_box_count
.
append
(
totel_count
)
count_jsn_path
=
os
.
path
.
join
(
generate_
root_path
+
dir_g
,
"count.json"
)
count_jsn_path
=
os
.
path
.
join
(
generate_
dir_path
,
"count.json"
)
with
open
(
count_jsn_path
,
'w'
)
as
file_o
:
with
open
(
count_jsn_path
,
'w'
)
as
file_o
:
json
.
dump
(
cloud_box_count
,
file_o
,
cls
=
NumpyEncoder
,
indent
=
4
)
json
.
dump
(
cloud_box_count
,
file_o
,
cls
=
NumpyEncoder
,
indent
=
4
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment