Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
S
Simulation-Environment-for-Cooperative-Localization
Manage
Activity
Members
Labels
Plan
Issues
0
Issue boards
Milestones
Wiki
Requirements
Code
Merge requests
0
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Container Registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Shengkang (William) Chen
Simulation-Environment-for-Cooperative-Localization
Commits
98dd615d
Commit
98dd615d
authored
7 years ago
by
Shengkang (William) Chen
Browse files
Options
Downloads
Patches
Plain Diff
hope this works
parent
d16025fc
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
functions/dataset_manager/existing_dataset.py
+207
-40
207 additions, 40 deletions
functions/dataset_manager/existing_dataset.py
functions/test_simulation.py
+76
-25
76 additions, 25 deletions
functions/test_simulation.py
with
283 additions
and
65 deletions
functions/dataset_manager/existing_dataset.py
+
207
−
40
View file @
98dd615d
# -*- coding: utf-8 -*-
import
numpy
as
np
from
numpy
import
random
import
sys
from
math
import
atan2
,
sqrt
,
pi
'''
sys.path.insert(0,
'
/Users/william/Documents/SLAM_Simulation/Simulation-Environment-for-Cooperative-Localization/functions/requests
'
)
import request_response
'''
def
find_nearest_time_idx
(
l
,
value
):
array
=
np
.
asarray
(
l
)
idx
=
(
np
.
abs
(
array
-
value
)).
argmin
()
return
idx
if
len
(
l
)
!=
0
:
array
=
np
.
asarray
(
l
)
idx
=
(
np
.
abs
(
array
-
value
)).
argmin
()
return
idx
else
:
return
None
...
...
@@ -30,13 +35,19 @@ def find_next_time_idx(array, start_idx, value):
break
return
i
def
linear_interpolation
(
end0
,
end1
,
x
):
[
x0
,
y0
]
=
end0
[
x1
,
y1
]
=
end1
if
x1
==
x0
:
y
=
y0
else
:
y
=
y0
+
(
x
-
x0
)
*
(
y1
-
y0
)
/
(
x1
-
x0
)
return
y
class
Dataset
:
def
__init__
(
self
,
dataset_name
,
dataset_path
,
dataset_labels
):
def
__init__
(
self
,
dataset_name
):
self
.
name
=
dataset_name
self
.
dataset_path
=
dataset_path
self
.
dataset_labels
=
dataset_labels
def
create_landmark_map
(
self
):
### Build landmark map ###
...
...
@@ -54,14 +65,22 @@ class Dataset:
def
initialization
_MRCLAMDatasets
(
self
,
d
uration
):
def
load
_MRCLAMDatasets
(
self
,
d
ataset_path
,
dataset_labels
,
duration
,
adding_actifical_dataline
=
True
,
delay_start
=
10
,
synthetic
=
True
):
print
(
'
******** Initialization Started ********
'
)
self
.
synthetic
=
synthetic
print
(
'
add synthetic data:
'
,
self
.
synthetic
)
self
.
dataset_path
=
dataset_path
self
.
dataset_labels
=
dataset_labels
self
.
adding_actifical_dataline
=
adding_actifical_dataline
self
.
create_landmark_map
()
self
.
num_robots
=
len
(
self
.
dataset_labels
)
self
.
measurement_data
=
[[]
for
i
in
range
(
self
.
num_robots
)]
self
.
odometry_data
=
[[]
for
i
in
range
(
self
.
num_robots
)]
self
.
gt_data_odo
=
[[]
for
i
in
range
(
self
.
num_robots
)]
self
.
gt_data_meas
=
[[]
for
i
in
range
(
self
.
num_robots
)]
self
.
groundtruth_data
=
[[]
for
i
in
range
(
self
.
num_robots
)]
#self.groundtruth_time= [[] for i in range(self.num_robots)]
self
.
time_arr
=
{
'
odometry
'
:
[[]
for
i
in
range
(
self
.
num_robots
)],
'
measurement
'
:
[[]
for
i
in
range
(
self
.
num_robots
)],
'
groundtruth
'
:
[[]
for
i
in
range
(
self
.
num_robots
)]}
...
...
@@ -78,7 +97,7 @@ class Dataset:
time
=
float
(
line
.
split
()[
0
])
self
.
start_time_arr
.
append
(
time
)
break
self
.
start_time
=
max
(
self
.
start_time_arr
)
self
.
start_time
=
max
(
self
.
start_time_arr
)
+
delay_start
self
.
end_time
=
self
.
start_time
+
self
.
duration
print
(
'
Staring time:
'
,
self
.
start_time
)
#finding starting states:
...
...
@@ -101,17 +120,6 @@ class Dataset:
for
i
,
label
in
enumerate
(
self
.
dataset_labels
):
robot_num
=
str
(
label
)
meas_path
=
self
.
dataset_path
+
"
Robot
"
+
robot_num
+
"
_Measurement_x.dat
"
with
open
(
meas_path
,
'
r+
'
)
as
measure_file
:
for
line
in
measure_file
:
if
line
[
0
]
!=
'
#
'
and
(
self
.
end_time
+
2
>=
float
(
line
.
split
()[
0
])
>=
self
.
start_time
):
time
=
float
(
line
.
split
()[
0
])
subject_ID
=
int
(
line
.
split
()[
1
])
measurment_range
=
float
(
line
.
split
()[
2
])
bearing
=
float
(
line
.
split
()[
3
])
meas_info
=
{
'
time
'
:
time
,
'
subject_ID
'
:
subject_ID
,
'
measurment_range
'
:
measurment_range
,
'
bearing
'
:
bearing
}
self
.
measurement_data
[
i
].
append
(
meas_info
)
self
.
time_arr
[
'
measurement
'
][
i
].
append
(
time
)
groundtruth_path
=
self
.
dataset_path
+
"
Robot
"
+
robot_num
+
"
_Groundtruth.dat
"
with
open
(
groundtruth_path
,
'
r+
'
)
as
groundtruth_file
:
...
...
@@ -124,6 +132,27 @@ class Dataset:
groundtruth_info
=
{
'
time
'
:
time
,
'
x_pos
'
:
x_pos
,
'
y_pos
'
:
y_pos
,
'
orientation
'
:
orientation
}
self
.
groundtruth_data
[
i
].
append
(
groundtruth_info
)
self
.
time_arr
[
'
groundtruth
'
][
i
].
append
(
time
)
meas_path
=
self
.
dataset_path
+
"
Robot
"
+
robot_num
+
"
_Measurement_x.dat
"
with
open
(
meas_path
,
'
r+
'
)
as
measure_file
:
for
line
in
measure_file
:
if
line
[
0
]
!=
'
#
'
and
(
self
.
end_time
+
2
>=
float
(
line
.
split
()[
0
])
>=
self
.
start_time
):
time
=
float
(
line
.
split
()[
0
])
subject_ID
=
int
(
line
.
split
()[
1
])
measurment_range
=
float
(
line
.
split
()[
2
])
bearing
=
float
(
line
.
split
()[
3
])
meas_info
=
{
'
time
'
:
time
,
'
subject_ID
'
:
subject_ID
,
'
measurment_range
'
:
measurment_range
,
'
bearing
'
:
bearing
}
g_idx
=
find_nearest_time_idx
(
self
.
time_arr
[
'
groundtruth
'
][
i
],
time
)
gt_x
=
self
.
groundtruth_data
[
i
][
g_idx
][
'
x_pos
'
]
gt_y
=
self
.
groundtruth_data
[
i
][
g_idx
][
'
y_pos
'
]
orientation
=
self
.
groundtruth_data
[
i
][
g_idx
][
'
orientation
'
]
matched_gt_info
=
{
'
time
'
:
time
,
'
x_pos
'
:
gt_x
,
'
y_pos
'
:
gt_y
,
'
orientation
'
:
orientation
}
self
.
measurement_data
[
i
].
append
(
meas_info
)
self
.
gt_data_meas
[
i
].
append
(
matched_gt_info
)
self
.
time_arr
[
'
measurement
'
][
i
].
append
(
time
)
odo_path
=
self
.
dataset_path
+
"
Robot
"
+
robot_num
+
"
_Odometry.dat
"
...
...
@@ -135,18 +164,25 @@ class Dataset:
velocity
=
float
(
line
.
split
()[
1
])
orientation
=
self
.
groundtruth_data
[
i
][
g_idx
][
'
orientation
'
]
odo_info
=
{
'
time
'
:
time
,
'
velocity
'
:
velocity
,
'
orientation
'
:
orientation
}
gt_x
=
self
.
groundtruth_data
[
i
][
g_idx
][
'
x_pos
'
]
gt_y
=
self
.
groundtruth_data
[
i
][
g_idx
][
'
y_pos
'
]
matched_gt_info
=
{
'
time
'
:
time
,
'
x_pos
'
:
gt_x
,
'
y_pos
'
:
gt_y
,
'
orientation
'
:
orientation
}
self
.
odometry_data
[
i
].
append
(
odo_info
)
self
.
gt_data_odo
[
i
].
append
(
matched_gt_info
)
self
.
time_arr
[
'
odometry
'
][
i
].
append
(
time
)
self
.
data_trackers
=
{
'
odometry
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
),
'
measurement
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
),
'
groundtruth
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
)}
self
.
data_trackers
=
{
'
odometry
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
),
'
measurement
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
)}
# tracker for each robot for both each type of data to keep track of their location in the dataset
self
.
odo_data_trackers
=
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
)
self
.
dataset_data
=
{
'
odometry
'
:
self
.
odometry_data
,
'
measurement
'
:
self
.
measurement_data
,
'
groundtruth
'
:
self
.
groundtruth_data
}
self
.
dataset_matched_gt_data
=
{
'
odometry
'
:
self
.
gt_data_odo
,
'
measurement
'
:
self
.
gt_data_meas
}
print
(
'
******** Initialization Completed ********
'
)
return
self
.
start_time
,
self
.
starting_states
,
self
.
dataset_data
,
self
.
time_arr
def
dataset_reset
(
self
):
def
dataset_reset
(
self
):
self
.
data_trackers
=
{
'
odometry
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
),
'
measurement
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
),
'
groundtruth
'
:
np
.
ones
((
self
.
num_robots
,),
dtype
=
np
.
int
)}
def
get_start_time
(
self
):
...
...
@@ -163,11 +199,13 @@ class Dataset:
def
get_start_moving_times
(
self
):
start_moving_times
=
[]
time_idx
=
0
for
i
,
label
in
enumerate
(
self
.
dataset_labels
):
start_moving_times
.
append
(
self
.
dataset_data
[
'
odometry
'
][
i
][
0
][
'
time
'
])
start_moving_times
.
append
(
self
.
dataset_data
[
'
odometry
'
][
i
][
time_idx
][
'
time
'
])
return
start_moving_times
def
get_time_arr
(
self
,
data_catagory
):
# retunr an array of time shows time for next dataline for each robot given data catagory
time_arr
=
[]
for
i
,
label
in
enumerate
(
self
.
dataset_labels
):
time_idx
=
self
.
data_trackers
[
data_catagory
][
i
]
...
...
@@ -189,7 +227,7 @@ class Dataset:
def
trackers_sync
(
self
,
current_time
):
#print('sync current time: ', current_time )
for
catagory
in
[
'
odometry
'
,
'
measurement
'
,
'
groundtruth
'
]:
for
catagory
in
[
'
odometry
'
,
'
measurement
'
]:
for
robot_idx
in
range
(
self
.
num_robots
):
start_idx
=
self
.
data_trackers
[
catagory
][
robot_idx
]
if
start_idx
!=
-
1
:
...
...
@@ -199,10 +237,11 @@ class Dataset:
def
trackers_update
(
self
,
req_type
,
robot_idx
):
# update corrsponding trackers
self
.
data_trackers
[
req_type
][
robot_idx
]
+=
1
self
.
data_trackers
[
'
groundtruth
'
][
robot_idx
]
+=
1
#
self.data_trackers['groundtruth'][robot_idx]+=1
def
get_matched_dataline
(
self
,
req
):
message
=
req
.
get_message
()
if
message
[
'
robot_index
'
]
==
None
:
if
req
.
get_type
()
==
None
:
odo_time_arr
=
self
.
get_time_arr
(
'
odometry
'
)
...
...
@@ -224,31 +263,158 @@ class Dataset:
else
:
robot_idx
=
message
[
'
robot_index
'
]
req_type
=
req
.
get_type
()
print
(
'
Incomplete for specific robot request
'
)
if
req
.
get_type
()
==
None
:
time_idx
=
self
.
data_trackers
[
'
odometry
'
][
robot_idx
]
if
self
.
dataset_data
[
'
odometry
'
][
robot_idx
][
time_idx
][
'
time
'
]
>
self
.
dataset_data
[
'
measurement
'
][
robot_idx
][
time_idx
][
'
time
'
]:
req_type
=
'
measurement
'
req
.
set_type
(
req_type
)
else
:
req_type
=
'
odometry
'
req
.
set_type
(
req_type
)
else
:
req_type
=
req
.
get_type
()
time_idx
=
self
.
data_trackers
[
req_type
][
robot_idx
]
if
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
][
'
time
'
]
>
self
.
end_time
:
valid_dataline
=
False
else
:
valid_dataline
=
True
return
valid_dataline
,
req_type
,
robot_idx
,
time_idx
message
[
'
data
'
]
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
]
message
[
'
groundtruth
'
]
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
time_idx
]
return
valid_dataline
,
message
,
req_type
,
robot_idx
,
time_idx
def
create_synthetic_dataline
(
self
,
req
,
time_idx
,
meas_input_var
):
message
=
req
.
get_message
()
req_type
=
req
.
get_type
()
robot_idx
=
message
[
'
robot_index
'
]
req_time
=
message
[
'
time
'
]
if
req_time
>
self
.
time_arr
[
req_type
][
robot_idx
][
time_idx
]:
try
:
prev_time_idx
=
time_idx
post_time_idx
=
time_idx
+
1
self
.
time_arr
[
req_type
][
robot_idx
][
post_time_idx
]
except
IndexError
:
prev_time_idx
=
time_idx
-
1
post_time_idx
=
time_idx
else
:
prev_time_idx
=
time_idx
-
1
post_time_idx
=
time_idx
t0
=
self
.
time_arr
[
req_type
][
robot_idx
][
prev_time_idx
]
t1
=
self
.
time_arr
[
req_type
][
robot_idx
][
post_time_idx
]
if
t1
-
t0
>
1
:
# time interval is too big for linear iterpolation
g_idx
=
find_nearest_time_idx
(
self
.
time_arr
[
'
groundtruth
'
][
robot_idx
],
req_time
)
gt_x
=
self
.
groundtruth_data
[
robot_idx
][
g_idx
][
'
x_pos
'
]
gt_y
=
self
.
groundtruth_data
[
robot_idx
][
g_idx
][
'
y_pos
'
]
gt_orientation
=
self
.
groundtruth_data
[
robot_idx
][
g_idx
][
'
orientation
'
]
else
:
#groudtruth = {'time':time, 'x_pos':gt_x, 'y_pos':gt_y, 'orientation':orientation}
x0
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
prev_time_idx
][
'
x_pos
'
]
x1
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
post_time_idx
][
'
x_pos
'
]
gt_x
=
linear_interpolation
([
t0
,
x0
],
[
t1
,
x1
],
req_time
)
y0
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
prev_time_idx
][
'
y_pos
'
]
y1
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
post_time_idx
][
'
y_pos
'
]
gt_y
=
linear_interpolation
([
t0
,
y0
],
[
t1
,
y1
],
req_time
)
o0
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
prev_time_idx
][
'
orientation
'
]
o1
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
post_time_idx
][
'
orientation
'
]
gt_orientation
=
linear_interpolation
([
t0
,
o0
],
[
t1
,
o1
],
req_time
)
message
[
'
groundtruth
'
]
=
{
'
time
'
:
req_time
,
'
x_pos
'
:
gt_x
,
'
y_pos
'
:
gt_y
,
'
orientation
'
:
gt_orientation
}
if
req_type
==
'
odometry
'
:
v0
=
self
.
dataset_data
[
req_type
][
robot_idx
][
prev_time_idx
][
'
velocity
'
]
v1
=
self
.
dataset_data
[
req_type
][
robot_idx
][
post_time_idx
][
'
velocity
'
]
velocity
=
linear_interpolation
([
t0
,
v0
],
[
t1
,
v1
],
req_time
)
o0
=
self
.
dataset_data
[
req_type
][
robot_idx
][
prev_time_idx
][
'
orientation
'
]
o1
=
self
.
dataset_data
[
req_type
][
robot_idx
][
post_time_idx
][
'
orientation
'
]
orientation
=
linear_interpolation
([
t0
,
o0
],
[
t1
,
o1
],
req_time
)
message
[
'
data
'
]
=
{
'
time
'
:
req_time
,
'
velocity
'
:
velocity
,
'
orientation
'
:
orientation
}
else
:
#measurement: meas_info = {'time':time, 'subject_ID':subject_ID, 'measurment_range': measurment_range, 'bearing':bearing}
pass
subject_ID
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
][
'
subject_ID
'
]
#bearing = self.dataset_data[req_type][robot_idx][time_idx]['bearing']
#print(self.dataset_data[req_type][robot_idx][time_idx])
var_dis
=
meas_input_var
[
0
]
var_phi
=
meas_input_var
[
1
]
if
subject_ID
>
5
:
# landmark
[
lx
,
ly
]
=
self
.
landmark_map
[
subject_ID
]
else
:
if
subject_ID
not
in
self
.
dataset_labels
:
obj_index
=
(
robot_idx
+
2
)
%
(
len
(
self
.
dataset_labels
))
subject_ID
=
self
.
dataset_labels
[
obj_index
]
obj_index
=
self
.
dataset_labels
.
index
(
subject_ID
)
matched_gt_data
=
self
.
find_correspoind_ground_truth
(
obj_index
,
req_time
)
lx
=
matched_gt_data
[
'
x_pos
'
]
ly
=
matched_gt_data
[
'
y_pos
'
]
measurment_range
=
sqrt
((
lx
-
gt_x
)
*
(
lx
-
gt_x
)
+
(
ly
-
gt_y
)
*
(
ly
-
gt_y
))
+
random
.
normal
(
0
,
sqrt
(
var_dis
))
# bearing not matching with closest dataline....
bearing
=
(
atan2
((
ly
-
gt_y
),
(
lx
-
gt_x
))
-
gt_orientation
)
%
pi
+
random
.
normal
(
0
,
sqrt
(
var_dis
))
if
abs
(
bearing
-
pi
)
<
abs
(
bearing
):
bearing
=
bearing
-
pi
message
[
'
data
'
]
=
{
'
time
'
:
req_time
,
'
subject_ID
'
:
subject_ID
,
'
measurment_range
'
:
measurment_range
,
'
bearing
'
:
bearing
}
return
message
def
load_most_recent_dataline
(
self
,
req
,
time_idx
):
message
=
req
.
get_message
()
req_type
=
req
.
get_type
()
robot_idx
=
message
[
'
robot_index
'
]
req_time
=
message
[
'
time
'
]
message
[
'
data
'
]
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
]
message
[
'
groundtruth
'
]
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
time_idx
]
return
message
def
create_additional_dataline
(
self
,
req
,
time_idx
,
meas_input_var
):
if
self
.
synthetic
:
message
=
self
.
create_synthetic_dataline
(
req
,
time_idx
,
meas_input_var
)
else
:
message
=
self
.
load_most_recent_dataline
(
req
,
time_idx
)
return
message
def
get_dataline_at_specific_time
(
self
,
req
,
time_diff_limit
=
0.1
):
message
=
req
.
get_message
()
req_type
=
req
.
get_type
()
robot_idx
=
message
[
'
robot_index
'
]
req_time
=
message
[
'
time
'
]
time_idx
=
find_nearest_time_idx
(
self
.
time_arr
[
req_type
][
robot_idx
],
req_time
)
respond_time
=
self
.
time_arr
[
req_type
][
robot_idx
][
time_idx
]
message
[
'
data
'
]
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
]
message
[
'
groundtruth
'
]
=
self
.
dataset_matched_gt_data
[
req_type
][
robot_idx
][
time_idx
]
if
abs
(
respond_time
-
req_time
)
>
time_diff_limit
and
self
.
adding_actifical_dataline
:
meas_input_var
=
[
0.1
,
0.1
]
message
=
self
.
create_additional_dataline
(
req
,
time_idx
,
meas_input_var
)
if
req_time
>
self
.
end_time
:
valid_dataline
=
False
else
:
valid_dataline
=
True
return
valid_dataline
,
message
,
req_type
,
robot_idx
,
time_idx
def
respond
(
self
,
req
,
current_time
):
def
respond
(
self
,
req
,
current_time
,
need_specific_time
=
False
):
message
=
req
.
get_message
()
self
.
trackers_sync
(
current_time
)
#print('trackers: ')
#print(self.data_trackers)
valid_respond
,
req_type
,
robot_idx
,
time_idx
=
self
.
get_matched_dataline
(
req
)
if
need_specific_time
:
valid_respond
,
message
,
req_type
,
robot_idx
,
time_idx
=
self
.
get_dataline_at_specific_time
(
req
)
current_time
=
message
[
'
time
'
]
else
:
valid_respond
,
message
,
req_type
,
robot_idx
,
time_idx
=
self
.
get_matched_dataline
(
req
)
current_time
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
][
'
time
'
]
if
valid_respond
:
#load data
current_time
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
][
'
time
'
]
message
[
'
time
'
]
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
][
'
time
'
]
message
[
'
data
'
]
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
]
#load corresponding groundtruth
time
=
self
.
dataset_data
[
req_type
][
robot_idx
][
time_idx
][
'
time
'
]
matched_gt_data
=
self
.
find_correspoind_ground_truth
(
robot_idx
,
time
)
message
[
'
groundtruth
'
]
=
matched_gt_data
self
.
trackers_update
(
req_type
,
robot_idx
)
message
[
'
time
'
]
=
current_time
self
.
trackers_update
(
req_type
,
robot_idx
)
req
.
set_message
(
message
)
return
valid_respond
,
current_time
,
req
...
...
@@ -266,3 +432,4 @@ class Dataset:
This diff is collapsed.
Click to expand it.
functions/test_simulation.py
+
76
−
25
View file @
98dd615d
...
...
@@ -7,7 +7,6 @@ Created on Sun Apr 8 17:55:06 2018
"""
import
os
,
sys
# sys.path.insert(0, 'Simulation-Environment-for-Cooperative-Localization/functions')
sys
.
path
.
append
(
os
.
path
.
join
(
os
.
path
.
dirname
(
__file__
),
"
.
"
))
from
dataset_manager.existing_dataset
import
Dataset
from
simulation_process.sim_manager
import
SimulationManager
...
...
@@ -17,49 +16,101 @@ from data_analysis.data_analyzer import Analyzer
from
data_analysis.realtime_plot
import
animate_plot
# load algorithms
#sys.path.insert(0, '/Users/william/Documents/SLAM_Simulation/Simulation-Environment-for-Cooperative-Localization/functions/localization_algos')
sys
.
path
.
append
(
os
.
path
.
join
(
os
.
path
.
dirname
(
__file__
),
"
localization_algos
"
))
from
centralized_ekf
import
Centralized_EKF
from
centralized_ekf2
import
Centralized_EKF2
from
simple_ekf
import
Simple_EKF
from
ekf_ls_bda
import
EKF_LS_BDA
from
ekf_gs_bound
import
EKF_GS_BOUND
from
ekf_gs_ci2
import
EKF_GS_CI2
#need to verify these
#need to verify these algo
'''
from ekf_ls_ci import EKF_LS_CI
from ekf_ls_ci2 import EKF_LS_CI2
from ekf_gs_ci import EKF_GS_CI
from
simple_ekf
import
Simple_EKF
from ekf_gs_sci2 import EKF_GS_SCI2
'''
dataset_path
=
'
../Datasets/MRCLAM_Dataset3/
'
dataset_labels
=
[
1
,
2
,
3
]
duration
=
200
# duration for the simulation in sec
testing_dataset
=
Dataset
(
'
testing
'
)
start_time
,
starting_states
,
dataset_data
,
time_arr
=
testing_dataset
.
load_MRCLAMDatasets
(
dataset_path
,
dataset_labels
,
duration
,
synthetic
=
False
)
freqs0
=
[[
10
,
10
,
10
],[
1
,
1
,
1
],[
0.5
,
0.5
,
0.5
]]
freqs1
=
[[
10
,
10
,
10
],[
4
,
4
,
4
],[
0.5
,
0.5
,
0.5
]]
loc_algo
=
EKF_GS_CI2
(
'
algo
'
)
robot
=
RobotSystem
(
'
robot gs ci
'
,
dataset_labels
,
loc_algo
,
distr_sys
=
True
)
sim
=
SimulationManager
(
'
sim
'
)
state_recorder
=
StatesRecorder
(
'
gs ci schedule freqs0
'
,
dataset_labels
)
sim
.
sim_process_schedule
(
dataset_labels
,
testing_dataset
,
robot
,
state_recorder
,
freqs0
,
simple_plot
=
True
)
##############################################################################
testing_dataset
.
dataset_reset
()
robot
=
RobotSystem
(
'
robot gs ci
'
,
dataset_labels
,
loc_algo
,
distr_sys
=
True
)
dataset_path
=
'
../Datasets/MRCLAM_Dataset7/
'
sim1
=
SimulationManager
(
'
sim
'
)
state_recorder1
=
StatesRecorder
(
'
gs ci schedule freq1
'
,
dataset_labels
)
sim1
.
sim_process_schedule
(
dataset_labels
,
testing_dataset
,
robot
,
state_recorder1
,
freqs1
,
simple_plot
=
True
)
dataset_labels
=
[
1
,
2
,
3
,
4
,
5
]
duration
=
100
# duration for the simulation in sec
testing_dataset
=
Dataset
(
'
testing
'
,
dataset_path
,
dataset_labels
)
start_time
,
starting_states
,
dataset_data
,
time_arr
=
testing_dataset
.
initialization_MRCLAMDatasets
(
duration
)
##############################################################################
testing_dataset
.
dataset_reset
()
robot
=
RobotSystem
(
'
robot gs ci
'
,
dataset_labels
,
loc_algo
,
distr_sys
=
True
)
sim_n
=
SimulationManager
(
'
sim
'
)
state_recorder_n
=
StatesRecorder
(
'
gs ci naive
'
,
dataset_labels
)
sim_n
.
sim_process_naive
(
dataset_labels
,
testing_dataset
,
robot
,
state_recorder_n
,
simple_plot
=
True
)
##############################################################################
'''
loc_algo = EKF_LS_CI(
'
ls ci
'
)
robot = RobotSystem(
'
r
'
, dataset_labels, loc_algo, distr_sys = False)
testing_dataset.dataset_reset()
bound_algo = EKF_GS_BOUND(
'
algo
'
)
robot_bound = RobotSystem(
'
robot bound
'
, dataset_labels, bound_algo, distr_sys = True)
sim_b = SimulationManager(
'
sim
'
)
state_recorder_bound = StatesRecorder(
'
gs ci bound
'
,dataset_labels)
sim_b.sim_process_schedule(dataset_labels, testing_dataset, robot_bound, state_recorder_bound, freqs)
##############################################################################
testing_dataset.dataset_reset()
cen_ekf_algo = Centralized_EKF2(
'
algo
'
)
robot_cen = RobotSystem(
'
robot cen
'
, dataset_labels, cen_ekf_algo, distr_sys = False)
sim_c = SimulationManager(
'
sim
'
)
state_recorder_c= StatesRecorder(
'
cen ekf
'
,dataset_labels)
sim_c.sim_process_naive(dataset_labels, testing_dataset, robot_cen, state_recorder_c)
##############################################################################
testing_dataset.dataset_reset()
bda_algo = EKF_LS_BDA(
'
algo
'
)
robot_bda = RobotSystem(
'
robot bda
'
, dataset_labels, bda_algo, distr_sys = False)
sim_bda = SimulationManager(
'
sim
'
)
state_recorder_bda= StatesRecorder(
'
bda
'
,dataset_labels)
sim_bda.sim_process_naive(dataset_labels, testing_dataset, robot_bda, state_recorder_bda)
'''
loc_algo
=
EKF_LS_CI2
(
'
algo
'
)
robot
=
RobotSystem
(
'
robot me
'
,
dataset_labels
,
loc_algo
,
distr_sys
=
False
)
sim
=
SimulationManager
(
'
sim
'
)
state_recorder
=
StatesRecorder
(
'
sr
'
,
dataset_labels
)
#ud_t = state_recorder.get_updata_type_in_time_order(
)
#data_t =
state_recorder
.get_data_in_time_order(
)
analyzer
=
Analyzer
(
'
analyzer
'
,
dataset_labels
)
#loc_err_per_run, trace_per_run, t_arr = analyzer.calculate_loc_err_and_trace_state_variance_per_run(state_recorder_bound)
analyzer
.
algos_comparison
([
state_recorder
,
state_recorder1
,
state_recorder_n
],
only_trace
=
[
'
gs ci bound
'
])
sim
.
sim_process_naive
(
dataset_labels
,
testing_dataset
,
robot
,
state_recorder
,
simple_plot
=
True
)
loc_err_per_run
,
trace_per_run
,
t_arr
=
analyzer
.
calculate_loc_err_and_trace_state_variance_per_run
(
state_recorder
)
#robot_location_at_unit_time_interval
robot_loc_time_unit
=
analyzer
.
robot_location_at_unit_time_interval
(
state_recorder
)
animate_plot
(
dataset_labels
,
state_recorder
,
analyzer
)
#animate_plot(dataset_labels, state_recorder, analyzer)
...
...
@@ -68,6 +119,6 @@ data_sim = state_recorder.get_recorded_data()
le = state_recorder.get_loc_err_arr()
ud = state_recorder.get_update_type_arr()
t_S = state_recorder.get_trace_sigma_s_arr()
ud_t = state_recorder.get_updata_type_in_time_order()
data_sim_t = state_recorder.get_data_in_time_order()
'''
\ No newline at end of file
This diff is collapsed.
Click to expand it.
Preview
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment