Mentions légales du service

Skip to content
Snippets Groups Projects
Commit b42d1f86 authored by AUTERNAUD Alex's avatar AUTERNAUD Alex
Browse files

more tests cases test_ari_robot

parent 4281c409
No related branches found
No related tags found
No related merge requests found
Subproject commit aeff32f954b6d8fbd47dd27a6a8f742e8fa069a5
Subproject commit bdad5d0f6f49b1d5ae7f1ed59e6d0d308cad605f
......@@ -8,6 +8,7 @@ import numpy as np
import yaml
from collections import namedtuple
from mpi_sim.utils.box2d_utils import constraint_angle
import cv2
# TODO: need group identification for SocialMPC
......@@ -153,8 +154,8 @@ class ARINavigation(GeneratorComponent):
else:
self.state.robot_state.joint_angles = np.zeros(1)
self.state.robot_state.joint_velocities = np.zeros(1)
self.state.robot_state.global_map = self.ari_mapping.global_map
self.state.robot_state.local_map = self.ari_mapping.local_map
self.state.robot_state.global_map = np.zeros_like(self.ari_mapping.global_map)
self.state.robot_state.local_map = np.zeros_like(self.ari_mapping.local_map)
self.state.robot_state.config = MapConfig(
self.ari_mapping.global_map_area,
......@@ -308,8 +309,11 @@ class ARINavigation(GeneratorComponent):
self.state.robot_state.robot_velocity[1] = (self.object.orientation_velocity)
self.state.robot_state.joint_angles[:] = constraint_angle(-self.object.joints[-1].angle)
self.state.robot_state.joint_velocities[:] = -self.object.joints[-1].speed
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,
(2, 2))
local_map = cv2.dilate(self.ari_mapping.local_map, kernel, iterations=self.mpc_controller.controller_config.dilation_iter)
self.state.robot_state.global_map = np.flip(self.ari_mapping.global_map.T, axis=0)
self.state.robot_state.local_map = np.flip(self.ari_mapping.local_map.T, axis=0)
self.state.robot_state.local_map = np.flip(local_map.T, axis=0)
humans = self.simulation.get_objects_by_type(mpi_sim.objects.Human)
......
......@@ -12,7 +12,7 @@ def test_ari_navigation_go_to_human_simple():
{'type': 'Wall', 'position': [3., 6.0], 'orientation': np.pi / 2, 'height': 6.2}, # south
{'type': 'Wall', 'position': [3., 0.0], 'orientation': np.pi / 2, 'height': 6.2}, # north
],
processes=[{'type': 'GUI'}]
# processes=[{'type': 'GUI'}]
)
human = mpi_sim.objects.Human(position=[1., 1.], orientation=0)
......@@ -50,14 +50,14 @@ def test_ari_navigation_go_to_human_with_obstacle():
# {'type': 'Bench', 'position': [2., 4.7]},
{'type': 'Bench', 'position': [2., 3.7]}
],
processes=[{'type': 'GUI'}]
# processes=[{'type': 'GUI'}]
)
human = mpi_sim.objects.Human(position=[1., 1.], orientation=0.)
simulation.add_object(human)
ari = mpi_sim.objects.ARIRobot(
position=[3., 5.],
position=[5., 5.],
orientation=0., #- 3 / 4 * np.pi,
)
simulation.add_object(ari)
......@@ -86,7 +86,7 @@ def test_ari_navigation_go_to_group():
{'type': 'Wall', 'position': [3., 6.0], 'orientation': np.pi / 2, 'height': 6.2}, # south
{'type': 'Wall', 'position': [3., 0.0], 'orientation': np.pi / 2, 'height': 6.2} # north
],
processes=[{'type': 'GUI'}]
# processes=[{'type': 'GUI'}]
)
# add two humans (paul and irene)
......@@ -128,7 +128,7 @@ def test_ari_navigation_set_goal_position():
{'type': 'Wall', 'position': [3., 6.0], 'orientation': np.pi / 2, 'height': 6.2}, # south
{'type': 'Wall', 'position': [3., 0.0], 'orientation': np.pi / 2, 'height': 6.2}, # north
],
processes=[{'type': 'GUI'}]
# processes=[{'type': 'GUI'}]
)
ari = mpi_sim.objects.ARIRobot(
......@@ -150,3 +150,81 @@ def test_ari_navigation_set_goal_position():
assert is_goal_reached
def test_ari_navigation_go_to_human_with_obstacles():
simulation = mpi_sim.Simulation(
visible_area=((0., 10.), (0., 10.)),
objects=[
{'type': 'Wall', 'position': [6., 3.], 'orientation': 0., 'height': 6.}, # east
{'type': 'Wall', 'position': [0., 3.], 'orientation': 0., 'height': 6.}, # west
{'type': 'Wall', 'position': [3., 6.0], 'orientation': np.pi / 2, 'height': 6.2}, # south
{'type': 'Wall', 'position': [3., 0.0], 'orientation': np.pi / 2, 'height': 6.2}, # north,
{'type': 'Bench', 'position': [4.7, 2.7], 'orientation': -1.2},
{'type': 'Bench', 'position': [2., 3.7]}
],
# processes=[{'type': 'GUI'}]
)
human = mpi_sim.objects.Human(position=[1., 1.], orientation=0.)
simulation.add_object(human)
ari = mpi_sim.objects.ARIRobot(
position=[5., 5.],
orientation=0.,
)
simulation.add_object(ari)
ari.navigation.set_go_towards_human(human)
is_human_reached = False
for step in range(10000):
simulation.step()
if mpi_sim.utils.measure_center_distance(ari, human) < 1.5:
is_human_reached = True
break
simulation.close()
assert is_human_reached
def test_ari_navigation_go_to_human_with_obstacles_humans():
simulation = mpi_sim.Simulation(
visible_area=((0., 10.), (0., 10.)),
objects=[
{'type': 'Wall', 'position': [6., 3.], 'orientation': 0., 'height': 6.}, # east
{'type': 'Wall', 'position': [0., 3.], 'orientation': 0., 'height': 6.}, # west
{'type': 'Wall', 'position': [3., 6.0], 'orientation': np.pi / 2, 'height': 6.2}, # south
{'type': 'Wall', 'position': [3., 0.0], 'orientation': np.pi / 2, 'height': 6.2}, # north,
{'type': 'Bench', 'position': [4.7, 3.2], 'orientation': -1.2},
{'type': 'Bench', 'position': [2., 4.2]}
],
processes=[{'type': 'GUI'}]
)
human1 = mpi_sim.objects.Human(position=[1., 1.], orientation=0.)
human2 = mpi_sim.objects.Human(position=[2.5, 2.], orientation=-np.pi*3/4)
human3 = mpi_sim.objects.Human(position=[3., 1.2], orientation=np.pi*1/4)
simulation.add_object([human1, human2, human3])
ari = mpi_sim.objects.ARIRobot(
position=[5., 5.],
orientation=0.,
)
simulation.add_object(ari)
ari.navigation.set_go_towards_human(human1)
is_human_reached = False
for step in range(10000):
simulation.step()
if mpi_sim.utils.measure_center_distance(ari, human1) < 1.7:
is_human_reached = True
break
simulation.close()
assert is_human_reached
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment