diff --git a/.gitignore b/.gitignore
index 25233481f4f6b288e32b45b943e38b275b969d49..893a8f6f313b1c093ffd340808bd935c29007613 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,9 +6,10 @@ frames.*
 *.sif
 *.bag
 /maps
-/src/spring_msgs
 /src/image_projection
 /src/ros_openpose
+*doctor*
+*_test.py
 
 .rosinstall
 # Byte-compiled / optimized / DLL files
diff --git a/.gitmodules b/.gitmodules
index 3e5fbf2748abbf67e81b44eefb16c5c689c2203b..ce0c718c04d92827bf9c67012e3f8af19fe3a10e 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,14 +1,28 @@
 [submodule "modules/Motor_Controller"]
 	path = modules/Motor_Controller
 	url = git@gitlab.inria.fr:spring/wp6_robot_behavior/Motor_Controller.git
-[submodule "modules/2D_Simulator"]
-	path = modules/2D_Simulator
-	url = git@gitlab.inria.fr:spring/wp6_robot_behavior/2D_Simulator.git
-	branch = devel
-[submodule "modules/omnicam"]
-	path = modules/omnicam
-	url = git@gitlab.inria.fr:spring/wp3_av_perception/omnicam.git
-[submodule "modules/pygco"]
-	path = modules/pygco
-	url = git@gitlab.inria.fr:spring/wp4_behavior/pygco.git
+	branch = MS5_alex_rm_gcff
+[submodule "src/occupancy_map_republisher"]
+	path = src/occupancy_map_republisher
+	url = git@gitlab.inria.fr:spring/wp2_mapping_localization/occupancy_map_republisher.git
 	branch = main
+[submodule "src/hri_msgs"]
+	path = src/hri_msgs
+	url = https://github.com/ros4hri/hri_msgs.git
+	branch = master
+[submodule "src/py_spring_hri"]
+	path = src/py_spring_hri
+	url = git@gitlab.inria.fr:spring/wp6_robot_behavior/py_spring_hri.git
+	branch = main
+[submodule "src/spring_msgs"]
+	path = src/spring_msgs
+	url = git@gitlab.inria.fr:spring/wp7_ari/spring_msgs.git
+	branch = master
+[submodule "modules/human_aware_navigation_rl"]
+	path = modules/human_aware_navigation_rl
+	url = git@gitlab.inria.fr:robotlearn/human_aware_navigation_rl.git
+	branch = main
+[submodule "src/drl-navigation"]
+	path = src/drl-navigation
+	url = git@gitlab.inria.fr:spring/wp6_robot_behavior/drl-navigation.git
+	branch = master
diff --git a/Dockerfile b/Dockerfile
index a86c73b6c5758e84f924ced597e08672fc5ad08b..89d14624863979719ce75189a07119e550a967ae 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,8 +1,4 @@
-ARG RTABMAP=0
-ARG OPENPOSE=0 
-
-
-FROM nvidia/cuda:11.5.1-cudnn8-devel-ubuntu20.04 as base-openpose-1
+FROM nvidia/cuda:11.5.1-cudnn8-devel-ubuntu20.04 as base-ros-gpu
 SHELL ["/bin/bash", "-c"]
 WORKDIR /
 RUN echo "Setting up timezone..." && \
@@ -14,7 +10,8 @@ RUN echo "Installing Python and Pytorch..." && \
     apt-get install -q -y --no-install-recommends \
         python3-dev \
         python3-pip \
-        tzdata && \
+        tzdata \
+        wget vim tmux unzip && \
     pip3 install torch torchvision torchaudio && \
     rm -rf /var/lib/apt/lists/* && \
     rm -rf /workspace
@@ -33,133 +30,50 @@ RUN echo "Installing ROS Noetic..." && \
     rm -rf /var/lib/apt/lists/* && \
     echo "source /opt/ros/noetic/setup.bash" >> ~/.bashrc
 
-RUN echo "Installing OpenPose..." && \
-    apt-get update && \
-    apt-get install -y --no-install-recommends \
-        build-essential \
-        git \
-        libatlas-base-dev \
-        libboost-all-dev \
-        libgflags-dev \
-        libcanberra-gtk-module \
-        libgoogle-glog-dev \
-        libhdf5-serial-dev \
-        libleveldb-dev \
-        liblmdb-dev \
-        libopencv-dev \
-        libprotobuf-dev \
-        libsnappy-dev \
-        libviennacl-dev \
-        ocl-icd-opencl-dev \
-        opencl-headers \
-        pciutils \
-        protobuf-compiler \
-        python3-setuptools && \
-    pip3 install \
-        numpy \
-        opencv-python \
-        protobuf && \
-    rm -rf /var/lib/apt/lists/* && \
-    cd /home && \
-    git clone --depth 1 --branch 'v1.7.0' \
-        https://github.com/CMU-Perceptual-Computing-Lab/openpose && \
-    mkdir -p /home/openpose/build && cd /home/openpose/build && \
-    cmake -DBUILD_PYTHON=ON .. && \
-    sed -ie 's/set(AMPERE "80 86")/#&/g' ../cmake/Cuda.cmake && \
-    sed -ie 's/set(AMPERE "80 86")/#&/g' ../3rdparty/caffe/cmake/Cuda.cmake && \
-    make -j `nproc` && \
-    make install
-
-
-FROM ros:noetic-ros-base as base-openpose-0
-RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y keyboard-configuration curl
-RUN curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | apt-key add -
-
-
-FROM base-openpose-${OPENPOSE} as ros-common
+FROM base-ros-gpu as ros-common
 RUN apt-get update && apt-get install -y python3-pip \
                                          libeigen3-dev \
                                          python3-rospy \
                                          ros-noetic-tf \
                                          ros-noetic-cv-bridge \
-					                     ros-noetic-perception \
-                                         ros-noetic-image-transport-plugins
-
-
-FROM ros-common as ros-rtabmap-0
-FROM ros-common as ros-rtabmap-1
-RUN apt-get update && apt-get install -y \
-ros-noetic-imu-tools \
-ros-noetic-rtabmap-ros
-
-
-FROM ros-rtabmap-${RTABMAP} as ros-openpose-0
-FROM ros-rtabmap-${RTABMAP} as ros-openpose-1
-RUN apt-get update && apt-get install -y wget vim tmux
-RUN cd /home/openpose/models && bash getModels.sh
-RUN mkdir -p /home/ros/robot_behavior_ws/src
-WORKDIR /home/ros/robot_behavior_ws/
-ADD src/ros_openpose /home/ros/robot_behavior_ws/src/ros_openpose
-RUN cd src/ros_openpose/scripts && chmod +x *
-RUN ln -s /usr/bin/python3 /usr/bin/python
+			                             ros-noetic-perception \
+                                         ros-noetic-image-transport-plugins \
+					                     ros-noetic-audio-common-msgs
 
 
-ARG UID=663486
-FROM ros-openpose-${OPENPOSE} as ros-socialmpc
-RUN pip install mediapipe
-RUN chown -R $UID:$UID /usr/local/lib/python3.8/dist-packages/mediapipe
+FROM ros-common as ros-socialmpc
 RUN mkdir -p /home/ros/robot_behavior_ws
-ADD modules/2D_Simulator /home/ros/robot_behavior_ws/modules/2D_Simulator
-
-WORKDIR /home/ros/robot_behavior_ws/modules/2D_Simulator
-RUN pip3 install -r requirements.txt 
-RUN pip3 install -e .
-
-ADD modules/pygco /home/ros/robot_behavior_ws/modules/pygco
-WORKDIR /home/ros/robot_behavior_ws/modules/pygco
-RUN apt-get update && apt-get install -y wget unzip
-RUN pip3 install  --global-option=build_ext .
-
+# MPC Navigation : Alex
 ADD modules/Motor_Controller /home/ros/robot_behavior_ws/modules/Motor_Controller
 WORKDIR /home/ros/robot_behavior_ws/modules/Motor_Controller
-
 RUN pip3 install pyparsing==2.4.7
 RUN pip3 install -r requirements.txt
 RUN pip3 install -e .
+RUN pip install jaxlib==0.1.73+cuda11.cudnn805 -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html
+# DRL Navigation : Victor Sanchez
+ADD modules/human_aware_navigation_rl/ /home/ros/robot_behavior_ws/modules/human_aware_navigation_rl/
+WORKDIR /home/ros/robot_behavior_ws/modules/human_aware_navigation_rl
+RUN pip3 install -r requirements.txt
+RUN pip3 install -U kaleido
+RUN pip3 install -e .
 
-
-ARG UID=663486
-FROM ros-socialmpc as wp6-ws
-RUN useradd -u $UID ros
+FROM ros-socialmpc as behavior-generator
+RUN groupadd -r ros && useradd -r -g ros ros
 RUN chown -R ros:ros /home/ros
 USER ros
-ADD modules/omnicam /home/ros/robot_behavior_ws/modules/omnicam
-RUN cd /home/ros/robot_behavior_ws/modules/omnicam && /bin/bash -c "pip install --user ."
-
+# MPC Navigation : Alex
 ADD --chown=ros:ros src/robot_behavior /home/ros/robot_behavior_ws/src/robot_behavior
+# DRL Navigation : Victor Sanchez
+ADD --chown=ros:ros src/drl-navigation /home/ros/robot_behavior_ws/src/drl_navigation
 WORKDIR /home/ros/robot_behavior_ws/
-RUN mkdir -p src/spring_msgs
 ADD src/spring_msgs src/spring_msgs
-RUN mkdir -p src/hri_msgs
 ADD src/hri_msgs src/hri_msgs
+ADD src/py_spring_hri src/py_spring_hri
+ADD src/occupancy_map_republisher src/occupancy_map_republisher
 RUN /bin/bash -c "source /opt/ros/noetic/setup.bash && catkin_make"
 
 COPY ./scripts/entrypoint.sh /
-ENTRYPOINT ["/entrypoint.sh", "bash"]
 RUN /bin/bash -c "echo 'source /opt/ros/noetic/setup.bash' >> ~/.bashrc"
 RUN /bin/bash -c "echo 'source /home/ros/robot_behavior_ws/devel/setup.bash' >> ~/.bashrc"
-
-
-FROM ros-rtabmap-1 as wp6-rtabmap
-ARG UID=1000
-RUN useradd -u $UID rtabmap
-USER rtabmap
-COPY ./scripts/entrypoint.sh /
-ENTRYPOINT ["/entrypoint.sh", "roslaunch", "robot_behavior", "stereo_rtabmap_inria.launch"]
-CMD ["localization:=True", "database_name:=inria_office_stereo_6"]
-
-
-FROM wp6-ws as wp6-demo-peirasmos
-#ENTRYPOINT ["bash"]
-ENTRYPOINT ["/entrypoint.sh", "roslaunch", "robot_behavior", "wp6_demo_peirasmos.launch"]
+ENTRYPOINT ["/entrypoint.sh", "roslaunch", "robot_behavior", "main.launch"]
 CMD [""]
diff --git a/README.md b/README.md
index f6efb33a57399f68a62bf74af3b36293777c150a..c8937897d94d973092e0033da3d164d0c7da38ac 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,6 @@ Two dockers:
 - One on another computer running everything else
 
 On the computer which will run the controller, clone the following repositories and get the submodules:
-
 ```
 git clone https://gitlab.inria.fr/spring/wp6_robot_behavior/robot_behavior
 cd robot_behavior/
@@ -32,31 +31,31 @@ cd spring_msgs/
 git checkout master
 git pull
 cd ..
-git clone https://gitlab.inria.fr/robotlearn/ros_openpose.git
-cd ros_openpose
-git checkout master
+git clone https://gitlab.inria.fr/spring/wp6_robot_behavior/py_spring_hri.git
+cd py_spring_hri
+git checkout main
 git pull
 cd ../..
 ```
 
-Build the docker images using buildkit for a more efficient skipping of lines:
-
+Build the *behavior_generator* docker image using buildkit for a more efficient skipping of lines:
 ```
 export DOCKER_BUILDKIT=1
-docker build -t wp2_rtabmap --target wp2-rtabmap --build-arg OPENPOSE=0 --build-arg RTABMAP=1 .
-docker build -t behavior_generator --target wp6-demo-peirasmos --build-arg RTABMAP=0 --build-arg OPENPOSE=1 .
+docker build -t behavior_generator --target behavior-generator .
 ```
-Put the OPENPOSE arg to 1 if you want to use [ros_openpose](https://gitlab.inria.fr/robotlearn/ros_openpose/-/tree/master) to extract to poses. In this case, the image will be built with all the Openpose and CUDA requierements. If the value is put to 0, the built image won't include these requierements and the [Mediapipe](https://google.github.io/mediapipe/solutions/pose) will be used to extract the poses.
 
-Export the **wp2_rtabmap** docker image from your host to ari:
+Download the **wp2_rtabmap** docker images from the [GitLab Container Registery](https://gitlab.inria.fr/spring/dockers/container_registry):
+```
+docker pull registry.gitlab.inria.fr/spring/dockers/wp2_rtabmap:latest
+```
 
+Export the **wp2_rtabmap** docker image from your host to ari:
 ```
 docker save -o wp2_rtabmap.tar wp2_rtabmap
 scp wp2_rtabmap.tar pal@ari-XXc:
 ```
 
 and on ARI:
-
 ```
 docker load -i wp2_rtabmap.tar
 ```
@@ -85,15 +84,10 @@ docker run --rm -it \
     --privileged \
     --env="ROS_MASTER_URI=http://192.168.0.10:11311" \
     --env="ROS_IP=192.168.0.1" \
-    behavior_generator \
-    social_mpc:=True action_servers:=True republish_from_rtabmap:=True republish_from_openpose:=True republish_to_basestation:=True
+    behavior_generator
 ```
 with _192.168.0.10_ the IP address of the ARI robot and _192.168.0.1_ the IP address of the experimental machine (for instance, got through `ifconfig`)
 
-Put _republish_from_openpose_ to False if you don't want to use ros_openpose to extract the pose or/and your **behavior_generator** docker image doesn't contain the Openpose and CUDA requirements. By default, to extract the poses, Mediapipe will be used.
-
-Put _republish_to_basestation_ to True if you want to republish the image topics on the basestation (experimental machine). This will read the compressed image topic of all the cameras on the robot and republish them on the basestation in a raw/compressed/theora topics. This node is to face with the wifi bandwith limitations.
-
 Before running this **behavior_generator** docker image, be sure that the tracker is running. If it's not the case, refer to [Robot-Behavior-Integration](https://gitlab.inria.fr/spring/wp6_robot_behavior/robot_behavior/-/wikis/Robot-Behavior-Integration-Demo)/wp3_tracker docker.
 
 To run the **behavior_generator** docker with other needed containers of **WP6 Robot Behavior** work package, you can use `docker-compose`, see the [Robot-Behavior-Integration](https://gitlab.inria.fr/spring/wp6_robot_behavior/robot_behavior/-/wikis/Robot-Behavior-Integration-Demo) page in the Wiki.
@@ -103,9 +97,33 @@ To run the **behavior_generator** docker with other needed containers of **WP6 R
 x11docker wp6_demo
 ``` -->
 
-Now, all the nodes are running to execute a robot action. For instance, to join a person (with the id _580_), you can use the provided sample action client:
+Now, all the nodes are running to execute a robot action. For instance, to join a person (with the id *00580* and timeout of 100 seconds), you can use the provided sample action client:
 
 ```
 docker exec -it behavior_generator bash
-roslaunch robot_behavior gotowards_action_client.launch goto_target_id:=580 timeout:=100
+roslaunch robot_behavior behavior_go_to_body_action_client.launch body_id:=00580 timeout:=100
 ``` 
+
+You can go to a specific position:
+
+```
+docker exec -it behavior_generator bash
+roslaunch robot_behavior behavior_go_to_position_action_client.launch go_to_target_pose:='[-3.5, -5.5, 1.57, 0, 1]'
+```
+where go_to_target_pose:='[x, y, orientation (rad), global/local map flag (0/1), flag=1]', timeout can also be set.
+
+
+You can loot at a specific position once:
+
+```
+docker exec -it behavior_generator bash
+roslaunch robot_behavior behavior_look_at_position_action_client.launch look_at_position look_at_position:='[-3.5, -5.5, 0, 1]'
+```
+where go_to_target_pose:='[x, y, global/local map flag (0/1), flag=1]', timeout can also be set.
+
+You can loot at a specific body once (with the id *00580*), timeout can also be set:
+
+```
+docker exec -it behavior_generator bash
+roslaunch robot_behavior behavior_look_at_body_action_client.launch body_id:=00580
+```
diff --git a/modules/2D_Simulator b/modules/2D_Simulator
deleted file mode 160000
index aa69789e12be3036cd360573f014d16c55d6c160..0000000000000000000000000000000000000000
--- a/modules/2D_Simulator
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit aa69789e12be3036cd360573f014d16c55d6c160
diff --git a/modules/Motor_Controller b/modules/Motor_Controller
index 4cab1e488e80167c73f3141a10c01f50bcefb344..498ebadff0a1bd87f8bdc486dd5c25368265666c 160000
--- a/modules/Motor_Controller
+++ b/modules/Motor_Controller
@@ -1 +1 @@
-Subproject commit 4cab1e488e80167c73f3141a10c01f50bcefb344
+Subproject commit 498ebadff0a1bd87f8bdc486dd5c25368265666c
diff --git a/modules/human_aware_navigation_rl b/modules/human_aware_navigation_rl
new file mode 160000
index 0000000000000000000000000000000000000000..3adf52dfca7bfd8092cdacc48ed547275d9d9ec5
--- /dev/null
+++ b/modules/human_aware_navigation_rl
@@ -0,0 +1 @@
+Subproject commit 3adf52dfca7bfd8092cdacc48ed547275d9d9ec5
diff --git a/modules/omnicam b/modules/omnicam
deleted file mode 160000
index 836ed828b927b5980db99bf83e3be005467c4ebc..0000000000000000000000000000000000000000
--- a/modules/omnicam
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 836ed828b927b5980db99bf83e3be005467c4ebc
diff --git a/modules/pygco b/modules/pygco
deleted file mode 160000
index 31267e74d1d4efab66557fcb595bbd33f7bde045..0000000000000000000000000000000000000000
--- a/modules/pygco
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 31267e74d1d4efab66557fcb595bbd33f7bde045
diff --git a/pull_submodules.sh b/pull_submodules.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d02e2754ae75c73502b884a0cdbcdbbf03346851
--- /dev/null
+++ b/pull_submodules.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# both do the same, but one might sometimes fail
+git submodule update --init --recursive
+git pull --recurse-submodules
+# lets try a third one to get latest versions of submodules
+git submodule update --remote
diff --git a/scripts/entrypoint_emulated_inputs.sh b/scripts/entrypoint_emulated_inputs.sh
deleted file mode 100755
index d02b4799279f1dc733371f8d3cf840670c746ebb..0000000000000000000000000000000000000000
--- a/scripts/entrypoint_emulated_inputs.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-set -e
-
-# setup ros environment
-echo $@
-source /opt/ros/noetic/setup.bash
-source /home/ros/robot_behavior_ws/devel/setup.bash
-roslaunch robot_behavior rtabmap_apriltag.launch $@
diff --git a/scripts/entrypoint_social_mpc.sh b/scripts/entrypoint_social_mpc.sh
deleted file mode 100755
index 542f978afaaaffb031005c823064f729af5da722..0000000000000000000000000000000000000000
--- a/scripts/entrypoint_social_mpc.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-set -e
-
-# setup ros environment
-echo $@
-source /opt/ros/noetic/setup.bash
-source /home/ros/robot_behavior_ws/devel/setup.bash
-# export ROS_MASTER_URI=http://ari-10c:11311
-# export ROS_IP=$(ifconfig | grep  10.68| awk '{ print $2 }')
-# export ROS_IP=10.68.0.130
-echo $ROS_MASTER_URI
-echo $ROS_IP
-roslaunch robot_behavior apriltag2tracking_social_mpc_action_servers.launch $@
diff --git a/scripts/entrypoint_social_mpc_sim.sh b/scripts/entrypoint_social_mpc_sim.sh
deleted file mode 100755
index 9a0794d3ffbd35fac025e9fdf981ad9faf0229be..0000000000000000000000000000000000000000
--- a/scripts/entrypoint_social_mpc_sim.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-set -e
-
-# setup ros environment
-echo $@
-source /opt/ros/noetic/setup.bash
-source /home/ros/robot_behavior_ws/devel/setup.bash
-# export ROS_MASTER_URI=http://ari-10c:11311
-# export ROS_IP=$(ifconfig | grep  10.68| awk '{ print $2 }')
-# export ROS_IP=10.68.0.130
-# echo $ROS_MASTER_URI
-# echo $ROS_IP
-roslaunch robot_behavior social_mpc_sim_action_servers.launch $@
diff --git a/scripts/run_docker_nvidia.sh b/scripts/run_docker_nvidia.sh
deleted file mode 100755
index 2c0f88c5a1d643e5ae558f3658c9d6cffaa737f9..0000000000000000000000000000000000000000
--- a/scripts/run_docker_nvidia.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-NAME=$1
-echo $NAME
-#CONTAINER_NAME=social_mpc_wp6_sim
-#DOCKER_IMAGE=social_mpc_wp6_sim
-
-XSOCK=/tmp/.X11-unix
-
-XAUTH=/tmp/docker.xauth
-XAUTH_DOCKER=/tmp/.docker.xauth
-
-if [ ! -f $XAUTH ]
-then
-    xauth_list=$(xauth nlist :0 | sed -e 's/^..../ffff/')
-    if [ ! -z "$xauth_list" ]
-    then
-        echo "$xauth_list" | xauth -f $XAUTH nmerge -
-    else
-        touch $XAUTH
-    fi
-    chmod a+r $XAUTH
-fi
-
-docker run --rm -it \
-    --name "$NAME" \
-    --network=host \
-    --privileged \
-    --env="DISPLAY" \
-    --env="QT_X11_NO_MITSHM=1" \
-    --env="ROS_IP=10.68.0.251" \
-    --env="ROS_MASTER_URI=http://192.168.0.10:11311" \
-    --env="XAUTHORITY=$XAUTH_DOCKER" \
-    --volume="$XSOCK:$XSOCK:rw" \
-    --volume="$XAUTH:$XAUTH_DOCKER:rw" \
-    $NAME bash
diff --git a/src/drl-navigation b/src/drl-navigation
new file mode 160000
index 0000000000000000000000000000000000000000..391f08c359e483b251faaf7fa8d53ecd40cb5044
--- /dev/null
+++ b/src/drl-navigation
@@ -0,0 +1 @@
+Subproject commit 391f08c359e483b251faaf7fa8d53ecd40cb5044
diff --git a/src/hri_msgs b/src/hri_msgs
new file mode 160000
index 0000000000000000000000000000000000000000..edba9341a078ca447ae04f243cd9481d4d1b3686
--- /dev/null
+++ b/src/hri_msgs
@@ -0,0 +1 @@
+Subproject commit edba9341a078ca447ae04f243cd9481d4d1b3686
diff --git a/src/occupancy_map_republisher b/src/occupancy_map_republisher
new file mode 160000
index 0000000000000000000000000000000000000000..6d1d822099d9fe0fa068324307a3772f3ed9fb81
--- /dev/null
+++ b/src/occupancy_map_republisher
@@ -0,0 +1 @@
+Subproject commit 6d1d822099d9fe0fa068324307a3772f3ed9fb81
diff --git a/src/py_spring_hri b/src/py_spring_hri
new file mode 160000
index 0000000000000000000000000000000000000000..4e3bf5c19b529125c3d08fe337be875952cde77f
--- /dev/null
+++ b/src/py_spring_hri
@@ -0,0 +1 @@
+Subproject commit 4e3bf5c19b529125c3d08fe337be875952cde77f
diff --git a/src/robot_behavior/CMakeLists.txt b/src/robot_behavior/CMakeLists.txt
index 4d20af8d4c9d247ec256092d3a15ac4d58e04f9b..2da02aee09dd67105b2adde1b384447348ce1131 100644
--- a/src/robot_behavior/CMakeLists.txt
+++ b/src/robot_behavior/CMakeLists.txt
@@ -57,9 +57,6 @@ catkin_python_setup()
 #    FILES
 #    TrackedPerson2d.msg
 #    TrackedPersons2d.msg
-#    GoTowards.msg
-#    LookAt.msg
-#    Navigate.msg
 #    ControllerStatus.msg
 #    BodyPart.msg
 #    Frame.msg
@@ -78,10 +75,6 @@ catkin_python_setup()
 # add_action_files(
 #    DIRECTORY action
 #    FILES
-#    MPC.action
-#    GoTowards.action
-#    LookAt.action
-#    Navigate.action
 # )
 
 ## Generate added messages and services with any dependencies listed here
@@ -179,17 +172,19 @@ include_directories(
 
 ## Mark executable scripts (Python etc.) for installation
 ## in contrast to setup.py, you can choose the destination
-catkin_install_python(PROGRAMS   scripts/sim_2d_main.py
-                                 scripts/controller_main.py
-                                 scripts/navigate_action_server_main.py
-                                 scripts/navigate_action_client_main.py
-                                 scripts/gotowards_action_server_main.py
-                                 scripts/gotowards_action_client_main.py
-                                 scripts/lookat_action_server_main.py
-                                 scripts/lookat_action_client_main.py
-                                 scripts/republish_from_apriltag_main.py
-                                 scripts/republish_from_rtabmap_main.py
-                                 scripts/republish_from_openpose_main.py
+catkin_install_python(PROGRAMS   
+         scripts/behavior_generator_main.py
+				 scripts/behavior_go_to_body_action_client_main.py
+				 scripts/behavior_go_to_body_action_server_main.py
+				 scripts/behavior_go_to_position_action_client_main.py
+				 scripts/behavior_go_to_position_action_server_main.py
+				 scripts/behavior_look_at_body_action_client_main.py
+				 scripts/behavior_look_at_body_action_server_main.py
+				 scripts/behavior_look_at_position_action_client_main.py
+				 scripts/behavior_look_at_position_action_server_main.py
+         scripts/behavior_global_path_planner_main.py
+         scripts/behavior_goal_finder_main.py
+         scripts/behavior_local_path_planner_mpc_main.py
                       DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
 )
 
diff --git a/src/robot_behavior/config/rviz/config_04_04_2023.rviz b/src/robot_behavior/config/rviz/config_04_04_2023.rviz
new file mode 100644
index 0000000000000000000000000000000000000000..077d10f0a218c407899e7b1a5bd5013a047aa95d
--- /dev/null
+++ b/src/robot_behavior/config/rviz/config_04_04_2023.rviz
@@ -0,0 +1,505 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 0
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /TF1
+        - /TF1/Frames1
+        - /TF1/Tree1
+      Splitter Ratio: 0.4427312910556793
+    Tree Height: 363
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: false
+      Name: Map
+      Topic: /slam/grid_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: false
+    - Class: rviz/TF
+      Enabled: true
+      Frame Timeout: 15
+      Frames:
+        All Enabled: false
+        arm_left_1_link:
+          Value: false
+        arm_left_2_link:
+          Value: false
+        arm_left_3_link:
+          Value: false
+        arm_left_4_link:
+          Value: false
+        arm_left_base_link:
+          Value: false
+        arm_left_grasping_frame:
+          Value: false
+        arm_left_tool_link:
+          Value: false
+        arm_right_1_link:
+          Value: false
+        arm_right_2_link:
+          Value: false
+        arm_right_3_link:
+          Value: false
+        arm_right_4_link:
+          Value: false
+        arm_right_base_link:
+          Value: false
+        arm_right_grasping_frame:
+          Value: false
+        arm_right_tool_link:
+          Value: false
+        back_led_frame:
+          Value: false
+        base_footprint:
+          Value: true
+        base_link:
+          Value: true
+        body_b0001:
+          Value: true
+        body_b0002:
+          Value: true
+        body_b0004:
+          Value: true
+        caster_left_link:
+          Value: false
+        caster_right_link:
+          Value: false
+        docking_link:
+          Value: false
+        ear_led_left_frame:
+          Value: false
+        ear_led_right_frame:
+          Value: false
+        front_fisheye_camera_link:
+          Value: false
+        front_fisheye_camera_optical_frame:
+          Value: false
+        global_map:
+          Value: true
+        group_grp397:
+          Value: true
+        group_grp398:
+          Value: true
+        group_grp399:
+          Value: true
+        group_grp400:
+          Value: true
+        group_grp401:
+          Value: true
+        group_grp402:
+          Value: true
+        group_grp403:
+          Value: true
+        group_grp404:
+          Value: true
+        group_grp405:
+          Value: true
+        group_grp406:
+          Value: true
+        group_grp407:
+          Value: true
+        hand_left_1_link:
+          Value: false
+        hand_left_2_link:
+          Value: false
+        hand_left_fingers_link:
+          Value: false
+        hand_right_1_link:
+          Value: false
+        hand_right_2_link:
+          Value: false
+        hand_right_fingers_link:
+          Value: false
+        head_1_link:
+          Value: false
+        head_2_link:
+          Value: false
+        head_front_camera_color_optical_frame:
+          Value: false
+        head_front_camera_link:
+          Value: false
+        local_cost_map:
+          Value: true
+        local_map:
+          Value: true
+        map:
+          Value: true
+        odom:
+          Value: true
+        odom_ORB:
+          Value: true
+        pan goal:
+          Value: true
+        rgbd_laser_link:
+          Value: false
+        torso_back_camera_accel_optical_frame:
+          Value: false
+        torso_back_camera_fisheye1_optical_frame:
+          Value: false
+        torso_back_camera_fisheye1_rgb_frame:
+          Value: false
+        torso_back_camera_fisheye2_optical_frame:
+          Value: false
+        torso_back_camera_fisheye2_rgb_frame:
+          Value: false
+        torso_back_camera_gyro_optical_frame:
+          Value: false
+        torso_back_camera_link:
+          Value: false
+        torso_back_camera_pose_frame:
+          Value: false
+        torso_front_camera_accel_frame:
+          Value: false
+        torso_front_camera_accel_optical_frame:
+          Value: false
+        torso_front_camera_aligned_depth_to_color_frame:
+          Value: false
+        torso_front_camera_bottom_screw_frame:
+          Value: false
+        torso_front_camera_color_frame:
+          Value: false
+        torso_front_camera_color_optical_frame:
+          Value: false
+        torso_front_camera_depth_frame:
+          Value: false
+        torso_front_camera_depth_optical_frame:
+          Value: false
+        torso_front_camera_gyro_frame:
+          Value: false
+        torso_front_camera_imu_optical_frame:
+          Value: false
+        torso_front_camera_infra1_frame:
+          Value: false
+        torso_front_camera_infra1_optical_frame:
+          Value: false
+        torso_front_camera_infra2_frame:
+          Value: false
+        torso_front_camera_infra2_optical_frame:
+          Value: false
+        torso_front_camera_link:
+          Value: false
+        torso_mic_front_link:
+          Value: false
+        wheel_left_link:
+          Value: false
+        wheel_right_link:
+          Value: false
+      Marker Alpha: 1
+      Marker Scale: 1
+      Name: TF
+      Show Arrows: true
+      Show Axes: true
+      Show Names: true
+      Tree:
+        map:
+          body_b0001:
+            {}
+          body_b0002:
+            {}
+          body_b0004:
+            {}
+          global_map:
+            {}
+          group_grp397:
+            {}
+          group_grp398:
+            {}
+          group_grp399:
+            {}
+          group_grp400:
+            {}
+          group_grp401:
+            {}
+          group_grp402:
+            {}
+          group_grp403:
+            {}
+          group_grp404:
+            {}
+          group_grp405:
+            {}
+          group_grp406:
+            {}
+          group_grp407:
+            {}
+          local_cost_map:
+            {}
+          local_map:
+            {}
+          odom_ORB:
+            odom:
+              base_footprint:
+                base_link:
+                  arm_left_base_link:
+                    arm_left_1_link:
+                      arm_left_2_link:
+                        arm_left_3_link:
+                          arm_left_4_link:
+                            arm_left_grasping_frame:
+                              {}
+                            arm_left_tool_link:
+                              hand_left_fingers_link:
+                                {}
+                            hand_left_1_link:
+                              hand_left_2_link:
+                                {}
+                  arm_right_base_link:
+                    arm_right_1_link:
+                      arm_right_2_link:
+                        arm_right_3_link:
+                          arm_right_4_link:
+                            arm_right_grasping_frame:
+                              {}
+                            arm_right_tool_link:
+                              hand_right_fingers_link:
+                                {}
+                            hand_right_1_link:
+                              hand_right_2_link:
+                                {}
+                  caster_left_link:
+                    {}
+                  caster_right_link:
+                    {}
+                  docking_link:
+                    {}
+                  front_fisheye_camera_link:
+                    front_fisheye_camera_optical_frame:
+                      {}
+                  head_1_link:
+                    back_led_frame:
+                      {}
+                    ear_led_left_frame:
+                      {}
+                    ear_led_right_frame:
+                      {}
+                    head_2_link:
+                      head_front_camera_link:
+                        head_front_camera_color_optical_frame:
+                          {}
+                  rgbd_laser_link:
+                    {}
+                  torso_back_camera_pose_frame:
+                    torso_back_camera_link:
+                      torso_back_camera_fisheye1_rgb_frame:
+                        torso_back_camera_fisheye1_optical_frame:
+                          {}
+                      torso_back_camera_fisheye2_rgb_frame:
+                        torso_back_camera_fisheye2_optical_frame:
+                          {}
+                      torso_back_camera_gyro_optical_frame:
+                        torso_back_camera_accel_optical_frame:
+                          {}
+                  torso_front_camera_bottom_screw_frame:
+                    torso_front_camera_link:
+                      torso_front_camera_accel_frame:
+                        torso_front_camera_accel_optical_frame:
+                          {}
+                      torso_front_camera_aligned_depth_to_color_frame:
+                        torso_front_camera_color_optical_frame:
+                          {}
+                      torso_front_camera_color_frame:
+                        {}
+                      torso_front_camera_depth_frame:
+                        torso_front_camera_depth_optical_frame:
+                          {}
+                      torso_front_camera_gyro_frame:
+                        torso_front_camera_imu_optical_frame:
+                          {}
+                      torso_front_camera_infra1_frame:
+                        torso_front_camera_infra1_optical_frame:
+                          {}
+                      torso_front_camera_infra2_frame:
+                        torso_front_camera_infra2_optical_frame:
+                          {}
+                  torso_mic_front_link:
+                    {}
+                  wheel_left_link:
+                    {}
+                  wheel_right_link:
+                    {}
+          pan goal:
+            {}
+      Update Interval: 0
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /tracked_pose_2d/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/local_cost_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /torso_front_camera/infra1/image_rect_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: raw
+      Unreliable: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /humans/summary_image/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: false
+      Name: Map
+      Topic: /slam/local_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: false
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: false
+      Name: Map
+      Topic: /slam/global_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: false
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 11.571998596191406
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: 1.0960367918014526
+        Y: -0.018212413415312767
+        Z: -1.0568172931671143
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 1.3197965621948242
+      Target Frame: <Fixed Frame>
+      Yaw: 4.495367050170898
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1385
+  Hide Left Dock: false
+  Hide Right Dock: true
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd0000000400000000000001c80000053afc020000000dfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000014000001a6000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001c00000012d0000001600fffffffb0000000a0049006d00610067006501000002f3000001710000001600fffffffb0000000a0049006d006100670065010000046a000000e40000001600fffffffb0000000a0049006d00610067006501000003700000019a0000000000000000fb0000000a0049006d0061006700650100000474000000da0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000042700fffffffb0000000800540069006d00650100000000000004500000000000000000000003110000053a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 1247
+  X: 1313
+  Y: 27
diff --git a/src/robot_behavior/config/rviz/config_05_07_2022.rviz b/src/robot_behavior/config/rviz/config_05_07_2022.rviz
new file mode 100644
index 0000000000000000000000000000000000000000..2fcd879bd9b1c53df16adc36e4027e2f0d4e30d4
--- /dev/null
+++ b/src/robot_behavior/config/rviz/config_05_07_2022.rviz
@@ -0,0 +1,431 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 0
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /TF1/Frames1
+        - /TF1/Tree1
+      Splitter Ratio: 0.7894737124443054
+    Tree Height: 249
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/grid_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/TF
+      Enabled: true
+      Frame Timeout: 15
+      Frames:
+        All Enabled: false
+        arm_left_1_link:
+          Value: false
+        arm_left_2_link:
+          Value: false
+        arm_left_3_link:
+          Value: false
+        arm_left_4_link:
+          Value: false
+        arm_left_base_link:
+          Value: false
+        arm_left_grasping_frame:
+          Value: false
+        arm_left_tool_link:
+          Value: false
+        arm_right_1_link:
+          Value: false
+        arm_right_2_link:
+          Value: false
+        arm_right_3_link:
+          Value: false
+        arm_right_4_link:
+          Value: false
+        arm_right_base_link:
+          Value: false
+        arm_right_grasping_frame:
+          Value: false
+        arm_right_tool_link:
+          Value: false
+        back_led_frame:
+          Value: false
+        base_footprint:
+          Value: true
+        base_link:
+          Value: true
+        body_b0082:
+          Value: true
+        caster_left_link:
+          Value: false
+        caster_right_link:
+          Value: false
+        docking_link:
+          Value: false
+        ear_led_left_frame:
+          Value: false
+        ear_led_right_frame:
+          Value: false
+        final goal:
+          Value: true
+        front_fisheye_camera_link:
+          Value: false
+        front_fisheye_camera_optical_frame:
+          Value: false
+        hand_left_1_link:
+          Value: false
+        hand_left_2_link:
+          Value: false
+        hand_left_fingers_link:
+          Value: false
+        hand_right_1_link:
+          Value: false
+        hand_right_2_link:
+          Value: false
+        hand_right_fingers_link:
+          Value: false
+        head_1_link:
+          Value: false
+        head_2_link:
+          Value: false
+        head_front_camera_color_optical_frame:
+          Value: false
+        head_front_camera_link:
+          Value: false
+        human_goal:
+          Value: true
+        intermediate waypoint goal:
+          Value: true
+        local_cost_map:
+          Value: true
+        local_map:
+          Value: true
+        map:
+          Value: true
+        odom:
+          Value: true
+        rear_fisheye_camera_link:
+          Value: false
+        rear_fisheye_camera_optical_frame:
+          Value: false
+        rgbd_laser_link:
+          Value: false
+        torso_back_camera_accel_optical_frame:
+          Value: false
+        torso_back_camera_fisheye1_optical_frame:
+          Value: false
+        torso_back_camera_fisheye1_rgb_frame:
+          Value: false
+        torso_back_camera_fisheye2_optical_frame:
+          Value: false
+        torso_back_camera_fisheye2_rgb_frame:
+          Value: false
+        torso_back_camera_gyro_optical_frame:
+          Value: false
+        torso_back_camera_link:
+          Value: false
+        torso_back_camera_pose_frame:
+          Value: false
+        torso_front_camera_accel_frame:
+          Value: false
+        torso_front_camera_accel_optical_frame:
+          Value: false
+        torso_front_camera_aligned_depth_to_color_frame:
+          Value: false
+        torso_front_camera_bottom_screw_frame:
+          Value: false
+        torso_front_camera_color_frame:
+          Value: false
+        torso_front_camera_color_optical_frame:
+          Value: false
+        torso_front_camera_gyro_frame:
+          Value: false
+        torso_front_camera_imu_optical_frame:
+          Value: false
+        torso_front_camera_infra1_frame:
+          Value: false
+        torso_front_camera_infra1_optical_frame:
+          Value: false
+        torso_front_camera_infra2_frame:
+          Value: false
+        torso_front_camera_infra2_optical_frame:
+          Value: false
+        torso_front_camera_link:
+          Value: false
+        torso_mic_front_link:
+          Value: false
+        wheel_left_link:
+          Value: false
+        wheel_right_link:
+          Value: false
+      Marker Alpha: 1
+      Marker Scale: 1
+      Name: TF
+      Show Arrows: true
+      Show Axes: true
+      Show Names: true
+      Tree:
+        map:
+          body_b0082:
+            {}
+          final goal:
+            {}
+          human_goal:
+            {}
+          local_cost_map:
+            {}
+          local_map:
+            {}
+          odom:
+            base_footprint:
+              base_link:
+                arm_left_base_link:
+                  arm_left_1_link:
+                    arm_left_2_link:
+                      arm_left_3_link:
+                        arm_left_4_link:
+                          arm_left_grasping_frame:
+                            {}
+                          arm_left_tool_link:
+                            hand_left_fingers_link:
+                              {}
+                          hand_left_1_link:
+                            hand_left_2_link:
+                              {}
+                arm_right_base_link:
+                  arm_right_1_link:
+                    arm_right_2_link:
+                      arm_right_3_link:
+                        arm_right_4_link:
+                          arm_right_grasping_frame:
+                            {}
+                          arm_right_tool_link:
+                            hand_right_fingers_link:
+                              {}
+                          hand_right_1_link:
+                            hand_right_2_link:
+                              {}
+                caster_left_link:
+                  {}
+                caster_right_link:
+                  {}
+                docking_link:
+                  {}
+                front_fisheye_camera_link:
+                  front_fisheye_camera_optical_frame:
+                    {}
+                head_1_link:
+                  back_led_frame:
+                    {}
+                  ear_led_left_frame:
+                    {}
+                  ear_led_right_frame:
+                    {}
+                  head_2_link:
+                    head_front_camera_link:
+                      head_front_camera_color_optical_frame:
+                        {}
+                rear_fisheye_camera_link:
+                  rear_fisheye_camera_optical_frame:
+                    {}
+                rgbd_laser_link:
+                  {}
+                torso_back_camera_pose_frame:
+                  torso_back_camera_link:
+                    torso_back_camera_fisheye1_rgb_frame:
+                      torso_back_camera_fisheye1_optical_frame:
+                        {}
+                    torso_back_camera_fisheye2_rgb_frame:
+                      torso_back_camera_fisheye2_optical_frame:
+                        {}
+                    torso_back_camera_gyro_optical_frame:
+                      torso_back_camera_accel_optical_frame:
+                        {}
+                torso_front_camera_bottom_screw_frame:
+                  torso_front_camera_link:
+                    torso_front_camera_accel_frame:
+                      torso_front_camera_accel_optical_frame:
+                        {}
+                    torso_front_camera_aligned_depth_to_color_frame:
+                      torso_front_camera_color_optical_frame:
+                        {}
+                    torso_front_camera_color_frame:
+                      {}
+                    torso_front_camera_gyro_frame:
+                      torso_front_camera_imu_optical_frame:
+                        {}
+                    torso_front_camera_infra1_frame:
+                      torso_front_camera_infra1_optical_frame:
+                        {}
+                    torso_front_camera_infra2_frame:
+                      torso_front_camera_infra2_optical_frame:
+                        {}
+                torso_mic_front_link:
+                  {}
+                wheel_left_link:
+                  {}
+                wheel_right_link:
+                  {}
+              intermediate waypoint goal:
+                {}
+      Update Interval: 0
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /tracked_pose_2d/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /behavior_generator/local_cost_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /behavior_generator/local_occupancy_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: false
+      Image Topic: /torso_front_camera/infra1/image_rect_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: raw
+      Unreliable: false
+      Value: false
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 5.599466323852539
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: 3.0773913860321045
+        Y: -2.183408737182617
+        Z: -2.1287214756011963
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 1.5397963523864746
+      Target Frame: <Fixed Frame>
+      Yaw: 0.8472102880477905
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1025
+  Hide Left Dock: false
+  Hide Right Dock: true
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd0000000400000000000003b0000003d2fc020000000cfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000001400000134000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650000000107000000db0000001600fffffffb0000000a0049006d006100670065010000014e000002980000001600fffffffb0000000a0049006d0061006700650100000433000000d70000000000000000fb0000000a0049006d00610067006501000003700000019a0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000042700fffffffb0000000800540069006d0065010000000000000450000000000000000000000387000003d200000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 1853
+  X: 67
+  Y: 27
diff --git a/src/robot_behavior/config/rviz/config_17_03_2023.rviz b/src/robot_behavior/config/rviz/config_17_03_2023.rviz
new file mode 100644
index 0000000000000000000000000000000000000000..e374c15d57a41142f687476426b8d0c140dc63a9
--- /dev/null
+++ b/src/robot_behavior/config/rviz/config_17_03_2023.rviz
@@ -0,0 +1,1533 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 0
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /TF1/Frames1
+        - /TF1/Tree1
+      Splitter Ratio: 0.7894737124443054
+    Tree Height: 363
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/grid_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/TF
+      Enabled: true
+      Frame Timeout: 15
+      Frames:
+        All Enabled: false
+        arm_left_1_link:
+          Value: false
+        arm_left_2_link:
+          Value: false
+        arm_left_3_link:
+          Value: false
+        arm_left_4_link:
+          Value: false
+        arm_left_base_link:
+          Value: false
+        arm_left_grasping_frame:
+          Value: false
+        arm_left_tool_link:
+          Value: false
+        arm_right_1_link:
+          Value: false
+        arm_right_2_link:
+          Value: false
+        arm_right_3_link:
+          Value: false
+        arm_right_4_link:
+          Value: false
+        arm_right_base_link:
+          Value: false
+        arm_right_grasping_frame:
+          Value: false
+        arm_right_tool_link:
+          Value: false
+        back_led_frame:
+          Value: false
+        base_footprint:
+          Value: true
+        base_link:
+          Value: true
+        body_b0001:
+          Value: true
+        body_b0002:
+          Value: true
+        body_b0003:
+          Value: true
+        body_b0004:
+          Value: true
+        body_b0005:
+          Value: true
+        body_b0006:
+          Value: true
+        body_b0007:
+          Value: true
+        body_b0008:
+          Value: true
+        body_b0009:
+          Value: true
+        body_b0010:
+          Value: true
+        body_b0011:
+          Value: true
+        body_b0012:
+          Value: true
+        body_b0013:
+          Value: true
+        body_b0014:
+          Value: true
+        body_b0015:
+          Value: true
+        body_b0016:
+          Value: true
+        body_b0019:
+          Value: true
+        body_b0020:
+          Value: true
+        body_b0021:
+          Value: true
+        body_b0022:
+          Value: true
+        body_b0024:
+          Value: true
+        body_b0025:
+          Value: true
+        body_b0027:
+          Value: true
+        body_b0028:
+          Value: true
+        body_b0030:
+          Value: true
+        body_b0032:
+          Value: true
+        body_b0035:
+          Value: true
+        body_b0036:
+          Value: true
+        body_b0037:
+          Value: true
+        body_b0038:
+          Value: true
+        body_b0039:
+          Value: true
+        body_b0040:
+          Value: true
+        body_b0043:
+          Value: true
+        body_b0044:
+          Value: true
+        body_b0045:
+          Value: true
+        body_b0046:
+          Value: true
+        body_b0050:
+          Value: true
+        body_b0054:
+          Value: true
+        body_b0059:
+          Value: true
+        body_b0060:
+          Value: true
+        body_b0061:
+          Value: true
+        body_b0071:
+          Value: true
+        body_b0074:
+          Value: true
+        body_b0075:
+          Value: true
+        body_b0084:
+          Value: true
+        body_b0092:
+          Value: true
+        body_b0096:
+          Value: true
+        body_b0099:
+          Value: true
+        body_b0116:
+          Value: true
+        body_b0119:
+          Value: true
+        body_b0122:
+          Value: true
+        body_b0123:
+          Value: true
+        caster_left_link:
+          Value: false
+        caster_right_link:
+          Value: false
+        docking_link:
+          Value: true
+        ear_led_left_frame:
+          Value: false
+        ear_led_right_frame:
+          Value: false
+        final goal:
+          Value: true
+        front_fisheye_camera_link:
+          Value: true
+        front_fisheye_camera_optical_frame:
+          Value: true
+        group_grp1590:
+          Value: true
+        group_grp1591:
+          Value: true
+        group_grp1592:
+          Value: true
+        group_grp1593:
+          Value: true
+        group_grp1594:
+          Value: true
+        group_grp1595:
+          Value: true
+        group_grp1596:
+          Value: true
+        group_grp1597:
+          Value: true
+        group_grp1598:
+          Value: true
+        group_grp1599:
+          Value: true
+        group_grp1600:
+          Value: true
+        group_grp1601:
+          Value: true
+        group_grp1602:
+          Value: true
+        group_grp1603:
+          Value: true
+        group_grp1604:
+          Value: true
+        group_grp1605:
+          Value: true
+        group_grp1606:
+          Value: true
+        group_grp1607:
+          Value: true
+        group_grp1608:
+          Value: true
+        group_grp1609:
+          Value: true
+        group_grp1610:
+          Value: true
+        group_grp1611:
+          Value: true
+        group_grp1612:
+          Value: true
+        group_grp1613:
+          Value: true
+        group_grp1614:
+          Value: true
+        group_grp1615:
+          Value: true
+        group_grp1616:
+          Value: true
+        group_grp1617:
+          Value: true
+        group_grp1618:
+          Value: true
+        group_grp1619:
+          Value: true
+        group_grp1620:
+          Value: true
+        group_grp1621:
+          Value: true
+        group_grp1622:
+          Value: true
+        group_grp1623:
+          Value: true
+        group_grp1624:
+          Value: true
+        group_grp1625:
+          Value: true
+        group_grp1626:
+          Value: true
+        group_grp1627:
+          Value: true
+        group_grp1628:
+          Value: true
+        group_grp1629:
+          Value: true
+        group_grp1630:
+          Value: true
+        group_grp1631:
+          Value: true
+        group_grp1632:
+          Value: true
+        group_grp1633:
+          Value: true
+        group_grp1634:
+          Value: true
+        group_grp1635:
+          Value: true
+        group_grp1636:
+          Value: true
+        group_grp1637:
+          Value: true
+        group_grp1638:
+          Value: true
+        group_grp1639:
+          Value: true
+        group_grp1640:
+          Value: true
+        group_grp1641:
+          Value: true
+        group_grp1642:
+          Value: true
+        group_grp1643:
+          Value: true
+        group_grp1644:
+          Value: true
+        group_grp1645:
+          Value: true
+        group_grp1646:
+          Value: true
+        group_grp1647:
+          Value: true
+        group_grp1648:
+          Value: true
+        group_grp1649:
+          Value: true
+        group_grp1650:
+          Value: true
+        group_grp1651:
+          Value: true
+        group_grp1652:
+          Value: true
+        group_grp1653:
+          Value: true
+        group_grp1654:
+          Value: true
+        group_grp1655:
+          Value: true
+        group_grp1656:
+          Value: true
+        group_grp1657:
+          Value: true
+        group_grp1658:
+          Value: true
+        group_grp1659:
+          Value: true
+        group_grp1660:
+          Value: true
+        group_grp1661:
+          Value: true
+        group_grp1662:
+          Value: true
+        group_grp1663:
+          Value: true
+        group_grp1664:
+          Value: true
+        group_grp1665:
+          Value: true
+        group_grp1666:
+          Value: true
+        group_grp1667:
+          Value: true
+        group_grp1668:
+          Value: true
+        group_grp1669:
+          Value: true
+        group_grp1670:
+          Value: true
+        group_grp1671:
+          Value: true
+        group_grp1672:
+          Value: true
+        group_grp1673:
+          Value: true
+        group_grp1674:
+          Value: true
+        group_grp1675:
+          Value: true
+        group_grp1676:
+          Value: true
+        group_grp1677:
+          Value: true
+        group_grp1678:
+          Value: true
+        group_grp1679:
+          Value: true
+        group_grp1680:
+          Value: true
+        group_grp1681:
+          Value: true
+        group_grp1682:
+          Value: true
+        group_grp1683:
+          Value: true
+        group_grp1684:
+          Value: true
+        group_grp1685:
+          Value: true
+        group_grp1686:
+          Value: true
+        group_grp1687:
+          Value: true
+        group_grp1688:
+          Value: true
+        group_grp1689:
+          Value: true
+        group_grp1690:
+          Value: true
+        group_grp1691:
+          Value: true
+        group_grp1692:
+          Value: true
+        group_grp1693:
+          Value: true
+        group_grp1694:
+          Value: true
+        group_grp1695:
+          Value: true
+        group_grp1696:
+          Value: true
+        group_grp1697:
+          Value: true
+        group_grp1698:
+          Value: true
+        group_grp1699:
+          Value: true
+        group_grp1700:
+          Value: true
+        group_grp1701:
+          Value: true
+        group_grp1702:
+          Value: true
+        group_grp1703:
+          Value: true
+        group_grp1704:
+          Value: true
+        group_grp1705:
+          Value: true
+        group_grp1706:
+          Value: true
+        group_grp1707:
+          Value: true
+        group_grp1708:
+          Value: true
+        group_grp1709:
+          Value: true
+        group_grp1710:
+          Value: true
+        group_grp1711:
+          Value: true
+        group_grp1712:
+          Value: true
+        group_grp1713:
+          Value: true
+        group_grp1714:
+          Value: true
+        group_grp1715:
+          Value: true
+        group_grp1716:
+          Value: true
+        group_grp1717:
+          Value: true
+        group_grp1718:
+          Value: true
+        group_grp1719:
+          Value: true
+        group_grp1720:
+          Value: true
+        group_grp1721:
+          Value: true
+        group_grp1722:
+          Value: true
+        group_grp1723:
+          Value: true
+        group_grp1724:
+          Value: true
+        group_grp1725:
+          Value: true
+        group_grp1726:
+          Value: true
+        group_grp1727:
+          Value: true
+        group_grp1728:
+          Value: true
+        group_grp1729:
+          Value: true
+        group_grp1730:
+          Value: true
+        group_grp1731:
+          Value: true
+        group_grp1732:
+          Value: true
+        group_grp1733:
+          Value: true
+        group_grp1734:
+          Value: true
+        group_grp1735:
+          Value: true
+        group_grp1736:
+          Value: true
+        group_grp1737:
+          Value: true
+        group_grp1738:
+          Value: true
+        group_grp1739:
+          Value: true
+        group_grp1740:
+          Value: true
+        group_grp1741:
+          Value: true
+        group_grp1742:
+          Value: true
+        group_grp1743:
+          Value: true
+        group_grp1744:
+          Value: true
+        group_grp1745:
+          Value: true
+        group_grp1746:
+          Value: true
+        group_grp1747:
+          Value: true
+        group_grp1748:
+          Value: true
+        group_grp1749:
+          Value: true
+        group_grp1750:
+          Value: true
+        group_grp1751:
+          Value: true
+        group_grp1752:
+          Value: true
+        group_grp1753:
+          Value: true
+        group_grp1754:
+          Value: true
+        group_grp1755:
+          Value: true
+        group_grp1756:
+          Value: true
+        group_grp1757:
+          Value: true
+        group_grp1758:
+          Value: true
+        group_grp1759:
+          Value: true
+        group_grp1760:
+          Value: true
+        group_grp1761:
+          Value: true
+        group_grp1762:
+          Value: true
+        group_grp1763:
+          Value: true
+        group_grp1764:
+          Value: true
+        group_grp1765:
+          Value: true
+        group_grp1766:
+          Value: true
+        group_grp1767:
+          Value: true
+        group_grp1768:
+          Value: true
+        group_grp1769:
+          Value: true
+        group_grp1770:
+          Value: true
+        group_grp1771:
+          Value: true
+        group_grp1772:
+          Value: true
+        group_grp1773:
+          Value: true
+        group_grp1774:
+          Value: true
+        group_grp1775:
+          Value: true
+        group_grp1776:
+          Value: true
+        group_grp1777:
+          Value: true
+        group_grp1778:
+          Value: true
+        group_grp1779:
+          Value: true
+        group_grp1780:
+          Value: true
+        group_grp1781:
+          Value: true
+        group_grp1782:
+          Value: true
+        group_grp1783:
+          Value: true
+        group_grp1784:
+          Value: true
+        group_grp1785:
+          Value: true
+        group_grp1786:
+          Value: true
+        group_grp1787:
+          Value: true
+        group_grp1788:
+          Value: true
+        group_grp1789:
+          Value: true
+        group_grp1790:
+          Value: true
+        group_grp1791:
+          Value: true
+        group_grp1792:
+          Value: true
+        group_grp1793:
+          Value: true
+        group_grp1794:
+          Value: true
+        group_grp1795:
+          Value: true
+        group_grp1796:
+          Value: true
+        group_grp2020:
+          Value: true
+        group_grp2021:
+          Value: true
+        group_grp2022:
+          Value: true
+        group_grp2306:
+          Value: true
+        group_grp2307:
+          Value: true
+        group_grp2308:
+          Value: true
+        group_grp2309:
+          Value: true
+        group_grp2310:
+          Value: true
+        group_grp2311:
+          Value: true
+        group_grp2312:
+          Value: true
+        group_grp2313:
+          Value: true
+        group_grp2314:
+          Value: true
+        group_grp2315:
+          Value: true
+        group_grp2316:
+          Value: true
+        group_grp2317:
+          Value: true
+        group_grp2318:
+          Value: true
+        group_grp2319:
+          Value: true
+        group_grp2320:
+          Value: true
+        group_grp2321:
+          Value: true
+        group_grp2322:
+          Value: true
+        group_grp2323:
+          Value: true
+        group_grp2324:
+          Value: true
+        group_grp2325:
+          Value: true
+        group_grp2326:
+          Value: true
+        group_grp2327:
+          Value: true
+        group_grp2328:
+          Value: true
+        group_grp2329:
+          Value: true
+        group_grp2330:
+          Value: true
+        group_grp2331:
+          Value: true
+        group_grp2332:
+          Value: true
+        group_grp2333:
+          Value: true
+        group_grp2334:
+          Value: true
+        group_grp2335:
+          Value: true
+        group_grp2336:
+          Value: true
+        group_grp2337:
+          Value: true
+        group_grp2338:
+          Value: true
+        group_grp2339:
+          Value: true
+        group_grp2340:
+          Value: true
+        group_grp2341:
+          Value: true
+        group_grp2342:
+          Value: true
+        group_grp2343:
+          Value: true
+        group_grp2344:
+          Value: true
+        group_grp2345:
+          Value: true
+        group_grp2346:
+          Value: true
+        group_grp2347:
+          Value: true
+        group_grp2348:
+          Value: true
+        group_grp2349:
+          Value: true
+        group_grp2350:
+          Value: true
+        group_grp2351:
+          Value: true
+        group_grp2352:
+          Value: true
+        group_grp2353:
+          Value: true
+        group_grp2354:
+          Value: true
+        group_grp2355:
+          Value: true
+        group_grp2356:
+          Value: true
+        group_grp2357:
+          Value: true
+        group_grp2358:
+          Value: true
+        group_grp2359:
+          Value: true
+        group_grp2360:
+          Value: true
+        group_grp2361:
+          Value: true
+        group_grp2362:
+          Value: true
+        group_grp2363:
+          Value: true
+        group_grp2364:
+          Value: true
+        group_grp2365:
+          Value: true
+        group_grp2366:
+          Value: true
+        group_grp2367:
+          Value: true
+        group_grp2368:
+          Value: true
+        group_grp2369:
+          Value: true
+        group_grp2370:
+          Value: true
+        group_grp2371:
+          Value: true
+        group_grp2372:
+          Value: true
+        group_grp2373:
+          Value: true
+        group_grp2374:
+          Value: true
+        group_grp2375:
+          Value: true
+        group_grp2376:
+          Value: true
+        group_grp2377:
+          Value: true
+        group_grp2378:
+          Value: true
+        group_grp2379:
+          Value: true
+        group_grp2380:
+          Value: true
+        group_grp2381:
+          Value: true
+        group_grp2382:
+          Value: true
+        group_grp2383:
+          Value: true
+        group_grp2384:
+          Value: true
+        group_grp2385:
+          Value: true
+        group_grp2386:
+          Value: true
+        group_grp2387:
+          Value: true
+        group_grp2388:
+          Value: true
+        group_grp2389:
+          Value: true
+        group_grp2390:
+          Value: true
+        group_grp2391:
+          Value: true
+        group_grp2392:
+          Value: true
+        group_grp2393:
+          Value: true
+        group_grp2394:
+          Value: true
+        group_grp2395:
+          Value: true
+        group_grp2396:
+          Value: true
+        group_grp2397:
+          Value: true
+        group_grp2398:
+          Value: true
+        group_grp2399:
+          Value: true
+        group_grp2400:
+          Value: true
+        group_grp2401:
+          Value: true
+        group_grp2402:
+          Value: true
+        group_grp2403:
+          Value: true
+        group_grp2404:
+          Value: true
+        group_grp2405:
+          Value: true
+        group_grp2406:
+          Value: true
+        group_grp2407:
+          Value: true
+        group_grp2408:
+          Value: true
+        group_grp2409:
+          Value: true
+        group_grp2410:
+          Value: true
+        group_grp2411:
+          Value: true
+        group_grp2412:
+          Value: true
+        group_grp2413:
+          Value: true
+        group_grp2414:
+          Value: true
+        group_grp2415:
+          Value: true
+        group_grp2416:
+          Value: true
+        group_grp2417:
+          Value: true
+        group_grp2418:
+          Value: true
+        group_grp2419:
+          Value: true
+        group_grp2420:
+          Value: true
+        group_grp2421:
+          Value: true
+        group_grp2422:
+          Value: true
+        group_grp2423:
+          Value: true
+        group_grp2424:
+          Value: true
+        group_grp2425:
+          Value: true
+        group_grp2426:
+          Value: true
+        group_grp2427:
+          Value: true
+        group_grp2428:
+          Value: true
+        group_grp2429:
+          Value: true
+        group_grp2430:
+          Value: true
+        group_grp2431:
+          Value: true
+        group_grp2432:
+          Value: true
+        group_grp2433:
+          Value: true
+        group_grp2434:
+          Value: true
+        group_grp2435:
+          Value: true
+        group_grp2436:
+          Value: true
+        group_grp2437:
+          Value: true
+        group_grp2438:
+          Value: true
+        group_grp2439:
+          Value: true
+        group_grp2440:
+          Value: true
+        group_grp2441:
+          Value: true
+        group_grp2442:
+          Value: true
+        group_grp2443:
+          Value: true
+        group_grp2444:
+          Value: true
+        group_grp2445:
+          Value: true
+        group_grp2446:
+          Value: true
+        group_grp2447:
+          Value: true
+        group_grp2448:
+          Value: true
+        group_grp2449:
+          Value: true
+        group_grp2450:
+          Value: true
+        group_grp2451:
+          Value: true
+        group_grp2452:
+          Value: true
+        group_grp2453:
+          Value: true
+        group_grp2454:
+          Value: true
+        group_grp2455:
+          Value: true
+        group_grp2456:
+          Value: true
+        group_grp2457:
+          Value: true
+        group_grp2458:
+          Value: true
+        group_grp2459:
+          Value: true
+        group_grp2460:
+          Value: true
+        group_grp2461:
+          Value: true
+        group_grp2462:
+          Value: true
+        group_grp2463:
+          Value: true
+        group_grp2464:
+          Value: true
+        group_grp2465:
+          Value: true
+        group_grp2466:
+          Value: true
+        group_grp2467:
+          Value: true
+        group_grp2468:
+          Value: true
+        group_grp2469:
+          Value: true
+        group_grp2470:
+          Value: true
+        group_grp2471:
+          Value: true
+        group_grp2472:
+          Value: true
+        group_grp2473:
+          Value: true
+        group_grp2474:
+          Value: true
+        group_grp2475:
+          Value: true
+        group_grp2476:
+          Value: true
+        group_grp2477:
+          Value: true
+        group_grp2478:
+          Value: true
+        group_grp2479:
+          Value: true
+        group_grp2480:
+          Value: true
+        group_grp2481:
+          Value: true
+        group_grp2482:
+          Value: true
+        group_grp2483:
+          Value: true
+        group_grp2484:
+          Value: true
+        group_grp2485:
+          Value: true
+        group_grp2486:
+          Value: true
+        group_grp2487:
+          Value: true
+        group_grp2488:
+          Value: true
+        group_grp2489:
+          Value: true
+        group_grp2490:
+          Value: true
+        group_grp2491:
+          Value: true
+        group_grp2492:
+          Value: true
+        group_grp2493:
+          Value: true
+        group_grp2494:
+          Value: true
+        group_grp2495:
+          Value: true
+        group_grp2496:
+          Value: true
+        group_grp2497:
+          Value: true
+        group_grp2498:
+          Value: true
+        group_grp2499:
+          Value: true
+        group_grp2500:
+          Value: true
+        group_grp2501:
+          Value: true
+        group_grp2502:
+          Value: true
+        group_grp2503:
+          Value: true
+        group_grp2504:
+          Value: true
+        group_grp2505:
+          Value: true
+        group_grp2506:
+          Value: true
+        group_grp2507:
+          Value: true
+        group_grp2508:
+          Value: true
+        group_grp2509:
+          Value: true
+        group_grp2510:
+          Value: true
+        group_grp2511:
+          Value: true
+        group_grp2512:
+          Value: true
+        group_grp2513:
+          Value: true
+        group_grp2514:
+          Value: true
+        group_grp2515:
+          Value: true
+        group_grp2516:
+          Value: true
+        group_grp2517:
+          Value: true
+        group_grp2518:
+          Value: true
+        group_grp2519:
+          Value: true
+        group_grp2520:
+          Value: true
+        group_grp2521:
+          Value: true
+        group_grp2522:
+          Value: true
+        group_grp2523:
+          Value: true
+        group_grp2524:
+          Value: true
+        group_grp2525:
+          Value: true
+        group_grp2526:
+          Value: true
+        group_grp2527:
+          Value: true
+        group_grp2528:
+          Value: true
+        group_grp2529:
+          Value: true
+        group_grp2530:
+          Value: true
+        group_grp2531:
+          Value: true
+        group_grp2532:
+          Value: true
+        group_grp2533:
+          Value: true
+        group_grp2534:
+          Value: true
+        group_grp2535:
+          Value: true
+        group_grp2536:
+          Value: true
+        group_grp2537:
+          Value: true
+        group_grp2538:
+          Value: true
+        group_grp2539:
+          Value: true
+        group_grp2540:
+          Value: true
+        group_grp2541:
+          Value: true
+        group_grp2542:
+          Value: true
+        group_grp2543:
+          Value: true
+        group_grp2544:
+          Value: true
+        group_grp2545:
+          Value: true
+        group_grp2546:
+          Value: true
+        group_grp2547:
+          Value: true
+        group_grp2548:
+          Value: true
+        group_grp2549:
+          Value: true
+        group_grp2550:
+          Value: true
+        group_grp2551:
+          Value: true
+        group_grp2552:
+          Value: true
+        group_grp2553:
+          Value: true
+        group_grp2554:
+          Value: true
+        group_grp2555:
+          Value: true
+        group_grp2556:
+          Value: true
+        group_grp2557:
+          Value: true
+        group_grp2558:
+          Value: true
+        group_grp2559:
+          Value: true
+        group_grp2560:
+          Value: true
+        group_grp2561:
+          Value: true
+        group_grp2562:
+          Value: true
+        group_grp2563:
+          Value: true
+        group_grp2564:
+          Value: true
+        group_grp2565:
+          Value: true
+        group_grp2566:
+          Value: true
+        group_grp2582:
+          Value: true
+        group_grp3429:
+          Value: true
+        group_grp3430:
+          Value: true
+        group_grp3431:
+          Value: true
+        group_grp6078:
+          Value: true
+        group_grp6079:
+          Value: true
+        group_grp6080:
+          Value: true
+        group_grp6081:
+          Value: true
+        group_grp6082:
+          Value: true
+        group_grp6083:
+          Value: true
+        group_grp6084:
+          Value: true
+        group_grp6085:
+          Value: true
+        group_grp6086:
+          Value: true
+        group_grp6087:
+          Value: true
+        group_grp6088:
+          Value: true
+        group_grp6089:
+          Value: true
+        group_grp6090:
+          Value: true
+        group_grp6091:
+          Value: true
+        group_grp6092:
+          Value: true
+        group_grp6093:
+          Value: true
+        group_grp6094:
+          Value: true
+        group_grp6095:
+          Value: true
+        group_grp6096:
+          Value: true
+        group_grp6097:
+          Value: true
+        group_grp6098:
+          Value: true
+        group_grp6099:
+          Value: true
+        hand_left_1_link:
+          Value: false
+        hand_left_2_link:
+          Value: false
+        hand_left_fingers_link:
+          Value: false
+        hand_right_1_link:
+          Value: false
+        hand_right_2_link:
+          Value: false
+        hand_right_fingers_link:
+          Value: false
+        head_1_link:
+          Value: false
+        head_2_link:
+          Value: false
+        head_front_camera_color_optical_frame:
+          Value: true
+        head_front_camera_link:
+          Value: false
+        human_goal:
+          Value: true
+        intermediate waypoint goal:
+          Value: true
+        local_cost_map:
+          Value: true
+        local_map:
+          Value: true
+        map:
+          Value: true
+        odom:
+          Value: true
+        odom_ORB:
+          Value: true
+        rgbd_laser_link:
+          Value: false
+        torso_back_camera_accel_optical_frame:
+          Value: true
+        torso_back_camera_fisheye1_optical_frame:
+          Value: true
+        torso_back_camera_fisheye1_rgb_frame:
+          Value: false
+        torso_back_camera_fisheye2_optical_frame:
+          Value: true
+        torso_back_camera_fisheye2_rgb_frame:
+          Value: false
+        torso_back_camera_gyro_optical_frame:
+          Value: true
+        torso_back_camera_link:
+          Value: false
+        torso_back_camera_pose_frame:
+          Value: false
+        torso_front_camera_accel_frame:
+          Value: true
+        torso_front_camera_accel_optical_frame:
+          Value: true
+        torso_front_camera_aligned_depth_to_color_frame:
+          Value: false
+        torso_front_camera_bottom_screw_frame:
+          Value: false
+        torso_front_camera_color_frame:
+          Value: false
+        torso_front_camera_color_optical_frame:
+          Value: true
+        torso_front_camera_gyro_frame:
+          Value: true
+        torso_front_camera_imu_optical_frame:
+          Value: true
+        torso_front_camera_infra1_frame:
+          Value: false
+        torso_front_camera_infra1_optical_frame:
+          Value: true
+        torso_front_camera_infra2_frame:
+          Value: false
+        torso_front_camera_infra2_optical_frame:
+          Value: true
+        torso_front_camera_link:
+          Value: false
+        torso_mic_front_link:
+          Value: false
+        wheel_left_link:
+          Value: false
+        wheel_right_link:
+          Value: false
+      Marker Alpha: 1
+      Marker Scale: 1
+      Name: TF
+      Show Arrows: true
+      Show Axes: true
+      Show Names: true
+      Tree:
+        map:
+          final goal:
+            {}
+          human_goal:
+            {}
+          local_cost_map:
+            {}
+          local_map:
+            {}
+          odom_ORB:
+            odom:
+              base_footprint:
+                base_link:
+                  arm_left_base_link:
+                    arm_left_1_link:
+                      arm_left_2_link:
+                        arm_left_3_link:
+                          arm_left_4_link:
+                            arm_left_grasping_frame:
+                              {}
+                            arm_left_tool_link:
+                              hand_left_fingers_link:
+                                {}
+                            hand_left_1_link:
+                              hand_left_2_link:
+                                {}
+                  arm_right_base_link:
+                    arm_right_1_link:
+                      arm_right_2_link:
+                        arm_right_3_link:
+                          arm_right_4_link:
+                            arm_right_grasping_frame:
+                              {}
+                            arm_right_tool_link:
+                              hand_right_fingers_link:
+                                {}
+                            hand_right_1_link:
+                              hand_right_2_link:
+                                {}
+                  caster_left_link:
+                    {}
+                  caster_right_link:
+                    {}
+                  docking_link:
+                    {}
+                  front_fisheye_camera_link:
+                    front_fisheye_camera_optical_frame:
+                      {}
+                  head_1_link:
+                    back_led_frame:
+                      {}
+                    ear_led_left_frame:
+                      {}
+                    ear_led_right_frame:
+                      {}
+                    head_2_link:
+                      head_front_camera_link:
+                        head_front_camera_color_optical_frame:
+                          {}
+                  rgbd_laser_link:
+                    {}
+                  torso_back_camera_pose_frame:
+                    torso_back_camera_link:
+                      torso_back_camera_fisheye1_rgb_frame:
+                        torso_back_camera_fisheye1_optical_frame:
+                          {}
+                      torso_back_camera_fisheye2_rgb_frame:
+                        torso_back_camera_fisheye2_optical_frame:
+                          {}
+                      torso_back_camera_gyro_optical_frame:
+                        torso_back_camera_accel_optical_frame:
+                          {}
+                  torso_front_camera_bottom_screw_frame:
+                    torso_front_camera_link:
+                      torso_front_camera_accel_frame:
+                        torso_front_camera_accel_optical_frame:
+                          {}
+                      torso_front_camera_aligned_depth_to_color_frame:
+                        torso_front_camera_color_optical_frame:
+                          {}
+                      torso_front_camera_color_frame:
+                        {}
+                      torso_front_camera_gyro_frame:
+                        torso_front_camera_imu_optical_frame:
+                          {}
+                      torso_front_camera_infra1_frame:
+                        torso_front_camera_infra1_optical_frame:
+                          {}
+                      torso_front_camera_infra2_frame:
+                        torso_front_camera_infra2_optical_frame:
+                          {}
+                  torso_mic_front_link:
+                    {}
+                  wheel_left_link:
+                    {}
+                  wheel_right_link:
+                    {}
+                intermediate waypoint goal:
+                  {}
+      Update Interval: 0
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /tracked_pose_2d/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: false
+      Name: Map
+      Topic: /behavior_generator/local_occupancy_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: false
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /behavior_generator/local_cost_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /torso_front_camera/infra1/image_rect_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: raw
+      Unreliable: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /humans/summary_image/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 25.061012268066406
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: -2.5232045650482178
+        Y: 1.610069751739502
+        Z: -0.9226140379905701
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 1.5147963762283325
+      Target Frame: <Fixed Frame>
+      Yaw: 4.635374546051025
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1385
+  Hide Left Dock: false
+  Hide Right Dock: true
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd0000000400000000000001c80000053afc020000000dfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000014000001a6000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001c00000012d0000001600fffffffb0000000a0049006d00610067006501000002f3000001710000001600fffffffb0000000a0049006d006100670065010000046a000000e40000001600fffffffb0000000a0049006d00610067006501000003700000019a0000000000000000fb0000000a0049006d0061006700650100000474000000da0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000042700fffffffb0000000800540069006d00650100000000000004500000000000000000000003110000053a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 1247
+  X: 67
+  Y: 27
diff --git a/src/robot_behavior/config/rviz/config_19_07_2022.rviz b/src/robot_behavior/config/rviz/config_19_07_2022.rviz
new file mode 100644
index 0000000000000000000000000000000000000000..205791778326611d54de92732f44db49338a9a1a
--- /dev/null
+++ b/src/robot_behavior/config/rviz/config_19_07_2022.rviz
@@ -0,0 +1,420 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 0
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /TF1/Frames1
+        - /TF1/Tree1
+      Splitter Ratio: 0.7894737124443054
+    Tree Height: 340
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/proj_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/TF
+      Enabled: true
+      Frame Timeout: 15
+      Frames:
+        All Enabled: false
+        arm_left_1_link:
+          Value: false
+        arm_left_2_link:
+          Value: false
+        arm_left_3_link:
+          Value: false
+        arm_left_4_link:
+          Value: false
+        arm_left_base_link:
+          Value: false
+        arm_left_grasping_frame:
+          Value: false
+        arm_left_tool_link:
+          Value: false
+        arm_right_1_link:
+          Value: false
+        arm_right_2_link:
+          Value: false
+        arm_right_3_link:
+          Value: false
+        arm_right_4_link:
+          Value: false
+        arm_right_base_link:
+          Value: false
+        arm_right_grasping_frame:
+          Value: false
+        arm_right_tool_link:
+          Value: false
+        back_led_frame:
+          Value: false
+        base_footprint:
+          Value: true
+        base_link:
+          Value: true
+        caster_left_link:
+          Value: false
+        caster_right_link:
+          Value: false
+        docking_link:
+          Value: true
+        ear_led_left_frame:
+          Value: false
+        ear_led_right_frame:
+          Value: false
+        final goal:
+          Value: true
+        front_fisheye_camera_link:
+          Value: true
+        front_fisheye_camera_optical_frame:
+          Value: true
+        hand_left_1_link:
+          Value: false
+        hand_left_2_link:
+          Value: false
+        hand_left_fingers_link:
+          Value: false
+        hand_right_1_link:
+          Value: false
+        hand_right_2_link:
+          Value: false
+        hand_right_fingers_link:
+          Value: false
+        head_1_link:
+          Value: false
+        head_2_link:
+          Value: false
+        head_front_camera_color_optical_frame:
+          Value: true
+        head_front_camera_link:
+          Value: false
+        human_goal:
+          Value: true
+        intermediate waypoint goal:
+          Value: true
+        local_cost_map:
+          Value: true
+        local_map:
+          Value: true
+        map:
+          Value: true
+        odom:
+          Value: true
+        rgbd_laser_link:
+          Value: false
+        torso_back_camera_accel_optical_frame:
+          Value: true
+        torso_back_camera_fisheye1_optical_frame:
+          Value: true
+        torso_back_camera_fisheye1_rgb_frame:
+          Value: false
+        torso_back_camera_fisheye2_optical_frame:
+          Value: true
+        torso_back_camera_fisheye2_rgb_frame:
+          Value: false
+        torso_back_camera_gyro_optical_frame:
+          Value: true
+        torso_back_camera_link:
+          Value: false
+        torso_back_camera_pose_frame:
+          Value: false
+        torso_front_camera_accel_frame:
+          Value: true
+        torso_front_camera_accel_optical_frame:
+          Value: true
+        torso_front_camera_aligned_depth_to_color_frame:
+          Value: false
+        torso_front_camera_bottom_screw_frame:
+          Value: false
+        torso_front_camera_color_frame:
+          Value: false
+        torso_front_camera_color_optical_frame:
+          Value: true
+        torso_front_camera_gyro_frame:
+          Value: true
+        torso_front_camera_imu_optical_frame:
+          Value: true
+        torso_front_camera_infra1_frame:
+          Value: false
+        torso_front_camera_infra1_optical_frame:
+          Value: true
+        torso_front_camera_infra2_frame:
+          Value: false
+        torso_front_camera_infra2_optical_frame:
+          Value: true
+        torso_front_camera_link:
+          Value: false
+        torso_mic_front_link:
+          Value: false
+        wheel_left_link:
+          Value: false
+        wheel_right_link:
+          Value: false
+      Marker Alpha: 1
+      Marker Scale: 1
+      Name: TF
+      Show Arrows: true
+      Show Axes: true
+      Show Names: true
+      Tree:
+        map:
+          final goal:
+            {}
+          human_goal:
+            {}
+          local_cost_map:
+            {}
+          local_map:
+            {}
+          odom:
+            base_footprint:
+              base_link:
+                arm_left_base_link:
+                  arm_left_1_link:
+                    arm_left_2_link:
+                      arm_left_3_link:
+                        arm_left_4_link:
+                          arm_left_grasping_frame:
+                            {}
+                          arm_left_tool_link:
+                            hand_left_fingers_link:
+                              {}
+                          hand_left_1_link:
+                            hand_left_2_link:
+                              {}
+                arm_right_base_link:
+                  arm_right_1_link:
+                    arm_right_2_link:
+                      arm_right_3_link:
+                        arm_right_4_link:
+                          arm_right_grasping_frame:
+                            {}
+                          arm_right_tool_link:
+                            hand_right_fingers_link:
+                              {}
+                          hand_right_1_link:
+                            hand_right_2_link:
+                              {}
+                caster_left_link:
+                  {}
+                caster_right_link:
+                  {}
+                docking_link:
+                  {}
+                front_fisheye_camera_link:
+                  front_fisheye_camera_optical_frame:
+                    {}
+                head_1_link:
+                  back_led_frame:
+                    {}
+                  ear_led_left_frame:
+                    {}
+                  ear_led_right_frame:
+                    {}
+                  head_2_link:
+                    head_front_camera_link:
+                      head_front_camera_color_optical_frame:
+                        {}
+                rgbd_laser_link:
+                  {}
+                torso_back_camera_pose_frame:
+                  torso_back_camera_link:
+                    torso_back_camera_fisheye1_rgb_frame:
+                      torso_back_camera_fisheye1_optical_frame:
+                        {}
+                    torso_back_camera_fisheye2_rgb_frame:
+                      torso_back_camera_fisheye2_optical_frame:
+                        {}
+                    torso_back_camera_gyro_optical_frame:
+                      torso_back_camera_accel_optical_frame:
+                        {}
+                torso_front_camera_bottom_screw_frame:
+                  torso_front_camera_link:
+                    torso_front_camera_accel_frame:
+                      torso_front_camera_accel_optical_frame:
+                        {}
+                    torso_front_camera_aligned_depth_to_color_frame:
+                      torso_front_camera_color_optical_frame:
+                        {}
+                    torso_front_camera_color_frame:
+                      {}
+                    torso_front_camera_gyro_frame:
+                      torso_front_camera_imu_optical_frame:
+                        {}
+                    torso_front_camera_infra1_frame:
+                      torso_front_camera_infra1_optical_frame:
+                        {}
+                    torso_front_camera_infra2_frame:
+                      torso_front_camera_infra2_optical_frame:
+                        {}
+                torso_mic_front_link:
+                  {}
+                wheel_left_link:
+                  {}
+                wheel_right_link:
+                  {}
+              intermediate waypoint goal:
+                {}
+      Update Interval: 0
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /tracked_pose_2d/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /behavior_generator/local_cost_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /behavior_generator/local_occupancy_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: false
+      Image Topic: /torso_front_camera/infra1/image_rect_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: raw
+      Unreliable: false
+      Value: false
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 24.4995059967041
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: 0.7471363544464111
+        Y: 0.010536948218941689
+        Z: -2.08261775970459
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 1.1047967672348022
+      Target Frame: <Fixed Frame>
+      Yaw: 3.770385265350342
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1025
+  Hide Left Dock: false
+  Hide Right Dock: true
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd0000000400000000000003b0000003d2fc020000000cfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000000140000018f000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001a90000023d0000001600fffffffb0000000a0049006d00610067006500000001e8000001fe0000001600fffffffb0000000a0049006d0061006700650100000433000000d70000000000000000fb0000000a0049006d00610067006501000003700000019a0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000042700fffffffb0000000800540069006d0065010000000000000450000000000000000000000387000003d200000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 1853
+  X: 67
+  Y: 27
diff --git a/src/robot_behavior/config/rviz/config_24_02_2022.rviz b/src/robot_behavior/config/rviz/config_24_02_2022.rviz
index 8734ad8a34bc41cef39c7f2de3f43aace2f6c734..6b5cf6596773977f370b1bb16ddd811b0ad5ba87 100644
--- a/src/robot_behavior/config/rviz/config_24_02_2022.rviz
+++ b/src/robot_behavior/config/rviz/config_24_02_2022.rviz
@@ -7,7 +7,7 @@ Panels:
         - /TF1/Frames1
         - /TF1/Tree1
       Splitter Ratio: 0.7894737124443054
-    Tree Height: 396
+    Tree Height: 639
   - Class: rviz/Selection
     Name: Selection
   - Class: rviz/Tool Properties
@@ -58,7 +58,7 @@ Visualization Manager:
       Draw Behind: false
       Enabled: true
       Name: Map
-      Topic: /rtabmap/proj_map
+      Topic: /slam/grid_map
       Unreliable: false
       Use Timestamp: false
       Value: true
@@ -137,16 +137,14 @@ Visualization Manager:
           Value: true
         head_front_camera_link:
           Value: false
-        human_0:
-          Value: true
-        human_1:
-          Value: true
         human_goal:
           Value: true
         intermediate waypoint goal:
           Value: true
         local_cost_map:
           Value: true
+        local_map:
+          Value: true
         map:
           Value: true
         odom:
@@ -169,6 +167,10 @@ Visualization Manager:
           Value: false
         torso_back_camera_pose_frame:
           Value: false
+        torso_front_camera_accel_frame:
+          Value: true
+        torso_front_camera_accel_optical_frame:
+          Value: true
         torso_front_camera_aligned_depth_to_color_frame:
           Value: false
         torso_front_camera_bottom_screw_frame:
@@ -177,6 +179,10 @@ Visualization Manager:
           Value: false
         torso_front_camera_color_optical_frame:
           Value: true
+        torso_front_camera_gyro_frame:
+          Value: true
+        torso_front_camera_imu_optical_frame:
+          Value: true
         torso_front_camera_infra1_frame:
           Value: false
         torso_front_camera_infra1_optical_frame:
@@ -203,12 +209,12 @@ Visualization Manager:
         map:
           final goal:
             {}
-          human_1:
-            {}
           human_goal:
             {}
           local_cost_map:
             {}
+          local_map:
+            {}
           odom:
             base_footprint:
               base_link:
@@ -273,11 +279,17 @@ Visualization Manager:
                         {}
                 torso_front_camera_bottom_screw_frame:
                   torso_front_camera_link:
+                    torso_front_camera_accel_frame:
+                      torso_front_camera_accel_optical_frame:
+                        {}
                     torso_front_camera_aligned_depth_to_color_frame:
                       torso_front_camera_color_optical_frame:
                         {}
                     torso_front_camera_color_frame:
                       {}
+                    torso_front_camera_gyro_frame:
+                      torso_front_camera_imu_optical_frame:
+                        {}
                     torso_front_camera_infra1_frame:
                       torso_front_camera_infra1_optical_frame:
                         {}
@@ -312,7 +324,7 @@ Visualization Manager:
       Draw Behind: false
       Enabled: true
       Name: Map
-      Topic: /rtabmap_local_map_rectified
+      Topic: /behavior_generator/local_occupancy_map
       Unreliable: false
       Use Timestamp: false
       Value: true
@@ -322,7 +334,7 @@ Visualization Manager:
       Draw Behind: false
       Enabled: true
       Name: Map
-      Topic: /local_cost_map
+      Topic: /behavior_generator/local_cost_map
       Unreliable: false
       Use Timestamp: false
       Value: true
@@ -366,7 +378,7 @@ Visualization Manager:
   Views:
     Current:
       Class: rviz/Orbit
-      Distance: 11.74433422088623
+      Distance: 12.148454666137695
       Enable Stereo Rendering:
         Stereo Eye Separation: 0.05999999865889549
         Stereo Focal Distance: 1
@@ -374,27 +386,27 @@ Visualization Manager:
         Value: false
       Field of View: 0.7853981852531433
       Focal Point:
-        X: 0.7471363544464111
-        Y: 0.010536948218941689
-        Z: -2.08261775970459
+        X: 2.3064792156219482
+        Y: -0.3170703053474426
+        Z: -2.041748523712158
       Focal Shape Fixed Size: true
       Focal Shape Size: 0.05000000074505806
       Invert Z Axis: false
       Name: Current View
       Near Clip Distance: 0.009999999776482582
-      Pitch: 1.3697963953018188
+      Pitch: 1.2497965097427368
       Target Frame: <Fixed Frame>
-      Yaw: 4.305388450622559
+      Yaw: 3.170382499694824
     Saved: ~
 Window Geometry:
   Displays:
     collapsed: false
-  Height: 1025
+  Height: 1385
   Hide Left Dock: false
   Hide Right Dock: true
   Image:
     collapsed: false
-  QMainWindow State: 000000ff00000000fd0000000400000000000003b0000003d2fc020000000cfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000014000001c7000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001e1000002050000001600fffffffb0000000a0049006d00610067006500000002c6000001200000001600fffffffb0000000a0049006d0061006700650100000433000000d70000000000000000fb0000000a0049006d00610067006501000003700000019a0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000024400fffffffb0000000800540069006d0065010000000000000450000000000000000000000387000003d200000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
+  QMainWindow State: 000000ff00000000fd0000000400000000000003b00000053afc020000000cfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000014000002ba000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650000000214000001690000001600fffffffb0000000a0049006d00610067006501000002d40000027a0000001600fffffffb0000000a0049006d0061006700650100000433000000d70000000000000000fb0000000a0049006d00610067006501000003700000019a0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000042700fffffffb0000000800540069006d00650100000000000004500000000000000000000006070000053a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
   Selection:
     collapsed: false
   Time:
@@ -403,6 +415,6 @@ Window Geometry:
     collapsed: false
   Views:
     collapsed: true
-  Width: 1853
+  Width: 2493
   X: 67
   Y: 27
diff --git a/src/robot_behavior/config/rviz/config_28_03_2023.rviz b/src/robot_behavior/config/rviz/config_28_03_2023.rviz
new file mode 100644
index 0000000000000000000000000000000000000000..3c0b3f3ac1feb195af38ccea6847f43bd3c4ca58
--- /dev/null
+++ b/src/robot_behavior/config/rviz/config_28_03_2023.rviz
@@ -0,0 +1,464 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 0
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /TF1/Frames1
+        - /TF1/Tree1
+      Splitter Ratio: 0.4427312910556793
+    Tree Height: 363
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/grid_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/TF
+      Enabled: true
+      Frame Timeout: 15
+      Frames:
+        All Enabled: false
+        arm_left_1_link:
+          Value: false
+        arm_left_2_link:
+          Value: false
+        arm_left_3_link:
+          Value: false
+        arm_left_4_link:
+          Value: false
+        arm_left_base_link:
+          Value: false
+        arm_left_grasping_frame:
+          Value: false
+        arm_left_tool_link:
+          Value: false
+        arm_right_1_link:
+          Value: false
+        arm_right_2_link:
+          Value: false
+        arm_right_3_link:
+          Value: false
+        arm_right_4_link:
+          Value: false
+        arm_right_base_link:
+          Value: false
+        arm_right_grasping_frame:
+          Value: false
+        arm_right_tool_link:
+          Value: false
+        back_led_frame:
+          Value: false
+        base_footprint:
+          Value: true
+        base_link:
+          Value: true
+        body_b0002:
+          Value: true
+        body_b0006:
+          Value: true
+        caster_left_link:
+          Value: false
+        caster_right_link:
+          Value: false
+        docking_link:
+          Value: true
+        ear_led_left_frame:
+          Value: false
+        ear_led_right_frame:
+          Value: false
+        final goal:
+          Value: true
+        front_fisheye_camera_link:
+          Value: true
+        front_fisheye_camera_optical_frame:
+          Value: true
+        global_map:
+          Value: true
+        hand_left_1_link:
+          Value: false
+        hand_left_2_link:
+          Value: false
+        hand_left_fingers_link:
+          Value: false
+        hand_right_1_link:
+          Value: false
+        hand_right_2_link:
+          Value: false
+        hand_right_fingers_link:
+          Value: false
+        head_1_link:
+          Value: false
+        head_2_link:
+          Value: false
+        head_front_camera_color_optical_frame:
+          Value: true
+        head_front_camera_link:
+          Value: false
+        human_goal:
+          Value: true
+        intermediate waypoint goal:
+          Value: true
+        local_cost_map:
+          Value: true
+        local_map:
+          Value: true
+        map:
+          Value: true
+        odom:
+          Value: true
+        odom_ORB:
+          Value: true
+        rgbd_laser_link:
+          Value: false
+        torso_back_camera_accel_optical_frame:
+          Value: true
+        torso_back_camera_fisheye1_optical_frame:
+          Value: true
+        torso_back_camera_fisheye1_rgb_frame:
+          Value: false
+        torso_back_camera_fisheye2_optical_frame:
+          Value: true
+        torso_back_camera_fisheye2_rgb_frame:
+          Value: false
+        torso_back_camera_gyro_optical_frame:
+          Value: true
+        torso_back_camera_link:
+          Value: false
+        torso_back_camera_pose_frame:
+          Value: false
+        torso_front_camera_accel_frame:
+          Value: true
+        torso_front_camera_accel_optical_frame:
+          Value: true
+        torso_front_camera_aligned_depth_to_color_frame:
+          Value: false
+        torso_front_camera_bottom_screw_frame:
+          Value: false
+        torso_front_camera_color_frame:
+          Value: false
+        torso_front_camera_color_optical_frame:
+          Value: true
+        torso_front_camera_depth_frame:
+          Value: true
+        torso_front_camera_depth_optical_frame:
+          Value: true
+        torso_front_camera_gyro_frame:
+          Value: true
+        torso_front_camera_imu_optical_frame:
+          Value: true
+        torso_front_camera_infra1_frame:
+          Value: false
+        torso_front_camera_infra1_optical_frame:
+          Value: true
+        torso_front_camera_infra2_frame:
+          Value: false
+        torso_front_camera_infra2_optical_frame:
+          Value: true
+        torso_front_camera_link:
+          Value: false
+        torso_mic_front_link:
+          Value: false
+        wheel_left_link:
+          Value: false
+        wheel_right_link:
+          Value: false
+      Marker Alpha: 1
+      Marker Scale: 1
+      Name: TF
+      Show Arrows: true
+      Show Axes: true
+      Show Names: true
+      Tree:
+        map:
+          body_b0002:
+            {}
+          body_b0006:
+            {}
+          final goal:
+            {}
+          global_map:
+            {}
+          human_goal:
+            {}
+          local_cost_map:
+            {}
+          local_map:
+            {}
+          odom_ORB:
+            odom:
+              base_footprint:
+                base_link:
+                  arm_left_base_link:
+                    arm_left_1_link:
+                      arm_left_2_link:
+                        arm_left_3_link:
+                          arm_left_4_link:
+                            arm_left_grasping_frame:
+                              {}
+                            arm_left_tool_link:
+                              hand_left_fingers_link:
+                                {}
+                            hand_left_1_link:
+                              hand_left_2_link:
+                                {}
+                  arm_right_base_link:
+                    arm_right_1_link:
+                      arm_right_2_link:
+                        arm_right_3_link:
+                          arm_right_4_link:
+                            arm_right_grasping_frame:
+                              {}
+                            arm_right_tool_link:
+                              hand_right_fingers_link:
+                                {}
+                            hand_right_1_link:
+                              hand_right_2_link:
+                                {}
+                  caster_left_link:
+                    {}
+                  caster_right_link:
+                    {}
+                  docking_link:
+                    {}
+                  front_fisheye_camera_link:
+                    front_fisheye_camera_optical_frame:
+                      {}
+                  head_1_link:
+                    back_led_frame:
+                      {}
+                    ear_led_left_frame:
+                      {}
+                    ear_led_right_frame:
+                      {}
+                    head_2_link:
+                      head_front_camera_link:
+                        head_front_camera_color_optical_frame:
+                          {}
+                  rgbd_laser_link:
+                    {}
+                  torso_back_camera_pose_frame:
+                    torso_back_camera_link:
+                      torso_back_camera_fisheye1_rgb_frame:
+                        torso_back_camera_fisheye1_optical_frame:
+                          {}
+                      torso_back_camera_fisheye2_rgb_frame:
+                        torso_back_camera_fisheye2_optical_frame:
+                          {}
+                      torso_back_camera_gyro_optical_frame:
+                        torso_back_camera_accel_optical_frame:
+                          {}
+                  torso_front_camera_bottom_screw_frame:
+                    torso_front_camera_link:
+                      torso_front_camera_accel_frame:
+                        torso_front_camera_accel_optical_frame:
+                          {}
+                      torso_front_camera_aligned_depth_to_color_frame:
+                        torso_front_camera_color_optical_frame:
+                          {}
+                      torso_front_camera_color_frame:
+                        {}
+                      torso_front_camera_depth_frame:
+                        torso_front_camera_depth_optical_frame:
+                          {}
+                      torso_front_camera_gyro_frame:
+                        torso_front_camera_imu_optical_frame:
+                          {}
+                      torso_front_camera_infra1_frame:
+                        torso_front_camera_infra1_optical_frame:
+                          {}
+                      torso_front_camera_infra2_frame:
+                        torso_front_camera_infra2_optical_frame:
+                          {}
+                  torso_mic_front_link:
+                    {}
+                  wheel_left_link:
+                    {}
+                  wheel_right_link:
+                    {}
+                intermediate waypoint goal:
+                  {}
+      Update Interval: 0
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /tracked_pose_2d/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: costmap
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/local_cost_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /torso_front_camera/infra1/image_rect_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: raw
+      Unreliable: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /humans/summary_image/image_raw
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/local_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+    - Alpha: 0.699999988079071
+      Class: rviz/Map
+      Color Scheme: map
+      Draw Behind: false
+      Enabled: true
+      Name: Map
+      Topic: /slam/global_map
+      Unreliable: false
+      Use Timestamp: false
+      Value: true
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 22.573223114013672
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: -1.0722724199295044
+        Y: -1.218994140625
+        Z: -1.0581470727920532
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 1.5697963237762451
+      Target Frame: <Fixed Frame>
+      Yaw: 4.665371417999268
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1385
+  Hide Left Dock: false
+  Hide Right Dock: true
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd0000000400000000000001c80000053afc020000000dfb0000001200530065006c0065006300740069006f006e0000000014000000a20000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000014000001a6000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001c00000012d0000001600fffffffb0000000a0049006d00610067006501000002f3000001710000001600fffffffb0000000a0049006d006100670065010000046a000000e40000001600fffffffb0000000a0049006d00610067006501000003700000019a0000000000000000fb0000000a0049006d0061006700650100000474000000da0000000000000000000000010000010f00000367fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b00000367000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000042700fffffffb0000000800540069006d00650100000000000004500000000000000000000007ef0000053a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730000000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 2493
+  X: 67
+  Y: 27
diff --git a/src/robot_behavior/config/rviz/facebodycandidatematches.rviz b/src/robot_behavior/config/rviz/facebodycandidatematches.rviz
new file mode 100644
index 0000000000000000000000000000000000000000..ae45ea0f0328aa1c35f38a6f5d53cff59340e624
--- /dev/null
+++ b/src/robot_behavior/config/rviz/facebodycandidatematches.rviz
@@ -0,0 +1,147 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 0
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /Global Options1
+        - /Status1
+        - /Image2/Status1
+      Splitter Ratio: 0.5
+    Tree Height: 286
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /front_fisheye_bodies_tracked/image_raw/
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /head_front_faces_tracked/image_raw/
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: compressed
+      Unreliable: false
+      Value: true
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 10
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: 0
+        Y: 0
+        Z: 0
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 0.785398006439209
+      Target Frame: <Fixed Frame>
+      Yaw: 0.785398006439209
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1025
+  Hide Left Dock: false
+  Hide Right Dock: false
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd000000040000000000000399000003abfc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b00000159000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d006100670065010000019a0000024c0000001600ffffff00000001000002db000003abfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fc0000003b000003ab000000bb0100001afa000000000100000002fb0000000a0049006d0061006700650100000000ffffffff0000005a00fffffffb0000000a00560069006500770073010000062e0000010f0000010000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006500000000000000073d0000024400fffffffb0000000800540069006d00650100000000000004500000000000000000000000bd000003ab00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: false
+  Width: 1853
+  X: 67
+  Y: 27
diff --git a/src/robot_behavior/config/social_mpc/social_mpc.yaml b/src/robot_behavior/config/social_mpc/social_mpc.yaml
index 1cea6cd5f4abbc5462466d7a55b5094bd6260da3..2de0740a9f7ec9cece189585157a71a3084e3c7f 100644
--- a/src/robot_behavior/config/social_mpc/social_mpc.yaml
+++ b/src/robot_behavior/config/social_mpc/social_mpc.yaml
@@ -2,8 +2,8 @@ logging_mode: 'INFO' # INFO, WARNING, ERROR, CRITICAL DEBUG
 # mpc parameters
 h: 0.2
 fw_time: 5.
-u_lb: [-3.0, -0.5, -0.1]
-u_ub: [3.0, 0.5, 0.2]
+u_lb: [-3.0, -0.15, -0.1]
+u_ub: [3.0, 0.15, 0.1]
 reg_parameter: [1., 10., 1.]
 max_acceleration: [0.4, 0.4, 0.3]
 max_iter_optim: 5
@@ -11,7 +11,8 @@ max_iter_optim: 5
 goto_target_dim: 8  # x, y, ang, v, w, loss, angle flag, flag
 human_target_dim: 8  # x, y, ang, v, w, loss, angle flag, flag
 pan_target_dim: 5  # x, y, v, w, flag
-target_pose_dim: 5  # x, y, ang, local/global flag, flag
+target_pose_goto_dim: 5  # x, y, ang, local/global flag, flag
+target_pose_look_dim: 4  # x, y, local/global flag, flag
 goto_target_id: 14  # id
 goto_target_pose: [0., 0., 0., 0., 0.]  # pose (x, y, ang),local 1./global 0. flag, flag
 human_target_id: -1
@@ -21,7 +22,7 @@ relative_h_pose: [0.90, -0.2]  # meter
 check_object_fw_traj_offset: 0.4  # meter
 # mpc losses parameters
 loss: 0  # max_out_y_dist_ang, maxout_x_y_ang by default
-wall_avoidance_points: [[0., -0.3]]  # [[0., 0.2], [0.15, 0.1], [-0.15, 0.1], [0., -0.3]]  # [[0., -0.3]]
+wall_avoidance_points: [[0., -0.3]]  # [0., -0.6]]  # [[0., 0.2], [0.15, 0.1], [-0.15, 0.1], [0., -0.3]]  # [[0., -0.3]]
 weights_description: ['goto_weight', 'human_features_weight', 'object_cost_map_weight', 'social_cost_map_weight', 'cost_map_weight', 'wall_avoidance_weight']
 goto_weight: 1.
 human_features_weight: 1.
@@ -34,13 +35,13 @@ loss_coef: [[50, 50, 1, 0, 0, 0, 3], [0, 0, 0, 50, 50, 3, 3], [10, 10, 1, 50, 50
 # cost_map parameters
 step_meshes: 5  # sampling every x cm
 # ssn model parameters
-group_detection_stride: 0.6
+group_detection_stride: 0.5
 # mpc preprocessing parameters
-waypoint_distance: 1.0 #1.2
-orientation_distance_threshold: 0.5
+waypoint_distance: 2. #1.2
+orientation_distance_threshold: 0.3
 # controller parameters
-path_loop_time: 1.
-goal_loop_time: 1.
+path_loop_time: 2.
+goal_loop_time: 2.
 path_planner_enabled: true
 goal_finder_enabled: true
 update_goals_enabled: true
diff --git a/src/robot_behavior/launch/action_servers.launch b/src/robot_behavior/launch/action_servers.launch
index bd7020cf5a41f5dd6b7ee31498cacdb50ed95255..e838901f548b61f01373c60006ea8463e2caf6d9 100644
--- a/src/robot_behavior/launch/action_servers.launch
+++ b/src/robot_behavior/launch/action_servers.launch
@@ -4,23 +4,67 @@
   <arg name="pub_hz_control"         default="20"/>
   <arg name="success_dist_thresh"    default="0.25"/>
   <arg name="success_ang_thresh"     default="0.2"/>
-  
+  <arg name="namespace"              default="behavior_generator"/>
 
-  <node pkg="robot_behavior" type="navigate_action_server_main.py" name="navigate_action_server_main" output="screen" >
-    <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-    <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
+
+  <node pkg="robot_behavior" type="behavior_go_to_position_action_server_main.py" name="behavior_go_to_position_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_dist_thresh"     type="double" value="$(arg success_dist_thresh)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/go_to_position"  to="/$(arg namespace)/action/go_to_position"/>
+  </node>
+
+  <node pkg="robot_behavior" type="behavior_go_to_body_action_server_main.py" name="behavior_go_to_body_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_dist_thresh"     type="double" value="$(arg success_dist_thresh)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/go_to_body"  to="/$(arg namespace)/action/go_to_body"/>
+  </node>
+
+  <node pkg="robot_behavior" type="behavior_go_to_person_action_server_main.py" name="behavior_go_to_person_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_dist_thresh"     type="double" value="$(arg success_dist_thresh)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/go_to_person"  to="/$(arg namespace)/action/go_to_person"/>
+  </node>
+
+  <node pkg="robot_behavior" type="behavior_go_to_group_action_server_main.py" name="behavior_go_to_group_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_dist_thresh"     type="double" value="$(arg success_dist_thresh)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/go_to_group"  to="/$(arg namespace)/action/go_to_group"/>
+  </node>
+
+  <node pkg="robot_behavior" type="behavior_look_at_body_action_server_main.py" name="behavior_look_at_body_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/look_at_body"  to="/$(arg namespace)/action/look_at_body"/>
+  </node>
+
+  <node pkg="robot_behavior" type="behavior_look_at_person_action_server_main.py" name="behavior_look_at_person_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/look_at_person"  to="/$(arg namespace)/action/look_at_person"/>
   </node>
 
-  <node pkg="robot_behavior" type="gotowards_action_server_main.py" name="gotowards_action_server_main" output="screen" >
-    <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-    <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
+  <node pkg="robot_behavior" type="behavior_look_at_group_action_server_main.py" name="behavior_look_at_group_action_server_main" output="screen" >
+    <param name="hz"                      type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_ang_thresh"      type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"      to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/look_at_group"  to="/$(arg namespace)/action/look_at_group"/>
   </node>
 
-  <node pkg="robot_behavior" type="lookat_action_server_main.py" name="lookat_action_server_main" output="screen" >
-    <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
+  <node pkg="robot_behavior" type="behavior_look_at_position_action_server_main.py" name="behavior_look_at_position_action_server_main" output="screen" >
+    <param name="hz"                        type="int"    value="$(arg pub_hz_control)"/>
+    <param name="success_ang_thresh"        type="double" value="$(arg success_ang_thresh)"/>
+    <remap from="/controller_status"        to="/$(arg namespace)/controller_status"/>
+    <remap from="/action/look_at_position"  to="/$(arg namespace)/action/look_at_position"/>
   </node>
 
 </launch>
diff --git a/src/robot_behavior/launch/apriltag2tracking.launch b/src/robot_behavior/launch/apriltag2tracking.launch
deleted file mode 100644
index 364f00d6b3c78613e49d758429198a29fd5c4840..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/apriltag2tracking.launch
+++ /dev/null
@@ -1,127 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="apriltag_ros"                     default="false"/>
-  <arg name="teleop_keyboard"                  default="false"/>
-  <arg name="teleop_joystick"                  default="false"/>
-  <arg name="relay"                            default="false"/>
-  <arg name="republish_from_apriltag"          default="false"/>
-  <arg name="republish_from_rtabmap"           default="false"/>
-  <arg name="rqt_joint_trajectory_controller"  default="false"/>
-  <arg name="social_mpc"                       default="false"/>
-  <arg name="action_servers"                   default="false"/>
-
-  <arg name="camera_name"            default="/head_front_camera"/>
-  <arg name="camera_frame"           default="head_front_camera_optical_frame"/>
-  <arg name="image_topic"            default="image_raw_apriltag"/>
-
-  <arg name="mobile_base_controller" default="/mobile_base_controller"/>
-  <arg name="speed"                  default="0.3"/>
-  <arg name="turn"                   default="0.5"/>
-
-  <arg name="in_base_topic"         default="/head_front_camera/image_raw/"/>
-  <arg name="out_base_topic"        default="/head_front_camera/image_raw_apriltag/"/>
-
-  <arg name="input_topic_relay"     default="input_topic_relay"/>
-  <arg name="output_topic_relay"    default="output_topic_relay"/>
-
-  <arg name="rviz"                  default="False"/>
-  <arg name="rviz_config_file"      default="localization_apriltag"/>
-
-  <arg name="reference_frame"       default="base_footprint"/>
-  <arg name="cam_optical_frame"     default="pinhole_front_optical_frame"/>
-
-  <arg name="mpc_config_name"        default="social_mpc"/>
-  <arg name="robot_config_name"      default="robot"/>
-  <arg name="pub_hz_control"         default="20"/>
-  <arg name="plot_render"            default="False"/>
-  <arg name="reference_map"          default="rtabmap_map"/>
-  <arg name="max_humans_world"       default="20"/>
-
-  <arg name="success_dist_thresh"    default="0.25"/>
-  <arg name="success_ang_thresh"     default="0.2"/>
-
-
-  <group if="$(arg action_servers)">
-    <node pkg="robot_behavior" type="navigate_action_server_main.py" name="navigate_action_server_main" output="screen">
-      <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-      <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-      <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-    </node>
-
-    <node pkg="robot_behavior" type="gotowards_action_server_main.py" name="gotowards_action_server_main" output="screen">
-      <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-      <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-      <param name="success_ang_thresh"   type="double"  value="$(arg success_ang_thresh)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-    </node>
-
-    <node pkg="robot_behavior" type="lookat_action_server_main.py" name="lookat_action_server_main" output="screen">
-      <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-      <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-    </node>
-  </group>
-
-  <group if="$(arg social_mpc)">
-    <node pkg="robot_behavior" type="controller_main.py" name="social_mpc" output="screen">
-      <param name="mpc_config_name"        type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
-      <param name="robot_config_name"      type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
-      <param name="plot_render"            type="bool"    value="$(arg plot_render)"/>
-      <param name="reference_map"          type="str"     value="$(arg reference_map)"/>
-      <param name="max_humans_world"       type="int"     value="$(arg max_humans_world)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-      <remap from="local_map"   to="rtabmap_local_map_rectified"/>
-      <remap from="global_map"  to="/rtabmap/proj_map"/>
-      <remap from="cmd_vel"     to="/nav_vel"/>
-    </node>
-  </group>
-
-  <group if="$(arg republish_from_apriltag)">
-    <node pkg="robot_behavior" type="republish_from_apriltag_main.py"  name="republish_from_apriltag_main" output="screen">
-      <param name="reference_frame"    type="str"  value="$(arg reference_frame)"/>
-      <param name="cam_optical_frame"  type="str"  value="$(arg cam_optical_frame)"/>
-    </node>
-  </group>
-
-  <group if="$(arg republish_from_rtabmap)">
-    <node pkg="robot_behavior" type="republish_from_rtabmap_main.py"  name="republish_from_rtabmap_main" output="screen"/>
-  </group>
-
-  <group if="$(arg rqt_joint_trajectory_controller)">
-    <node pkg="rqt_joint_trajectory_controller" type="rqt_joint_trajectory_controller"  name="rqt_joint_trajectory_controller" output="screen"/>
-  </group>
-
-  <group if="$(arg relay)">
-    <node pkg="topic_tools" type="relay"  name="relay" output="screen" args="$(arg input_topic_relay) $(arg output_topic_relay)" />
-  </group>
-
-  <group if="$(arg apriltag_ros)">
-    <node pkg="image_transport" type="republish"  name="republish" output="screen" args="compressed in:=$(arg in_base_topic) raw out:=$(arg out_base_topic)" />
-    <include file="$(find robot_behavior)/launch/apriltag_continuous_detection.launch">
-      <arg name="camera_name"   value="$(arg camera_name)"/>
-      <arg name="camera_frame"  value="$(arg camera_frame)"/>
-      <arg name="image_topic"   value="$(arg image_topic)"/>
-    </include>
-  </group>
-
-  <group if="$(arg teleop_keyboard)">
-    <node pkg="teleop_twist_keyboard" type="teleop_twist_keyboard.py" name="teleop_twist_keyboard" output="screen" >
-      <remap from="cmd_vel" to="$(arg mobile_base_controller)/cmd_vel" />
-      <param name="~speed"  type="double"  value="$(arg speed)" />
-      <param name="~turn"   type="double"  value="$(arg turn)" />
-    </node>
-  </group>
-
-  <group if="$(arg teleop_joystick)">
-    <node pkg="joy" type="joy_node" name="joystick">
-      <param name="autorepeat_rate" value="10" />
-      <remap from="diagnostics" to="joystick_diagnostics"/>
-    </node>
-  </group>
-
-  <group if="$(arg rviz)">
-    <node type="rviz" name="rviz" pkg="rviz" args="-d $(find robot_behavior)/config/rviz/$(arg rviz_config_file).rviz" />
-  </group>
-
-</launch>
diff --git a/src/robot_behavior/launch/apriltag2tracking_social_mpc_action_servers.launch b/src/robot_behavior/launch/apriltag2tracking_social_mpc_action_servers.launch
deleted file mode 100644
index 438c1b73be24cc1df3aeb7f79b896dcbc02740eb..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/apriltag2tracking_social_mpc_action_servers.launch
+++ /dev/null
@@ -1,97 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="teleop_keyboard"                  default="False"/>
-  <arg name="teleop_joystick"                  default="False"/>
-  <arg name="republish_from_rtabmap"           default="True"/>
-  <arg name="republish_from_apriltag"          default="True"/>
-  <arg name="social_mpc"                       default="True"/>
-  <arg name="action_servers"                   default="True"/>
-  <arg name="rviz"                             default="False"/>
-
-  <arg name="mobile_base_controller" default="/mobile_base_controller"/>
-  <arg name="speed"                  default="0.3"/>
-  <arg name="turn"                   default="0.5"/>
-
-  <arg name="rviz_config_file"      default="localization_apriltag"/>
-
-  <arg name="reference_frame"       default="base_footprint"/>
-  <arg name="cam_optical_frame"     default="pinhole_front_optical_frame"/>
-
-  <arg name="mpc_config_name"        default="social_mpc"/>
-  <arg name="robot_config_name"      default="robot"/>
-  <arg name="pub_hz_control"         default="20"/>
-  <arg name="plot_render"            default="False"/>
-  <arg name="reference_map"          default="rtabmap_map"/>
-  <arg name="max_humans_world"       default="20"/>
-
-  <arg name="success_dist_thresh"    default="0.25"/>
-  <arg name="success_ang_thresh"     default="0.2"/>
-
-
-  <group if="$(arg action_servers)">
-    <node pkg="robot_behavior" type="navigate_action_server_main.py" name="navigate_action_server_main" output="screen">
-      <param name="hz"                   type="int"     value="$(arg pub_hz_control)"/>
-      <param name="success_dist_thresh"  type="double"  value="$(arg success_dist_thresh)"/>
-      <param name="success_ang_thresh"   type="double"  value="$(arg success_ang_thresh)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-    </node>
-
-    <node pkg="robot_behavior" type="gotowards_action_server_main.py" name="gotowards_action_server_main" output="screen">
-      <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-      <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-      <param name="success_ang_thresh"   type="double"  value="$(arg success_ang_thresh)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-    </node>
-
-    <node pkg="robot_behavior" type="lookat_action_server_main.py" name="lookat_action_server_main" output="screen">
-      <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-      <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-    </node>
-  </group>
-
-  <group if="$(arg social_mpc)">
-    <node pkg="robot_behavior" type="controller_main.py" name="social_mpc" output="screen">
-      <param name="mpc_config_name"        type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
-      <param name="robot_config_name"      type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
-      <param name="plot_render"            type="bool"    value="$(arg plot_render)"/>
-      <param name="reference_map"          type="str"     value="$(arg reference_map)"/>
-      <param name="max_humans_world"       type="int"     value="$(arg max_humans_world)"/>
-      <remap from="odom"        to="rtabmap_odom_rectified"/>
-      <remap from="local_map"   to="rtabmap_local_map_rectified"/>
-      <remap from="global_map"  to="/rtabmap/proj_map"/>
-      <remap from="cmd_vel"     to="/nav_vel"/>
-    </node>
-  </group>
-
-  <group if="$(arg republish_from_apriltag)">
-    <node pkg="robot_behavior" type="republish_from_apriltag_main.py"  name="republish_from_apriltag_main" output="screen">
-      <param name="reference_frame"    type="str"  value="$(arg reference_frame)"/>
-      <param name="cam_optical_frame"  type="str"  value="$(arg cam_optical_frame)"/>
-    </node>
-  </group>
-
-  <group if="$(arg republish_from_rtabmap)">
-    <node pkg="robot_behavior" type="republish_from_rtabmap_main.py"  name="republish_from_rtabmap_main" output="screen"/>
-  </group>
-
-  <group if="$(arg teleop_keyboard)">
-    <node pkg="teleop_twist_keyboard" type="teleop_twist_keyboard.py" name="teleop_twist_keyboard" output="screen" >
-      <remap from="cmd_vel" to="$(arg mobile_base_controller)/cmd_vel" />
-      <param name="~speed"  type="double"  value="$(arg speed)" />
-      <param name="~turn"   type="double"  value="$(arg turn)" />
-    </node>
-  </group>
-
-  <group if="$(arg teleop_joystick)">
-    <node pkg="joy" type="joy_node" name="joystick">
-      <param name="autorepeat_rate" value="10" />
-      <remap from="diagnostics" to="joystick_diagnostics"/>
-    </node>
-  </group>
-
-  <group if="$(arg rviz)">
-    <node type="rviz" name="rviz" pkg="rviz" args="-d $(find robot_behavior)/config/rviz/$(arg rviz_config_file).rviz" />
-  </group>
-
-</launch>
diff --git a/src/robot_behavior/launch/apriltag_continuous_detection.launch b/src/robot_behavior/launch/apriltag_continuous_detection.launch
deleted file mode 100644
index 78a6f7f50cb8fef48e04db55004f58868cc9625f..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/apriltag_continuous_detection.launch
+++ /dev/null
@@ -1,20 +0,0 @@
-<launch>
-  <arg name="launch_prefix" default="" /> <!-- set to value="gdbserver localhost:10000" for remote debugging -->
-  <arg name="node_namespace" default="apriltag_ros_continuous_node" />
-  <arg name="camera_name" default="/camera_rect" />
-  <arg name="camera_frame" default="camera" />
-  <arg name="image_topic" default="image_rect" />
-
-  <!-- Set parameters -->
-  <rosparam command="load" file="$(find robot_behavior)/config/apriltag/settings.yaml" ns="$(arg node_namespace)" />
-  <rosparam command="load" file="$(find robot_behavior)/config/apriltag/tags.yaml" ns="$(arg node_namespace)" />
-
-  <node pkg="apriltag_ros" type="apriltag_ros_continuous_node" name="$(arg node_namespace)" clear_params="true" output="screen" launch-prefix="$(arg launch_prefix)" >
-    <!-- Remap topics from those used in code to those on the ROS network -->
-    <remap from="image_rect" to="$(arg camera_name)/$(arg image_topic)" />
-    <remap from="camera_info" to="$(arg camera_name)/camera_info" />
-
-    <param name="camera_frame" type="str" value="$(arg camera_frame)" />
-    <param name="publish_tag_detections_image" type="bool" value="true" />      <!-- default: false -->
-  </node>
-</launch>
diff --git a/src/robot_behavior/launch/basestation_republisher.launch b/src/robot_behavior/launch/basestation_republisher.launch
deleted file mode 100644
index f72b9c456a8303accb4c33a7e8fb2a80a47272c6..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/basestation_republisher.launch
+++ /dev/null
@@ -1,6 +0,0 @@
-<launch>
-    <node name="front_fisheye_basestation_node" pkg="image_transport" type="republish" output="screen" args="compressed in:=/front_camera/fisheye/image_raw out:=/front_fisheye_basestation/image_raw"/>
-    <!-- <node name="head_front_basestation_node" pkg="image_transport" type="republish" output="screen" args="compressed in:=/head_front_camera/color/image_raw  out:=/head_front_basestation/image_raw"/> -->
-    <!-- <node name="torso_front_color_basestation_node" pkg="image_transport" type="republish" output="screen" args="theora in:=/torso_front_camera/color/image_raw  out:=/torso_front_color_basestation/image_raw"/> -->
-    <!--<node name="torso_front_depth_basestation_node" pkg="image_transport" type="republish" output="screen" args="theora compressedDepth in:=/torso_front_camera/aligned_depth_to_color/image_raw  out:=/torso_front_depth_basestation"/>-->
-</launch>
diff --git a/src/robot_behavior/launch/behavior_go_to_body_action_client.launch b/src/robot_behavior/launch/behavior_go_to_body_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..e20ca5783d7e83cf2b8fc472dd91f1ad6b8d7bf6
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_go_to_body_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="body_id"             default="b-1"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_go_to_body_action_client_main.py"  name="behavior_go_to_body_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="body_id"     type="str" value="$(arg body_id)"/>
+    <param name="timeout"     type="int" value="$(arg timeout)"/>
+    <param name="continuous"  type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_go_to_group_action_client.launch b/src/robot_behavior/launch/behavior_go_to_group_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..ae2d92cef991890d05cea17463ada1d421d2fe78
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_go_to_group_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="group_id"            default="grp-1"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_go_to_group_action_client_main.py"  name="behavior_go_to_group_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="group_id"    type="str" value="$(arg group_id)"/>
+    <param name="timeout"     type="int" value="$(arg timeout)"/>
+    <param name="continuous"  type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_go_to_person_action_client.launch b/src/robot_behavior/launch/behavior_go_to_person_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..a5de107313407d805f1d714967f58f53efae85cd
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_go_to_person_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="person_id"           default="-1"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_go_to_person_action_client_main.py"  name="behavior_go_to_person_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="person_id"   type="str" value="$(arg person_id)"/>
+    <param name="timeout"     type="int" value="$(arg timeout)"/>
+    <param name="continuous"  type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_go_to_position_action_client.launch b/src/robot_behavior/launch/behavior_go_to_position_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..c0a8d4e97285e6b75c0d998f02e84b0b7584706d
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_go_to_position_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="go_to_target_pose"    default="[0., 0., 0., 0., 0.]"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_go_to_position_action_client_main.py"  name="behavior_go_to_position_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="go_to_target_pose"  type="str"  value="$(arg go_to_target_pose)"/>
+    <param name="timeout"            type="int"  value="$(arg timeout)"/>
+    <param name="continuous"         type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_look_at_body_action_client.launch b/src/robot_behavior/launch/behavior_look_at_body_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..279d1c75843d0481f3be711bc79a87669110cfd6
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_look_at_body_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="body_id"             default="b-1"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_look_at_body_action_client_main.py"  name="behavior_look_at_body_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="body_id"     type="str"  value="$(arg body_id)"/>
+    <param name="timeout"     type="int"  value="$(arg timeout)"/>
+    <param name="continuous"  type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_look_at_group_action_client.launch b/src/robot_behavior/launch/behavior_look_at_group_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..4a0d0a43ab6bca31b822d6880f2d157b4e8da8a1
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_look_at_group_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="group_id"            default="grp-1"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_look_at_group_action_client_main.py"  name="behavior_look_at_group_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="group_id"    type="str"  value="$(arg group_id)"/>
+    <param name="timeout"     type="int"  value="$(arg timeout)"/>
+    <param name="continuous"  type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_look_at_person_action_client.launch b/src/robot_behavior/launch/behavior_look_at_person_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..f89f3814e8858d25a4ddf2aba0c1fc9d5885c825
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_look_at_person_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="person_id"           default="-1"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_look_at_person_action_client_main.py"  name="behavior_look_at_person_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="person_id"   type="str"  value="$(arg person_id)"/>
+    <param name="timeout"     type="int"  value="$(arg timeout)"/>
+    <param name="continuous"  type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/behavior_look_at_position_action_client.launch b/src/robot_behavior/launch/behavior_look_at_position_action_client.launch
new file mode 100644
index 0000000000000000000000000000000000000000..853365bc0a7c00c795b4503870e35576af187bc4
--- /dev/null
+++ b/src/robot_behavior/launch/behavior_look_at_position_action_client.launch
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"           default="behavior_generator"/>
+  <arg name="look_at_position"    default="[0., 0., 0., 0.]"/>
+  <arg name="timeout"             default="120"/>
+  <arg name="continuous"          default="False"/>
+
+  <node pkg="robot_behavior" type="behavior_look_at_position_action_client_main.py"  name="behavior_look_at_position_action_client_main" output="screen" ns="$(arg namespace)">
+    <param name="look_at_position"  type="str"  value="$(arg look_at_position)"/>
+    <param name="timeout"           type="int"  value="$(arg timeout)"/>
+    <param name="continuous"        type="bool" value="$(arg continuous)"/>
+  </node>
+
+</launch>
diff --git a/src/robot_behavior/launch/global_path_planner.launch b/src/robot_behavior/launch/global_path_planner.launch
new file mode 100644
index 0000000000000000000000000000000000000000..272072cf6f6476fd73094070ad48676cb3d51c7a
--- /dev/null
+++ b/src/robot_behavior/launch/global_path_planner.launch
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+    <arg name="mpc_config_name"             default="social_mpc"/>
+    <arg name="map_frame"                   default="map"/>
+    <arg name="robot_frame"                 default="base_footprint"/>
+    <arg name="namespace_slam"              default="slam"/>
+    <arg name="global_occupancy_map_topic"  default="/slam/global_map"/>
+
+  <node pkg="robot_behavior" type="behavior_global_path_planner_main.py" name="global_path_planner" output="screen" >    
+    <param name="mpc_config_name"             type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
+    <param name="robot_frame"                 type="str"     value="$(arg robot_frame)"/>
+    <param name="map_frame"                   type="str"     value="$(arg map_frame)"/>
+    <param name="namespace_slam"              type="str"     value="$(arg namespace_slam)"/>
+    <param name="global_occupancy_map_topic"  type="str"     value="$(arg global_occupancy_map_topic)"/>    
+  </node>
+
+
+</launch>
diff --git a/src/robot_behavior/launch/goal_finder.launch b/src/robot_behavior/launch/goal_finder.launch
new file mode 100644
index 0000000000000000000000000000000000000000..dc0463c01e3721e1d9f0d1818f6b316092eb5c2a
--- /dev/null
+++ b/src/robot_behavior/launch/goal_finder.launch
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+    <arg name="mpc_config_name"             default="social_mpc"/>
+    <arg name="robot_config_name"           default="robot"/>
+    <arg name="map_frame"                   default="map"/>
+    <arg name="robot_frame"                 default="base_footprint"/>
+    <arg name="namespace_slam"              default="slam"/>
+    <arg name="global_occupancy_map_topic"  default="/slam/global_map"/>
+    <arg name="max_humans_world"            default="20"/>
+    <arg name="max_groups_world"            default="10"/>
+    <arg name="namespace"                   default="behavior_generator"/>
+
+  <node pkg="robot_behavior" type="behavior_goal_finder_main.py" name="goal_finder" output="screen" >    
+    <param name="mpc_config_name"             type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
+    <param name="robot_config_name"           type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
+    <param name="max_humans_world"            type="int"     value="$(arg max_humans_world)"/>
+    <param name="max_groups_world"            type="int"     value="$(arg max_groups_world)"/>
+    <param name="robot_frame"                 type="str"     value="$(arg robot_frame)"/>
+    <param name="map_frame"                   type="str"     value="$(arg map_frame)"/>
+    <param name="namespace_slam"              type="str"     value="$(arg namespace_slam)"/>
+    <param name="global_occupancy_map_topic"  type="str"     value="$(arg global_occupancy_map_topic)"/>
+    <param name="namespace"                   type="str"     value="$(arg namespace)"/>
+  </node>
+
+
+</launch>
diff --git a/src/robot_behavior/launch/gotowards_action_client.launch b/src/robot_behavior/launch/gotowards_action_client.launch
deleted file mode 100644
index 070d1588a248b70c8b47455bd582cde2990854b4..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/gotowards_action_client.launch
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="goto_target_id"      default="-1"/>
-  <arg name="timeout"             default="120"/>
-
-  <node pkg="robot_behavior" type="gotowards_action_client_main.py"  name="gotowards_action_client_main" output="screen">
-    <param name="goto_target_id" type="int" value="$(arg goto_target_id)"/>
-    <param name="timeout" type="int" value="$(arg timeout)"/>
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/image_projection.launch b/src/robot_behavior/launch/image_projection.launch
deleted file mode 100644
index 3084c115baf415cba020ddbf32e5b18713770e25..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/image_projection.launch
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-
-  <arg name="camera_ns"              default="/front_camera/fisheye"/>
-  <arg name="fisheye_camera_frame"   default="front_fisheye_camera_optical_frame"/>
-
-
-  <node name="camera_info_publisher" pkg="kalibr_extended_camera_info_publisher" type="camera_info_publisher_node" >
-    <param name="camera_ns" value="$(arg camera_ns)"/>
-    <param name="frame_id" value="$(arg fisheye_camera_frame)"/>
-    <rosparam file="$(find robot_behavior)/config/projections/fisheye_calibration.yaml" command="load" subst_value="true" />
-  </node>
-
-  <rosparam command="load" file="$(find robot_behavior)/config/projections/projections_inria.yaml" subst_value="true"/>
-
-  <node name="front_fisheye_camera_rect"  pkg="image_projection" type="periodic_image_projection_node" >
-    <rosparam command="load" file="$(find robot_behavior)/config/projections/front_fisheye_camera.yaml" subst_value="true"/>
-    <remap from="~projection" to="$(arg camera_ns)/rect/image_raw" />
-    <remap from="~camera_info" to="$(arg camera_ns)/rect/camera_info" />
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/image_rectification.launch b/src/robot_behavior/launch/image_rectification.launch
deleted file mode 100644
index 036809f9ac0278c87da79938c739c44ba9301cf5..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/image_rectification.launch
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-<arg name="front_fisheye_camera_ns"             default="/front_fisheye_basestation"/>
-<include file="$(find image_projection_demo)/launch/projections_inria.launch">
-    <arg name="camera_ns"                        value="$(arg front_fisheye_camera_ns)"/>
-</include>
-</launch>
\ No newline at end of file
diff --git a/src/robot_behavior/launch/local_path_planner_mpc.launch b/src/robot_behavior/launch/local_path_planner_mpc.launch
new file mode 100644
index 0000000000000000000000000000000000000000..ef5249681a34f8ebe9b50bb5ebcbec74eafddf98
--- /dev/null
+++ b/src/robot_behavior/launch/local_path_planner_mpc.launch
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+    <arg name="mpc_config_name"              default="social_mpc"/>
+    <arg name="robot_config_name"            default="robot"/>
+    <arg name="map_frame"                    default="map"/>
+    <arg name="robot_frame"                  default="base_footprint"/>
+    <arg name="namespace_slam"               default="slam"/>
+    <arg name="global_occupancy_map_topic"   default="global_map"/>
+    <arg name="local_occupancy_map_topic"    default="local_static_cost_map"/>
+    <arg name="local_social_cost_map_topic"  default="local_social_cost_map"/>
+
+  <node pkg="robot_behavior" type="behavior_local_path_planner_mpc_main.py" name="local_path_planner_mpc" output="screen" >    
+    <param name="mpc_config_name"              type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
+    <param name="robot_config_name"            type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
+    <param name="robot_frame"                  type="str"     value="$(arg robot_frame)"/>
+    <param name="map_frame"                    type="str"     value="$(arg map_frame)"/>
+    <param name="namespace_slam"               type="str"     value="$(arg namespace_slam)"/>
+    <param name="global_occupancy_map_topic"   type="str"     value="/$(arg namespace_slam)/$(arg global_occupancy_map_topic)"/>
+    <param name="local_occupancy_map_topic"    type="str"     value="/$(arg namespace_slam)/$(arg local_occupancy_map_topic)"/>
+    <param name="local_social_cost_map_topic"  type="str"     value="/$(arg namespace_slam)/$(arg local_social_cost_map_topic)"/>
+    <remap from="/cmd_vel"     to="/nav_vel"/>
+  </node>
+
+
+</launch>
diff --git a/src/robot_behavior/launch/lookat_action_client.launch b/src/robot_behavior/launch/lookat_action_client.launch
deleted file mode 100644
index f808f257800b92ccb901ed2aee10e67ac161451c..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/lookat_action_client.launch
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="pan_target_id"       default="-1"/>
-  <arg name="timeout"             default="120"/>
-
-  <node pkg="robot_behavior" type="lookat_action_client_main.py"  name="lookat_action_client_main" output="screen">
-    <param name="pan_target_id" type="int" value="$(arg pan_target_id)"/>
-    <param name="timeout" type="int" value="$(arg timeout)"/>
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/main.launch b/src/robot_behavior/launch/main.launch
new file mode 100644
index 0000000000000000000000000000000000000000..7f30f8949e03b315944693e3b469a12b108dc4b9
--- /dev/null
+++ b/src/robot_behavior/launch/main.launch
@@ -0,0 +1,81 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<launch>
+  <arg name="namespace"                                default="behavior_generator"/>
+  <arg name="namespace_slam"                           default="slam"/>
+
+  <arg name="social_mpc"                               default="True"/>
+  <arg name="action_servers"                           default="True"/>
+  <arg name="occupancy_map_republisher"                default="True"/> 
+
+  <arg name="mpc_config_name"                          default="social_mpc"/>
+  <arg name="robot_config_name"                        default="robot"/>
+  <arg name="max_humans_world"                         default="20"/>
+  <arg name="map_frame"                                default="map"/>
+  <arg name="robot_frame"                              default="base_footprint"/>
+  <arg name="global_occupancy_map_topic"               default="global_map"/>
+  <arg name="local_occupancy_map_topic"                default="local_static_cost_map"/>
+  <arg name="target_id_absent_treshold"                default="3"/>
+  
+  <arg name="pub_hz_control"                           default="20"/>
+  <arg name="success_dist_thresh"                      default="0.4"/>
+  <arg name="success_ang_thresh"                       default="0.3"/>
+
+  <arg name="local_map_depth"                          default="3"/>
+  <arg name="offset_dilation_local"                    default="10"/>
+  <arg name="offset_dilation_global"                   default="10"/>
+  <arg name="base_footprint_radius"                    default="0.3"/>
+  <arg name="dilation_iter"                            default="1"/>
+  <arg name="global_map_topic"                         default="grid_map"/>
+  <arg name="obstacle_map_topic"                       default="/obstacle_map"/>
+  <arg name="depth_camera_topic"                       default="/torso_front_camera/depth/color/points"/>
+  <arg name="obstacle_mask_threshold"                  default="0.3"/>
+  <arg name="kernel_size_opening"                      default="1"/>
+  
+  <arg name="diag_timer_rate"                          default="5"/>
+
+
+  <group ns="$(arg namespace)" if="$(arg action_servers)">
+    <include file="$(find robot_behavior)/launch/action_servers.launch">
+      <arg name="pub_hz_control"         value="$(arg pub_hz_control)"/>
+      <arg name="success_dist_thresh"    value="$(arg success_dist_thresh)"/>
+      <arg name="namespace"              value="$(arg namespace)"/>
+      <arg name="success_ang_thresh"     value="$(arg success_ang_thresh)"/>
+    </include>
+  </group>
+
+  <group if="$(arg social_mpc)">
+    <include file="$(find robot_behavior)/launch/social_mpc.launch">
+      <arg name="mpc_config_name"             value="$(arg mpc_config_name)"/>
+      <arg name="robot_config_name"           value="$(arg robot_config_name)"/>
+      <arg name="max_humans_world"            value="$(arg max_humans_world)"/>
+      <arg name="namespace"                   value="$(arg namespace)"/>
+      <arg name="namespace_slam"              value="$(arg namespace_slam)"/>
+      <arg name="map_frame"                   value="$(arg map_frame)"/>
+      <arg name="robot_frame"                 value="$(arg robot_frame)"/>
+      <arg name="global_occupancy_map_topic"  value="/$(arg namespace_slam)/$(arg global_occupancy_map_topic)"/>
+      <arg name="local_occupancy_map_topic"   value="/$(arg namespace_slam)/$(arg local_occupancy_map_topic)"/>
+      <arg name="diag_timer_rate"             value="$(arg diag_timer_rate)"/>
+      <arg name="target_id_absent_treshold"   value="$(arg target_id_absent_treshold)"/>
+    </include>
+  </group>
+
+  <group if="$(arg occupancy_map_republisher)">
+    <include file="$(find occupancy_map_republisher)/launch/node.launch">
+      <arg name="map_frame"                value="$(arg map_frame)"/>
+      <arg name="robot_frame"              value="$(arg robot_frame)"/>
+      <arg name="namespace_slam"           value="/$(arg namespace_slam)"/>
+      <arg name="local_map_depth"          value="$(arg local_map_depth)"/>
+      <arg name="offset_dilation_local"    value="$(arg offset_dilation_local)"/>
+      <arg name="offset_dilation_global"   value="$(arg offset_dilation_global)"/>
+      <arg name="base_footprint_radius"    value="$(arg base_footprint_radius)"/>
+      <arg name="dilation_iter"            value="$(arg dilation_iter)"/>
+      <arg name="global_map_topic"         value="/$(arg namespace_slam)/$(arg global_map_topic)"/>
+      <arg name="obstacle_map_topic"       value="$(arg obstacle_map_topic)"/>
+      <arg name="depth_camera_topic"       value="$(arg depth_camera_topic)"/>
+      <arg name="diag_timer_rate"          value="$(arg diag_timer_rate)"/>
+      <arg name="obstacle_mask_threshold"  value="$(arg obstacle_mask_threshold)"/>
+      <arg name="kernel_size_opening"      value="$(arg kernel_size_opening)"/>
+    </include>
+  </group>
+
+</launch>
diff --git a/src/robot_behavior/launch/navigate_action_client.launch b/src/robot_behavior/launch/navigate_action_client.launch
deleted file mode 100644
index 03846267338cb69c442c0f9ab4beb22cfdfa2e1a..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/navigate_action_client.launch
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="goto_target_id"      default="-1"/>
-  <arg name="human_target_id"     default="-1"/>
-  <arg name="goto_target_pose"    default="[0., 0., 0., 0., 0.]"/>
-  <arg name="relative_h_pose"     default="[0.90, -0.2]"/>
-  <arg name="timeout"             default="120"/>
-
-  <node pkg="robot_behavior" type="navigate_action_client_main.py"  name="navigate_action_client_main" output="screen">
-    <param name="goto_target_id" type="int" value="$(arg goto_target_id)"/>
-    <param name="human_target_id" type="int" value="$(arg human_target_id)"/>
-    <param name="goto_target_pose" type="str" value="$(arg goto_target_pose)"/>
-    <param name="relative_h_pose" type="str" value="$(arg relative_h_pose)"/>
-    <param name="timeout" type="int" value="$(arg timeout)"/>
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/republish_from_apriltag.launch b/src/robot_behavior/launch/republish_from_apriltag.launch
deleted file mode 100644
index cc7048436ac1910c95e164ef663de32848acaa1f..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/republish_from_apriltag.launch
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-
-  <arg name="robot_frame"            default="base_footprint"/>
-  <arg name="cam_optical_frame"      default="front_fisheye_camera_optical_frame"/>
-  <arg name="camera_ns"              default="/front_camera/fisheye"/>
-
-
-  <node pkg="robot_behavior" type="republish_from_apriltag_main.py"  name="republish_from_apriltag_main" output="screen">
-    <param name="robot_frame"    type="str"  value="$(arg robot_frame)"/>
-    <param name="cam_optical_frame"  type="str"  value="$(arg cam_optical_frame)"/>
-  </node>
-
-  <node pkg="apriltag_ros" type="apriltag_ros_continuous_node" name="apriltags" clear_params="true" output="screen">
-    <rosparam command="load" file="$(find robot_behavior)/config/apriltag/settings.yaml"/>
-    <rosparam command="load" file="$(find robot_behavior)/config/apriltag/tags.yaml"/>
-    <remap from="image_rect" to="$(arg camera_ns)/rect/image_raw" />
-    <remap from="camera_info" to="$(arg camera_ns)/rect/camera_info" />
-    <param name="publish_tag_detections_image" type="bool" value="true" />
-    <param name="camera_frame" type="str" value="pinhole_front_camera_frame" />
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/republish_from_openpose.launch b/src/robot_behavior/launch/republish_from_openpose.launch
deleted file mode 100644
index ccfb7ea2cb1577f6e64965052591ef39c2fb321a..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/republish_from_openpose.launch
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-
-  <arg name="frame_openpose_topic"   default="/frame"/>
-  <arg name="tracking_topic"         default="/tracker/tracker_output"/>
-  <arg name="image_pose_topic"   default="/rosOpenpose/camera/image"/>
-
-
-  <node pkg="robot_behavior" type="republish_from_openpose_main.py"  name="republish_from_openpose_main" output="screen">
-    <param name="frame_topic"           type="str"  value="$(arg frame_openpose_topic)"/>
-    <param name="tracking_topic"        type="str"  value="$(arg tracking_topic)"/>
-    <param name="image_pose_topic"  type="str"  value="$(arg image_pose_topic)"/>
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/republish_from_rtabmap.launch b/src/robot_behavior/launch/republish_from_rtabmap.launch
deleted file mode 100644
index 4b72582824f9903f2d299c5d86c52b79086df861..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/republish_from_rtabmap.launch
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-
-  <arg name="robot_frame"            default="base_footprint"/>
-  <arg name="map_frame"              default="map"/>
-  <arg name="frame_topic"            default="/tracked_pose_2d/frame"/>
-
-
-  <node pkg="robot_behavior" type="republish_from_rtabmap_main.py"  name="republish_from_rtabmap_main" output="screen">
-    <param name="robot_frame"  type="str"  value="$(arg robot_frame)"/>
-    <param name="map_frame"   type="str"  value="$(arg map_frame)"/>
-    <param name="frame_topic"        type="str"  value="$(arg frame_topic)"/>
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/ros_mediapipe.launch b/src/robot_behavior/launch/ros_mediapipe.launch
deleted file mode 100644
index 7c7826fde8f24a7885aa5a42624d15ab3b935e9f..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/ros_mediapipe.launch
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-
-  <arg name="image_topic"            default="/front_fisheye_basestation/image_raw"/>
-  <arg name="tracking_topic"         default="/tracker/tracker_output"/>
-
-
-  <node pkg="robot_behavior" type="ros_mediapipe_main.py"  name="ros_mediapipe_main" output="screen">
-    <param name="image_topic"        type="str"  value="$(arg image_topic)"/>
-    <param name="tracking_topic"     type="str"  value="$(arg tracking_topic)"/>
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/rtabmap_apriltag.launch b/src/robot_behavior/launch/rtabmap_apriltag.launch
deleted file mode 100644
index e15dc114873324ccd3a27254f4dff13d3422593b..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/rtabmap_apriltag.launch
+++ /dev/null
@@ -1,39 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <!-- apriltag args -->
-  <arg name="camera_name"                      default="/front_fisheye_camera/pinhole_front/"/>
-  <arg name="camera_frame"                     default="pinhole_front_optical_frame"/>
-  <arg name="image_topic"                      default="image_rect_color"/>
-
-  <!-- <arg name="in_topic_republish"               default="/head_front_camera/image_raw/"/> -->
-  <!-- <arg name="out_topic_republish"              default="/head_front_camera/image_raw_apriltag/"/> -->
-
-  <!-- rtabmap args -->
-  <arg name="localization"                     default="True"/>
-  <arg name="database_name"                    default="inria_office_stereo"/>
-  <arg name="rviz_rtabmap"                     default="False"/>
-  <arg name="rtabmapviz"                       default="False"/>
-
-  <arg name="front_fisheye_camera_ns"          default="/front_camera/fisheye/"/>
-
-
-  <include file="$(find image_projection_demo)/launch/projections_inria.launch">
-    <arg name="camera_ns"                        value="$(arg front_fisheye_camera_ns)"/>
-  </include>
-
-  <!-- <node pkg="image_transport" type="republish"  name="republish" output="screen" args="compressed in:=$(arg in_topic_republish) raw out:=$(arg out_topic_republish)" /> -->
-  <include file="$(find robot_behavior)/launch/apriltag_continuous_detection.launch">
-    <arg name="camera_name"   value="$(arg camera_name)"/>
-    <arg name="camera_frame"  value="$(arg camera_frame)"/>
-    <arg name="image_topic"   value="$(arg image_topic)"/>
-  </include>
-
-
-  <include file="$(find robot_behavior)/launch/stereo_rtabmap_inria.launch">
-    <arg name="rviz"                             value="$(arg rviz_rtabmap)"/>
-    <arg name="localization"                     value="$(arg localization)"/>
-    <arg name="rtabmapviz"                       value="$(arg rtabmapviz)"/>
-    <arg name="database_path"                    value="$(find robot_behavior)/maps/$(arg database_name).db"/>
-  </include>
-
-</launch>
diff --git a/src/robot_behavior/launch/rtabmap_apriltag_social_mpc.launch b/src/robot_behavior/launch/rtabmap_apriltag_social_mpc.launch
deleted file mode 100644
index 90c589ffc18858157d0132bdf04493748c9afe38..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/rtabmap_apriltag_social_mpc.launch
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <!-- apriltag2tracking args -->
-  <arg name="apriltag_ros"                     default="false"/>
-  <arg name="teleop_keyboard"                  default="false"/>
-  <arg name="teleop_joystick"                  default="false"/>
-  <arg name="relay"                            default="false"/>
-  <arg name="apriltag2tracking_main"           default="false"/>
-  <arg name="rqt_joint_trajectory_controller"  default="false"/>
-  <arg name="social_mpc"                       default="false"/>
-  <arg name="action_servers"                   default="false"/>
-  <arg name="rviz_controller"                  default="False"/>
-  <arg name="plot_render"                      default="False"/>
-
-  <arg name="config_name_control"              default="social_mpc"/>
-  <arg name="rviz_config_file"                 default="localization_apriltag"/>
-
-  <arg name="camera_name"                      default="/head_front_camera"/>
-  <arg name="camera_frame"                     default="head_front_camera_optical_frame"/>
-  <arg name="image_topic"                      default="image_raw_apriltag"/>
-
-  <arg name="in_topic_republish"               default="/head_front_camera/image_raw/"/>
-  <arg name="out_topic_republish"              default="/head_front_camera/image_raw_apriltag/"/>
-
-  <!-- rtabmap args -->
-  <arg name="localization"                     default="True"/>
-  <arg name="database_name"                    default="inria_office_stereo"/>
-  <arg name="rviz_rtabmap"                     default="False"/>
-  <arg name="rtabmapviz"                       default="False"/>
-
-
-  <include file="$(find robot_behavior)/launch/apriltag2tracking.launch">
-    <arg name="apriltag_ros"                      value="$(arg apriltag_ros)"/>
-    <arg name="teleop_keyboard"                   value="$(arg teleop_keyboard)"/>
-    <arg name="teleop_joystick"                   value="$(arg teleop_joystick)"/>
-    <arg name="relay"                             value="$(arg relay)"/>
-    <arg name="apriltag2tracking_main"            value="$(arg apriltag2tracking_main)"/>
-    <arg name="plot_render"                       value="$(arg plot_render)"/>
-    <arg name="rqt_joint_trajectory_controller"   value="$(arg rqt_joint_trajectory_controller)"/>
-    <arg name="social_mpc"                        value="$(arg social_mpc)"/>
-    <arg name="action_servers"                    value="$(arg action_servers)"/>
-    <arg name="rviz"                              value="$(arg rviz_controller)"/>
-    <arg name="config_name_control"               value="$(arg config_name_control)"/>
-    <arg name="rviz_config_file"                  value="$(arg rviz_config_file)"/>
-    <arg name="camera_name"                       value="$(arg camera_name)"/>
-    <arg name="camera_frame"                      value="$(arg camera_frame)"/>
-    <arg name="image_topic"                       value="$(arg image_topic)"/>
-    <arg name="in_base_topic"                     value="$(arg in_topic_republish)"/>
-    <arg name="out_base_topic"                    value="$(arg out_topic_republish)"/>
-  </include>
-
-  <include file="$(find robot_behavior)/launch/stereo_rtabmap_inria.launch">
-    <arg name="rviz"                             value="$(arg rviz_rtabmap)"/>
-    <arg name="localization"                     value="$(arg localization)"/>
-    <arg name="rtabmapviz"                       value="$(arg rtabmapviz)"/>
-    <arg name="database_path"                    value="$(find robot_behavior)/maps/$(arg database_name).db"/>
-  </include>
-
-
-</launch>
diff --git a/src/robot_behavior/launch/social_mpc.launch b/src/robot_behavior/launch/social_mpc.launch
index ea5dd64a070ef7baa208cff06f3e04049661b8e1..f82e78e592d99e9a289a07c49a254dd4d4ba3fdf 100644
--- a/src/robot_behavior/launch/social_mpc.launch
+++ b/src/robot_behavior/launch/social_mpc.launch
@@ -1,25 +1,32 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <launch>
 
-  <arg name="mpc_config_name"        default="social_mpc"/>
-  <arg name="robot_config_name"      default="robot"/>
-  <arg name="pub_hz_control"         default="20"/>
-  <arg name="plot_render"            default="False"/>
-  <arg name="map_frame"              default="map"/>
-  <arg name="robot_frame"            default="base_footprint"/>
-  <arg name="max_humans_world"       default="20"/>
+  <arg name="mpc_config_name"             default="social_mpc"/>
+  <arg name="robot_config_name"           default="robot"/>
+  <arg name="pub_hz_control"              default="20"/>
+  <arg name="max_humans_world"            default="20"/>
+  <arg name="namespace"                   default="behavior_generator"/>
+  <arg name="namespace_slam"              default="slam"/>
+  <arg name="map_frame"                   default="map"/>
+  <arg name="robot_frame"                 default="base_footprint"/>
+  <arg name="local_map_depth"             default="3"/>
+  <arg name="global_occupancy_map_topic"  default="/slam/global_map"/>
+  <arg name="local_occupancy_map_topic"   default="/slam/local_map"/>
+  <arg name="diag_timer_rate"             default="10"/>
+  <arg name="target_id_absent_treshold"   default="3"/>
 
-
-  <node pkg="robot_behavior" type="controller_main.py" name="social_mpc" output="screen" >
-    <param name="mpc_config_name"        type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
-    <param name="robot_config_name"      type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
-    <param name="plot_render"            type="bool"    value="$(arg plot_render)"/>
-    <param name="map_frame"              type="str"     value="$(arg map_frame)"/>
-    <param name="robot_frame"            type="str"     value="$(arg robot_frame)"/>
-    <param name="max_humans_world"       type="int"     value="$(arg max_humans_world)"/>
-    <remap from="/odom"        to="/rtabmap_odom_rectified"/>
-    <remap from="/local_map"   to="/rtabmap_local_map_rectified"/>
-    <remap from="/global_map"  to="/rtabmap/proj_map"/>
+  <node pkg="robot_behavior" type="behavior_generator_main.py" name="social_mpc" output="screen" >
+    <param name="mpc_config_name"             type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
+    <param name="robot_config_name"           type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
+    <param name="max_humans_world"            type="int"     value="$(arg max_humans_world)"/>
+    <param name="namespace"                   type="str"     value="$(arg namespace)"/>
+    <param name="namespace_slam"              type="str"     value="$(arg namespace_slam)"/>
+    <param name="map_frame"                   type="str"     value="$(arg map_frame)"/>
+    <param name="robot_frame"                 type="str"     value="$(arg robot_frame)"/>
+    <param name="global_occupancy_map_topic"  type="str"     value="$(arg global_occupancy_map_topic)"/>    
+    <param name="local_occupancy_map_topic"   type="str"     value="$(arg local_occupancy_map_topic)"/>   
+    <param name="diag_timer_rate"             type="int"     value="$(arg diag_timer_rate)"/>
+    <param name="target_id_absent_treshold"   type="int"     value="$(arg target_id_absent_treshold)"/>
     <remap from="/cmd_vel"     to="/nav_vel"/>
   </node>
 
diff --git a/src/robot_behavior/launch/social_mpc_sim.launch b/src/robot_behavior/launch/social_mpc_sim.launch
deleted file mode 100644
index 0e724e4ee5ee12fbf8a9e18a6aad3a73085ce4b5..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/social_mpc_sim.launch
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="gui"                    default="True"/>
-  <arg name="config_name_sim"        default="None"/>
-  <arg name="pub_hz_sim"             default="20"/>
-  <arg name="joint_names"            default="['head_1_joint']"/>
-  <arg name="mpc_config_name"        default="social_mpc"/>
-  <arg name="robot_config_name"      default="robot"/>
-  <arg name="max_humans_world"       default="20"/>
-  <arg name="pub_hz_control"         default="20"/>
-  <arg name="plot_render"            default="False"/>
-  <arg name="reference_map"          default="map"/>
-  <arg name="node_start_delay"       default="5.0"/>
-  <arg name="rviz"                   default="False"/>
-
-  <node pkg="robot_behavior" type="sim_2d_main.py"  name="simulator_2d" output="screen">
-    <!-- <remap from="cmd_vel" to="$(arg mobile_base_controller)/cmd_vel" /> -->
-    <param name="gui" type="bool" value="$(arg gui)"/>
-    <param name="config_name" type="str" value="$(arg config_name_sim)"/>
-    <param name="pub_hz" type="int" value="$(arg pub_hz_sim)"/>
-    <param name="joint_names" type="str" value="$(arg joint_names)"/>
-  </node>
-
-  <node pkg="robot_behavior" type="controller_main.py" name="social_mpc" output="screen">
-    <param name="mpc_config_name"        type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
-    <param name="robot_config_name"      type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
-    <param name="pub_hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="plot_render" type="bool" value="$(arg plot_render)"/>
-    <param name="reference_map"          type="str"     value="$(arg reference_map)"/>
-    <param name="max_humans_world"       type="int"     value="$(arg max_humans_world)"/>
-  </node>
-
-  <group if="$(arg rviz)">
-    <node type="rviz" name="rviz" pkg="rviz" launch-prefix="bash -c 'sleep $(arg node_start_delay); $0 $@' " args="-d $(find robot_behavior)/config/rviz/social_mpc_navigation.rviz"/>
-    <!-- or timed_roslaunch -->
-  </group>
-
-</launch>
diff --git a/src/robot_behavior/launch/social_mpc_sim_action_servers.launch b/src/robot_behavior/launch/social_mpc_sim_action_servers.launch
deleted file mode 100644
index 9cf6bacdf6d55dc1bd3a49bfbe7ef1516178cdd9..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/social_mpc_sim_action_servers.launch
+++ /dev/null
@@ -1,58 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="gui"                    default="True"/>
-  <arg name="config_name_sim"        default="None"/>
-  <arg name="pub_hz_sim"             default="20"/>
-  <arg name="joint_names"            default="['head_1_joint']"/>
-  <arg name="mpc_config_name"        default="social_mpc"/>
-  <arg name="robot_config_name"      default="robot"/>
-  <arg name="pub_hz_control"         default="20"/>
-  <arg name="plot_render"            default="False"/>
-  <arg name="reference_map"          default="map"/>
-  <arg name="node_start_delay"       default="5.0"/>
-  <arg name="success_dist_thresh"    default="0.25"/>
-  <arg name="success_ang_thresh"     default="0.2"/>
-  <arg name="rviz"                   default="False"/>
-  <arg name="max_humans_world"       default="20"/>
-
-  <node pkg="robot_behavior" type="sim_2d_main.py"  name="simulator_2d" output="screen">
-    <!-- <remap from="cmd_vel" to="$(arg mobile_base_controller)/cmd_vel" /> -->
-    <param name="gui" type="bool" value="$(arg gui)"/>
-    <param name="config_name" type="str" value="$(arg config_name_sim)"/>
-    <param name="pub_hz" type="int" value="$(arg pub_hz_sim)"/>
-    <param name="joint_names" type="str" value="$(arg joint_names)"/>
-  </node>
-
-  <node pkg="robot_behavior" type="controller_main.py" name="social_mpc" output="screen">
-    <param name="mpc_config_name"        type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg mpc_config_name).yaml"/>
-    <param name="robot_config_name"      type="str"     value="$(find robot_behavior)/config/social_mpc/$(arg robot_config_name).yaml"/>
-    <param name="pub_hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="plot_render" type="bool" value="$(arg plot_render)"/>
-    <param name="reference_map"          type="str"     value="$(arg reference_map)"/>
-    <param name="max_humans_world"       type="int"     value="$(arg max_humans_world)"/>
-    <remap from="/head_controller/command"     to="/pan_vel"/>
-  </node>
-
-  <node pkg="robot_behavior" type="navigate_action_server_main.py" name="navigate_action_server_main" output="screen">
-    <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-    <param name="success_ang_thresh"   type="double"  value="$(arg success_ang_thresh)"/>
-  </node>
-
-  <node pkg="robot_behavior" type="gotowards_action_server_main.py" name="gotowards_action_server_main" output="screen">
-    <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="success_dist_thresh" type="double" value="$(arg success_dist_thresh)"/>
-    <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
-  </node>
-
-  <node pkg="robot_behavior" type="lookat_action_server_main.py" name="lookat_action_server_main" output="screen">
-    <param name="hz" type="int" value="$(arg pub_hz_control)"/>
-    <param name="success_ang_thresh" type="double" value="$(arg success_ang_thresh)"/>
-  </node>
-
-  <group if="$(arg rviz)">
-    <node type="rviz" name="rviz" pkg="rviz" launch-prefix="bash -c 'sleep $(arg node_start_delay); $0 $@' " args="-d $(find robot_behavior)/config/rviz/social_mpc_navigation.rviz"/>
-    <!-- or timed_roslaunch -->
-  </group>
-
-</launch>
diff --git a/src/robot_behavior/launch/stereo_rtabmap_inria.launch b/src/robot_behavior/launch/stereo_rtabmap_inria.launch
deleted file mode 100644
index 1e75efb4d00436e162c0a3085e1e51668de0b266..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/stereo_rtabmap_inria.launch
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="left_image_topic"         default="/torso_front_camera/infra1/image_rect_raw"/>
-  <arg name="left_camera_info_topic"   default="/torso_front_camera/infra1/camera_info"/>
-  <arg name="right_image_topic"        default="/torso_front_camera/infra2/image_rect_raw"/>
-  <arg name="right_camera_info_topic"  default="/torso_front_camera/infra2/camera_info"/>
-  <arg name="stereo"                   default="True"/>
-  <arg name="wait_imu_to_init"         default="True"/>
-  <arg name="imu_topic"                default="/torso_front_camera/accel/sample"/>
-  <arg name="frame_id"                 default="base_footprint"/>
-  <arg name="odom_frame_id"            default="odom"/>
-  <arg name="odom_frame_id_init"       default="odom"/>
-  <arg name="localization"             default="True"/>
-  <arg name="database_path"            default="inria_office_stereo.db"/>
-  <arg name="rviz"                     default="False"/>
-  <arg name="odom_topic"               default="rtabmap_odom"/>
-  <arg name="visual_odometry"          default="True"/>
-  <arg name="publish_tf_odom"          default="True"/>
-  <arg name="publish_tf_map"           default="True"/>
-  <arg name="map_frame_id"             default="rtabmap_map"/>
-  <arg name="use_mag"                  default="False"/>
-  <arg name="publish_imu_tf"           default="False"/>
-  <arg name="world_frame"              default="enu"/>
-  <arg name="imu_data_raw"             default="/torso_front_camera/accel/sample"/>
-  <arg name="imu_data"                 default="/rtabmap/imu"/>
-  <arg name="rtabmapviz"               default="False"/>
-  <arg name="approx_sync"               default="False"/>
-
-
-  <node pkg="imu_filter_madgwick" type="imu_filter_node" name="imu_filter_madgwick" output="screen" >
-    <param name="~use_mag"      type="bool"  value="$(arg use_mag)"/>
-]   <param name="~publish_tf"   type="bool"  value="$(arg publish_imu_tf)"/>
-    <param name="~world_frame"  type="str"   value="$(arg world_frame)"/>
-    <remap from="/imu/data_raw"  to="$(arg imu_topic)"/>
-    <remap from="/imu/data"      to="/rtabmap/imu"/>
-  </node>
-
-  <include file="$(find rtabmap_ros)/launch/rtabmap.launch">
-    <arg name="left_image_topic"         value="$(arg left_image_topic)"/>
-    <arg name="left_camera_info_topic"   value="$(arg left_camera_info_topic)"/>
-    <arg name="right_image_topic"        value="$(arg right_image_topic)"/>
-    <arg name="right_camera_info_topic"  value="$(arg right_camera_info_topic)"/>
-    <arg name="stereo"                   value="$(arg stereo)"/>
-    <arg name="wait_imu_to_init"         value="$(arg wait_imu_to_init)"/>
-    <arg name="imu_topic"                value="/rtabmap/imu"/>
-    <arg name="frame_id"                 value="$(arg frame_id)"/>
-    <arg name="localization"             value="$(arg localization)"/>
-    <arg name="database_path"            value="$(arg database_path)"/>
-    <arg name="rviz"                     value="$(arg rviz)"/>
-    <arg name="odom_frame_id"            value="$(arg odom_frame_id)"/>
-     <arg name="odom_frame_id_init"       value="$(arg odom_frame_id_init)"/>
-    <arg name="odom_topic"               value="$(arg odom_topic)"/>
-    <arg name="visual_odometry"          value="$(arg visual_odometry)"/>
-    <arg name="publish_tf_odom"          value="$(arg publish_tf_odom)"/>
-    <arg name="publish_tf_map"           value="$(arg publish_tf_map)"/>
-    <arg name="map_frame_id"             value="$(arg map_frame_id)"/>
-    <arg name="approx_sync"              value="$(arg approx_sync)"/>
-    <arg name="rtabmapviz"               value="$(arg rtabmapviz)"/>
-  </include>
-</launch>
diff --git a/src/robot_behavior/launch/track_frame.launch b/src/robot_behavior/launch/track_frame.launch
deleted file mode 100644
index 4978417bd38ce0e4324e1836eff94b79c6308da1..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/track_frame.launch
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-
-  <arg name="robot_frame"               default="base_footprint"/>
-  <arg name="map_frame"                 default="map"/>
-  <arg name="frame_topic"               default="/tracked_pose_2d/frame"/>
-  <arg name="cam_optical_frame"         default="front_fisheye_camera_optical_frame"/>
-   <arg name="republish_from_openpose"  default="$(arg republish_from_openpose)"/>
-  
-
-  <node pkg="robot_behavior" type="track_frame_main.py"  name="track_frame_main" output="screen">
-    <param name="robot_frame"              type="str"   value="$(arg robot_frame)"/>
-    <param name="cam_optical_frame"        type="str"   value="$(arg cam_optical_frame)"/>
-    <param name="frame_topic"              type="str"   value="$(arg frame_topic)"/>
-    <param name="map_frame"                type="str"   value="$(arg map_frame)"/>
-    <param name="republish_from_openpose"  type="bool"  value="$(arg republish_from_openpose)"/>
-    <rosparam file="$(find robot_behavior)/config/projections/fisheye_calibration.yaml" command="load" subst_value="true" />
-  </node>
-
-</launch>
diff --git a/src/robot_behavior/launch/wp6_demo_peirasmos.launch b/src/robot_behavior/launch/wp6_demo_peirasmos.launch
deleted file mode 100644
index e0125c7cf7dfb67d004233e9f77ab3c41ba25494..0000000000000000000000000000000000000000
--- a/src/robot_behavior/launch/wp6_demo_peirasmos.launch
+++ /dev/null
@@ -1,134 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<launch>
-  <arg name="teleop_keyboard"                  default="False"/>
-  <arg name="teleop_joystick"                  default="True"/>
-  <arg name="republish_from_rtabmap"           default="False"/>
-  <arg name="republish_from_apriltag"          default="False"/>
-  <arg name="track_frame"                      default="True"/>
-  <arg name="republish_front_fisheye"          default="False"/>
-  <arg name="image_projection"                 default="False"/>
-  <arg name="social_mpc"                       default="False"/>
-  <arg name="action_servers"                   default="False"/>
-  <arg name="rviz"                             default="False"/>
-  <arg name="republish_from_openpose"          default="False"/>
-  <arg name="republish_to_basestation"         default="False"/>
-
-  <arg name="mobile_base_controller" default="/mobile_base_controller"/>
-  <arg name="speed"                  default="0.3"/>
-  <arg name="turn"                   default="0.5"/>
-
-  <arg name="rviz_config_file"       default="localization_apriltag"/>
-
-  <arg name="robot_frame"            default="base_footprint"/>
-  <arg name="cam_optical_frame"      default="front_fisheye_camera_optical_frame"/>
-
-  <arg name="mpc_config_name"        default="social_mpc"/>
-  <arg name="robot_config_name"      default="robot"/>
-  <arg name="pub_hz_control"         default="20"/>
-  <arg name="plot_render"            default="False"/>
-  <arg name="max_humans_world"       default="20"/>
-
-  <arg name="success_dist_thresh"    default="0.25"/>
-  <arg name="success_ang_thresh"     default="0.2"/>
-
-  <arg name="image_topic"            default="/front_fisheye_basestation/image_raw"/>
-  <arg name="tracking_topic"         default="/tracker/tracker_output"/>
-
-  <arg name="camera_ns"              default="/front_camera/fisheye"/>
-  <arg name="in_topic_republish"     default="$(arg camera_ns)/image_raw"/>
-  <arg name="out_topic_republish"    default="$(arg image_topic)"/>
-  <arg name="fisheye_camera_frame"   default="front_fisheye_camera_optical_frame"/>
-
-  <arg name="rect_camera_ns"         default="$(arg camera_ns)/rect"/>
-
-  <arg name="frame_topic"            default="/tracked_pose_2d/frame"/>
-
-  <arg name="odom_frame"             default="odom"/>
-  <arg name="map_frame"              default="map"/>
-
-  <arg name="frame_openpose_topic"   default="/frame"/>
-
-
-
-  <group if="$(arg republish_front_fisheye)">
-    <node pkg="image_transport" type="republish"  name="republish" output="screen" args="compressed in:=$(arg in_topic_republish) raw out:=$(arg out_topic_republish)"  />
-  </group>
-
-  <group if="$(arg image_projection)">
-    <include file="$(find robot_behavior)/launch/image_projection.launch">
-      <arg name="camera_ns"             value="$(arg camera_ns)"/>
-      <arg name="fisheye_camera_frame"  value="$(arg fisheye_camera_frame)"/>
-    </include>
-  </group>
-
-  <group if="$(arg action_servers)">
-    <include file="$(find robot_behavior)/launch/action_servers.launch">
-      <arg name="pub_hz_control"         value="$(arg pub_hz_control)"/>
-      <arg name="success_dist_thresh"    value="$(arg success_dist_thresh)"/>
-      <arg name="success_ang_thresh"     value="$(arg success_ang_thresh)"/>
-    </include>
-  </group>
-
-  <group if="$(arg social_mpc)">
-    <include file="$(find robot_behavior)/launch/social_mpc.launch">
-      <arg name="mpc_config_name"        value="$(arg mpc_config_name)"/>
-      <arg name="robot_config_name"      value="$(arg robot_config_name)"/>
-      <arg name="plot_render"            value="$(arg plot_render)"/>
-      <arg name="map_frame"              value="$(arg map_frame)"/>
-      <arg name="robot_frame"            value="$(arg robot_frame)"/>
-      <arg name="max_humans_world"       value="$(arg max_humans_world)"/>      
-    </include>
-  </group>
-
-  <group if="$(arg republish_from_apriltag)">
-    <include file="$(find robot_behavior)/launch/republish_from_apriltag.launch">
-      <arg name="robot_frame"        value="$(arg robot_frame)"/>
-      <arg name="cam_optical_frame"  value="$(arg cam_optical_frame)"/>
-      <arg name="camera_ns"          value="$(arg camera_ns)"/>
-    </include>
-  </group>
-
-  <group if="$(arg track_frame)">
-    <include file="$(find robot_behavior)/launch/track_frame.launch">
-      <arg name="robot_frame"              value="$(arg robot_frame)"/>
-      <arg name="cam_optical_frame"        value="$(arg cam_optical_frame)"/>
-      <arg name="frame_topic"              value="$(arg frame_topic)"/>
-      <arg name="map_frame"                value="$(arg map_frame)"/>
-      <arg name="republish_from_openpose"  value="$(arg republish_from_openpose)"/>
-    </include>
-  </group>
-
-  <group if="$(arg republish_from_rtabmap)">
-    <include file="$(find robot_behavior)/launch/republish_from_rtabmap.launch">
-      <arg name="robot_frame"  value="$(arg robot_frame)"/>
-      <arg name="map_frame"    value="$(arg map_frame)"/>
-      <arg name="frame_topic"  value="$(arg frame_topic)"/>
-    </include>
-  </group>  
-
-  <group if="$(arg republish_from_openpose)">
-    <include file="$(find robot_behavior)/launch/republish_from_openpose.launch">
-      <arg name="frame_openpose_topic"   value="$(arg frame_openpose_topic)"/>
-      <arg name="tracking_topic"         value="$(arg tracking_topic)"/>
-    </include>
-
-    <include file="$(find ros_openpose)/launch/run.launch">
-      <arg name="camera"        value="nodepth"/>
-      <arg name="color_topic"   value="$(arg image_topic)"/>
-      <arg name="synchronous"   value="True"/>
-      <arg name="print"         value="False"/>
-    </include>
-  </group>
-
-  <group unless="$(arg republish_from_openpose)">
-    <include file="$(find robot_behavior)/launch/ros_mediapipe.launch">
-      <arg name="image_topic"        value="$(arg image_topic)"/>
-      <arg name="tracking_topic"     value="$(arg tracking_topic)"/>
-    </include>    
-  </group>
-
-  <group if="$(arg republish_to_basestation)">
-    <include file="$(find robot_behavior)/launch/basestation_republisher.launch"/>    
-  </group>
-
-</launch>
diff --git a/src/robot_behavior/scripts/behavior_generator_main.py b/src/robot_behavior/scripts/behavior_generator_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..b8890d9bb66d52bfcd23579d08d079370fd88693
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_generator_main.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+import rospy
+import pkg_resources
+import yaml
+from robot_behavior import BehaviorGenerator
+
+
+if __name__ == '__main__':
+    # Init node
+    rospy.init_node('BehaviorGenerator', log_level=rospy.DEBUG, anonymous=True)
+    controller = BehaviorGenerator()
+    controller.run()
+    controller.shutdown()
+    # rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_global_path_planner_main.py b/src/robot_behavior/scripts/behavior_global_path_planner_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..8e3a3d1a184aeafe36b8084de157582d79369fd7
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_global_path_planner_main.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+import rospy
+import pkg_resources
+import yaml
+from robot_behavior import GlobalPathPlannerNode
+
+
+if __name__ == '__main__':
+    # Init node
+    rospy.init_node('GlobalPathPlannerNode', log_level=rospy.DEBUG, anonymous=True)
+    controller = GlobalPathPlannerNode()
+    controller.run()
+    controller.shutdown()
+    # rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_go_to_body_action_client_main.py b/src/robot_behavior/scripts/behavior_go_to_body_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..b5e3f6622b01ff3b13bc7755bb10f70308c6e42b
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_body_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToBodyActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_body_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = GoToBodyActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_go_to_body_action_server_main.py b/src/robot_behavior/scripts/behavior_go_to_body_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..3639fb5b60c1c6623461b11debfc0b01ef24cc90
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_body_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToBodyActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_body_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = GoToBodyActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_go_to_group_action_client_main.py b/src/robot_behavior/scripts/behavior_go_to_group_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..e9641f2179bb60a16b863b3ca1e9e1b34e53b3e7
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_group_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToGroupActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_group_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = GoToGroupActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_go_to_group_action_server_main.py b/src/robot_behavior/scripts/behavior_go_to_group_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..a6e582e5d6002312b7c2d660b57df79ea6027fc2
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_group_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToGroupActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_group_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = GoToGroupActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_go_to_person_action_client_main.py b/src/robot_behavior/scripts/behavior_go_to_person_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..4711b32c4e1a05e45528337ae91c6fe50e3b76f5
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_person_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToPersonActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_person_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = GoToPersonActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_go_to_person_action_server_main.py b/src/robot_behavior/scripts/behavior_go_to_person_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..9a8769efbeaae8a1c4ec078a9c9190a2775ac617
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_person_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToPersonActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_person_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = GoToPersonActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_go_to_position_action_client_main.py b/src/robot_behavior/scripts/behavior_go_to_position_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..f913ae639e32c1cce47c6003fe9517fa3b9fefda
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_position_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToPositionActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_position_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = GoToPositionActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_go_to_position_action_server_main.py b/src/robot_behavior/scripts/behavior_go_to_position_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..2a35690103f88a537dd26d47b3b1bcf7f8a45e32
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_go_to_position_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import GoToPositionActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'go_to_position_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = GoToPositionActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_goal_finder_main.py b/src/robot_behavior/scripts/behavior_goal_finder_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..9d74b6b325410d4543b245f315a80a816a6dd196
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_goal_finder_main.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+import rospy
+import pkg_resources
+import yaml
+from robot_behavior import GoalFinderNode
+
+
+if __name__ == '__main__':
+    # Init node
+    rospy.init_node('GoalFinderNode', log_level=rospy.DEBUG, anonymous=True)
+    controller = GoalFinderNode()
+    controller.run()
+    controller.shutdown()
+    # rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_local_path_planner_mpc_main.py b/src/robot_behavior/scripts/behavior_local_path_planner_mpc_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..15b6d72013d3bae5544d68aeda9e061853fbaaa5
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_local_path_planner_mpc_main.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+import rospy
+import pkg_resources
+import yaml
+from robot_behavior import LocalPathPlannerMPCNode
+
+
+if __name__ == '__main__':
+    # Init node
+    rospy.init_node('LocalPathPlannerMPCNode', log_level=rospy.DEBUG, anonymous=True)
+    controller = LocalPathPlannerMPCNode()
+    controller.run()
+    controller.shutdown()
+    # rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_look_at_body_action_client_main.py b/src/robot_behavior/scripts/behavior_look_at_body_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..4eec5f1e80a927f7bde87ab46570e083e402d675
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_body_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtBodyActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_body_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = LookAtBodyActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_look_at_body_action_server_main.py b/src/robot_behavior/scripts/behavior_look_at_body_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..5c290c737f75a5caf690c16309ff555db9d120ea
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_body_action_server_main.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtBodyActionServer
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_body_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = LookAtBodyActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_look_at_group_action_client_main.py b/src/robot_behavior/scripts/behavior_look_at_group_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..bf154f4c01e1f67485cd1f73ef7d48c9a09abc0e
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_group_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtGroupActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_group_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = LookAtGroupActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_look_at_group_action_server_main.py b/src/robot_behavior/scripts/behavior_look_at_group_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..b5bb87f56a1db8582c298e15428ae60a5391d0ec
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_group_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtGroupActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_group_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = LookAtGroupActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_look_at_person_action_client_main.py b/src/robot_behavior/scripts/behavior_look_at_person_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..9e36fd48fbb1000341a7b7aed2e5c45345127f3f
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_person_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtPersonActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_person_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = LookAtPersonActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_look_at_person_action_server_main.py b/src/robot_behavior/scripts/behavior_look_at_person_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..66cc25c11ec06a3148232b575b2829f25518fcc7
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_person_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtPersonActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_person_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = LookAtPersonActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/behavior_look_at_position_action_client_main.py b/src/robot_behavior/scripts/behavior_look_at_position_action_client_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..1b7a95340b73e2f2eb8958d47f3e2924eb0d69ae
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_position_action_client_main.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtPositionActionClient
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_position_action'
+    node_name = action_name + '_client'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    client = LookAtPositionActionClient(name=action_name)
+    result = client.call_server()
+    rospy.loginfo('Result is : {}'.format(result))
diff --git a/src/robot_behavior/scripts/behavior_look_at_position_action_server_main.py b/src/robot_behavior/scripts/behavior_look_at_position_action_server_main.py
new file mode 100755
index 0000000000000000000000000000000000000000..62032161dcbfc54243ae75039c985475a5febd88
--- /dev/null
+++ b/src/robot_behavior/scripts/behavior_look_at_position_action_server_main.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+import rospy
+from robot_behavior import LookAtPositionActionServer
+
+
+if __name__ == '__main__':
+    # Init node
+    action_name = 'look_at_position_action'
+    node_name = action_name + '_server'
+    rospy.init_node(node_name, log_level=rospy.INFO, anonymous=True)
+    server = LookAtPositionActionServer(name=action_name)
+    rospy.spin()
diff --git a/src/robot_behavior/scripts/controller_main.py b/src/robot_behavior/scripts/controller_main.py
deleted file mode 100755
index 5be7bdaf5db397e97dbff0d08dfef803598d3d4d..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/controller_main.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import pkg_resources
-import yaml
-
-from robot_behavior.controller_node import Controller
-
-
-mpc_config_name = None
-robot_config_name = None
-plot_render = False
-map_frame = "map"
-robot_frame = "base_footprint"
-max_humans_world = 20
-
-def main():
-    global mpc_config_name
-    global robot_config_name
-    global mpc_config_path
-    global robot_config_path
-    global has_pan
-    global has_tilt
-    global base_footprint_radius
-    global min_pan_angle
-    global max_pan_angle
-    global plot_render
-    global map_frame
-    global robot_frame
-    global max_humans_world
-
-    mpc_config_name = rospy.get_param("~mpc_config_name")
-    robot_config_name = rospy.get_param("~robot_config_name")
-    plot_render = rospy.get_param("~plot_render")
-    map_frame = rospy.get_param("~map_frame")
-    robot_frame = rospy.get_param("~robot_frame")
-    max_humans_world = rospy.get_param("~max_humans_world")
-
-    mpc_config_path = None if mpc_config_name == 'None' else mpc_config_name
-    robot_config_path = None if robot_config_name == 'None' else robot_config_name
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('Controller', log_level=rospy.DEBUG, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        controller = Controller(mpc_config_path=mpc_config_path,
-                                robot_config_path=robot_config_path,
-                                plot_render=plot_render,
-                                map_frame=map_frame,
-                                robot_frame=robot_frame,
-                                max_humans_world=max_humans_world)
-        controller.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Controller node terminated.")
diff --git a/src/robot_behavior/scripts/gotowards_action_client_main.py b/src/robot_behavior/scripts/gotowards_action_client_main.py
deleted file mode 100755
index 8f900785d103002f36cd8c3a8919c6c260021759..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/gotowards_action_client_main.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.gotowards_action_client import GoTowardsActionClient
-
-goto_target_id = -1
-timeout = 120
-
-def main():
-    global goto_target_id
-    global timeout
-
-    goto_target_id = rospy.get_param("~goto_target_id")
-    timeout = rospy.get_param("~timeout")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('gotowards_action_client', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        # Initializes a rospy node so that the SimpleActionClient can
-        # publish and subscribe over ROS.
-        client = GoTowardsActionClient(name='go_towards_action',
-                                       goto_target_id=goto_target_id,
-                                       timeout=timeout)
-        result = client.call_server()
-        rospy.loginfo('Result is : {}'.format(result))
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Go Towards action client node terminated.")
diff --git a/src/robot_behavior/scripts/gotowards_action_server_main.py b/src/robot_behavior/scripts/gotowards_action_server_main.py
deleted file mode 100755
index f2dcc12bc11898f8242d6ab65fd05e81898a34b8..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/gotowards_action_server_main.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.gotowards_action_server import GoTowardsActionServer
-
-hz = 20
-success_dist_thresh = 0.3
-success_ang_thresh = 0.2
-
-def main():
-    global hz
-    global success_dist_thresh
-    global success_ang_thresh
-
-    hz = rospy.get_param("~hz")
-    success_dist_thresh = rospy.get_param("~success_dist_thresh")
-    success_ang_thresh = rospy.get_param("~success_ang_thresh")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('gotowards_action_server', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        server = GoTowardsActionServer(name='go_towards_action',
-                                       success_dist_thresh=success_dist_thresh,
-                                       success_ang_thresh=success_ang_thresh,
-                                       hz=hz)
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Go Towards action server node terminated.")
diff --git a/src/robot_behavior/scripts/lookat_action_client_main.py b/src/robot_behavior/scripts/lookat_action_client_main.py
deleted file mode 100755
index e1933b052bdc59cb3d2c89d0d89c032dcd050487..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/lookat_action_client_main.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.lookat_action_client import LookAtActionClient
-
-pan_target_id = -1
-timeout = 120
-
-def main():
-    global pan_target_id
-    global timeout
-
-    pan_target_id = rospy.get_param("~pan_target_id")
-    timeout = rospy.get_param("~timeout")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('lookat_action_client', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        # Initializes a rospy node so that the SimpleActionClient can
-        # publish and subscribe over ROS.
-        client = LookAtActionClient(name='look_at_action',
-                                    pan_target_id=pan_target_id,
-                                    timeout=timeout)
-        result = client.call_server()
-        rospy.loginfo('Result is : {}'.format(result))
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Look At action client node terminated.")
diff --git a/src/robot_behavior/scripts/lookat_action_server_main.py b/src/robot_behavior/scripts/lookat_action_server_main.py
deleted file mode 100755
index 56c8aadb70ceb493a238a61e925ecf2d85fdb6ff..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/lookat_action_server_main.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.lookat_action_server import LookAtActionServer
-
-hz = 20
-success_ang_thresh = 0.3
-
-def main():
-    global hz
-    global success_ang_thresh
-
-    hz = rospy.get_param("~hz")
-    success_ang_thresh = rospy.get_param("~success_ang_thresh")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('lookat_action_server', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        server = LookAtActionServer(name='look_at_action',
-                                    success_ang_thresh=success_ang_thresh,
-                                    hz=hz)
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Look At action server node terminated.")
diff --git a/src/robot_behavior/scripts/navigate_action_client_main.py b/src/robot_behavior/scripts/navigate_action_client_main.py
deleted file mode 100755
index 95053e5cdfa9b448adcc32d0a609dbd1886b11fd..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/navigate_action_client_main.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.navigate_action_client import NavigateActionClient
-
-goto_target_id = -1
-human_target_id = -1
-goto_target_pose = [0., 0., 0., 0., 0.]
-relative_h_pose = [0., 0.]
-timeout = 120
-
-def main():
-    global goto_target_id
-    global human_target_id
-    global goto_target_pose
-    global relative_h_pose
-    global timeout
-
-    goto_target_id = rospy.get_param("~goto_target_id")
-    human_target_id = rospy.get_param("~human_target_id")
-    goto_target_pose = rospy.get_param("~goto_target_pose")
-    relative_h_pose = rospy.get_param("~relative_h_pose")
-    timeout = rospy.get_param("~timeout")
-
-    goto_target_pose = goto_target_pose.strip("[]").split(", ")
-    goto_target_pose = list(map(float, goto_target_pose))
-    relative_h_pose = relative_h_pose.strip("[]").split(", ")
-    relative_h_pose = list(map(float, relative_h_pose))
-
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('navigate_action_client', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        # Initializes a rospy node so that the SimpleActionClient can
-        # publish and subscribe over ROS.
-        client = NavigateActionClient(name='navigate_action',
-                                      goto_target_id=goto_target_id,
-                                      human_target_id=human_target_id,
-                                      goto_target_pose=goto_target_pose,
-                                      relative_h_pose=relative_h_pose,
-                                      timeout=timeout)
-        result = client.call_server()
-        rospy.loginfo('Result is : {}'.format(result))
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Navigate action client node terminated.")
diff --git a/src/robot_behavior/scripts/navigate_action_server_main.py b/src/robot_behavior/scripts/navigate_action_server_main.py
deleted file mode 100755
index c2171e255be57327f5e73866775f23ffebf1e65c..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/navigate_action_server_main.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.navigate_action_server import NavigateActionServer
-
-hz = 20
-success_dist_thresh = 0.3
-success_ang_thresh = 0.2
-
-def main():
-    global hz
-    global success_dist_thresh
-    global success_ang_thresh
-
-    hz = rospy.get_param("~hz")
-    success_dist_thresh = rospy.get_param("~success_dist_thresh")
-    success_ang_thresh = rospy.get_param("~success_ang_thresh")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('navigate_action_server', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        server = NavigateActionServer(name='navigate_action',
-                                      success_dist_thresh=success_dist_thresh,
-                                      success_ang_thresh=success_ang_thresh,
-                                      hz=hz)
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Navigate action server node terminated.")
diff --git a/src/robot_behavior/scripts/republish_from_apriltag_main.py b/src/robot_behavior/scripts/republish_from_apriltag_main.py
deleted file mode 100755
index c742da1070cb936b45951d89db8f2021c6fd769b..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/republish_from_apriltag_main.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.republish_from_apriltag_node import RepublishFromApriltag
-
-robot_frame = "base_footprint"
-cam_optical_frame = "head_front_camera_optical_frame"
-
-def main():
-    global robot_frame
-    global cam_optical_frame
-
-    robot_frame = rospy.get_param("~robot_frame")
-    cam_optical_frame = rospy.get_param("~cam_optical_frame")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('RepublishFromApriltag', log_level=rospy.INFO, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        republish_tag = RepublishFromApriltag(cam_optical_frame=cam_optical_frame,
-                                              robot_frame=robot_frame)
-        # republish_tag.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("RepublishFromApriltag node terminated.")
diff --git a/src/robot_behavior/scripts/republish_from_openpose_main.py b/src/robot_behavior/scripts/republish_from_openpose_main.py
deleted file mode 100755
index d3ad95719f8a0a7200ff07d70d6306e871523545..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/republish_from_openpose_main.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.republish_from_openpose_node import RepublishFromOpenpose
-
-frame_topic = "/frame"
-image_pose_topic = '/rosOpenpose/camera/image'
-tracking_topic = "/tracker/tracker_output"
-
-def main():    
-    global frame_topic
-    global tracking_topic
-    global image_pose_topic
-
-    
-    frame_topic = rospy.get_param("~frame_topic")
-    tracking_topic = rospy.get_param("~tracking_topic")
-    image_pose_topic = rospy.get_param("~image_pose_topic")
-    
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('RepublishFromOpenpose', log_level=rospy.DEBUG, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        republish_tag = RepublishFromOpenpose(tracking_topic=tracking_topic,
-                                              frame_topic=frame_topic,
-                                              image_pose_topic=image_pose_topic)
-        # republish_tag.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("RepublishFromOpenpose node terminated.")
diff --git a/src/robot_behavior/scripts/republish_from_rtabmap_main.py b/src/robot_behavior/scripts/republish_from_rtabmap_main.py
deleted file mode 100755
index 1c1903face86e5fd9fe8c6c9e8cd981c2f563ed7..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/republish_from_rtabmap_main.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.republish_from_rtabmap_node import RepublishFromRtabmap
-
-robot_frame = "odom"
-map_frame = "map"
-frame_topic = "/vision_msgs/human_2d_pose/annotations"
-
-def main():
-    global robot_frame
-    global map_frame
-    global frame_topic
-
-    robot_frame = rospy.get_param("~robot_frame")
-    map_frame = rospy.get_param("~map_frame")
-    frame_topic = rospy.get_param("~frame_topic")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('RepublishFromRtabmap', log_level=rospy.DEBUG, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        republish_rtabmap = RepublishFromRtabmap(robot_frame=robot_frame,
-                                                 frame_topic=frame_topic,
-                                                 map_frame=map_frame)
-        # republish_rtabmap.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("RepublishFromRtabmap node terminated.")
diff --git a/src/robot_behavior/scripts/ros_mediapipe_main.py b/src/robot_behavior/scripts/ros_mediapipe_main.py
deleted file mode 100755
index 97a2b309a098e354db9f27e0fddef9558df969ea..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/ros_mediapipe_main.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.ros_mediapipe_node import RosMediapipe
-
-image_topic = "/front_fisheye_basestation/image_raw"
-tracking_topic = "/tracker/tracker_output"
-
-def main():
-    global image_topic
-    global tracking_topic
-
-    image_topic = rospy.get_param("~image_topic")
-    tracking_topic = rospy.get_param("~tracking_topic")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('RosMediapipe', log_level=rospy.DEBUG, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        republish_tag = RosMediapipe(tracking_topic=tracking_topic,
-                                     image_topic=image_topic)
-        # republish_tag.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("RosMediapipe node terminated.")
diff --git a/src/robot_behavior/scripts/sim_2d_main.py b/src/robot_behavior/scripts/sim_2d_main.py
deleted file mode 100755
index 6061d1b9bb409ec2fb5c2344900d9c91eb6db75d..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/sim_2d_main.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import pkg_resources
-
-from robot_behavior.sim_2d_node import Simulator
-
-gui = True
-config_name = None
-pub_hz = 20
-joint_names = ['head_1_joint']
-
-def main():
-    global gui
-    global config_name
-    global config_path
-    global pub_hz
-    global joint_names
-
-    gui = rospy.get_param("~gui")
-    config_name = rospy.get_param("~config_name")
-    pub_hz = rospy.get_param("~pub_hz")
-    joint_names = rospy.get_param("~joint_names")
-    joint_names = joint_names.strip("['']").split("', '")
-
-    config_name = None if config_name == 'None' else config_name
-    try:
-        config_path =  pkg_resources.resource_filename('results', '/data/config/sim2d/'+ config_name)
-    except:
-        config_path = None
-
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('Simulator', log_level=rospy.DEBUG, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        sim = Simulator(gui=gui,
-                        config_path=config_path,
-                        pub_hz=pub_hz,
-                        joint_names=joint_names)
-        sim.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("Simulator node terminated.")
diff --git a/src/robot_behavior/scripts/track_frame_main.py b/src/robot_behavior/scripts/track_frame_main.py
deleted file mode 100755
index 562467cbced786331dafb1943e1344aeab370352..0000000000000000000000000000000000000000
--- a/src/robot_behavior/scripts/track_frame_main.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-
-from robot_behavior.track_frame_node import TrackFrame
-
-robot_frame = "base_footprint"
-cam_optical_frame = "head_front_camera_optical_frame"
-frame_topic = "/vision_msgs/human_2d_pose/annotations"
-map_frame = "map"
-republish_from_openpose = True
-
-
-def main():
-    global robot_frame
-    global cam_optical_frame
-    global frame_topic
-    global map_frame
-    global republish_from_openpose
-
-    robot_frame = rospy.get_param("~robot_frame")
-    cam_optical_frame = rospy.get_param("~cam_optical_frame")
-    frame_topic = rospy.get_param("~frame_topic")
-    map_frame = rospy.get_param("~map_frame")
-    republish_from_openpose = rospy.get_param("~republish_from_openpose")
-
-
-if __name__ == '__main__':
-    # Init node
-    rospy.init_node('RepublishFromOpenpose', log_level=rospy.DEBUG, anonymous=True)
-    main()  # the others are not passed arguements
-    try:
-        republish_tag = TrackFrame(cam_optical_frame=cam_optical_frame,
-                                   frame_topic=frame_topic,
-                                   map_frame=map_frame,
-                                   robot_frame=robot_frame,
-                                   republish_from_openpose=republish_from_openpose)
-        # republish_tag.run()
-        rospy.spin()
-    except rospy.ROSInterruptException:
-        rospy.loginfo("RepublishFromOpenpose node terminated.")
diff --git a/src/robot_behavior/src/robot_behavior/__init__.py b/src/robot_behavior/src/robot_behavior/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..d8601e750eef35d60716c0c9e0c4db4d71ea69cc 100755
--- a/src/robot_behavior/src/robot_behavior/__init__.py
+++ b/src/robot_behavior/src/robot_behavior/__init__.py
@@ -0,0 +1,23 @@
+from .behavior_global_path_planner import GlobalPathPlanner
+from .behavior_goal_finder import GoalFinder
+from .behavior_local_path_planner_mpc import LocalPathPlannerMPC
+from .behavior_generator_node import BehaviorGenerator
+from .behavior_global_path_planner_node import GlobalPathPlannerNode
+from .behavior_local_path_planner_mpc_node import LocalPathPlannerMPCNode
+from .behavior_goal_finder_node import GoalFinderNode
+from .behavior_go_to_body_action_client import GoToBodyActionClient
+from .behavior_go_to_body_action_server import GoToBodyActionServer
+from .behavior_go_to_group_action_client import GoToGroupActionClient
+from .behavior_go_to_group_action_server import GoToGroupActionServer
+from .behavior_go_to_person_action_client import GoToPersonActionClient
+from .behavior_go_to_person_action_server import GoToPersonActionServer
+from .behavior_go_to_position_action_client import GoToPositionActionClient
+from .behavior_go_to_position_action_server import GoToPositionActionServer
+from .behavior_look_at_body_action_client import LookAtBodyActionClient
+from .behavior_look_at_body_action_server import LookAtBodyActionServer
+from .behavior_look_at_group_action_client import LookAtGroupActionClient
+from .behavior_look_at_group_action_server import LookAtGroupActionServer
+from .behavior_look_at_person_action_client import LookAtPersonActionClient
+from .behavior_look_at_person_action_server import LookAtPersonActionServer
+from .behavior_look_at_position_action_client import LookAtPositionActionClient
+from .behavior_look_at_position_action_server import LookAtPositionActionServer
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/behavior_generator_node.py b/src/robot_behavior/src/robot_behavior/behavior_generator_node.py
new file mode 100755
index 0000000000000000000000000000000000000000..41be07101af63598170416d7cc5b5c261409101d
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_generator_node.py
@@ -0,0 +1,1325 @@
+#!/usr/bin/env python3
+import numpy as np
+from collections import namedtuple
+
+import pkg_resources
+import tf
+import os
+import yaml
+import cv2
+from multiprocessing import Lock
+
+from social_mpc.controller import RobotController
+from social_mpc.config.config import RobotConfig
+from social_mpc.utils import local_to_global, global_to_local
+from social_mpc.ssn_model.social_spaces import SocialSpaces, Group
+
+import rospy
+from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3, PoseArray, Transform
+from sensor_msgs.msg import JointState
+from std_msgs.msg import Float64
+from nav_msgs.msg import Odometry, OccupancyGrid, MapMetaData
+from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
+from diagnostic_msgs.msg import DiagnosticArray, DiagnosticStatus, KeyValue
+
+from robot_behavior.utils import constraint_angle, meter_to_px
+from spring_msgs.msg import TrackedPersons2d, ControllerStatus, GoToPosition, GoToEntity, LookAtEntity, LookAtPosition
+from robot_behavior.ros_4_hri_interface import ROS4HRIInterface  
+
+
+class BehaviorGenerator:
+    def __init__(self):
+        os.environ['XLA_PYTHON_CLIENT_PREALLOCATE'] = 'false'
+        mpc_config_name = rospy.get_param('~mpc_config_name', 'None')
+        robot_config_name = rospy.get_param('~robot_config_name', 'None')
+        mpc_config_path = None if mpc_config_name == 'None' else mpc_config_name
+        robot_config_path = None if robot_config_name == 'None' else robot_config_name
+        self.mpc_config_path = mpc_config_path
+        self.robot_config_path = robot_config_path
+        self.max_humans_world = rospy.get_param('max_humans_world', 20)
+        self.max_groups_world = rospy.get_param('max_groups_world', 10)
+        self.namespace = rospy.get_param('namespace', 'behavior_generator')
+        self.namespace_slam = rospy.get_param('~namespace_slam', '/rtabmap')
+        self.map_frame = rospy.get_param('~map_frame', 'map')
+        self.robot_frame = rospy.get_param('~robot_frame', 'base_footprint')
+        self.global_occupancy_map_topic = rospy.get_param('~global_occupancy_map_topic', '/slam/global_occupancy_map')
+        self.local_occupancy_map_topic = rospy.get_param('~local_occupancy_map_topic', '/slam/local_occupancy_map')
+        self.diag_timer_rate = rospy.get_param('~diag_timer_rate', 10)
+        self.target_id_absent_treshold = rospy.get_param('~target_id_absent_treshold', 3)
+
+        self.controller = RobotController(config_filename=self.mpc_config_path)
+
+        cmd = "rostopic pub -1  /interaction_profile/set/cancel actionlib_msgs/GoalID \"{}\""
+        nodes = os.popen("rosnode list").readlines()
+        for i in range(len(nodes)):
+            nodes[i] = nodes[i].replace("\n","")
+        if '/pal_head_manager' in nodes:
+            os.system("rosnode kill " + '/pal_head_manager')  # '/pal_webcommander'
+        os.system(cmd)  # disable alive mode
+
+        cmd_pub = "rostopic pub -1 /pause_navigation std_msgs/Bool 'data: false'"
+        os.system(cmd_pub)  # disable pause navigation mode
+
+        self.joint_states_data = None
+        self.global_map_data = None
+        self.local_map_data = None
+        self.tracked_persons_2d_data = None
+        self.go_to_position_data = None
+        self.go_to_body_data = None
+        self.go_to_person_data = None
+        self.go_to_group_data = None
+        self.look_at_position_data = None
+        self.look_at_body_data = None
+        self.look_at_person_data = None
+        self.look_at_group_data = None
+        self.rtabmap_ready = False
+        self.hri_data = None
+
+        self.look_at_group_target = None
+        self.look_at_body_target = None
+        self.look_at_person_target = None
+        self.look_at_position_target = None
+        self.look_at_done = False
+
+        self.go_to_group_target = None
+        self.go_to_body_target = None
+        self.go_to_person_target = None
+        self.go_to_position_target = None
+        self.go_to_done = False
+
+        # To get indexes of the joints
+        self.get_indexes = False
+        self.pan_ind = None
+        self.tilt_ind = None
+        self.current_pan = None
+        self.current_tilt = None
+        self.head_goal = False
+        self.base_goal = False
+        self.pan_angle = 0.
+        self.tilt_angle = 0.
+        self.lin_vel = 0.
+        self.ang_vel = 0.
+        
+        self.human_id_default = -1
+        self.group_id_default = -1
+        self.x_pan_target = 0.
+        self.y_pan_target = 0.
+        self.wrong_target = False
+        self.x_final = 0.
+        self.y_final = 0.
+        self.th_final = 0.
+        self.x_wp = 0.
+        self.y_wp = 0.
+        self.th_wp = 0.
+        self.head_joint_errors = False
+        self.x_human_features = 0.
+        self.y_human_features = 0.
+        self.th_human_features = 0.
+        self.x_robot_pose = 0.
+        self.y_robot_pose = 0.
+        self.yaw_robot_pose = 0.
+
+        self.target_id_absent_count = 0
+
+        # continuous flags
+        self.is_continuous_look_at = False
+        self.continuous_look_at_done = False
+        self.is_continuous_go_to = False
+        self.continuous_go_to_done = False
+
+        self.robot_config = None
+
+        self.action_idx = None
+        self.actions = None
+        self.controller_status_msg = None
+
+        self.timestamp_tracking = None
+        self.timestamp_tracking_latest = None
+        self.lock = Lock()
+        self.lock_controller = Lock()
+        self.ssn = None
+
+        self._subscribers = []
+        self._publishers = []
+        self._timers = []
+
+        self.read_robot_config(self.robot_config_path)
+        self.init_state()
+
+        self.init()
+
+
+    def init(self):
+        self.i = 0
+        self.ros_4_hri_interface = ROS4HRIInterface()
+        self.tf_broadcaster = tf.TransformBroadcaster()
+        self.tf_listener = tf.TransformListener()
+
+        self._check_all_sensors_ready()
+
+        # Start all the ROS related Subscribers and publishers
+        self._subscribers.append(rospy.Subscriber('/joint_states', JointState, callback=self._joint_states_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber('/tracked_persons_2d', TrackedPersons2d, callback=self._tracked_persons_2d_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.global_occupancy_map_topic, OccupancyGrid, callback=self._global_map_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.local_occupancy_map_topic, OccupancyGrid, callback=self._local_map_callback, queue_size=1))
+
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_position', GoToPosition, callback=self._go_to_position_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_body', GoToEntity, callback=self._go_to_body_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_person', GoToEntity, callback=self._go_to_person_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_group', GoToEntity, callback=self._go_to_group_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/look_at_position', LookAtPosition, callback=self._look_at_position_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/look_at_body', LookAtEntity, callback=self._look_at_body_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/look_at_person', LookAtEntity, callback=self._look_at_person_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/look_at_group', LookAtEntity, callback=self._look_at_group_callback, queue_size=1))
+
+        self._timers.append(rospy.Timer(rospy.Duration(self.controller.controller_config.h), callback=self._pub_vel_callback))
+        self._timers.append(rospy.Timer(rospy.Duration(1/self.diag_timer_rate), callback=self._diagnostics_callback))
+        self.rate = rospy.Rate(int(1/self.controller.controller_config.h))
+        
+        self._cmd_vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=1)
+        self._publishers.append(self._cmd_vel_pub)
+        self._pan_vel_pub = rospy.Publisher('/head_controller/command', JointTrajectory, queue_size=1)
+        self._publishers.append(self._pan_vel_pub)
+        self._controller_status_pub = rospy.Publisher(self.namespace + '/controller_status', ControllerStatus, queue_size=1)
+        self._publishers.append(self._controller_status_pub)
+        self._diagnostics_pub = rospy.Publisher('/diagnostics', DiagnosticArray, queue_size=1)
+        self._publishers.append(self._diagnostics_pub)
+        self._local_social_cost_map_pub = rospy.Publisher(self.namespace_slam + '/local_social_cost_map', OccupancyGrid, queue_size=1, latch=True)
+        self._publishers.append(self._local_social_cost_map_pub)
+        
+        rospy.loginfo("Initializing the BehaviorGenerator")
+
+        # Register handler to be called when rospy process begins shutdown
+        # rospy.on_shutdown(self.shutdown)  # calling shutdown several times because of sub processes
+
+        self._check_publishers_connection()
+
+        if self.ssn is None:
+            local_map_size = [
+                [self.local_map_data.info.origin.position.x, self.local_map_data.info.origin.position.x + self.local_map_data.info.width*self.local_map_data.info.resolution],
+                [self.local_map_data.info.origin.position.y, self.local_map_data.info.origin.position.y + self.local_map_data.info.height*self.local_map_data.info.resolution]]
+            self.ssn = SocialSpaces(
+                    bbox=local_map_size,
+                    map_shape=(self.local_map_data.info.height, self.local_map_data.info.width))
+
+        self.get_robot_pose()
+        self.recreate_state()
+        self.controller.controller_config.goto_target_id = -1
+        self.controller.controller_config.human_target_id = -1
+        self.controller.controller_config.pan_target_id = -1
+        self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
+        self.controller.step(self.state)
+        self.controller.reg_parameter[-1] = 100000
+        self.controller.mpc.step(state=self.state.joint_angles,
+                                         actions=self.controller.actions + self.controller.initial_action,
+                                         weights=self.controller.weights,
+                                         reg_parameter=self.controller.reg_parameter,
+                                         loss_coef=self.controller.loss_coef,
+                                         loss_rad=self.controller.loss_rad,
+                                         cost_map=self.controller.local_map[:, :, :-1],
+                                         goto_goal=self.controller.goto_goal,
+                                         pan_goal=self.controller.pan_goal,
+                                         human_features=None) #self.controller.human_features)
+
+        self.action_idx = 0
+        self.actions = np.copy(self.controller.initial_action)
+
+        if not self.controller.is_init:
+            rospy.signal_shutdown("BehaviorGenerator shutting down.")
+
+        rospy.loginfo("Arguements passed : ")
+        rospy.loginfo("Config path : {}".format(self.mpc_config_path))
+        rospy.loginfo("BehaviorGenerator Initialization Ended")
+
+
+    def read_robot_config(self, filename=None):
+        if filename is None:
+            filename = pkg_resources.resource_filename(
+                'sim2d', 'config/robot.yaml')
+            rospy.logdebug("No filename provided for the robot configuration, basic robot config loaded")
+        elif os.path.isfile(filename):
+            rospy.logdebug("Desired robot config loaded")
+        else:
+            filename = pkg_resources.resource_filename(
+                'sim2d', 'config/robot.yaml')
+            rospy.logdebug("Desired filename for the robot configuration does not exist, basic robot config loaded")
+        config = yaml.load(open(filename), Loader=yaml.FullLoader)
+        self.robot_config = RobotConfig(config)
+
+
+    def run(self):
+        init_t = rospy.Time.now()
+        while not rospy.is_shutdown():
+            self.step(init_t)
+
+    def step(self, init_time):
+        loop_t = rospy.Time.now()
+        self.get_robot_pose()
+        with self.lock:
+            self.recreate_state()
+        rospy.logdebug('recreate_state time : {}'.format((rospy.Time.now() - loop_t).to_sec()))
+        self.controller.step(self.state)
+        rospy.logdebug('MPC time : {}'.format((rospy.Time.now() - loop_t).to_sec()))
+        self.publish_controller_status()
+        self.publish_tf_go_to_goals()
+        self.publish_tf_look_at_goals()
+        self.publish_tf_human_goal()
+
+        time_now = rospy.Time.now()
+        rospy.logdebug('BehaviorGenerator time : {}, BehaviorGenerator loop time : {}'.format((time_now - init_time).to_sec(), (time_now - loop_t).to_sec()))
+        rospy.logdebug('Step time : {}'.format((time_now - loop_t).to_sec()))
+
+        self.rate.sleep()
+        self.i += 1
+
+
+    def init_state(self):
+        self.state = RobotState()
+        self.list_attr = ["robot_pose",
+                          "robot_velocity",
+                          "joint_angles",
+                          "joint_velocities",
+                          "local_map",
+                          "global_map",
+                          "groups_pose",
+                          "humans_pose",
+                          "humans_velocity",
+                          "humans_visible",
+                          "groups_visible",
+                          "groups_id",
+                          "humans_group_id",
+                          "humans_id",
+                          "config",
+                          "robot_config"]
+        for attr in self.list_attr:
+            self.state.__setattr__(attr, None)
+
+        self.Config = namedtuple('Config', ['global_map_size',
+                                            'global_map_scale',
+                                            'global_map_width',
+                                            'global_map_height',
+                                            'local_map_size',
+                                            'local_map_scale',
+                                            'local_map_width',
+                                            'local_map_height'])
+
+        self.RobotConfig = namedtuple('RobotConfig', ['has_pan',
+                                                      'has_tilt',
+                                                      'min_pan_angle',
+                                                      'max_pan_angle',
+                                                      'base_footprint_radius'])
+        self.state.robot_config = self.RobotConfig(self.robot_config.has_pan,
+                                                   self.robot_config.has_tilt,
+                                                   self.robot_config.min_pan_angle,
+                                                   self.robot_config.max_pan_angle,
+                                                   self.robot_config.base_footprint_radius)
+
+        self.reset_state()
+
+    def reset_state(self):
+        nb_humans = self.max_humans_world
+        self.state.humans_id = np.ones(nb_humans) * self.human_id_default
+        self.state.humans_group_id = np.ones(nb_humans) * self.group_id_default
+        self.state.groups_id = np.ones(self.max_groups_world) * self.group_id_default
+        self.state.humans_visible = np.ones(nb_humans)
+        self.state.groups_visible = np.ones(self.max_groups_world)
+        self.state.humans_pose = np.zeros((nb_humans, 3))  # x, y, angle
+        self.state.groups_pose = np.zeros((self.max_groups_world, 2))  # x, y
+        self.state.humans_velocity = np.zeros((nb_humans, 2))
+        self.state.robot_pose = np.zeros(3)
+        self.state.robot_velocity = np.zeros(2)
+        if self.state.robot_config.has_pan and self.state.robot_config.has_tilt:
+            self.state.joint_angles = np.zeros(2)
+            self.state.joint_velocities = np.zeros(2)
+        elif not self.state.robot_config.has_pan and not self.state.robot_config.has_tilt:
+            self.state.joint_angles = None
+            self.state.joint_velocities = None
+        else:
+            self.state.joint_angles = np.zeros(1)
+            self.state.joint_velocities = np.zeros(1)
+
+
+    def publish_controller_status(self):
+        controller_status_msg = ControllerStatus()
+        controller_status_msg.goto_target_id = self.controller.controller_config.goto_target_id
+        controller_status_msg.human_target_id = self.controller.controller_config.human_target_id
+        controller_status_msg.pan_target_id = self.controller.controller_config.pan_target_id
+        if self.look_at_body_target:
+            controller_status_msg.pan_target_id = self.look_at_body_target
+        if self.look_at_group_target:
+            controller_status_msg.pan_target_id = self.look_at_group_target
+        if self.look_at_person_target:
+            controller_status_msg.pan_target_id = self.look_at_person_target
+
+        controller_status_msg.is_continuous_look_at = self.is_continuous_look_at
+        controller_status_msg.continuous_look_at_done = self.continuous_look_at_done
+        controller_status_msg.is_continuous_go_to = self.is_continuous_go_to        
+        controller_status_msg.continuous_go_to_done = self.continuous_go_to_done
+
+        self.x_human_features, self.y_human_features, self.th_human_features = self.controller.human_features[:3]
+        self.x_final = self.controller.shared_target[2]
+        self.y_final = self.controller.shared_target[3]
+        self.th_final = self.controller.shared_target[4]
+        self.x_wp = self.controller.goto_goal[0]
+        self.y_wp = self.controller.goto_goal[1]
+        self.th_wp = self.controller.goto_goal[2]
+
+        if controller_status_msg.goto_target_id != -1 or self.controller.controller_config.goto_target_pose[-1] == 1:
+            dist_to_goto_target = np.linalg.norm(np.array([self.x_robot_pose - self.x_final, self.y_robot_pose - self.y_final]))
+            controller_status_msg.dist_to_goto_target = [np.abs(self.x_robot_pose - self.x_final),
+                                              np.abs(self.y_robot_pose - self.y_final),
+                                              np.abs(constraint_angle(self.yaw_robot_pose - self.th_final)),
+                                              dist_to_goto_target]
+
+        if controller_status_msg.human_target_id != -1:
+            dist_to_human_target = np.linalg.norm(np.array([self.x_human_features, self.y_human_features]))
+            controller_status_msg.dist_to_human_target = [np.abs(self.x_human_features),
+                                               np.abs(self.y_human_features),
+                                               np.abs(constraint_angle(self.th_human_features)),
+                                               dist_to_human_target]
+
+        if controller_status_msg.pan_target_id != -1 or self.controller.pan_target_pose is not None or self.head_goal:
+            head_joint_errors = []
+            if self.head_joint_errors:
+                if self.pan_ind is not None:
+                    head_joint_errors.append(np.abs(constraint_angle(np.arctan2(self.y_pan_target - self.y_robot_pose, self.x_pan_target - self.x_robot_pose) - constraint_angle(self.yaw_robot_pose + self.current_pan))))
+                if self.tilt_ind is not None:
+                    head_joint_errors.append(0.)
+                controller_status_msg.head_joint_errors = head_joint_errors
+
+        if controller_status_msg.pan_target_id == -1 and self.controller.pan_target_pose is None and controller_status_msg.human_target_id == -1 and controller_status_msg.goto_target_id == -1 and self.controller.controller_config.goto_target_pose[-1] == 0:
+            controller_status_msg.status = ControllerStatus.IDLE
+        if controller_status_msg.pan_target_id != -1 or self.controller.pan_target_pose is not None or controller_status_msg.human_target_id != -1 or controller_status_msg.goto_target_id != -1 or self.controller.controller_config.goto_target_pose[-1] == 1:
+            controller_status_msg.status = ControllerStatus.IS_RUNNING
+        if (self.controller.is_failure or self.wrong_target) and not(self.go_to_done or self.look_at_done):
+            rospy.logwarn('FAILURE from controller {}, from target {}'.format(self.controller.is_failure, self.wrong_target))
+            controller_status_msg.status = ControllerStatus.IS_FAILURE
+        self.controller_status_msg = controller_status_msg
+        self._controller_status_pub.publish(self.controller_status_msg)
+
+
+    def publish_tf_human_goal(self):
+        if self.controller.controller_config.human_target_id != -1:
+            current_time = rospy.Time.now()
+            quat = tf.transformations.quaternion_from_euler(0, 0, self.th_human_features)
+            self.tf_broadcaster.sendTransform(
+             (self.x_human_features, self.y_human_features, 0),
+             quat,
+             current_time,
+             "human_goal",
+             self.map_frame
+            )
+
+
+    def publish_tf_go_to_goals(self):
+        if self.base_goal:
+            current_time = rospy.Time.now()
+            quat_final = tf.transformations.quaternion_from_euler(0, 0, self.th_final)
+            quat_wp = tf.transformations.quaternion_from_euler(0, 0, self.th_wp)
+
+            self.tf_broadcaster.sendTransform(
+             (self.x_final, self.y_final, 0),
+             quat_final,
+             current_time,
+             "final goal",
+             self.map_frame
+            )
+
+            self.tf_broadcaster.sendTransform(
+             (self.x_wp, self.y_wp, 0),
+             quat_wp,
+             current_time,
+             "intermediate waypoint goal",
+             self.robot_frame
+            )
+
+
+    def publish_tf_look_at_goals(self):
+        if self.head_goal:
+            current_time = rospy.Time.now()
+            quat_pan_goal = tf.transformations.quaternion_from_euler(0, 0, self.yaw_robot_pose)
+            self.tf_broadcaster.sendTransform(
+             (self.x_pan_target, self.y_pan_target, 0),
+             quat_pan_goal,
+             current_time,
+             "pan goal",
+             self.map_frame
+            )
+
+
+    def publish_head_cmd(self, pan, tilt):
+        pan_angle, tilt_angle = self._check_joint_pos_limits(pan, tilt)
+        
+        head_msg = JointTrajectory()
+        p = JointTrajectoryPoint()
+        p.positions = [None] * 2
+        p.positions[0] = pan_angle
+        p.positions[1] = tilt_angle
+        # head_msg.header.frame_id = "head"
+        # head_msg.header.stamp = rospy.Time.now()
+        head_msg.joint_names = [None]*2
+        head_msg.joint_names[0] = "head_1_joint"  # pan
+        head_msg.joint_names[1] = "head_2_joint"  # tilt
+        head_msg.points = [None]
+        head_msg.points[0] = p
+        head_msg.points[0].time_from_start = rospy.Duration.from_sec(self.controller.controller_config.h)
+        self._pan_vel_pub.publish(head_msg)
+        rospy.loginfo('pan_angle goal : {}  tilt angle goal : {}'.format(pan_angle, tilt_angle))
+
+
+    def publish_vel(self, action):
+        pan_vel, ang, lin = action[0], action[1], action[2]
+        self.lin_vel, self.ang_vel = self._check_joint_vel_limits(lin, ang)
+        twist_msg = Twist(Vector3(self.lin_vel, 0, 0), Vector3(0, 0, self.ang_vel))
+        self._cmd_vel_pub.publish(twist_msg)
+
+        """
+        pan_angle = self.current_pan
+        pan_angle += self.controller.controller_config.h*pan_vel
+        tilt_angle = 0.  #self.current_tilt
+        pan_angle, tilt_angle = self._check_joint_pos_limits(pan_angle, tilt_angle)
+
+        head_msg = JointTrajectory()
+        p = JointTrajectoryPoint()
+        p.positions = [None] * 2
+        p.positions[0] = pan_angle
+        p.positions[1] = tilt_angle
+        # head_msg.header.frame_id = "head"
+        # head_msg.header.stamp = rospy.Time.now()
+        head_msg.joint_names = [None]*2
+        head_msg.joint_names[0] = "head_1_joint"  # pan
+        head_msg.joint_names[1] = "head_2_joint"  # tilt
+        head_msg.points = [None]
+        head_msg.points[0] = p
+        head_msg.points[0].time_from_start = rospy.Duration.from_sec(self.controller.controller_config.h)
+        self._pan_vel_pub.publish(head_msg)
+        """
+        rospy.loginfo("vel to robot : {}, {}, {}".format(lin, ang, pan_vel))
+
+
+    def get_robot_pose(self):
+        # try:
+        #     map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, self.timestamp_tracking_latest)
+        # except:
+        try:
+            map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
+            rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
+        except:
+            rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
+            self.x_robot_pose, self.y_robot_pose, self.yaw_robot_pose = 0., 0., 0.
+            return None
+        self.x_robot_pose, self.y_robot_pose, _ = map_2_robot[0]
+        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_robot[1][0],
+                                                                map_2_robot[1][1],
+                                                                map_2_robot[1][2],
+                                                                map_2_robot[1][3]])
+        self.yaw_robot_pose = float(constraint_angle(yaw))
+
+
+    def recreate_state(self):
+        self.reset_state()
+        self.timestamp_tracking_latest = self.timestamp_tracking
+
+        self.current_pan = self.joint_states_data.position[self.pan_ind]
+        self.current_tilt = self.joint_states_data.position[self.tilt_ind]
+        self.state.robot_pose = np.array([self.x_robot_pose, self.y_robot_pose, self.yaw_robot_pose])
+        # self.state.robot_velocity = np.array([lin, ang])
+
+        self.state.joint_angles = np.array([self.joint_states_data.position[self.pan_ind]])
+        self.state.joint_velocities = np.array([self.joint_states_data.velocity[self.pan_ind]])
+        
+        for human_id, human_data in self.hri_data[0].items():
+            human_id = int(human_id[1:])
+            humans_id_list = self.state.humans_id.tolist()
+            if human_id in humans_id_list:
+                index = humans_id_list.index(human_id)
+            elif self.human_id_default in humans_id_list:
+                index = humans_id_list.index(self.human_id_default)
+            else:
+                rospy.logerr("No more space in state.humans_ arrays. Number max is {}. It is fixed because of the use of SharedMemory multiprocessing".format(self.max_humans_world))
+            self.state.humans_id[index] = human_id
+            self.state.humans_visible[index] = 0.  # how to get it ?
+            if len(human_data) >= 3:
+                self.state.humans_group_id[index] = int(human_data[2][3:])  # 0: transform, 1:person_id, 2:group_id if group
+
+            human_pose = human_data[0].transform.translation
+            human_quat = human_data[0].transform.rotation
+            x_pos, y_pos = human_pose.x, human_pose.y
+            (_, _, yaw) = tf.transformations.euler_from_quaternion([
+                human_quat.x,
+                human_quat.y,
+                human_quat.z,
+                human_quat.w])
+
+            lin = 0.  # how to get it ?
+            ang = 0.  # how to get it ?
+
+            self.state.humans_pose[index, :] = [x_pos, y_pos, constraint_angle(yaw)]
+            self.state.humans_velocity[index, :] = [lin, ang]
+
+            if self.look_at_body_target is not None:
+                if (self.controller.pan_goal[-1] == 1 or self.head_goal) and self.look_at_body_target == human_id:
+                    self.head_joint_errors = True
+                    self.x_pan_target = x_pos
+                    self.y_pan_target = y_pos
+            
+            if self.look_at_person_target is not None:
+                if (self.controller.pan_goal[-1] == 1 or self.head_goal) and self.look_at_person_target == human_id:
+                    self.head_joint_errors = True
+                    self.x_pan_target = x_pos
+                    self.y_pan_target = y_pos
+
+        rospy.logdebug('humans_id : {}'.format(self.state.humans_id.tolist()))
+
+        for group_id, group_data in self.hri_data[1].items():
+            group_id  = int(group_id[3:])
+            groups_id_list = self.state.groups_id.tolist()
+            if group_id in groups_id_list:
+                index = groups_id_list.index(group_id)
+            elif self.group_id_default in groups_id_list:
+                index = groups_id_list.index(self.group_id_default)
+            else:
+                rospy.logerr("No more space in state.groups_ arrays. Number max is {}. It is fixed because of the use of SharedMemory multiprocessing".format(self.max_groups_world))
+            self.state.groups_id[index] = group_id
+            self.state.groups_visible[index] = 0.  # how to get it ?
+
+            group_pose = group_data[0].transform.translation
+            x_pos, y_pos = group_pose.x, group_pose.y
+            self.state.groups_pose[index, :] = [x_pos, y_pos]
+
+            if self.look_at_group_target is not None:
+                if (self.controller.pan_goal[-1] == 1 or self.head_goal) and self.look_at_group_target == group_id:
+                    self.head_joint_errors = True
+                    self.x_pan_target = x_pos
+                    self.y_pan_target = y_pos
+
+        rospy.logdebug('groups_id ; {}'.format(self.state.groups_id.tolist()))
+
+        # for person_id, person_data in self.hri_data[2].items():
+        #     print('person_id', person_id, 'person_data', person_data)
+        # for body_id, body_data in self.hri_data[3].items():
+        #    print('body_id', body_id, 'body_data', body_data)
+
+        if self.look_at_position_target is not None:
+            if self.controller.pan_goal[-1] == 1 or self.head_goal:
+                self.head_joint_errors = True
+                if self.look_at_position_target[-2]:
+                    (self.x_pan_target, self.y_pan_target) = local_to_global(np.asarray([self.x_robot_pose, self.y_robot_pose, self.yaw_robot_pose]), self.look_at_position_target[:2])  # local to global map target
+                else:
+                    self.x_pan_target = self.look_at_position_target[0]
+                    self.y_pan_target = self.look_at_position_target[1]
+        
+        if self.head_goal:
+            if self.look_at_body_target is not None:
+                if float(self.look_at_body_target) not in self.state.humans_id.tolist():
+                    self.reset_look_at_target()
+                    self.wrong_target = True
+                    if self.is_continuous_look_at:
+                        self.continuous_look_at_done = True
+                    rospy.logwarn('reset head_goal look_at_body_target')
+            if self.look_at_person_target is not None:
+                if float(self.look_at_person_target) not in self.state.humans_id.tolist():
+                    self.reset_look_at_target()
+                    self.wrong_target = True
+                    if self.is_continuous_look_at:
+                        self.continuous_look_at_done = True
+                    rospy.logwarn('reset head_goal look_at_person_target')
+            if self.look_at_group_target is not None:
+                if float(self.look_at_group_target) not in self.state.groups_id.tolist():
+                    self.reset_look_at_target()
+                    self.wrong_target = True
+                    if self.is_continuous_look_at:
+                        self.continuous_look_at_done = True
+                    rospy.logwarn('reset head_goal look_at_group_target')
+        
+        if self.base_goal:
+            if self.go_to_body_target is not None:
+                if float(self.go_to_body_target) not in self.state.humans_id.tolist():
+                    if self.target_id_absent_count > self.target_id_absent_treshold:
+                        self.reset_go_to_target()
+                        self.wrong_target = True
+                        if self.is_continuous_go_to:
+                            self.continuous_go_to_done = True
+                        rospy.logwarn('reset base_goal go_to_body_target')
+                    else:
+                        self.target_id_absent_count += 1
+                else:
+                    self.target_id_absent_count = 0
+            if self.go_to_person_target is not None:
+                if float(self.go_to_person_target) not in self.state.humans_id.tolist():
+                    if self.target_id_absent_count > self.target_id_absent_treshold:
+                        self.reset_go_to_target()
+                        self.wrong_target = True
+                        if self.is_continuous_go_to:
+                            self.continuous_go_to_done = True
+                        rospy.logwarn('reset base_goal go_to_person_target')
+                    else:
+                        self.target_id_absent_count += 1
+                else:
+                    self.target_id_absent_count = 0
+            if self.go_to_group_target is not None:
+                if float(self.go_to_group_target) not in self.state.groups_id.tolist():
+                    if self.target_id_absent_count > self.target_id_absent_treshold:
+                        self.reset_go_to_target()
+                        self.wrong_target = True
+                        if self.is_continuous_go_to:
+                            self.continuous_go_to_done = True
+                        rospy.logwarn('reset base_goal go_to_group_target')
+                    else:
+                        self.target_id_absent_count += 1
+                else:
+                    self.target_id_absent_count = 0
+
+        self.state.config = self.Config(self.global_map_size,
+                                        round(1/self.global_map_resolution),
+                                        self.global_map_width,
+                                        self.global_map_height,
+                                        self.local_map_size,
+                                        round(1/self.local_map_resolution),
+                                        self.local_map_width,
+                                        self.local_map_height)
+
+        self.state.global_map = self.global_map_static
+        local_map = np.zeros((*self.local_static_map.shape, 3))
+        local_map[:, :, 0] = self.local_static_map
+        local_map[:, :, 1] = self.get_social_cost_map(data=self.local_map_data)
+        self.state.local_map = local_map
+
+
+    def _check_joint_vel_limits(self, lin_vel, ang_vel):
+        if lin_vel > self.controller.controller_config.u_ub[-1]:
+            rospy.logwarn("The linear velocity is greater than the maximum allowed. The maximum velocity is set.")
+            lin_vel = self.controller.controller_config.u_ub[-1]
+        if lin_vel < self.controller.controller_config.u_lb[-1]:
+            rospy.logwarn("The linear velocity is lower than the minimum allowed. The minimum velocity is set.")
+            lin_vel = self.controller.controller_config.u_lb[-1]
+        if ang_vel > self.controller.controller_config.u_ub[-2]:
+            rospy.logwarn("The angular velocity is greater than the maximum allowed. The maximum velocity is set.")
+            ang_vel = self.controller.controller_config.u_ub[-2]
+        if ang_vel < self.controller.controller_config.u_lb[-2]:
+            rospy.logwarn("The angular velocity is lower than the minimum allowed. The minimum velocity is set.")
+            ang_vel = self.controller.controller_config.u_lb[-2]
+        return lin_vel, ang_vel
+
+
+    def _check_joint_pos_limits(self, p, t):
+        if p < self.robot_config.min_pan_angle:
+            p = self.robot_config.min_pan_angle
+            rospy.logwarn("The pan angle desired is lower than the miminum allowed. The minimum oan angle is set.")
+        if p > self.robot_config.max_pan_angle:
+            p = self.robot_config.max_pan_angle
+            rospy.logwarn("The pan angle desired is greater than the maximum allowed. The maximum oan angle is set.")
+        if t < self.robot_config.min_tilt_angle:
+            t = self.robot_config.min_tilt_angle
+            rospy.logwarn("The tilt angle desired is lower than the miminum allowed. The minimum oan angle is set.")
+        if t > self.robot_config.max_tilt_angle:
+            t = self.robot_config.max_tilt_angle
+            rospy.logwarn("The tilt angle desired is higher than the maximum allowed. The maximum oan angle is set.")
+        return p, t
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        # self._check_rtabmap_ready()
+        self._check_joint_states_ready()
+        self._check_tracked_persons_2d_ready()
+        self._check_global_map_ready()
+        self._check_local_map_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_global_map_ready(self):
+        self.global_map_data = None
+        rospy.logdebug("Waiting for {} to be READY...".format(self.global_occupancy_map_topic))
+        while self.global_map_data is None and not rospy.is_shutdown():
+            try:
+                self.global_map_data = rospy.wait_for_message(self.global_occupancy_map_topic, OccupancyGrid, timeout=5.0)
+                rospy.logdebug("Current {} READY=>".format(self.global_occupancy_map_topic))
+
+            except:
+                rospy.logerr("Current {} not ready yet, retrying for getting global map".format(self.global_occupancy_map_topic))
+        return self.global_map_data
+
+
+    def _check_local_map_ready(self):
+        self.local_map_data = None
+        rospy.logdebug("Waiting for {} to be READY...".format(self.local_occupancy_map_topic))
+        while self.local_map_data is None and not rospy.is_shutdown():
+            try:
+                self.local_map_data = rospy.wait_for_message(self.local_occupancy_map_topic, OccupancyGrid, timeout=5.0)
+                rospy.logdebug("Current {} READY=>".format(self.local_occupancy_map_topic))
+
+            except:
+                rospy.logerr("Current {} not ready yet, retrying for getting local map".format(self.local_occupancy_map_topic))
+        return self.local_map_data
+
+
+    def _check_tracked_persons_2d_ready(self):
+        self.tracked_persons_2d_data = None
+        rospy.logdebug("Waiting for /tracked_persons_2d to be READY...")
+        while self.tracked_persons_2d_data is None and not rospy.is_shutdown():
+            try:
+                self.tracked_persons_2d_data = rospy.wait_for_message("/tracked_persons_2d", TrackedPersons2d, timeout=5.0)
+                rospy.logdebug("Current /tracked_persons_2d READY=>")
+
+            except:
+                rospy.logerr("Current /tracked_persons_2d not ready yet, retrying for getting tracked persons 2d")
+        return self.tracked_persons_2d_data
+
+
+    def _check_joint_states_ready(self):
+        self.joint_states_data = None
+        rospy.logdebug("Waiting for /joint_states to be READY...")
+        while self.joint_states_data is None and not rospy.is_shutdown():
+            try:
+                self.joint_states_data = rospy.wait_for_message("/joint_states", JointState, timeout=5.0)
+                rospy.logdebug("Current /joint_states READY=>")
+
+            except:
+                rospy.logerr("Current /joint_states not ready yet, retrying for getting joint states")
+        if not self.get_indexes:
+            # joint_states : head_1_joint, head_2_joint, wheel_left_joint, wheel_right_joint, caster_left_joint, caster_right_joint
+            for index, name in enumerate(self.joint_states_data.name):
+                if name == "head_1_joint":
+                    self.pan_ind = index
+                if name == "head_2_joint":
+                    self.tilt_ind = index
+            self.get_indexes = True
+
+        return self.joint_states_data
+    
+
+    def _check_rtabmap_ready(self):
+        rospy.logdebug("Waiting for rtabmap pose to be READY...")
+        while self.rtabmap_ready is None and not rospy.is_shutdown():
+            try:
+                self.tf_listener.waitForTransform(self.map_frame, self.robot_frame, rospy.Time(0), rospy.Duration(5.0))
+                self.rtabmap_ready = True
+                rospy.logdebug("Current rtabmap pose READY=>")
+
+            except:
+                rospy.logerr("Current rtabmap pose not ready yet, retrying for getting rtabmap pose")
+        return self.rtabmap_ready
+    
+
+    def _check_publishers_connection(self):
+        """
+        Checks that all the publishers are working
+        :return:
+        """
+        rate = rospy.Rate(10)  # 10hz
+        while self._cmd_vel_pub.get_num_connections() == 0 and not rospy.is_shutdown():
+            rospy.logdebug("No susbribers to _cmd_vel_pub yet so we wait and try again")
+            try:
+                rate.sleep()
+            except rospy.ROSInterruptException:
+                # This is to avoid error when world is rested, time when backwards.
+                pass
+        rospy.logdebug("_cmd_vel_pub Publisher Connected")
+
+        while self._pan_vel_pub.get_num_connections() == 0 and not rospy.is_shutdown():
+            rospy.logdebug("No susbribers to _pan_vel_pub yet so we wait and try again")
+            try:
+                rate.sleep()
+            except rospy.ROSInterruptException:
+                # This is to avoid error when world is rested, time when backwards.
+                pass
+        rospy.logdebug("_pan_vel_pub Publisher Connected")
+        rospy.logdebug("All Publishers READY")
+
+
+    def reset_look_at_data(self):
+        self.look_at_person_data = None
+        self.look_at_group_data = None
+        self.look_at_position_data = None
+        self.look_at_body_data = None
+        self.wrong_target = False
+        self.head_joint_errors = False
+        self.look_at_done = False
+
+
+    def reset_look_at_target(self):
+        self.look_at_body_target = None
+        self.look_at_group_target = None
+        self.look_at_person_target = None
+        self.look_at_position_target = None
+        self.head_goal = False
+        self.head_joint_errors = False
+        self.pan_angle = 0.
+        self.tilt_angle = 0.
+        self.x_pan_target = 0.
+        self.y_pan_target = 0.
+
+
+    def reset_go_to_target(self):
+        self.go_to_group_target = None
+        self.go_to_body_target = None
+        self.go_to_person_target = None
+        self.go_to_position_target = None
+        self.base_goal = False
+        self.reset_controller_go_to_values()
+        self.lin_vel = 0.
+        self.ang_vel = 0.
+
+
+    def reset_continuous_look_at_flag(self):
+        self.is_continuous_look_at = False
+        self.continuous_look_at_done = False
+
+
+    def reset_continuous_go_to_flag(self):
+        self.is_continuous_go_to = False
+        self.continuous_go_to_done = False
+
+
+    def reset_go_to_data(self):
+        self.go_to_position_data = None
+        self.go_to_body_data = None
+        self.go_to_person_data = None
+        self.go_to_group_data = None
+        self.wrong_target = False
+        self.go_to_done = False
+
+
+    def reset_controller_go_to_values(self):
+        with self.lock_controller:
+            self.controller.controller_config.goto_target_id = -1
+            self.controller.controller_config.human_target_id = -1
+            self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
+            self.controller.set_goto_target(self.state, target=-1)
+            self.controller.set_mode()
+            self.controller.set_weights()
+            self.controller.reset_shared_values()
+
+
+    def _look_at_body_callback(self, data):
+        self.reset_look_at_data()
+        self.look_at_body_data = data
+        look_at_body_target = int(self.look_at_body_data.id[1:])
+        if float(look_at_body_target) in self.state.humans_id.tolist() and look_at_body_target != -1:
+            self.reset_look_at_target()
+            self.reset_continuous_look_at_flag()
+            self.head_goal = True
+            self.look_at_body_target = look_at_body_target
+            if self.look_at_body_data.continuous:
+                self.is_continuous_look_at = True
+        elif look_at_body_target == -1:
+            # reset but not the look_at targets
+            self.look_at_body_data = None
+            self.look_at_done = True
+            if not self.is_continuous_look_at:
+                self.reset_look_at_target()
+                self.reset_continuous_look_at_flag()
+        else:
+            self.wrong_target = True
+            self.reset_look_at_target()
+            self.reset_continuous_look_at_flag()
+
+
+    def _look_at_person_callback(self, data):
+        self.reset_look_at_data()
+        self.look_at_person_data = data
+        look_at_person_target = self.look_at_person_data.id
+        if look_at_person_target in self.hri_data[2].keys():
+            look_at_person_target = int(self.hri_data[2][look_at_person_target][1:])
+            if float(look_at_person_target) in self.state.humans_id.tolist():
+                self.reset_look_at_target()
+                self.reset_continuous_look_at_flag()
+                self.head_goal = True
+                self.look_at_person_target = look_at_person_target
+                if self.look_at_person_data.continuous:
+                    self.is_continuous_look_at = True
+            else:
+                self.wrong_target = True
+                self.reset_look_at_target()
+                self.reset_continuous_look_at_flag()
+        elif look_at_person_target == 'p-1':
+            # reset but not the look_at targets
+            self.look_at_person_data = None
+            self.look_at_done = True
+            if not self.is_continuous_look_at:
+                self.reset_look_at_target()
+                self.reset_continuous_look_at_flag()
+        else:
+            self.wrong_target = True
+            self.reset_look_at_target()
+            self.reset_continuous_look_at_flag()
+
+
+    def _look_at_group_callback(self, data):
+        self.reset_look_at_data()
+        self.look_at_group_data = data
+        look_at_group_target = int(self.look_at_group_data.id[3:])
+        if float(look_at_group_target) in self.state.groups_id.tolist() and look_at_group_target != -1:
+            self.reset_look_at_target()
+            self.reset_continuous_look_at_flag()
+            self.head_goal = True
+            self.look_at_group_target = look_at_group_target
+            if self.look_at_group_data.continuous:
+                self.is_continuous_look_at = True
+        elif look_at_group_target == -1:
+            # reset but not the look_at targets
+            self.look_at_group_data = None
+            self.look_at_done = True
+            if not self.is_continuous_look_at:
+                self.reset_look_at_target()
+                self.reset_continuous_look_at_flag()
+        else:
+            self.wrong_target = True
+            self.reset_look_at_target()
+            self.reset_continuous_look_at_flag()
+
+
+    def _look_at_position_callback(self, data):
+        self.reset_look_at_data()
+        self.look_at_position_data = data
+        look_at_position_target = np.asarray(self.look_at_position_data.look_at_position)
+        if not look_at_position_target[-1]:
+            self.wrong_target = True
+            self.reset_look_at_target()
+            self.reset_continuous_look_at_flag()
+        else:
+            if look_at_position_target[-1] == 1:
+                self.reset_look_at_target()
+                self.reset_continuous_look_at_flag()
+                self.head_goal = True
+                self.look_at_position_target = look_at_position_target
+                if self.look_at_position_data.continuous:
+                    self.is_continuous_look_at = True
+            else:
+                # reset but not the look_at targets
+                self.look_at_position_data = None
+                self.look_at_done = True
+                if not self.is_continuous_look_at:
+                    self.reset_look_at_target()
+                    self.reset_continuous_look_at_flag()
+
+
+    def _go_to_position_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_position_data = data
+        go_to_position_target = np.asarray(self.go_to_position_data.go_to_target_pose)
+        if not go_to_position_target[-1]:
+            self.wrong_target = True
+            self.reset_go_to_target()
+            self.reset_continuous_go_to_flag()
+        else:            
+            if go_to_position_target[-1] == 1:
+                self.reset_go_to_target()
+                self.reset_continuous_go_to_flag()
+                self.base_goal = True
+                self.go_to_position_target = go_to_position_target
+                with self.lock_controller:
+                    self.controller.controller_config.goto_target_id = -1
+                    self.controller.controller_config.human_target_id = -1
+                    self.controller.controller_config.goto_target_pose = go_to_position_target
+                    self.controller.set_goto_target(self.state, target=go_to_position_target)
+                    self.controller.set_mode()
+                    self.controller.set_weights()
+                if self.go_to_position_data.continuous:
+                    self.is_continuous_go_to = True
+            else:
+                # reset controller
+                self.go_to_position_data = None
+                self.go_to_done = True
+                if not self.is_continuous_go_to:
+                    self.reset_go_to_target()
+                    self.reset_continuous_go_to_flag()
+
+
+    def _go_to_body_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_body_data = data
+        go_to_body_target = int(self.go_to_body_data.id[1:])
+        if float(go_to_body_target) in self.state.humans_id.tolist() and go_to_body_target != -1:
+            self.reset_go_to_target()
+            self.reset_continuous_go_to_flag()
+            self.base_goal = True
+            self.go_to_body_target = go_to_body_target
+            with self.lock_controller:
+                self.controller.controller_config.goto_target_id = go_to_body_target
+                self.controller.controller_config.human_target_id = -1
+                self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
+                self.controller.set_goto_target(self.state, target=go_to_body_target)
+                self.controller.set_mode()
+                self.controller.set_weights()
+            if self.go_to_body_data.continuous:
+                self.is_continuous_go_to = True
+        elif go_to_body_target == -1:
+            # reset controller
+            self.go_to_body_data = None
+            self.go_to_done = True
+            if not self.is_continuous_go_to:
+                self.reset_go_to_target()
+                self.reset_continuous_go_to_flag()
+        else:
+            self.wrong_target = True
+            self.reset_go_to_target()
+            self.reset_continuous_go_to_flag()
+
+
+    def _go_to_group_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_group_data = data
+        go_to_group_target = int(self.go_to_group_data.id[3:])
+        if float(go_to_group_target) in self.state.groups_id.tolist() and go_to_group_target != -1:
+            self.reset_go_to_target()
+            self.reset_continuous_go_to_flag()
+            self.base_goal = True
+            self.go_to_group_target = go_to_group_target
+            with self.lock_controller:
+                self.controller.controller_config.goto_target_id = go_to_group_target
+                self.controller.controller_config.human_target_id = -1
+                self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
+                self.controller.set_goto_target(self.state, target=go_to_group_target, group=True)
+                self.controller.set_mode()
+                self.controller.set_weights()
+                if self.go_to_group_data.continuous:
+                    self.is_continuous_go_to = True
+        elif go_to_group_target == -1:
+            # reset controller
+            self.go_to_group_data = None
+            self.go_to_done = True
+            if not self.is_continuous_go_to:
+                self.reset_go_to_target()
+                self.reset_continuous_go_to_flag()
+        else:
+            self.wrong_target = True
+            self.reset_go_to_target()
+            self.reset_continuous_go_to_flag()
+
+
+    def _go_to_person_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_person_data = data
+        go_to_person_target = self.go_to_person_data.id
+        if go_to_person_target in self.hri_data[2].keys():
+            go_to_person_target = int(self.hri_data[2][self.go_to_person_data.id][1:])        
+            if float(go_to_person_target) in self.state.humans_id.tolist():
+                self.reset_go_to_target()
+                self.reset_continuous_go_to_flag()
+                self.base_goal = True
+                self.go_to_person_target = go_to_person_target
+                with self.lock_controller:
+                    self.controller.controller_config.goto_target_id = go_to_person_target
+                    self.controller.controller_config.human_target_id = -1
+                    self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
+                    self.controller.set_goto_target(self.state, target=go_to_person_target)
+                    self.controller.set_mode()
+                    self.controller.set_weights()
+                    if self.go_to_person_data.continuous:
+                        self.is_continuous_go_to = True
+            else:
+                self.wrong_target = True
+                self.reset_go_to_target()
+                self.reset_continuous_go_to_flag()
+        elif go_to_person_target == 'p-1':
+            # reset controller
+            self.go_to_person_data = None
+            self.go_to_done = True
+            if not self.is_continuous_go_to:
+                self.reset_go_to_target()
+                self.reset_continuous_go_to_flag()
+        else:
+            self.wrong_target = True
+            self.reset_go_to_target()
+            self.reset_continuous_go_to_flag()
+
+
+    def _joint_states_callback(self, data):
+        with self.lock:
+            self.joint_states_data = data
+
+
+    def _tracked_persons_2d_callback(self, data):
+        with self.lock:
+            self.tracked_persons_2d_data = data
+            self.timestamp_tracking = data.header.stamp
+            if self.controller.actions is not None:
+                self.action_idx = int((self.timestamp_tracking - self.timestamp_tracking_latest).to_sec()/self.controller.controller_config.h)
+                self.action_idx = min(self.action_idx, len(self.controller.actions) - 1)
+                self.actions = self.controller.actions
+                # self.publish_vel(self.controller.actions[self.action_idx])   
+
+
+    def _global_map_callback(self, data):
+        with self.lock:
+            self.global_map_data = data
+            self.timestamp_global_map_cb = self.global_map_data.header.stamp
+            self.x_global_map = self.global_map_data.info.origin.position.x
+            self.y_global_map = self.global_map_data.info.origin.position.y
+            self.global_map_width = self.global_map_data.info.width
+            self.global_map_height = self.global_map_data.info.height
+            self.global_map_resolution = self.global_map_data.info.resolution
+            self.global_map_size = [[self.x_global_map, self.x_global_map + self.global_map_width*self.global_map_resolution],[self.y_global_map, self.y_global_map + self.global_map_height*self.global_map_resolution]]
+            self.last_shape_global_map = (self.global_map_height, self.global_map_width)
+            self.global_map_static = (np.asarray(self.global_map_data.data) / 100).reshape(self.last_shape_global_map)
+
+
+    def _local_map_callback(self, data):
+        with self.lock:
+            self.local_map_data = data
+            self.x_local_map = self.local_map_data.info.origin.position.x
+            self.y_local_map = self.local_map_data.info.origin.position.y
+            self.local_map_width = self.local_map_data.info.width
+            self.local_map_height = self.local_map_data.info.height
+            self.local_map_resolution = self.local_map_data.info.resolution
+            self.local_map_size = [[self.x_local_map, self.x_local_map + self.local_map_width*self.local_map_resolution],[self.y_local_map, self.y_local_map + self.local_map_height*self.local_map_resolution]]
+            self.last_shape_local_map = (self.local_map_height, self.local_map_width)
+            self.local_static_map = (np.asarray(self.local_map_data.data) / 100).reshape(self.last_shape_local_map)
+
+
+    def get_social_cost_map(self, data):
+        groups = None
+        ids = np.where(self.state.humans_visible == 0)[0]
+        # ids = ids[ids != self.state.humans_id.tolist().index(self.human_target_id)]
+        n_human = len(ids)
+        # (x, y, direction, velocity, is_sitting)
+        humans = np.zeros((n_human, 5))
+        for j in range(n_human):
+            i = ids[j]
+            global_position = self.state.humans_pose[i, :2]
+            local_position = global_to_local(self.state.robot_pose,
+                                                   global_position)
+            local_angle = self.state.humans_pose[i, -
+                                                1] - self.state.robot_pose[-1]
+            humans[j, :] = * \
+                local_position, local_angle, self.state.humans_velocity[i, 0], 0
+        if n_human > 1:
+            groups = []
+            for idx, group_id in enumerate(self.state.groups_id):
+                if group_id != -1:
+                    center = global_to_local(self.state.robot_pose, self.state.groups_pose[idx])
+                    groups.append(Group(center=center, person_ids=np.where(self.state.humans_group_id == group_id)[0]))
+                else:
+                    break
+
+        f_dsz = self.ssn.calc_dsz(humans, groups=groups)
+
+        occupancy_grid = data
+        occupancy_grid.info.map_load_time = rospy.Time.now()
+        occupancy_grid.data = (f_dsz * 100).flatten().astype(np.int8)
+        self._local_social_cost_map_pub.publish(occupancy_grid)
+        return f_dsz
+
+
+    def _pub_vel_callback(self, event):
+        self.hri_data = self.ros_4_hri_interface.get_data()
+        if  self.head_goal or self.i == 0:
+            if self.look_at_body_target or self.look_at_group_target or self.look_at_person_target or self.look_at_position_target is not None:
+                self.pan_angle = constraint_angle(np.arctan2(self.y_pan_target - self.y_robot_pose, self.x_pan_target - self.x_robot_pose) - self.yaw_robot_pose) # to check
+            self.publish_head_cmd(self.pan_angle, self.tilt_angle)
+        if self.action_idx is not None:
+            actions = self.actions[self.action_idx]
+            self.lin_vel = 0.
+            self.ang_vel = 0.
+            if np.any(actions) or self.i == 0:
+                self.publish_vel(actions)
+            self.action_idx += 1
+            if self.action_idx > (len(self.controller.actions) - 1):
+                self.action_idx = 0
+                self.actions = np.copy(self.controller.initial_action)
+
+
+    def _diagnostics_callback(self, event):
+        self.publish_diagnostics()
+
+
+    def publish_diagnostics(self):
+        diagnostic_msg = DiagnosticArray()
+        diagnostic_msg.status = []
+        diagnostic_msg.header.stamp = rospy.Time.now()
+        status = DiagnosticStatus()
+        status.name = 'Functionality: Robot Behavior: MPC Controller'
+        status.values.append(KeyValue(key='Linear velocity required', value='{} m/s'.format(self.lin_vel)))
+        status.values.append(KeyValue(key='Angular velocity required', value='{} rad/s'.format(self.ang_vel)))
+        status.values.append(KeyValue(key='Pan angle required', value='{} rad'.format(self.pan_angle)))
+        status.values.append(KeyValue(key='Tilt angle required', value='{} rad'.format(self.tilt_angle)))
+        status.values.append(KeyValue(key='Top view pan target [x, y]', value='[{} {}]'.format(self.x_pan_target, self.y_pan_target)))
+        status.values.append(KeyValue(key='Top view human target [x, y, theta]', value='[{} {} {}]'.format(self.x_human_features, self.y_human_features, self.th_human_features)))
+        status.values.append(KeyValue(key='Top view intermediate waypoint [x, y, theta]', value='[{} {} {}]'.format(self.x_wp, self.y_wp, self.th_wp)))
+        status.values.append(KeyValue(key='Top view final goal [x, y, theta]', value='[{} {} {}]'.format(self.x_final, self.y_final, self.th_final)))
+        if self.controller_status_msg:
+            status.values.append(KeyValue(key='Goto target id', value='{}'.format(self.controller_status_msg.goto_target_id)))
+            status.values.append(KeyValue(key='Human target id', value='{}'.format(self.controller_status_msg.human_target_id)))
+            status.values.append(KeyValue(key='Pan target id', value='{}'.format(self.controller_status_msg.pan_target_id)))
+            status.values.append(KeyValue(key='Continuous look_at      Done', value='[{} {}]'.format(self.controller_status_msg.is_continuous_look_at, self.controller_status_msg.continuous_look_at_done)))
+            status.values.append(KeyValue(key='Continuous go_to     Done', value='[{} {}]'.format(self.controller_status_msg.is_continuous_go_to, self.controller_status_msg.continuous_go_to_done)))
+            status.values.append(KeyValue(key='Distance to Goto target [x (m), y (m), theta (rad), distance (m)]', value='{}'.format(self.controller_status_msg.dist_to_goto_target)))
+            status.values.append(KeyValue(key='Distance to Human target [x (m), y (m), theta (rad), distance (m)]', value='{}'.format(self.controller_status_msg.dist_to_human_target)))
+            status.values.append(KeyValue(key='Head joint errors [pan angle, tilt angle] in rad', value='{}'.format(self.controller_status_msg.head_joint_errors)))
+        status.values.append(KeyValue(key='Number of MPC loops executed', value='{}'.format(self.i)))
+        if self.hri_data:
+            status.values.append(KeyValue(key='Number of tracked bodies', value='{}'.format(len(self.hri_data[0]))))
+            status.values.append(KeyValue(key='Number of tracked groups', value='{}'.format(len(self.hri_data[1]))))
+            status.values.append(KeyValue(key='Number of person body mapped', value='{}'.format(len(self.hri_data[2]))))
+            status.values.append(KeyValue(key='Number of body person mapped', value='{}'.format(len(self.hri_data[3]))))
+
+        # by default
+        status.level = DiagnosticStatus.OK
+        status.message = ''
+        if self.i == 0 or self.controller_status_msg == None:
+            status.level = DiagnosticStatus.WARN
+            status.message = 'Initialization of the MPC Controller not finished'
+        if self.hri_data:
+            if len(self.hri_data[2]) != len(self.hri_data[3]):
+                status.level = DiagnosticStatus.OK
+                status.message = 'The number of person body mapped ({}) is different from the number of body person mapped ({})'.format(len(self.hri_data[2]), len(self.hri_data[3]))
+            if not self.hri_data[0]:
+                status.level = DiagnosticStatus.OK
+                status.message = 'No tracked bodies as input'
+            if len(self.hri_data[0]) != len(self.hri_data[2]) or len(self.hri_data[0]) != len(self.hri_data[3]):
+                status.level = DiagnosticStatus.OK
+                status.message += ' The number of tracked bodies ({}) is different from the number of body person mapped ({}) or from the number of person body mapped ({})'.format(len(self.hri_data[0]), len(self.hri_data[2]), len(self.hri_data[3]))
+        if self.controller_status_msg:
+            if self.controller_status_msg.status == ControllerStatus.IS_FAILURE:
+                if self.wrong_target:
+                    status.level = DiagnosticStatus.WARN
+                    status.message += ' Target neither in the field of view of the robot, nor in memory'
+                if self.controller.is_failure:
+                    status.level += DiagnosticStatus.ERROR
+                    status.message = ' MPC Controller is in a failure mode'
+            if self.controller_status_msg.goto_target_id != -1 and self.lin_vel == 0. and self.ang_vel == 0.:
+                status.level = DiagnosticStatus.ERROR
+                status.message += ' Go_to target mode error, no velocities (lin_vel, ang_vel) computed'
+            if self.controller_status_msg.pan_target_id != -1 and self.pan_angle == 0. and self.tilt_angle == 0.:
+                status.level += DiagnosticStatus.ERROR
+                status.message = ' Look_at target mode error, no angles (pan_angle, tilt_angle) computed'
+                if self.x_pan_target == 0. and self.y_pan_target == 0.:
+                    status.message += ', pan target not set because pan_target_id was not found'
+                if self.x_wp == 0. and self.y_wp == 0. and self.th_wp == 0.:
+                    status.message += ', shared_waypoint of the MPC controller not ready or MPC controller in a wrong mode'
+                if self.x_final == 0. and self.y_final == 0. and self.th_final == 0.:
+                    status.message += ', shared_target of the MPC controller not ready or can not be calculated'
+            # TODO: add ERROR/WARN status for escort mode (x_human_features, y_human_features, th_human_features) 
+            
+        diagnostic_msg.status.append(status)
+        self._diagnostics_pub.publish(diagnostic_msg)
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the BehaviorGenerator")
+        self.close()
+        rospy.loginfo("Killing the BehaviorGenerator node")
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                if isinstance(publisher, dict):
+                    for pub in publisher.values():
+                        pub.unregister()
+                else:
+                    publisher.unregister()
+        if self._timers:
+            for timer in self._timers:
+                timer.shutdown()
+        self.ros_4_hri_interface.close()
+        self.controller.close()
+
+
+class RobotState:
+    pass
diff --git a/src/robot_behavior/src/robot_behavior/behavior_global_path_planner.py b/src/robot_behavior/src/robot_behavior/behavior_global_path_planner.py
new file mode 100644
index 0000000000000000000000000000000000000000..2fb10565498899d8a270bb86aab6f4e15512b102
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_global_path_planner.py
@@ -0,0 +1,190 @@
+# This file, part of Social MPC in the WP6 of the Spring project,
+# is part of a project that has received funding from the 
+# European Union’s Horizon 2020 research and innovation programme
+#  under grant agreement No 871245.
+# 
+# Copyright (C) 2020-2023 by Inria
+# Authors : Victor Sanchez
+# victor.sanchez@inria.fr
+#
+# Code inspired from : https://gitlab.inria.fr/spring/wp6_robot_behavior/Motor_Controller/-/blob/master/social_mpc/path.py?ref_type=heads
+#   From : Alex Auternaud, Timothée Wintz
+#   alex.auternaud@inria.fr
+#   timothee.wintz@inria.fr
+
+import cv2
+import skfmm
+import numpy.ma as ma
+import numpy as np
+from robot_behavior.utils import constraint_angle, optimal_path_2d
+import rospy
+import exputils as eu
+
+
+class GlobalPathPlanner():
+
+    def __init__(self, waypoint_resolution_in_meters=0.5, threshold_distance_arrival=1.0, threshold_distance_orientation=0.5, max_target_angle=np.pi/4):
+
+        self.state_config = None
+        self.threshold_distance_arrival = threshold_distance_arrival
+        self.max_target_angle = max_target_angle
+        self.threshold_distance_orientation = threshold_distance_orientation
+        self.waypoint_resolution_in_meters = waypoint_resolution_in_meters
+        self.distance = None
+        self.ssn = None
+        self.global_map_with_waypoints = None
+        self.map_with_subgoals=None
+
+
+    def set_state_config(self,
+                        global_map_size=[[-11.26118716597557, 12.588813189417124], [-10.207198709249496, 14.492801658809185]],
+                        global_map_height=494,
+                        global_map_width=477,
+                        global_map_scale=1/0.05
+        ):
+        self.state_config=eu.AttrDict(global_map_size=global_map_size,
+                                    global_map_height=global_map_height,
+                                    global_map_width=global_map_width,
+                                    global_map_scale=global_map_scale
+        )
+        self.y_coordinates = np.linspace(global_map_size[1][0], global_map_size[1][1], self.state_config.global_map_height)
+        self.x_coordinates = np.linspace(global_map_size[0][0], global_map_size[0][1], self.state_config.global_map_width)
+        self.map_with_subgoals = np.zeros((global_map_width, global_map_height))
+
+
+    def set_global_map_for_waypoints(self, map):
+        self.global_map_with_waypoints = map
+
+
+    def position_to_px_coord(self, pos):
+        r''' Tranform real position to coordinate in pixel in the global map coordinate system '''
+        if self.state_config is None:
+            return None
+        scale = self.state_config.global_map_scale
+        x_min = self.state_config.global_map_size[0][0]
+        y_max = self.state_config.global_map_size[1][1]
+        
+        i = (y_max - pos[1]) * scale
+        j = (pos[0] - x_min) * scale
+
+        return int(i), int(j)
+
+
+    def px_coord_grid(self):
+        r''' Tranform coordinate in pixel in the global map to real position coordinate '''
+        if self.state_config is None:
+            return None
+        scale = self.state_config.global_map_scale
+        x_min = self.state_config.global_map_size[0][0]
+        y_min = self.state_config.global_map_size[1][0]
+
+        x = [x_min + (j + 0.5) /
+             scale for j in range(self.state_config.global_map_width)]
+        y = [y_min + (i + 0.5) /
+             scale for i in range(self.state_config.global_map_height)]
+        return x, y
+
+
+    def get_distance_map(self, global_map, big_number=100):
+        r''' Set the distance map from sffmm 
+        [see https://pythonhosted.org/scikit-fmm/ for more info about distance map]
+        '''
+        if self.state_config is None:
+            return None
+        t_i, t_j = self.position_to_px_coord(self.target)
+        phi = ma.MaskedArray(np.ones(global_map.shape), mask=(global_map > 0))
+        phi.data[t_i, t_j] = 0.0
+        dx = 1/self.state_config.global_map_scale
+        self.distance = skfmm.distance(phi, dx=dx)
+
+
+    def get_waypoints(self, pos):
+        r''' Return the next waypoint position from a given position 
+            pos : [x,y,yaw] '''
+        #x, y = self.px_coord_grid()
+        if self.state_config is None:
+            return None, None, None, None, None
+        # x, y = self.ssn.x_coordinates, self.ssn.y_coordinates
+        x, y = self.x_coordinates, self.y_coordinates
+        
+        optimal_path_2d_output = optimal_path_2d(
+            travel_time = np.flip(self.distance, axis=0), 
+            starting_point = pos[:2],
+            dx=self.waypoint_resolution_in_meters,
+            coords=(x, y),
+            max_d_orientation=0.3)
+        
+        if optimal_path_2d_output :
+            xl, yl, vxl, vyl, dl = optimal_path_2d_output
+        else :
+            return None
+        if len(xl) == 0 or len(yl) == 0 or len(vxl) == 0 or len(vyl) == 0:
+            return None
+        else :
+            return xl, yl, vxl, vyl, dl
+
+
+    def run(self, global_map, target_pose, robot_pose):
+        
+        
+        if robot_pose is None or (target_pose is None or global_map is None):  # not ready
+            if robot_pose is None :
+                rospy.logwarn('Warning robot_pose is None')
+            if target_pose is None :
+                rospy.logwarn('Warning target_pose is None')
+            if global_map is None :
+                rospy.logwarn('Warning global_map is None')
+            self.distance = None
+            return None
+        
+
+        self.target = [target_pose[0], target_pose[1], target_pose[2]]
+        robot_pose = [robot_pose[0], robot_pose[1], robot_pose[2]]
+        
+        # if np.linalg.norm(np.array(robot_pose[0:2]) - np.array(self.target[0:2]))<self.waypoint_resolution_in_meters:
+        #     return self.target[0:2]
+        
+        self.get_distance_map(global_map)
+
+        get_waypoint_output = self.get_waypoints(robot_pose)
+
+        if get_waypoint_output :
+            xl, yl, vxl, vyl, dl = get_waypoint_output
+            
+            # Victor's method
+            # wpt_pos_closest = [xl[0], yl[0]]
+            
+            # Alex's method
+            target_dist = np.linalg.norm(np.array(robot_pose[:2]) - np.array(self.target[:2]))
+            waypoint_index = min(int(self.threshold_distance_arrival / self.waypoint_resolution_in_meters), len(dl)-1)  # get the index of the next waypoint at threshold_distance_arrival with self.waypoint_resolution_in_meters step
+            if target_dist < self.threshold_distance_orientation:  # if we are near the target, we only turn on ourself
+                wpt_pos_closest = [robot_pose[0], robot_pose[1], self.target[2]]
+            else :
+                angle = np.abs(constraint_angle(np.arctan2(yl[waypoint_index] - robot_pose[1], xl[waypoint_index] - robot_pose[0]) - robot_pose[2])) 
+                #  if the angle to the next point is too big, also first turn on ourself to its direction
+                if angle > self.max_target_angle:
+                        wpt_pos_closest = [robot_pose[0], robot_pose[1], np.arctan2(yl[waypoint_index] - robot_pose[1], xl[waypoint_index] - robot_pose[0])]
+                else : 
+                    if target_dist < self.threshold_distance_arrival: # we are near the target
+                        wpt_pos_closest = [self.target[0],
+                                           self.target[1],
+                                           np.arctan2(yl[-1] - robot_pose[1], xl[-1] - robot_pose[0])]
+                    else:  # nominal case else, point on shortest path with orientation towards target
+                        wpt_pos_closest = [xl[waypoint_index], yl[waypoint_index], np.arctan2(yl[waypoint_index] - robot_pose[1], xl[waypoint_index] - robot_pose[0])]
+
+            self.get_map_with_subgoals(waypoints=(xl, yl))
+            return wpt_pos_closest
+        else :
+            return None
+
+
+    def get_map_with_subgoals(self, waypoints):
+        #self.global_map_with_waypoints = np.where(self.global_map_with_waypoints>0, 0 ,1)
+        self.global_map_with_waypoints = np.zeros((self.state_config.global_map_height, self.state_config.global_map_width))
+        for id in range(len(waypoints[0])):
+            if str(waypoints[0][id]) != 'nan':
+                x_px, y_px = self.position_to_px_coord(pos=(waypoints[0][id],waypoints[1][id]))
+                x_px = np.clip(x_px, 0, np.shape(self.global_map_with_waypoints)[0]-1)
+                y_px = np.clip(y_px, 0, np.shape(self.global_map_with_waypoints)[1]-1)
+                self.global_map_with_waypoints[x_px][y_px] = 1
+        self.global_map_with_waypoints = cv2.dilate(self.global_map_with_waypoints, kernel = np.ones((2,2)))
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/behavior_global_path_planner_node.py b/src/robot_behavior/src/robot_behavior/behavior_global_path_planner_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..aaa4dc33a19195c71153bf2c3aaed25099c4c9b4
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_global_path_planner_node.py
@@ -0,0 +1,311 @@
+#!/usr/bin/env python3
+import numpy as np
+import pkg_resources
+import tf
+import os
+import yaml
+import cv2
+import sys
+from multiprocessing import Lock
+import numpy as np
+import rospy
+from nav_msgs.msg import OccupancyGrid, MapMetaData
+from robot_behavior.behavior_global_path_planner import GlobalPathPlanner
+from robot_behavior.utils import constraint_angle
+from social_mpc.config.config import ControllerConfig
+import time
+
+
+class GlobalPathPlannerNode:
+    def __init__(self):
+        # os.environ['XLA_PYTHON_CLIENT_PREALLOCATE'] = 'false'
+        mpc_config_name = rospy.get_param('~mpc_config_name', 'None')
+        mpc_config_path = None if mpc_config_name == 'None' else mpc_config_name
+        self.mpc_config_path = mpc_config_path
+        self.namespace_slam = rospy.get_param('~namespace_slam', '/slam')
+        self.map_frame = rospy.get_param('~map_frame', 'map')
+        self.robot_frame = rospy.get_param('~robot_frame', 'base_footprint')
+        self.global_occupancy_map_topic = rospy.get_param('~global_occupancy_map_topic', '/slam/global_map_rl_navigation')
+        
+        self.read_config(filename=self.mpc_config_path)
+        self.global_path_planner = GlobalPathPlanner(
+            waypoint_resolution_in_meters=self.controller_config.orientation_distance_threshold,
+            threshold_distance_orientation=self.controller_config.orientation_distance_threshold,
+            threshold_distance_arrival=self.controller_config.waypoint_distance)
+
+        self.global_map_data = None
+        self.rtabmap_ready = False
+        self.global_map_static = None
+
+        self.goal_position = [5, 1]
+
+        self.x_wp = 0.
+        self.y_wp = 0.
+        self.x_final = 0.
+        self.y_final = 0.
+        self.yaw_final = 0.
+        self.yaw_wp = 0.
+        self.flag_waypoint_okay = False
+
+
+        self.robot_x_position = 0.
+        self.robot_y_position = 0.
+        self.robot_yaw = 0.
+        self.robot_position = [0, 0]
+        self.robot_position_former = [0, 0]
+        self.lock_data_entrance = Lock()        
+    
+        self.init_ros_subscriber_and_publicher()
+
+        self.tf_broadcaster = tf.TransformBroadcaster() # Publish Transform to look for human
+        self.tf_listener = tf.TransformListener() # Listen Transform to look for human
+
+        self._check_all_sensors_ready()
+
+        rospy.loginfo("GlobalPathPlannerNode Initialization Ended")
+
+
+    def read_config(self, filename=None):
+        if filename is None:
+            filename = pkg_resources.resource_filename(
+                'social_mpc', 'config/social_mpc.yaml')
+        elif os.path.isfile(filename):
+            self.passed_config_loaded = True
+        else:
+            filename = pkg_resources.resource_filename(
+                'social_mpc', 'config/social_mpc.yaml')
+        config = yaml.load(open(filename), Loader=yaml.FullLoader)
+        self.controller_config = ControllerConfig(config)
+
+        self.goal_finder_enabled = self.controller_config.goal_finder_enabled
+        self.path_planner_enabled = self.controller_config.path_planner_enabled
+        self.update_goals_enabled = self.controller_config.update_goals_enabled
+
+
+    def init_ros_subscriber_and_publicher(self):
+        r''' Initialize the subscribers and publishers '''
+        self._subscribers = []
+        self._publishers = []
+        self._action_server = []
+        self._timers = []
+
+        ### ROS Subscribers
+        self._subscribers.append(rospy.Subscriber(self.global_occupancy_map_topic, OccupancyGrid, callback=self._global_map_callback, queue_size=1))
+        self.time_per_ros_step = 2 # in sec
+
+        ### ROS Publishers
+        # self._cmd_vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=1)
+        # self._publishers.append(self._cmd_vel_pub)
+
+        ### Publishing map in Rviz
+        self._global_waypoint_map_pub = rospy.Publisher(self.namespace_slam + '/global_waypoint_map', OccupancyGrid, queue_size=10, latch=True)
+        self._publishers.append(self._global_waypoint_map_pub)
+
+        rospy.loginfo("Initializing the BehaviorGenerator")
+
+        # self._check_publishers_connection()
+ 
+
+    def run(self):
+        r''' Runs the ros wrapper '''
+        
+        # At time step 0, final goal = robot position
+        self.get_robot_pose()
+        self.x_final = self.robot_x_position
+        self.y_final = self.robot_y_position
+        self.yaw_final = self.robot_yaw
+        
+        init_t = rospy.Time.now()
+        while not rospy.is_shutdown():
+            self.step(init_t)
+            
+
+    def step(self, init_time):
+        r''' Step the ros wrapper'''
+
+        # Localizing the robot in the environment
+        self.get_robot_pose()
+        # Get the final goal tf
+        self.get_final_goal_pose()
+        if self.global_map_static is not None :
+            # if np.linalg.norm(np.array(self.robot_position_former) - np.array(self.robot_position)) > 0.05 :
+            # Case where the waypoints were not computed yet or when the goal position was changed 
+            waypoint_target = self.global_path_planner.run(
+                                                            global_map = cv2.dilate(self.global_map_static,kernel=cv2.getStructuringElement(cv2.MORPH_RECT,(8,8)),iterations = 1),
+                                                            target_pose = [self.x_final, self.y_final, self.yaw_final],
+                                                            robot_pose=[self.robot_x_position, self.robot_y_position, self.robot_yaw]
+                            )
+            if waypoint_target:
+                self.flag_waypoint_okay = True
+                self.x_wp, self.y_wp, self.yaw_wp = waypoint_target
+                rospy.logdebug('Robot position : x {:.2f} \t y {:.2f} \t yaw {:.2f}'.format(self.robot_x_position, self.robot_y_position, self.robot_yaw))
+                rospy.logdebug('Goal position : x {:.2f} \t y {:.2f} \t yaw {:.2f}'.format(self.x_final, self.y_final, self.yaw_final))
+            else:
+                self.flag_waypoint_okay = False
+                rospy.logwarn("The waypoint was not computed properly")
+
+            if self.global_path_planner.global_map_with_waypoints is not None :
+                self.publish_global_waypoint_map(self.global_path_planner.global_map_with_waypoints)
+            else :
+                rospy.logwarn('waypoint map not ready !') 
+
+            self.robot_position_former  = self.robot_position
+            
+            self.publish_tf_go_to_goals()
+            rospy.sleep(self.controller_config.path_loop_time)
+
+
+    def publish_tf_go_to_goals(self):
+        
+        current_time = rospy.Time.now()
+        # to debug 
+        # quat_final = tf.transformations.quaternion_from_euler(0, 0, self.yaw_final)
+        # self.x_final, self.y_final = self.goal_position
+        # self.tf_broadcaster.sendTransform(
+        #     (self.x_final, self.y_final, 0),
+        #     quat_final,
+        #     current_time,
+        #     "final goal",
+        #     self.map_frame
+        # )
+        if self.flag_waypoint_okay:
+            quat_wp = tf.transformations.quaternion_from_euler(0, 0, self.yaw_wp)
+            self.tf_broadcaster.sendTransform(
+                (self.x_wp, self.y_wp, 0),
+                quat_wp,
+                current_time,
+                "intermediate waypoint goal",
+                self.map_frame
+            )
+
+    
+    def get_final_goal_pose(self):
+        try:
+            map_2_final_goal = self.tf_listener.lookupTransform(self.map_frame, 'final goal', rospy.Time(0))
+            #rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
+        except:
+            #rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
+            return None
+        self.x_final, self.y_final, _ = map_2_final_goal[0]
+        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_final_goal[1][0],
+                                                                map_2_final_goal[1][1],
+                                                                map_2_final_goal[1][2],
+                                                                map_2_final_goal[1][3]])
+        self.yaw_final = float(constraint_angle(yaw))       
+
+
+    def get_robot_pose(self):
+        r""" Function that compute the position of the robot on the map """
+        # try:
+        #     map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, self.timestamp_tracking_latest)
+        # except:
+        try:
+            map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
+            #rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
+        except:
+            #rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
+            return None
+        self.robot_x_position, self.robot_y_position, _ = map_2_robot[0]
+        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_robot[1][0],
+                                                                map_2_robot[1][1],
+                                                                map_2_robot[1][2],
+                                                                map_2_robot[1][3]])
+        self.robot_yaw = float(constraint_angle(yaw))
+        self.robot_position = np.array([self.robot_x_position, self.robot_y_position])
+
+    def publish_global_waypoint_map(self, map):
+        self._global_waypoint_map_pub.publish(self.set_global_map_object(map))
+    
+    def set_global_map_object(self, map):
+        occupancy_grid = OccupancyGrid()
+        occupancy_grid.info = MapMetaData()
+        occupancy_grid.info.map_load_time = rospy.Time.now()
+        occupancy_grid.info.resolution =  self.global_map_resolution
+        occupancy_grid.info.width = self.global_map_width
+        occupancy_grid.info.height = self.global_map_height
+        occupancy_grid.info.origin = self.origin_global_map
+        
+        occupancy_grid.data = (np.flip(map, axis=0)*100).flatten().astype(np.int8)
+
+        occupancy_grid.header.stamp = self.timestamp_global_map_cb
+        occupancy_grid.header.frame_id = self.map_frame
+
+        return occupancy_grid
+    
+    def _global_map_callback(self, data):
+        r''' Function that computes the global map from the data and store it into the memory under self.global_map_static '''
+        if data:
+            with self.lock_data_entrance:
+                self.global_map_data = data
+                self.timestamp_global_map_cb = self.global_map_data.header.stamp
+                self.x_global_map = self.global_map_data.info.origin.position.x
+                self.y_global_map = self.global_map_data.info.origin.position.y
+                self.origin_global_map = self.global_map_data.info.origin
+                self.global_map_width = self.global_map_data.info.width
+                self.global_map_height = self.global_map_data.info.height
+                self.global_map_resolution = self.global_map_data.info.resolution
+                self.global_map_size = [[self.x_global_map, self.x_global_map + self.global_map_width*self.global_map_resolution],[self.y_global_map, self.y_global_map + self.global_map_height*self.global_map_resolution]]
+                self.last_shape_global_map = (self.global_map_height, self.global_map_width)
+                self.global_map_static = np.flip((np.asarray(self.global_map_data.data) / 100).reshape(self.last_shape_global_map), axis=0)
+                if self.global_path_planner.state_config is None :
+                    self.global_path_planner.set_state_config(global_map_size=self.global_map_size,
+                                            global_map_height=self.global_map_height,
+                                            global_map_width=self.global_map_width,
+                                            global_map_scale=1/0.05
+                    )
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the GlobalPathPlannerNode")
+        self.close()
+        rospy.loginfo("Killing the GlobalPathPlannerNode node")
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+
+        if self._publishers:
+            for publisher in self._publishers:
+                if isinstance(publisher, dict):
+                    for pub in publisher.values():
+                        pub.unregister()
+                else:
+                    publisher.unregister()
+        if self._timers:
+            for timer in self._timers:
+                timer.shutdown()
+
+        if self._action_server :
+            self._action_server.shutdown()
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_rtabmap_ready()
+        self._check_global_map_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+    def _check_global_map_ready(self):
+        self.global_map_data = None
+        rospy.logdebug("Waiting for {} to be READY...".format(self.global_occupancy_map_topic))
+        while self.global_map_data is None and not rospy.is_shutdown():
+            try:
+                self.global_map_data = rospy.wait_for_message(self.global_occupancy_map_topic, OccupancyGrid, timeout=5.0)
+                rospy.logdebug("Current {} READY=>".format(self.global_occupancy_map_topic))
+
+            except:
+                rospy.logerr("Current {} not ready yet, retrying for getting global map".format(self.global_occupancy_map_topic))
+        return self.global_map_data
+    
+    def _check_rtabmap_ready(self):
+        rospy.logdebug("Waiting for rtabmap pose to be READY...")
+        while self.rtabmap_ready is None and not rospy.is_shutdown():
+            try:
+                self.tf_listener.waitForTransform(self.map_frame, self.robot_frame, rospy.Time(0), rospy.Duration(5.0))
+                self.rtabmap_ready = True
+                rospy.logdebug("Current rtabmap pose READY=>")
+
+            except:
+                rospy.logerr("Current rtabmap pose not ready yet, retrying for getting rtabmap pose")
+        return self.rtabmap_ready
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/lookat_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_body_action_client.py
similarity index 71%
rename from src/robot_behavior/src/robot_behavior/lookat_action_client.py
rename to src/robot_behavior/src/robot_behavior/behavior_go_to_body_action_client.py
index c2e92b0a7ca5fc9c843cd43b836b83b566e7ae92..d7d904717ee4d114f6fbf939427338e047cad2c4 100755
--- a/src/robot_behavior/src/robot_behavior/lookat_action_client.py
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_body_action_client.py
@@ -1,26 +1,23 @@
 #!/usr/bin/env python3
 import rospy
 import actionlib
+from spring_msgs.msg import GoToEntityAction, GoToEntityGoal
 
-from spring_msgs.msg import LookAtAction, LookAtGoal
 
-
-class LookAtActionClient:
-    def __init__(self, name,
-                       pan_target_id,
-                       timeout):
+class GoToBodyActionClient:
+    def __init__(self, name):
         self._action_name = name
-        self._pan_target_id = pan_target_id
-        self._timeout = timeout
+        self.body_id = rospy.get_param('~body_id', 'b-1')
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
 
         rospy.on_shutdown(self.shutdown)
 
         # Creates the SimpleActionClient
-        self._a_client = actionlib.SimpleActionClient(self._action_name, LookAtAction)
+        self._a_client = actionlib.SimpleActionClient(self._action_name, GoToEntityAction)
 
         # Waits until the action server has started up and started listening for goals
         self._a_client.wait_for_server()
-        
         self._a_client.cancel_all_goals()
         rospy.sleep(0.1)
 
@@ -28,9 +25,10 @@ class LookAtActionClient:
 
 
     def call_server(self):
-        goal = LookAtGoal()
-        goal.pan_target_id = self._pan_target_id
-        goal.timeout = self._timeout
+        goal = GoToEntityGoal()
+        goal.id = self.body_id
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
 
         # Sends the goal to the action server
         self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
diff --git a/src/robot_behavior/src/robot_behavior/gotowards_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_body_action_server.py
similarity index 68%
rename from src/robot_behavior/src/robot_behavior/gotowards_action_server.py
rename to src/robot_behavior/src/robot_behavior/behavior_go_to_body_action_server.py
index ac6a76fe722abb3c43027ff0e609354d9ec8d265..6ec2665a2aff0d71fe224d3678dd889ccb487816 100755
--- a/src/robot_behavior/src/robot_behavior/gotowards_action_server.py
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_body_action_server.py
@@ -2,19 +2,16 @@
 import rospy
 import numpy as np
 import actionlib
-from spring_msgs.msg import GoTowardsAction, GoTowardsFeedback, GoTowardsResult, GoTowards
-import tf
-
-from robot_behavior.utils import constraint_angle
+from spring_msgs.msg import GoToEntityAction, GoToEntityFeedback, GoToEntityResult, GoToEntity
 from spring_msgs.msg import ControllerStatus
 
 
-class GoTowardsActionServer:
-    def __init__(self, name, hz=20, success_dist_thresh=0.3, success_ang_thresh=0.2):
+class GoToBodyActionServer:
+    def __init__(self, name):
         self._action_name = name
-        self._hz = hz
-        self._success_dist_thresh = success_dist_thresh
-        self._success_ang_thresh = success_ang_thresh
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_dist_thresh = rospy.get_param('~success_dist_thresh', 0.3)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
 
         self.controller_status_data = None
 
@@ -23,19 +20,19 @@ class GoTowardsActionServer:
         self.dist_to_goto_target = np.inf
         self.ang_to_goto_target = np.pi
 
-        # create messages that are used to publish feedback/result
-        self._feedback = GoTowardsFeedback()
-        self._result = GoTowardsResult()
-
         self._check_all_sensors_ready()
 
-        rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1)
+        self._publishers = []
+        self._subscribers = []
+
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
 
-        self._go_towards_pub  = rospy.Publisher('/go_towards', GoTowards, queue_size=10)
+        self._go_to_body_pub  = rospy.Publisher('/action/go_to_body', GoToEntity, queue_size=10)
+        self._publishers.append(self._go_to_body_pub)
 
         rospy.on_shutdown(self.shutdown)
 
-        self._a_server = actionlib.SimpleActionServer(self._action_name, GoTowardsAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server = actionlib.SimpleActionServer(self._action_name, GoToEntityAction, execute_cb=self.execute_cb, auto_start=False)
         self._a_server.start()
 
         rospy.loginfo("{} Initialization Ended".format(self._action_name))
@@ -43,17 +40,17 @@ class GoTowardsActionServer:
 
     def execute_cb(self, goal):
         # helper variables
-        rate = rospy.Rate(self._hz)
+        rate = rospy.Rate(self.hz)
         self.dist_to_goto_target = np.inf
         self.ang_to_goto_target = np.pi
         self.init_time = rospy.Time.now()
         self.current_time = (rospy.Time.now() - self.init_time).to_sec()
 
-        self.publish_gotowards(goal)
-
+        self.publish_go_to_body(goal)
+        rospy.sleep(0.5)
 
         # publish info to the console for the user
-        rospy.logdebug('%s: Begins the navigation planning to human %i' % (self._action_name, goal.goto_target_id))
+        rospy.logdebug('{}: Begins the navigation planning to human body {}'.format(self._action_name, goal.id))
         rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
         rospy.logdebug('Goal message : {}'.format(goal))
 
@@ -64,12 +61,15 @@ class GoTowardsActionServer:
                 break
 
             self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+            # create messages that are used to publish feedback/result
+            self._feedback = GoToEntityFeedback()
+            self._result = GoToEntityResult()
 
             # publish the feedback
             self._publish_feedback(goal)
             rate.sleep()
 
-            if self.dist_to_goto_target < self._success_dist_thresh and self.ang_to_goto_target < self._success_ang_thresh:
+            if self.dist_to_goto_target < self.success_dist_thresh and self.ang_to_goto_target < self.success_ang_thresh:
                 rospy.loginfo('%s: Succeeded' % self._action_name)
                 self._a_server.set_succeeded(self._result)
                 break
@@ -89,14 +89,16 @@ class GoTowardsActionServer:
         self._result_msg(goal)
 
         # go to idle state
-        goal.goto_target_id = -1
-        self.publish_gotowards(goal)
+        goal.id = 'b-1'
+        goal.continuous = False
+        self.publish_go_to_body(goal)
 
 
-    def publish_gotowards(self, goal):
-        gotowards_msg = GoTowards()
-        gotowards_msg.human_id = goal.goto_target_id
-        self._go_towards_pub.publish(gotowards_msg)
+    def publish_go_to_body(self, goal):
+        go_to_body_msg = GoToEntity()
+        go_to_body_msg.id = goal.id
+        go_to_body_msg.continuous = goal.continuous
+        self._go_to_body_pub.publish(go_to_body_msg)
 
 
     def _result_msg(self, goal):
@@ -145,5 +147,14 @@ class GoTowardsActionServer:
 
     def shutdown(self):
         rospy.loginfo("Stopping the %s " % self._action_name)
-        # send empty goal message
+        self.close()
         rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/gotowards_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_group_action_client.py
similarity index 69%
rename from src/robot_behavior/src/robot_behavior/gotowards_action_client.py
rename to src/robot_behavior/src/robot_behavior/behavior_go_to_group_action_client.py
index 61ec56d621ea203def94f20f47ba67b948fc2e41..170d3730c68da70ed3f76480a6562405fb69d64c 100755
--- a/src/robot_behavior/src/robot_behavior/gotowards_action_client.py
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_group_action_client.py
@@ -1,35 +1,34 @@
 #!/usr/bin/env python3
 import rospy
 import actionlib
+from spring_msgs.msg import GoToEntityAction, GoToEntityGoal
 
-from spring_msgs.msg import GoTowardsAction, GoTowardsGoal
 
-
-class GoTowardsActionClient:
-    def __init__(self, name,
-                       goto_target_id,
-                       timeout):
+class GoToGroupActionClient:
+    def __init__(self, name):
         self._action_name = name
-        self._goto_target_id = goto_target_id
-        self._timeout = timeout
+        self.group_id = rospy.get_param('~group_id', 'grp-1')
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
 
         rospy.on_shutdown(self.shutdown)
 
         # Creates the SimpleActionClient
-        self._a_client = actionlib.SimpleActionClient(self._action_name, GoTowardsAction)
+        self._a_client = actionlib.SimpleActionClient(self._action_name, GoToEntityAction)
 
         # Waits until the action server has started up and started listening for goals
         self._a_client.wait_for_server()
-
         self._a_client.cancel_all_goals()
+        rospy.sleep(0.1)
 
         rospy.loginfo("{} Initialization Ended".format(self._action_name))
 
 
     def call_server(self):
-        goal = GoTowardsGoal()
-        goal.goto_target_id = self._goto_target_id
-        goal.timeout = self._timeout
+        goal = GoToEntityGoal()
+        goal.id = self.group_id
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
 
         # Sends the goal to the action server
         self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
diff --git a/src/robot_behavior/src/robot_behavior/behavior_go_to_group_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_group_action_server.py
new file mode 100755
index 0000000000000000000000000000000000000000..50860793bcd27c02141fa242fca2f1db7f018dc9
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_group_action_server.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python3
+import rospy
+import numpy as np
+import actionlib
+from spring_msgs.msg import GoToEntityAction, GoToEntityFeedback, GoToEntityResult, GoToEntity
+from spring_msgs.msg import ControllerStatus
+
+
+class GoToGroupActionServer:
+    def __init__(self, name):
+        self._action_name = name
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_dist_thresh = rospy.get_param('~success_dist_thresh', 0.3)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
+
+        self.controller_status_data = None
+
+        self.current_time = None
+        self.init_time = None
+        self.dist_to_goto_target = np.inf
+        self.ang_to_goto_target = np.pi
+
+        self._check_all_sensors_ready()
+
+        self._publishers = []
+        self._subscribers = []
+
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
+
+        self._go_to_group_pub  = rospy.Publisher('/action/go_to_group', GoToEntity, queue_size=10)
+        self._publishers.append(self._go_to_group_pub)
+
+        rospy.on_shutdown(self.shutdown)
+
+        self._a_server = actionlib.SimpleActionServer(self._action_name, GoToEntityAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server.start()
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def execute_cb(self, goal):
+        # helper variables
+        rate = rospy.Rate(self.hz)
+        self.dist_to_goto_target = np.inf
+        self.ang_to_goto_target = np.pi
+        self.init_time = rospy.Time.now()
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+        self.publish_go_to_group(goal)
+        rospy.sleep(0.5)
+
+        # publish info to the console for the user
+        rospy.logdebug('{}: Begins the navigation planning to group {}'.format(self._action_name, goal.id))
+        rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
+        rospy.logdebug('Goal message : {}'.format(goal))
+
+        # start executing the action
+        while not rospy.is_shutdown():
+            if self._a_server.is_preempt_requested():
+                self._a_server.set_preempted()
+                break
+
+            self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+            # create messages that are used to publish feedback/result
+            self._feedback = GoToEntityFeedback()
+            self._result = GoToEntityResult()
+
+            # publish the feedback
+            self._publish_feedback(goal)
+            rate.sleep()
+
+            if self.dist_to_goto_target < self.success_dist_thresh and self.ang_to_goto_target < self.success_ang_thresh:
+                rospy.loginfo('%s: Succeeded' % self._action_name)
+                self._a_server.set_succeeded(self._result)
+                break
+
+            if self.controller_status_data.status == ControllerStatus.IS_FAILURE:
+                text = "Controller failure"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result, text=text)
+                break
+
+            if self.current_time > goal.timeout :
+                text = "Timeout"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result)
+                break
+
+        self._result_msg(goal)
+
+        # go to idle state
+        goal.id = 'grp-1'
+        goal.continuous = False
+        self.publish_go_to_group(goal)
+
+
+    def publish_go_to_group(self, goal):
+        go_to_group_msg = GoToEntity()
+        go_to_group_msg.id = goal.id
+        go_to_group_msg.continuous = goal.continuous
+        self._go_to_group_pub.publish(go_to_group_msg)
+
+
+    def _result_msg(self, goal):
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+        self._create_action_msg(goal, self._result)
+        self._result.final_time = self.current_time
+
+
+    def _publish_feedback(self, goal):
+        self._create_action_msg(goal, self._feedback)
+        self._feedback.current_time = self.current_time
+        self._a_server.publish_feedback(self._feedback)
+
+
+    def _create_action_msg(self, goal, action_msg):
+        if self.controller_status_data.dist_to_goto_target:
+            self.dist_to_goto_target = self.controller_status_data.dist_to_goto_target[-1]
+            self.ang_to_goto_target = self.controller_status_data.dist_to_goto_target[-2]
+            action_msg.dist_to_goto_target = self.controller_status_data.dist_to_goto_target
+        else:
+            action_msg.dist_to_goto_target = []
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_controller_status_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_controller_status_ready(self):
+        self.controller_status_data = None
+        rospy.logdebug("Waiting for /controller_status to be READY...")
+        while self.controller_status_data is None and not rospy.is_shutdown():
+            try:
+                self.controller_status_data = rospy.wait_for_message("/controller_status", ControllerStatus, timeout=5.0)
+                rospy.logdebug("Current /controller_status READY=>")
+
+            except:
+                rospy.logerr("Current /controller_status not ready yet, retrying for getting controller status")
+        return self.controller_status_data
+
+
+    def _controller_status_callback(self, data):
+        self.controller_status_data = data
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self.close()
+        rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/navigate_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_person_action_client.py
similarity index 58%
rename from src/robot_behavior/src/robot_behavior/navigate_action_client.py
rename to src/robot_behavior/src/robot_behavior/behavior_go_to_person_action_client.py
index 939b08ee3947529bf6920e80e4d279b6f59dbff6..30b4e05576f7369840b69128a3703c72cbf59252 100755
--- a/src/robot_behavior/src/robot_behavior/navigate_action_client.py
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_person_action_client.py
@@ -1,26 +1,20 @@
 #!/usr/bin/env python3
 import rospy
 import actionlib
-from spring_msgs.msg import NavigateAction, NavigateGoal
-
-class NavigateActionClient:
-    def __init__(self, name,
-                       goto_target_id,
-                       human_target_id,
-                       goto_target_pose,
-                       relative_h_pose,
-                       timeout):
+from spring_msgs.msg import GoToEntityAction, GoToEntityGoal
+
+
+class GoToPersonActionClient:
+    def __init__(self, name):
         self._action_name = name
-        self._goto_target_id = goto_target_id
-        self._human_target_id = human_target_id
-        self._goto_target_pose = goto_target_pose
-        self._relative_h_pose = relative_h_pose
-        self._timeout = timeout
+        self.person_id = rospy.get_param('~person_id', '-1')
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
 
         rospy.on_shutdown(self.shutdown)
 
         # Creates the SimpleActionClient
-        self._a_client = actionlib.SimpleActionClient(self._action_name, NavigateAction)
+        self._a_client = actionlib.SimpleActionClient(self._action_name, GoToEntityAction)
 
         # Waits until the action server has started up and started listening for goals
         self._a_client.wait_for_server()
@@ -31,12 +25,10 @@ class NavigateActionClient:
 
 
     def call_server(self):
-        goal = NavigateGoal()
-        goal.goto_target_id = self._goto_target_id
-        goal.human_target_id = self._human_target_id
-        goal.goto_target_pose = self._goto_target_pose
-        goal.relative_h_pose = self._relative_h_pose
-        goal.timeout = self._timeout
+        goal = GoToEntityGoal()
+        goal.id = self.person_id
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
 
         # Sends the goal to the action server
         self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
diff --git a/src/robot_behavior/src/robot_behavior/behavior_go_to_person_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_person_action_server.py
new file mode 100755
index 0000000000000000000000000000000000000000..2adcbe69c367bb3772bd07aef7c0ed768b757630
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_person_action_server.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+import rospy
+import numpy as np
+import actionlib
+from spring_msgs.msg import GoToEntityAction, GoToEntityFeedback, GoToEntityResult, GoToEntity
+from spring_msgs.msg import ControllerStatus
+
+
+class GoToPersonActionServer:
+    def __init__(self, name):
+        self._action_name = name
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_dist_thresh = rospy.get_param('~success_dist_thresh', 0.3)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
+
+        self.controller_status_data = None
+
+        self.current_time = None
+        self.init_time = None
+        self.dist_to_goto_target = np.inf
+        self.ang_to_goto_target = np.pi
+
+        self._check_all_sensors_ready()
+
+        self._publishers = []
+        self._subscribers = []
+
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
+
+        self._go_to_person_pub  = rospy.Publisher('/action/go_to_person', GoToEntity, queue_size=10)
+        self._publishers.append(self._go_to_person_pub)
+
+        rospy.on_shutdown(self.shutdown)
+
+        self._a_server = actionlib.SimpleActionServer(self._action_name, GoToEntityAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server.start()
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def execute_cb(self, goal):
+        # helper variables
+        rate = rospy.Rate(self.hz)
+        self.dist_to_goto_target = np.inf
+        self.ang_to_goto_target = np.pi
+        self.init_time = rospy.Time.now()
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+        self.publish_go_to_person(goal)
+        rospy.sleep(0.5)
+
+        # publish info to the console for the user
+        rospy.logdebug('{}: Begins the navigation planning to human person {}'.format(self._action_name, goal.id))
+        rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
+        rospy.logdebug('Goal message : {}'.format(goal))
+
+        # start executing the action
+        while not rospy.is_shutdown():
+            if self._a_server.is_preempt_requested():
+                self._a_server.set_preempted()
+                break
+
+            self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+            # create messages that are used to publish feedback/result
+            self._feedback = GoToEntityFeedback()
+            self._result = GoToEntityResult()
+            # publish the feedback
+            self._publish_feedback(goal)
+            rate.sleep()
+
+            if self.dist_to_goto_target < self.success_dist_thresh and self.ang_to_goto_target < self.success_ang_thresh:
+                rospy.loginfo('%s: Succeeded' % self._action_name)
+                self._a_server.set_succeeded(self._result)
+                break
+
+            if self.controller_status_data.status == ControllerStatus.IS_FAILURE:
+                text = "Controller failure"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result, text=text)
+                break
+
+            if self.current_time > goal.timeout :
+                text = "Timeout"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result)
+                break
+
+        self._result_msg(goal)
+
+        # go to idle state
+        goal.id = 'p-1'
+        goal.continuous = False
+        self.publish_go_to_person(goal)
+
+
+    def publish_go_to_person(self, goal):
+        go_to_person_msg = GoToEntity()
+        go_to_person_msg.id = goal.id
+        go_to_person_msg.continuous = goal.continuous
+        self._go_to_person_pub.publish(go_to_person_msg)
+
+
+    def _result_msg(self, goal):
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+        self._create_action_msg(goal, self._result)
+        self._result.final_time = self.current_time
+
+
+    def _publish_feedback(self, goal):
+        self._create_action_msg(goal, self._feedback)
+        self._feedback.current_time = self.current_time
+        self._a_server.publish_feedback(self._feedback)
+
+
+    def _create_action_msg(self, goal, action_msg):
+        if self.controller_status_data.dist_to_goto_target:
+            self.dist_to_goto_target = self.controller_status_data.dist_to_goto_target[-1]
+            self.ang_to_goto_target = self.controller_status_data.dist_to_goto_target[-2]
+            action_msg.dist_to_goto_target = self.controller_status_data.dist_to_goto_target
+        else:
+            action_msg.dist_to_goto_target = []
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_controller_status_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_controller_status_ready(self):
+        self.controller_status_data = None
+        rospy.logdebug("Waiting for /controller_status to be READY...")
+        while self.controller_status_data is None and not rospy.is_shutdown():
+            try:
+                self.controller_status_data = rospy.wait_for_message("/controller_status", ControllerStatus, timeout=5.0)
+                rospy.logdebug("Current /controller_status READY=>")
+
+            except:
+                rospy.logerr("Current /controller_status not ready yet, retrying for getting controller status")
+        return self.controller_status_data
+
+
+    def _controller_status_callback(self, data):
+        self.controller_status_data = data
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self.close()
+        rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/behavior_go_to_position_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_position_action_client.py
new file mode 100755
index 0000000000000000000000000000000000000000..a4780c2c3da14fd80465e28a986346408bee0021
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_position_action_client.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+import rospy
+import actionlib
+from spring_msgs.msg import GoToPositionAction, GoToPositionGoal
+
+
+class GoToPositionActionClient:
+    def __init__(self, name):
+        self._action_name = name
+        self.go_to_target_pose = rospy.get_param('~go_to_target_pose', '[0., 0., 0., 0., 0.]')
+        self.go_to_target_pose = self.go_to_target_pose.strip("[]").split(", ")
+        self.go_to_target_pose = list(map(float, self.go_to_target_pose))
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
+
+        rospy.on_shutdown(self.shutdown)
+
+        # Creates the SimpleActionClient
+        self._a_client = actionlib.SimpleActionClient(self._action_name, GoToPositionAction)
+
+        # Waits until the action server has started up and started listening for goals
+        self._a_client.wait_for_server()
+        self._a_client.cancel_all_goals()
+        rospy.sleep(0.1)
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def call_server(self):
+        goal = GoToPositionGoal()
+        goal.go_to_target_pose = self.go_to_target_pose
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
+
+        # Sends the goal to the action server
+        self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
+
+        # Waits for the server to finish performing the action
+        self._a_client.wait_for_result()
+
+        # Prints out the result of executing the action
+        result = self._a_client.get_result()
+        return result
+
+    def feedback_cb(self, msg):
+        rospy.loginfo('Feedback received: {}'.format(msg))
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self._a_client.cancel_all_goals()
+        rospy.loginfo("Killing the %s node" % self._action_name)
diff --git a/src/robot_behavior/src/robot_behavior/navigate_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_go_to_position_action_server.py
similarity index 63%
rename from src/robot_behavior/src/robot_behavior/navigate_action_server.py
rename to src/robot_behavior/src/robot_behavior/behavior_go_to_position_action_server.py
index e90749841b8537e232dadfb97d40adedd1132733..630b5984e6e2f61cfb8a5e88db28223c018f5c09 100755
--- a/src/robot_behavior/src/robot_behavior/navigate_action_server.py
+++ b/src/robot_behavior/src/robot_behavior/behavior_go_to_position_action_server.py
@@ -2,42 +2,37 @@
 import rospy
 import numpy as np
 import actionlib
-from spring_msgs.msg import NavigateAction, NavigateFeedback, NavigateResult, Navigate
-import tf
-
-from robot_behavior.utils import constraint_angle
+from spring_msgs.msg import GoToPositionAction, GoToPositionFeedback, GoToPositionResult, GoToPosition
 from spring_msgs.msg import ControllerStatus
 
 
-class NavigateActionServer:
-    def __init__(self, name, hz=20, success_dist_thresh=0.3, success_ang_thresh=0.2):
+class GoToPositionActionServer:
+    def __init__(self, name):
         self._action_name = name
-        self._hz = hz
-        self._success_dist_thresh = success_dist_thresh
-        self._success_ang_thresh = success_ang_thresh
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_dist_thresh = rospy.get_param('~success_dist_thresh', 0.3)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
 
         self.controller_status_data = None
 
         self.current_time = None
         self.init_time = None
         self.dist_to_goto_target = np.inf
-        self.dist_to_human_target = np.inf
         self.ang_to_goto_target = np.pi
-        self.ang_to_human_target = np.pi
-
-        # create messages that are used to publish feedback/result
-        self._feedback = NavigateFeedback()
-        self._result = NavigateResult()
 
         self._check_all_sensors_ready()
 
-        rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1)
+        self._publishers = []
+        self._subscribers = []
 
-        self._navigate_pub  = rospy.Publisher('/navigate', Navigate, queue_size=10)
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
+
+        self._go_to_position_pub  = rospy.Publisher('/action/go_to_position', GoToPosition, queue_size=10)
+        self._publishers.append(self._go_to_position_pub)
 
         rospy.on_shutdown(self.shutdown)        
 
-        self._a_server = actionlib.SimpleActionServer(self._action_name, NavigateAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server = actionlib.SimpleActionServer(self._action_name, GoToPositionAction, execute_cb=self.execute_cb, auto_start=False)
         self._a_server.start()
 
         rospy.loginfo("{} Initialization Ended".format(self._action_name))
@@ -45,19 +40,17 @@ class NavigateActionServer:
 
     def execute_cb(self, goal):
         # helper variables
-        rate = rospy.Rate(self._hz)
+        rate = rospy.Rate(self.hz)
         self.dist_to_goto_target = np.inf
-        self.dist_to_human_target = np.inf
         self.ang_to_goto_target = np.pi
-        self.ang_to_human_target = np.pi
         self.init_time = rospy.Time.now()
         self.current_time = (rospy.Time.now() - self.init_time).to_sec()
 
-        self.publish_navigate(goal)
-
+        self.publish_go_to_position(goal)
+        rospy.sleep(0.5)
 
         # publish info to the console for the user
-        rospy.logdebug('%s: Begins the navigation planning to human %i with human %i' % (self._action_name, goal.goto_target_id, goal.human_target_id))
+        rospy.logdebug('{}: Begins the navigation planning to position {} with orientation {}'.format(self._action_name, goal.go_to_target_pose[:2], goal.go_to_target_pose[2]))
         rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
         rospy.logdebug('Goal message : {}'.format(goal))
 
@@ -69,11 +62,15 @@ class NavigateActionServer:
 
             self.current_time = (rospy.Time.now() - self.init_time).to_sec()
 
+            # create messages that are used to publish feedback/result
+            self._feedback = GoToPositionFeedback()
+            self._result = GoToPositionResult()
+
             # publish the feedback
             self._publish_feedback(goal)
             rate.sleep()
 
-            if max(self.dist_to_goto_target, self.dist_to_human_target) < self._success_dist_thresh and max(self.ang_to_goto_target, self.ang_to_human_target) < self._success_ang_thresh:
+            if self.dist_to_goto_target < self.success_dist_thresh and self.ang_to_goto_target < self.success_ang_thresh:
                 rospy.loginfo('%s: Succeeded' % self._action_name)
                 self._a_server.set_succeeded(self._result)
                 break
@@ -93,19 +90,17 @@ class NavigateActionServer:
         self._result_msg(goal)
 
         # go to idle state
-        goal.goto_target_id = -1
-        goal.human_target_id = -1
-        goal.goto_target_pose = np.zeros_like(goal.goto_target_pose)
-        self.publish_navigate(goal)
+        goal.go_to_target_pose = np.zeros_like(goal.go_to_target_pose)
+        goal.go_to_target_pose[-1] = -1
+        goal.continuous = False
+        self.publish_go_to_position(goal)
 
 
-    def publish_navigate(self, goal):
-        navigate_msg = Navigate()
-        navigate_msg.goto_target_id = goal.goto_target_id
-        navigate_msg.human_target_id = goal.human_target_id
-        navigate_msg.relative_h_pose = goal.relative_h_pose
-        navigate_msg.goto_target_pose = goal.goto_target_pose
-        self._navigate_pub.publish(navigate_msg)
+    def publish_go_to_position(self, goal):
+        go_to_position_msg = GoToPosition()
+        go_to_position_msg.go_to_target_pose = goal.go_to_target_pose
+        go_to_position_msg.continuous = goal.continuous
+        self._go_to_position_pub.publish(go_to_position_msg)
 
 
     def _result_msg(self, goal):
@@ -127,12 +122,6 @@ class NavigateActionServer:
             action_msg.dist_to_goto_target = self.controller_status_data.dist_to_goto_target
         else:
             action_msg.dist_to_goto_target = []
-        if self.controller_status_data.dist_to_human_target:
-            self.dist_to_human_target = self.controller_status_data.dist_to_human_target[-1]
-            self.ang_to_human_target = self.controller_status_data.dist_to_human_target[-2]
-            action_msg.dist_to_human_target = self.controller_status_data.dist_to_human_target
-        else:
-            action_msg.dist_to_human_target = []
 
 
     def _check_all_sensors_ready(self):
@@ -160,5 +149,14 @@ class NavigateActionServer:
 
     def shutdown(self):
         rospy.loginfo("Stopping the %s " % self._action_name)
-        # send empty goal message
+        self.close()
         rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/behavior_goal_finder.py b/src/robot_behavior/src/robot_behavior/behavior_goal_finder.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd0c5fbb25abbd061f6566f4a7eab5872ed643db
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_goal_finder.py
@@ -0,0 +1,159 @@
+# This file, part of Social MPC in the WP6 of the Spring project,
+# is part of a project that has received funding from the 
+# European Union’s Horizon 2020 research and innovation programme
+#  under grant agreement No 871245.
+# 
+# Copyright (C) 2020-2023 by Inria
+# Authors : Victor Sanchez
+# victor.sanchez@inria.fr
+#
+# Code inspired from : https://gitlab.inria.fr/spring/wp6_robot_behavior/Motor_Controller/-/blob/master/social_mpc/path.py?ref_type=heads
+#   From : Alex Auternaud, Timothée Wintz
+#   alex.auternaud@inria.fr
+#   timothee.wintz@inria.fr
+
+from social_mpc.ssn_model.social_spaces import SocialSpaces, Group
+from social_mpc import utils
+import numpy as np
+import time
+import rospy
+import exputils as eu
+
+
+class GoalFinder():
+
+    def __init__(self, controller_config):
+        self.state_config = None
+        self.ssn = None
+        self.controller_config = controller_config
+    
+    def set_state_config(self,
+                        global_map_size=[[-11.26118716597557, 12.588813189417124], [-10.207198709249496, 14.492801658809185]],
+                        global_map_height=494,
+                        global_map_width=477,
+                        global_map_scale=1/0.05
+                        ):
+        self.state_config=eu.AttrDict(global_map_size=global_map_size,
+                                    global_map_height=global_map_height,
+                                    global_map_width=global_map_width,
+                                    global_map_scale=global_map_scale
+        )
+        self.ssn = SocialSpaces(
+            bbox=self.state_config.global_map_size,
+            map_shape=(self.state_config.global_map_height,
+                       self.state_config.global_map_width)
+            )
+    
+    def run(self,
+            shared_humans,
+            shared_groups,
+            shared_robot_pose,
+            shared_goto_target_human_id,
+            shared_goto_target_group_id,
+            shared_global_map,
+            ):
+        last_time = time.time()
+
+        if not self.ssn:
+            return None
+        
+        global_map = (np.array([*shared_global_map])).squeeze()
+        if self.controller_config.goal_loop_time:
+            new_time = time.time()
+            if (new_time - last_time) < self.controller_config.goal_loop_time:
+                time.sleep(self.controller_config.goal_loop_time - (new_time - last_time))
+            last_time = time.time()
+
+        robot_pose = (shared_robot_pose[0], shared_robot_pose[1])
+        sh_goto_target_human_id = np.array([*shared_goto_target_human_id])
+        sh_goto_target_group_id = np.array([*shared_goto_target_group_id])
+        if shared_humans is not None:
+            sh_humans = np.array([*shared_humans])
+        if shared_groups is not None:
+            sh_groups = np.array([*shared_groups])
+        if shared_groups is None and shared_humans is None:
+            return None
+        if not sh_goto_target_human_id[0] and not sh_goto_target_group_id[0]:
+            return None
+        n_human = np.sum(sh_humans[:, -1] >= 0)
+        if n_human < 1:
+            return None
+        humans = np.zeros((n_human, sh_humans.shape[1]))
+        for j in range(n_human):
+            humans[j, :] = sh_humans[j, :]
+        human_ids = np.array(sh_humans[sh_humans[:, -1] >= 0, -1], dtype=int).tolist()
+        
+        n_group = np.sum(sh_groups[:, -1] >= 0)
+        sh_groups = sh_groups[:n_group, :]
+        group_ids = np.array(sh_groups[sh_groups[:, -1] >= 0, -1], dtype=int).tolist()
+        target = None
+
+        if sh_goto_target_human_id[0]:
+            if sh_goto_target_human_id[1] in human_ids:
+                idx = human_ids.index(sh_goto_target_human_id[1])
+                if humans[idx, -2] != -1:
+                    gr_idx = group_ids.index(humans[idx, -2])
+                    target = ('human_in_group', idx, gr_idx, humans[idx, -2])  # type, human id, group index, group id
+                else:
+                    target = ('isolated_human', idx)  # type, human id
+            else:
+                rospy.logwarn("Target human not in sight")
+                return None
+        if sh_goto_target_group_id[0]:
+            if sh_goto_target_group_id[1] in group_ids:
+                gr_idx = group_ids.index(sh_goto_target_group_id[1])
+                target = ('group', gr_idx, sh_goto_target_group_id[1])  # type, group index, group id
+            else:
+                rospy.logwarn("Target group not in sight")
+                return None
+
+        groups = []
+        for idx, group_id in enumerate(group_ids):
+            groups.append(Group(center=sh_groups[idx, :2], person_ids=np.where(humans[:, -2] == group_id)[0]))
+        
+        humans_fdz = np.zeros((n_human, 5))
+        humans_fdz[:, :3] = humans[:, :3]
+        f_dsz = self.ssn.calc_dsz(humans_fdz, groups=groups)
+
+        if target:
+            if target[0] == 'human_in_group':
+                center = [sh_groups[target[2], 0], sh_groups[target[2], 1]]
+                persons = humans[humans[:, -2] == target[3], :]
+                goal = self.ssn.calc_goal_pos(
+                    f_dsz=f_dsz,
+                    map=global_map,
+                    persons=persons,
+                    group_center=center,
+                    robot=robot_pose)
+                goal = list(goal)
+                goal[2] = np.arctan2(humans[target[1]][1] - goal[1], humans[target[1]][0] - goal[0])  # to face the human
+            elif target[0] == 'isolated_human':
+                person = humans[target[1], :]
+                ts_x = person[0] + np.cos(person[2]) * self.controller_config.group_detection_stride
+                ts_y = person[1] + np.sin(person[2]) * self.controller_config.group_detection_stride
+                center = [ts_x, ts_y]
+                person = person[np.newaxis, :]
+                goal = self.ssn.calc_goal_pos(
+                    f_dsz=f_dsz,
+                    map=global_map,
+                    persons=person,
+                    group_center=center,
+                    robot=robot_pose)
+                goal = list(goal)
+                goal[2] = np.arctan2(humans[target[1]][1] - goal[1],humans[target[1]][0] - goal[0])  # to face the human
+            elif target[0] == 'group':
+                center = [sh_groups[target[1], 0], sh_groups[target[1], 1]]
+                persons = humans[humans[:, -2] == target[2], :]
+                goal = self.ssn.calc_goal_pos(
+                    f_dsz=f_dsz,
+                    map=global_map,
+                    persons=persons,
+                    group_center=center,
+                    robot=robot_pose)
+                goal = list(goal)
+            else:
+                return None
+        else:
+            return None
+        rospy.loginfo("goal : {}".format(goal))
+        return goal
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/behavior_goal_finder_node.py b/src/robot_behavior/src/robot_behavior/behavior_goal_finder_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..2eb0dd834b59a0ceebd2c7ee77d73877b0625b10
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_goal_finder_node.py
@@ -0,0 +1,509 @@
+#!/usr/bin/env python3
+import numpy as np
+# from collections import namedtuple
+import pkg_resources
+import tf
+import sys
+import os
+import yaml
+from multiprocessing import Lock
+import numpy as np
+import rospy
+from nav_msgs.msg import OccupancyGrid
+from robot_behavior.behavior_goal_finder import GoalFinder
+from robot_behavior.utils import constraint_angle, local_to_global
+from robot_behavior.behavior_generator_node import RobotState
+from social_mpc.config.config import ControllerConfig, RobotConfig
+import time
+from robot_behavior.ros_4_hri_interface import ROS4HRIInterface 
+from spring_msgs.msg import GoToPosition, GoToEntity, LookAtEntity, LookAtPosition
+
+
+class GoalFinderNode:
+    def __init__(self):
+        # os.environ['XLA_PYTHON_CLIENT_PREALLOCATE'] = 'false'
+        mpc_config_name = rospy.get_param('~mpc_config_name', 'None')
+        # robot_config_name = rospy.get_param('~robot_config_name', 'None')
+        mpc_config_path = None if mpc_config_name == 'None' else mpc_config_name
+        # robot_config_path = None if robot_config_name == 'None' else robot_config_name
+        self.mpc_config_path = mpc_config_path
+        # self.robot_config_path = robot_config_path
+        # self.namespace_slam = rospy.get_param('~namespace_slam', '/slam')
+        self.map_frame = rospy.get_param('~map_frame', 'map')
+        self.robot_frame = rospy.get_param('~robot_frame', 'base_footprint')
+        self.global_occupancy_map_topic = rospy.get_param('~global_occupancy_map_topic', '/slam/global_map_rl_navigation')
+        self.max_humans_world = rospy.get_param('max_humans_world', 20)
+        self.max_groups_world = rospy.get_param('max_groups_world', 10)
+        self.namespace = rospy.get_param('namespace', 'behavior_generator')
+        
+        # self.read_robot_config(filename=self.robot_config_path)
+        self.read_config(filename=self.mpc_config_path)
+        self.goal_finder = GoalFinder(
+            controller_config=self.controller_config)
+
+        self.global_map_data = None
+        self.rtabmap_ready = False
+        self.global_map_static = None
+        self.hri_data = None
+
+        self.human_id_default = -1
+        self.group_id_default = -1
+
+        self.x_final = 0.
+        self.y_final = 0.
+        self.yaw_final = 0.
+        self.flag_final_goal_okay = False
+
+        self.robot_x_position = 0.
+        self.robot_y_position = 0.
+        self.robot_yaw = 0.
+        self.robot_position = [0, 0]
+        self.lock_data_entrance = Lock()
+    
+        self.init_ros_subscriber_and_publicher()
+
+        self.tf_broadcaster = tf.TransformBroadcaster()  # Publish Transform to look for human
+        self.tf_listener = tf.TransformListener()  # Listen Transform to look for human
+
+        self._check_all_sensors_ready()
+
+        self.ros_4_hri_interface = ROS4HRIInterface()
+        self.init_state()
+        self.reset_go_to_data()
+
+        rospy.loginfo("GoalFinderNode Initialization Ended")
+
+
+    def read_config(self, filename=None):
+        if filename is None:
+            filename = pkg_resources.resource_filename(
+                'social_mpc', 'config/social_mpc.yaml')
+        elif os.path.isfile(filename):
+            self.passed_config_loaded = True
+        else:
+            filename = pkg_resources.resource_filename(
+                'social_mpc', 'config/social_mpc.yaml')
+        config = yaml.load(open(filename), Loader=yaml.FullLoader)
+        self.controller_config = ControllerConfig(config)
+
+        self.goal_finder_enabled = self.controller_config.goal_finder_enabled
+        self.path_planner_enabled = self.controller_config.path_planner_enabled
+        self.update_goals_enabled = self.controller_config.update_goals_enabled
+    
+
+    def read_robot_config(self, filename=None):
+        if filename is None:
+            filename = pkg_resources.resource_filename(
+                'sim2d', 'config/robot.yaml')
+            rospy.logdebug("No filename provided for the robot configuration, basic robot config loaded")
+        elif os.path.isfile(filename):
+            rospy.logdebug("Desired robot config loaded")
+        else:
+            filename = pkg_resources.resource_filename(
+                'sim2d', 'config/robot.yaml')
+            rospy.logdebug("Desired filename for the robot configuration does not exist, basic robot config loaded")
+        config = yaml.load(open(filename), Loader=yaml.FullLoader)
+        self.robot_config = RobotConfig(config)
+
+
+    def init_ros_subscriber_and_publicher(self):
+        r''' Initialize the subscribers and publishers '''
+        self._subscribers = []
+        self._publishers = []
+        self._action_server = []
+        self._timers = []
+
+        ### ROS Subscribers
+        self._subscribers.append(rospy.Subscriber(self.global_occupancy_map_topic, OccupancyGrid, callback=self._global_map_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_position', GoToPosition, callback=self._go_to_position_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_body', GoToEntity, callback=self._go_to_body_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_person', GoToEntity, callback=self._go_to_person_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_group', GoToEntity, callback=self._go_to_group_callback, queue_size=1))
+
+        ### ROS Publishers
+        # self._cmd_vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=1)
+        # self._publishers.append(self._cmd_vel_pub)
+
+        rospy.loginfo("Initializing the BehaviorGenerator")
+
+        # self._check_publishers_connection()
+ 
+
+    def run(self):
+        r''' Runs the ros wrapper '''
+        init_t = rospy.Time.now()
+        while not rospy.is_shutdown():
+            self.step(init_t)
+
+
+    def step(self, init_time):
+        r''' Step the ros wrapper'''
+
+        # Localizing the robot in the environment
+        self.get_robot_pose()
+        self.hri_data = self.ros_4_hri_interface.get_data()
+        self.recreate_state()
+        
+        if self.go_to_position_data:
+            # no need to compute the final goal because go to position
+            if self.go_to_position_data.go_to_target_pose[-1]:
+                if not self.go_to_position_data.go_to_target_pose[-2]:
+                    self.x_final = self.go_to_position_data.go_to_target_pose[0]
+                    self.y_final = self.go_to_position_data.go_to_target_pose[1]
+                    self.yaw_final = self.go_to_position_data.go_to_target_pose[2]
+                else:
+                    self.x_final, self.y_final = local_to_global(self.robot_position, np.asarray(self.go_to_position_data.go_to_target_pose[:2]))
+                    self.yaw_final = constraint_angle(self.robot_yaw + self.go_to_position_data.go_to_target_pose[2])
+
+        else:
+            if self.go_to_body_data:
+                go_to_body_target = int(self.go_to_body_data.id[1:])
+                if float(go_to_body_target) in self.state.humans_id.tolist() and go_to_body_target >= 0:
+                    self.shared_goto_target_human_id = [1, go_to_body_target]
+            
+            if self.go_to_group_data:
+                go_to_group_target = int(self.go_to_group_data.id[3:])
+                if float(go_to_group_target) in self.state.groups_id.tolist() and go_to_group_target >= 0:
+                    self.shared_goto_target_group_id = [1, go_to_group_target]
+            
+            if self.go_to_person_data:
+                go_to_person_target = self.go_to_person_data.id
+                if go_to_person_target in self.hri_data[2].keys():
+                    go_to_person_target = int(self.hri_data[2][self.go_to_person_data.id][1:])
+                    if float(go_to_person_target) in self.state.humans_id.tolist():
+                        self.shared_goto_target_human_id = [1, go_to_person_target]
+
+            if self.go_to_body_data or self.go_to_group_data or self.go_to_person_data:
+            # final_goal = None
+                final_goal = self.goal_finder.run(
+                    self.shared_humans,
+                    self.shared_groups,
+                    self.state.robot_pose,
+                    self.shared_goto_target_human_id,
+                    self.shared_goto_target_group_id,
+                    self.global_map_static,
+                )
+                if final_goal:
+                    self.flag_final_goal_okay = True
+                    self.x_final, self.y_final, self.yaw_final = final_goal
+                    rospy.logdebug('Robot position : x {:.2f} \t y {:.2f} \t yaw {:.2f}'.format(self.robot_x_position, self.robot_y_position, self.robot_yaw))
+                    rospy.logdebug('Goal position : x {:.2f} \t y {:.2f} \t yaw {:.2f}'.format(self.x_final, self.y_final, self.yaw_final))
+                else:
+                    self.flag_final_goal_okay = True
+                    rospy.logwarn("The final goal was not computed properly")
+            
+        self.publish_tf_go_to_goals()
+        self.reset_go_to_data()
+        rospy.sleep(self.controller_config.goal_loop_time)
+
+
+    def publish_tf_go_to_goals(self):
+        
+        current_time = rospy.Time.now()
+        quat_final = tf.transformations.quaternion_from_euler(0, 0, self.yaw_final)
+        self.tf_broadcaster.sendTransform(
+            (self.x_final, self.y_final, 0),
+            quat_final,
+            current_time,
+            "final goal",
+            self.map_frame
+        )   
+
+
+    def get_robot_pose(self):
+        r""" Function that compute the position of the robot on the map """
+        # try:
+        #     map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, self.timestamp_tracking_latest)
+        # except:
+        try:
+            map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
+            #rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
+        except:
+            #rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
+            return None
+        self.robot_x_position, self.robot_y_position, _ = map_2_robot[0]
+        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_robot[1][0],
+                                                                map_2_robot[1][1],
+                                                                map_2_robot[1][2],
+                                                                map_2_robot[1][3]])
+        self.robot_yaw = float(constraint_angle(yaw))
+        self.robot_position = np.array([self.robot_x_position, self.robot_y_position])
+
+
+    def reset_go_to_data(self):
+        self.go_to_position_data = None
+        self.go_to_body_data = None
+        self.go_to_person_data = None
+        self.go_to_group_data = None
+
+    
+    def _go_to_position_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_position_data = data
+    
+
+    def _go_to_body_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_body_data = data
+
+
+    def _go_to_group_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_group_data = data
+
+
+    def _go_to_person_callback(self, data):
+        self.reset_go_to_data()
+        self.go_to_person_data = data
+
+
+    def _global_map_callback(self, data):
+        r''' Function that computes the global map from the data and store it into the memory under self.global_map_static '''
+        if data:
+            with self.lock_data_entrance:
+                self.global_map_data = data
+                self.x_global_map = self.global_map_data.info.origin.position.x
+                self.y_global_map = self.global_map_data.info.origin.position.y
+                self.global_map_width = self.global_map_data.info.width
+                self.global_map_height = self.global_map_data.info.height
+                self.global_map_resolution = self.global_map_data.info.resolution
+                self.global_map_size = [[self.x_global_map, self.x_global_map + self.global_map_width*self.global_map_resolution],[self.y_global_map, self.y_global_map + self.global_map_height*self.global_map_resolution]]
+                self.last_shape_global_map = (self.global_map_height, self.global_map_width)
+                self.global_map_static = np.flip((np.asarray(self.global_map_data.data) / 100).reshape(self.last_shape_global_map), axis=0)
+                if self.goal_finder.state_config is None :
+                    self.goal_finder.set_state_config(global_map_size=self.global_map_size,
+                                                      global_map_height=self.global_map_height,
+                                                      global_map_width=self.global_map_width,
+                                                      global_map_scale=1/0.05
+                                                     )
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_rtabmap_ready()
+        self._check_global_map_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_global_map_ready(self):
+        self.global_map_data = None
+        rospy.logdebug("Waiting for {} to be READY...".format(self.global_occupancy_map_topic))
+        while self.global_map_data is None and not rospy.is_shutdown():
+            try:
+                self.global_map_data = rospy.wait_for_message(self.global_occupancy_map_topic, OccupancyGrid, timeout=5.0)
+                rospy.logdebug("Current {} READY=>".format(self.global_occupancy_map_topic))
+
+            except:
+                rospy.logerr("Current {} not ready yet, retrying for getting global map".format(self.global_occupancy_map_topic))
+        return self.global_map_data
+
+
+    def _check_rtabmap_ready(self):
+        rospy.logdebug("Waiting for rtabmap pose to be READY...")
+        while self.rtabmap_ready is None and not rospy.is_shutdown():
+            try:
+                self.tf_listener.waitForTransform(self.map_frame, self.robot_frame, rospy.Time(0), rospy.Duration(5.0))
+                self.rtabmap_ready = True
+                rospy.logdebug("Current rtabmap pose READY=>")
+
+            except:
+                rospy.logerr("Current rtabmap pose not ready yet, retrying for getting rtabmap pose")
+        return self.rtabmap_ready
+
+
+    def reset_state(self):
+        nb_humans = self.max_humans_world
+        self.state.humans_id = np.ones(nb_humans) * self.human_id_default
+        self.state.humans_group_id = np.ones(nb_humans) * self.group_id_default
+        self.state.groups_id = np.ones(self.max_groups_world) * self.group_id_default
+        self.state.humans_visible = np.ones(nb_humans)
+        self.state.groups_visible = np.ones(self.max_groups_world)
+        self.state.humans_pose = np.zeros((nb_humans, 3))  # x, y, angle
+        self.state.groups_pose = np.zeros((self.max_groups_world, 2))  # x, y
+        self.state.humans_velocity = np.zeros((nb_humans, 2))
+        self.state.robot_pose = np.zeros(3)
+        self.state.robot_velocity = np.zeros(2)
+        # if self.state.robot_config.has_pan and self.state.robot_config.has_tilt:
+        #     self.state.joint_angles = np.zeros(2)
+        #     self.state.joint_velocities = np.zeros(2)
+        # elif not self.state.robot_config.has_pan and not self.state.robot_config.has_tilt:
+        #     self.state.joint_angles = None
+        #     self.state.joint_velocities = None
+        # else:
+        #     self.state.joint_angles = np.zeros(1)
+        #     self.state.joint_velocities = np.zeros(1)
+
+        self.shared_humans = np.zeros((nb_humans, 7))  # x, y, orientation, lin vel, ang vel, group id, id
+        self.shared_groups = np.zeros((self.max_groups_world, 3))  # x, y, id
+        self.shared_goto_target_group_id = np.zeros(2)  # flag, id
+        self.shared_goto_target_human_id = np.zeros(2)  # flag, id
+
+
+    def recreate_state(self):
+        self.reset_state()
+
+        self.state.robot_pose = np.array([self.robot_x_position, self.robot_y_position, self.robot_yaw])
+        # self.state.robot_velocity = np.array([lin, ang])
+
+        # self.state.joint_angles = np.array([self.joint_states_data.position[self.pan_ind]])
+        # self.state.joint_velocities = np.array([self.joint_states_data.velocity[self.pan_ind]])
+        
+        for human_id, human_data in self.hri_data[0].items():
+            human_id = int(human_id[1:])
+            humans_id_list = self.state.humans_id.tolist()
+            if human_id in humans_id_list:
+                index = humans_id_list.index(human_id)
+            elif self.human_id_default in humans_id_list:
+                index = humans_id_list.index(self.human_id_default)
+            else:
+                rospy.logerr("No more space in state.humans_ arrays. Number max is {}. It is fixed because of the use of SharedMemory multiprocessing".format(self.max_humans_world))
+            self.state.humans_id[index] = human_id
+            self.state.humans_visible[index] = 0.  # how to get it ?
+            if len(human_data) >= 3:
+                self.state.humans_group_id[index] = int(human_data[2][3:])  # 0: transform, 1:person_id, 2:group_id if group
+
+            human_pose = human_data[0].transform.translation
+            human_quat = human_data[0].transform.rotation
+            x_pos, y_pos = human_pose.x, human_pose.y
+            (_, _, yaw) = tf.transformations.euler_from_quaternion([
+                human_quat.x,
+                human_quat.y,
+                human_quat.z,
+                human_quat.w])
+
+            lin = 0.  # how to get it ?
+            ang = 0.  # how to get it ?
+
+            self.state.humans_pose[index, :] = [x_pos, y_pos, constraint_angle(yaw)]
+            self.state.humans_velocity[index, :] = [lin, ang]
+
+        rospy.logdebug('humans_id : {}'.format(self.state.humans_id.tolist()))
+
+        for group_id, group_data in self.hri_data[1].items():
+            group_id  = int(group_id[3:])
+            groups_id_list = self.state.groups_id.tolist()
+            if group_id in groups_id_list:
+                index = groups_id_list.index(group_id)
+            elif self.group_id_default in groups_id_list:
+                index = groups_id_list.index(self.group_id_default)
+            else:
+                rospy.logerr("No more space in state.groups_ arrays. Number max is {}. It is fixed because of the use of SharedMemory multiprocessing".format(self.max_groups_world))
+            self.state.groups_id[index] = group_id
+            self.state.groups_visible[index] = 0.  # how to get it ?
+
+            group_pose = group_data[0].transform.translation
+            x_pos, y_pos = group_pose.x, group_pose.y
+            self.state.groups_pose[index, :] = [x_pos, y_pos]
+
+        rospy.logdebug('groups_id ; {}'.format(self.state.groups_id.tolist()))
+
+        # for person_id, person_data in self.hri_data[2].items():
+        #     print('person_id', person_id, 'person_data', person_data)
+        # for body_id, body_data in self.hri_data[3].items():
+        #    print('body_id', body_id, 'body_data', body_data)
+
+        # self.state.config = self.Config(self.global_map_size,
+        #                                 round(1/self.global_map_resolution),
+        #                                 self.global_map_width,
+        #                                 self.global_map_height,
+        #                                 self.local_map_size,
+        #                                 round(1/self.local_map_resolution),
+        #                                 self.local_map_width,
+        #                                 self.local_map_height)
+
+        # self.state.global_map = self.global_map_static
+        # local_map = np.zeros((*self.local_static_map.shape, 3))
+        # local_map[:, :, 0] = self.local_static_map
+        # local_map[:, :, 1] = self.get_social_cost_map(data=self.local_map_data)
+        # self.state.local_map = local_map
+
+        self.shared_humans[:, -1] = -1
+        idx = np.where(self.state.humans_visible == 0)[0]
+        n = len(idx)
+        self.shared_humans[:n, -1] = self.state.humans_id[idx]
+        self.shared_humans[:n, -2] = self.state.humans_group_id[idx]
+        self.shared_humans[:n, :3] = self.state.humans_pose[idx, :]
+        self.shared_humans[:n, 3:5] = self.state.humans_velocity[idx, :]
+
+        self.shared_groups[:, -1] = -1
+        idx = np.where(self.state.groups_visible == 0)[0]
+        n = len(idx)
+        self.shared_groups[:n, :2] = self.state.groups_pose[idx, :]
+        self.shared_groups[:n, -1] = self.state.groups_id[idx]
+    
+
+    def init_state(self):
+        self.state = RobotState()
+        self.list_attr = ["robot_pose",
+                          "robot_velocity",
+                        #   "joint_angles",
+                        #   "joint_velocities",
+                        #   "local_map",
+                        #   "global_map",
+                          "groups_pose",
+                          "humans_pose",
+                          "humans_velocity",
+                          "humans_visible",
+                          "groups_visible",
+                          "groups_id",
+                          "humans_group_id",
+                          "humans_id",
+                        #   "config",
+                        #   "robot_config"
+                          ]
+        for attr in self.list_attr:
+            self.state.__setattr__(attr, None)
+
+        # self.Config = namedtuple('Config', ['global_map_size',
+        #                                     'global_map_scale',
+        #                                     'global_map_width',
+        #                                     'global_map_height',
+        #                                     'local_map_size',
+        #                                     'local_map_scale',
+        #                                     'local_map_width',
+        #                                     'local_map_height'])
+
+        # self.RobotConfig = namedtuple('RobotConfig', ['has_pan',
+        #                                               'has_tilt',
+        #                                               'min_pan_angle',
+        #                                               'max_pan_angle',
+        #                                               'base_footprint_radius'])
+        # self.state.robot_config = self.RobotConfig(self.robot_config.has_pan,
+        #                                            self.robot_config.has_tilt,
+        #                                            self.robot_config.min_pan_angle,
+        #                                            self.robot_config.max_pan_angle,
+        #                                            self.robot_config.base_footprint_radius)
+
+        self.shared_humans = None
+        self.shared_groups = None
+        self.shared_goto_target_group_id = None
+        self.shared_goto_target_human_id = None
+
+        self.reset_state()
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the GoalFinderNode")
+        self.close()
+        rospy.loginfo("Killing the GoalFinderNode node")
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+
+        if self._publishers:
+            for publisher in self._publishers:
+                if isinstance(publisher, dict):
+                    for pub in publisher.values():
+                        pub.unregister()
+                else:
+                    publisher.unregister()
+        if self._timers:
+            for timer in self._timers:
+                timer.shutdown()
+
+        if self._action_server:
+            self._action_server.shutdown()
+        self.ros_4_hri_interface.close()
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/behavior_local_path_planner_mpc.py b/src/robot_behavior/src/robot_behavior/behavior_local_path_planner_mpc.py
new file mode 100755
index 0000000000000000000000000000000000000000..bfcb648aaca8a2de125ad7da16783a33d0f2c0f2
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_local_path_planner_mpc.py
@@ -0,0 +1,392 @@
+# This file, part of Social MPC in the WP6 of the Spring project,
+# is part of a project that has received funding from the 
+# European Union’s Horizon 2020 research and innovation programme
+#  under grant agreement No 871245.
+# 
+# Copyright (C) 2020-2022 by Inria
+# Authors : Alex Auternaud, Timothée Wintz
+# alex.auternaud@inria.fr
+# timothee.wintz@inria.fr
+
+import time
+import scipy
+from scipy.optimize import NonlinearConstraint, Bounds, minimize
+from jax.config import config
+from jax.ops import index, index_update
+from jax.lax import cond, reshape, bitwise_and
+import jax.numpy as np
+import jax
+from jax import grad, jit, vmap, jacfwd, custom_jvp, partial
+import rospy
+from robot_behavior.utils import local_to_global_jax
+
+config.update("jax_enable_x64", True)
+# config.update('jax_disable_jit', True)
+
+# @partial(custom_jvp, nondiff_argnums=[0])
+def interp1d(cost_map, angles):
+    return jax.scipy.ndimage.map_coordinates(cost_map, angles, order=1, mode='wrap')
+
+def rotmat(theta):
+    return np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])
+
+def rotmat_inv(theta):
+    return np.array([[np.cos(theta), np.sin(theta)], [-np.sin(theta), np.cos(theta)]])
+
+# @interp1d.defjvp
+# def interp1d_jvp(cost_map, primals, tangents):
+#     ddepth = np.gradient(cost_map)
+#     primals_out = interp1d(cost_map, primals)
+#     tangents_out = interp1d(ddepth, primals) * tangents[0]
+#     return primals_out, tangents_out
+
+
+class LocalPathPlannerMPC:
+    def __init__(self, h, robot_config, horizon, dim_config, u_lb, u_ub,
+                 joints_lb=None, joints_ub=None,
+                 max_acceleration=None,
+                 wall_avoidance_points=None,
+                 max_iter=100,
+                 cost_map_region=None):
+        """
+            MPC Constructor.
+
+            Args:
+                h (float): time step
+                robot (mpc.Robot): robot model
+                u_lb (array): 1D array of length robot.n_joints + 2 lower bound for control
+                u_ub (array): 1D array of length robot.n_joints + 2 upper bound for control
+                joints_lb (array, optional): 1D array of length robot.n_angles lower bound for joint values
+                joints_ub (array, optional): 1D array of length robot.n_angles upper bound for joint values
+                reg_parameter (array or float): 1D array of size robot.n_joints + 2 or float regularization parameter
+        """
+        self.h = h
+        self.robot = robot_config
+        self.dim_config = dim_config
+
+        self.pan_target_dim = self.dim_config['pan_target_dim']
+        self.goto_target_dim = self.dim_config['goto_target_dim']
+        self.human_target_dim = self.dim_config['human_target_dim']
+        self.cost_map_dim = self.dim_config['cost_map_dim']
+        self.weights_dim = self.dim_config['weights_dim']
+        self.loss_coef_dim = self.dim_config['loss_coef_dim']
+        self.loss_rad_dim = self.dim_config['loss_rad_dim']
+
+        n_joints = 0
+        if self.robot.has_pan:
+            n_joints += 1
+        if self.robot.has_tilt:
+            n_joints += 1
+
+        self.action_dim = n_joints + 2
+        self.horizon = horizon
+        self.actions_shape = (self.horizon, self.action_dim)
+        self.n_angles = n_joints
+
+        self.fw_angles = jit(self._fw_angles)
+        self.fw_angles_flat = jit(self._fw_angles_flat)
+        self.g_fw_angles_flat = jit(jacfwd(self._fw_angles_flat, argnums=1))
+
+        self.fw_base_loss_flat = jit(self._fw_base_loss_flat)
+        self.g_fw_base_loss_flat = jit(grad(self._fw_base_loss_flat, 0))
+
+        self.fw_base_loss = jit(self._fw_base_loss)
+        self.fw_base_positions = jit(self._fw_base_positions)
+
+        self.l2_loss = jit(self._l2_loss)
+        self.exp_loss = jit(self._exp_loss)
+        self.maxout_loss = jit(self._maxout_loss)
+        self.minmaxout_loss = jit(self._minmaxout_loss)
+
+        self.regularization_term = jit(self._regularization_term)
+
+        self.wall_avoidance_loss = jit(self._wall_avoidance_loss)
+
+        self.u_lb = u_lb
+        self.u_ub = u_ub
+
+        self.joints_lb = joints_lb
+        self.joints_ub = joints_ub
+
+        self.max_iter = max_iter
+        self.max_acceleration = max_acceleration
+        self.wall_avoidance_points = wall_avoidance_points
+
+        if cost_map_region is not None:
+            self.cost_map_region = cost_map_region
+        else:
+            self.cost_map_region = [[-3, 3], [-3,3]]
+
+        self.fw_base_positions_ang_c = None
+        self.fw_base_positions_pos_c = None
+        self.fw_b_l_f = None
+        self.count = 0
+        self.fw_loss = 0.
+
+
+    def step(self, state, weights, actions, loss_coef, loss_rad, cost_map=None, reg_parameter=None, goto_goal=None, pan_goal=None, human_features=None):
+        """
+            MPC Step.
+
+            Args:
+                state (array): 1D array of the form [alpha_1, alpha_2, x_1, y_1, x_2, y_2] of size n_angles + n_features x n_dim
+                # horizon (int): number of future steps to consider
+                actions (array): (optional) intial value for actions. 2D array of shape n_horizon x (n_angles + 2).
+                    Action order is joint angle velocity, base angle velocity, base linear velocity
+            Returns:
+                array: 2D array of shape n_horizon x (n_angles + 2), optimal actions
+        """
+        assert(len(state) == self.n_angles)
+        assert(actions.shape == self.actions_shape)
+        assert(len(weights) == self.weights_dim)
+        assert(len(loss_coef) == self.loss_coef_dim)
+        assert(len(loss_rad) == self.loss_rad_dim)
+
+        if cost_map is not None:
+            assert(cost_map.shape == self.cost_map_dim)
+        if reg_parameter is not None:
+            assert(len(reg_parameter) == self.action_dim or np.isscalar(reg_parameter))
+        if goto_goal is not None:
+            assert(len(goto_goal) == self.goto_target_dim)
+        if pan_goal is not None:
+            assert(len(pan_goal) == self.pan_target_dim)
+        if human_features is not None:
+            assert(len(human_features) == self.human_target_dim)
+
+        if reg_parameter is None:
+            reg_parameter = 0.
+        if pan_goal is None:
+            pan_goal = np.zeros(self.pan_target_dim)
+        if goto_goal is None:
+            goto_goal = np.zeros(self.goto_target_dim)
+        if human_features is None:
+            human_features = np.zeros(self.human_target_dim)
+        if cost_map is None:
+            cost_map = np.zeros(self.cost_map_dim)
+
+
+        rospy.loginfo('goals : {0}, {2}, {1}'.format(goto_goal, pan_goal, human_features))
+
+        fw_loss = lambda x : self.fw_base_loss_flat(x, state=state, cost_map=cost_map, weights=weights, reg_parameter=reg_parameter, loss_coef=loss_coef, loss_rad=loss_rad, goto_goal=goto_goal, pan_goal=pan_goal, human_features=human_features)
+
+        g_fw_loss = lambda x : self.g_fw_base_loss_flat(x, state=state, cost_map=cost_map, weights=weights, reg_parameter=reg_parameter, loss_coef=loss_coef, loss_rad=loss_rad, goto_goal=goto_goal, pan_goal=pan_goal, human_features=human_features)
+
+        lb = np.zeros(self.actions_shape)
+        lb = index_update(lb, index[:, :], self.u_lb[np.newaxis, :])
+        if self.count == 1:
+            lb = index_update(lb, index[0, :], actions[0, :])
+        if self.count > 1:
+            lb = index_update(lb, index[0, :], actions[1, :])
+        lb = lb.flatten()
+        ub = np.zeros(self.actions_shape)
+        ub = index_update(ub, index[:, :], self.u_ub[np.newaxis, :])
+        if self.count == 1:
+            ub = index_update(ub, index[0, :], actions[0, :])
+        if self.count > 1:
+            ub = index_update(ub, index[0, :], actions[1, :])
+        ub = ub.flatten()
+        bounds = Bounds(lb, ub)
+        if self.joints_lb is not None or self.joints_ub is not None:
+            # rospy.logdebug("using constraints")
+            lb_angles = np.zeros((self.horizon, self.n_angles))
+            lb_angles = index_update(lb_angles, index[:, :], self.joints_lb[np.newaxis, :])
+            ub_angles = np.zeros((self.horizon, self.n_angles))
+            ub_angles = index_update(ub_angles, index[:, :], self.joints_ub[np.newaxis, :])
+            constraints = NonlinearConstraint(lambda x: self.fw_angles_flat(state, x),
+                                              jac=lambda x: np.array(self.g_fw_angles_flat(state, x)),
+                                              lb=lb_angles.flatten(), ub=ub_angles.flatten())
+        else:
+            constraints = []
+        options = {'maxiter': self.max_iter}
+        r = minimize(fw_loss, jac=g_fw_loss, x0=actions.flatten(), bounds=bounds, constraints=constraints, options=options)
+        # rospy.logdebug(r)
+
+        if not r.success and options['maxiter'] > 20:
+            rospy.logwarn("Warning: failed optimization")
+            rospy.logwarn(r)
+
+
+        # self.fw_base_positions_ang_c, self.fw_base_positions_pos_c = self.fw_base_positions(reshape(r.x, (self.horizon, self.n_angles + 2)), 0.)
+        # self.fw_b_l_f = self._fw_base_loss_flat(r.x, state=state, cost_map=cost_map, weights=weights, reg_parameter=reg_parameter, loss_coef=loss_coef, loss_rad=loss_rad, goto_goal=goto_goal, pan_goal=pan_goal, human_features=human_features)
+        self.fw_loss = r.fun
+        # rospy.logdebug("fw loss : {}".format(self.fw_loss))
+        # rospy.logdebug(r.nit, r.nfev, r.njev)
+        # rospy.logdebug(r)
+        self.count += 1
+
+        return reshape(r.x, (self.horizon, self.n_angles + 2))
+
+
+    def _l2_loss(self, state, target):
+        return np.sum((state - target)**2)
+
+    def _minmaxout_loss(self, state, target, acceptance_r, coef):
+        return np.minimum(np.maximum(coef*(np.sum((state - target)**2) - acceptance_r**2), 0.), 1.)
+
+    def _maxout_loss(self, state, target, acceptance_r, coef):
+        return np.maximum(coef*(np.sum((state - target)**2) - acceptance_r**2), 0.)
+
+    def _exp_loss(self, dist):
+        return np.minimum(np.exp(11*(dist - 0.5)), 1.)
+
+    def _fw_angles(self, state, actions):
+        return state[np.newaxis, :self.n_angles] + self.h * np.cumsum(actions[:, :self.n_angles], axis=0)
+
+    def _fw_angles_flat(self, state, x):
+        actions = reshape(x, self.actions_shape)
+        return self.fw_angles(state, actions).flatten()
+
+    def _fw_base_positions(self, actions, start_ang):
+        base_angles = self.h * np.cumsum(actions[:, self.n_angles]) + start_ang
+        velocities = np.stack([actions[:, -1] * np.cos(base_angles), actions[:, -1] * np.sin(base_angles)], axis=1)
+        base_positions = self.h * np.cumsum(velocities, axis=0)
+        return base_angles, base_positions
+
+    def false_fun_costmap(self, positions, cost_map):
+        cost_map_xmin = self.cost_map_region[0][0]
+        cost_map_xmax = self.cost_map_region[0][1]
+        cost_map_ymin = self.cost_map_region[1][0]
+        cost_map_ymax = self.cost_map_region[1][1]
+        cost_map_xres, cost_map_yres = cost_map.shape
+        im_x = (positions[:,0] - cost_map_xmin)/(cost_map_xmax - cost_map_xmin) * cost_map_xres
+        im_y = (cost_map_ymax - positions[:,1])/(cost_map_ymax - cost_map_ymin) * cost_map_yres
+        int_position = np.stack([im_x, im_y], axis=1)
+        costs = jax.scipy.ndimage.map_coordinates(cost_map, np.transpose(int_position), order=1, mode='constant')
+        return np.sum(costs)
+
+    def true_fun_l2_loss(self, x, goal):
+        loss = vmap(self.l2_loss, in_axes=(0, None))(x, goal)
+        return np.sum(loss)
+
+    def true_fun_maxout_loss(self, x, goal, r, coef):
+        loss = vmap(self.maxout_loss, in_axes=(0, 0, None, None))(x, goal, r, coef)
+        return np.sum(loss)
+
+    def true_fun_maxout_loss_static(self, x, goal, r, coef):
+        loss = vmap(self.maxout_loss, in_axes=(0, None, None, None))(x, goal, r, coef)
+        return np.sum(loss)
+
+    def true_fun_minmaxout_loss(self, x, goal, r, coef):
+        loss = vmap(self.minmaxout_loss, in_axes=(0, None, None, None))(x, goal, r, coef)
+        return np.sum(loss)
+
+    def fun_loss(self, goal_features, base_positions, base_angles, active_angle_loss, weight, loss_features):
+        x, y, ang, lin_v, ang_v = goal_features
+        velocities = np.array([0., ang_v, lin_v])
+        actions = np.tile(velocities, (self.horizon, 1))
+        goal_angles, goal_positions = self.fw_base_positions(actions, ang)
+        goal_positions += np.array([x, y])
+        loss = self.true_fun_maxout_loss(base_positions[:, 1], goal_positions[:, 1], weight[0, 1], weight[1, 1])
+        loss += cond(loss_features == 1,
+                    lambda _: self.true_fun_maxout_loss(np.linalg.norm(base_positions - goal_positions, axis=1), np.zeros(self.horizon), weight[0, 0], weight[1, 0]),
+                    lambda _: self.true_fun_maxout_loss(base_positions[:, 0], goal_positions[:, 0], weight[0, 0], weight[1, 0]),
+                    operand=None)  # constraint on x or just on dist
+
+        loss += cond(active_angle_loss,  # bitwise_and(lin_v == 0., ang_v == 0.)
+                     lambda _: self.true_fun_maxout_loss(base_angles, goal_angles, weight[0, 2], weight[1, 2]),
+                     lambda _: 0.,
+                     operand=None)
+        # loss += self.true_fun_maxout_loss(base_angles, goal_angles, weight[0, 2], weight[1, 2])
+        # loss = self.true_fun_l2_loss(base_positions[:, 0], goal_positions[:, 0])*weight[1, 0]
+        # loss += self.true_fun_l2_loss(base_positions[:, 1], goal_positions[:, 1])*weight[1, 1]
+        # loss += cond(bitwise_and(active_angle_loss, bitwise_and(lin_v == 0., ang_v == 0.)),
+        #              lambda _: self.true_fun_l2_loss(base_angles, goal_angles)*weight[1, 2],
+        #              lambda _: 0.,
+        #              operand=None)
+        return loss
+
+    def fun_pan_loss(self, state, actions, goal_features, weight):
+        x, y, lin_v, ang_v = goal_features
+        velocities = np.array([0., ang_v, lin_v])
+        actions_features = np.tile(velocities, (self.horizon, 1))
+        _, goal_positions = self.fw_base_positions(actions_features, 0.)
+        goal_positions += np.array([x, y])
+        pan_goal_fw = np.arctan2(goal_positions[:, 1], goal_positions[:, 0])
+        loss = self.true_fun_maxout_loss(self.fw_angles(state, actions), pan_goal_fw, weight[0], weight[1])
+        # loss = self.true_fun_l2_loss(self.fw_angles(state, actions), pan_goal_fw)*weight[1]
+        return loss
+
+    def _regularization_term(self, actions, reg_parameter):
+        if np.isscalar(reg_parameter):
+            reg_term = np.sum(reg_parameter * actions ** 2)
+        else:
+            reg_term = np.sum(reg_parameter[np.newaxis, :] * actions ** 2)
+        return reg_term
+
+    def _wall_avoidance_loss(self, positions, object_map):
+        wall_avoidance_loss = cond(np.any(object_map),
+                             lambda _: np.sum(vmap(self.false_fun_costmap, in_axes=(0, None))(positions, object_map)),
+                             lambda _: 0.,
+                             operand=None)
+        return wall_avoidance_loss
+
+
+    def _fw_base_loss(self, actions, state, cost_map, weights, reg_parameter, loss_coef, loss_rad, goto_goal, pan_goal, human_features):
+        object_map = cost_map[:, :, 0]
+        social_map = cost_map[:, :, 1]
+        base_angles, base_positions = self.fw_base_positions(actions, 0.)
+
+        # escorted human loss (x, y, ang)
+        weight = np.array([loss_rad[3:-1], loss_coef[3:-1]])
+        # rospy.logdebug('human_features weight : {}'.format(weight))
+        human_features_loss = cond(human_features[-1] > 0.,
+                                   lambda _: self.fun_loss(goal_features=human_features[:5],
+                                                           base_positions=base_positions,
+                                                           base_angles=base_angles,
+                                                           active_angle_loss=human_features[-2],
+                                                           weight=weight,
+                                                           loss_features=human_features[-3]),
+                                   lambda _: 0.,
+                                   operand=None)
+
+        # pan loss (ang)
+        weight = np.array([loss_rad[-1], loss_coef[-1]])
+        # rospy.logdebug('pan weight : {}'.format(weight))
+        pan_angle_loss = cond(pan_goal[-1] > 0.,
+                               lambda _: self.fun_pan_loss(state=state,
+                                                           actions=actions,
+                                                           goal_features=pan_goal[:-1],
+                                                           weight=weight),
+                               lambda _: 0.,
+                               operand=None)
+
+        # base goal loss (x, y, ang)
+        weight = np.array([loss_rad[:3], loss_coef[:3]])
+        # rospy.logdebug('goto weight : {}'.format(weight))
+        goto_goal_loss = cond(goto_goal[-1] > 0.,
+                                   lambda _: self.fun_loss(goal_features=goto_goal[:5],
+                                                           base_positions=base_positions,
+                                                           base_angles=base_angles,
+                                                           active_angle_loss=goto_goal[-2],
+                                                           weight=weight,
+                                                           loss_features=goto_goal[-3]),
+                                   lambda _: 0.,
+                                   operand=None)
+
+        # cost map loss
+        # cost_map = np.maximum(object_map, social_map)
+        cost_map = weights[2] * object_map + weights[3] * social_map
+        cost_map_loss = cond(np.any(cost_map),
+                             lambda _: self.false_fun_costmap(base_positions, cost_map),
+                             lambda _: 0.,
+                             operand=None)
+
+        fw_base_pose = np.stack((base_positions[:, 0], base_positions[:, 1], base_angles), axis=-1)
+        new_positions = vmap(local_to_global_jax, in_axes=(None, 0))(fw_base_pose, self.wall_avoidance_points)
+        wall_avoidance_loss = self.wall_avoidance_loss(new_positions, object_map)
+
+        reg_term = self.regularization_term(actions, reg_parameter)
+
+        # rospy.logdebug('wall_avoidance_loss : {}'.format(wall_avoidance_loss))
+        # rospy.logdebug('cost_map_loss : {}'.format(cost_map_loss))
+        # rospy.logdebug('goto_goal_loss : {}'.format(goto_goal_loss))
+        # rospy.logdebug('human_features_loss : {}'.format(human_features_loss))
+        # rospy.logdebug('regularization_term : {}'.format(reg_term))
+        # rospy.logdebug('pan_angle_loss : {}'.format(pan_angle_loss))
+
+        loss  = weights[0]*goto_goal_loss + weights[4]*cost_map_loss + wall_avoidance_loss*weights[5] + human_features_loss*weights[1] + reg_term + pan_angle_loss
+        return loss
+
+    def _fw_base_loss_flat(self, x, state, cost_map, weights, reg_parameter, loss_coef, loss_rad, goto_goal, pan_goal, human_features):
+        actions = reshape(x, self.actions_shape)
+        return self.fw_base_loss(actions, state, cost_map, weights, reg_parameter, loss_coef, loss_rad, goto_goal, pan_goal, human_features)
diff --git a/src/robot_behavior/src/robot_behavior/behavior_local_path_planner_mpc_node.py b/src/robot_behavior/src/robot_behavior/behavior_local_path_planner_mpc_node.py
new file mode 100755
index 0000000000000000000000000000000000000000..2d8c441490521835771838d8d4dd2ea5c4f1b796
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_local_path_planner_mpc_node.py
@@ -0,0 +1,435 @@
+#!/usr/bin/env python3
+import numpy as np
+# from collections import namedtuple
+import pkg_resources
+import tf
+import sys
+import os
+import yaml
+from multiprocessing import Lock
+import numpy as np
+import rospy
+from nav_msgs.msg import OccupancyGrid
+from robot_behavior.behavior_local_path_planner_mpc import LocalPathPlannerMPC
+from robot_behavior.utils import constraint_angle, local_to_global, global_to_local
+from robot_behavior.behavior_generator_node import RobotState
+from social_mpc.config.config import ControllerConfig, RobotConfig
+import time
+from spring_msgs.msg import GoToPosition, GoToEntity, LookAtEntity, LookAtPosition
+from geometry_msgs.msg import Twist, Vector3
+
+
+class LocalPathPlannerMPCNode:
+    def __init__(self):
+        os.environ['XLA_PYTHON_CLIENT_PREALLOCATE'] = 'false'
+        mpc_config_name = rospy.get_param('~mpc_config_name', 'None')
+        robot_config_name = rospy.get_param('~robot_config_name', 'None')
+        mpc_config_path = None if mpc_config_name == 'None' else mpc_config_name
+        robot_config_path = None if robot_config_name == 'None' else robot_config_name
+        self.mpc_config_path = mpc_config_path
+        self.robot_config_path = robot_config_path
+        # self.namespace_slam = rospy.get_param('~namespace_slam', '/slam')
+        self.map_frame = rospy.get_param('~map_frame', 'map')
+        self.robot_frame = rospy.get_param('~robot_frame', 'base_footprint')
+        self.global_occupancy_map_topic = rospy.get_param('~global_occupancy_map_topic', '/slam/global_occupancy_map')
+        self.local_occupancy_map_topic = rospy.get_param('~local_occupancy_map_topic', '/slam/local_occupancy_map')
+        self.local_social_cost_map_topic = rospy.get_param('~local_social_cost_map_topic', '/slam/local_social_cost_map')
+        self.max_humans_world = rospy.get_param('max_humans_world', 20)
+        self.max_groups_world = rospy.get_param('max_groups_world', 10)
+        self.namespace = rospy.get_param('namespace', 'behavior_generator')
+        
+        self.read_robot_config(filename=self.robot_config_path)
+        self.read_config(filename=self.mpc_config_path)
+
+        self.local_map_data = None
+        self.local_social_cost_map_data = None
+        self.rtabmap_ready = False
+        self.local_map_static = None
+        self.mode = None
+        self.action_idx = None
+
+        self.init_ros_subscriber_and_publicher()
+
+        self.tf_broadcaster = tf.TransformBroadcaster()  # Publish Transform to look for human
+        self.tf_listener = tf.TransformListener()  # Listen Transform to look for human
+
+        self._check_all_sensors_ready()
+
+        self.init_var()
+        self.init_mpc()
+
+
+    def init_ros_subscriber_and_publicher(self):
+        r''' Initialize the subscribers and publishers '''
+        self._subscribers = []
+        self._publishers = []
+        self._action_server = []
+        self._timers = []
+
+        ### ROS Subscribers
+        self._subscribers.append(rospy.Subscriber(self.local_occupancy_map_topic, OccupancyGrid, callback=self._local_map_callback, queue_size=1))
+        self._subscribers.append(rospy.Subscriber(self.local_social_cost_map_topic, OccupancyGrid, callback=self._local_social_cost_map_callback, queue_size=1))
+        # self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_position', GoToPosition, callback=self._go_to_position_callback, queue_size=1))
+        # self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_body', GoToEntity, callback=self._go_to_body_callback, queue_size=1))
+        # self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_person', GoToEntity, callback=self._go_to_person_callback, queue_size=1))
+        # self._subscribers.append(rospy.Subscriber(self.namespace + '/action/go_to_group', GoToEntity, callback=self._go_to_group_callback, queue_size=1))
+
+        ### ROS Publishers
+        self._cmd_vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=1)
+        self._publishers.append(self._cmd_vel_pub)
+
+        self._timers.append(rospy.Timer(rospy.Duration(self.controller.controller_config.h), callback=self._pub_vel_callback))
+
+        rospy.loginfo("Initializing the LocalPathPlannerMPCNode")
+
+        self._check_publishers_connection()
+
+
+    def run(self):
+        r''' Runs the ros wrapper '''
+        init_t = rospy.Time.now()
+        while not rospy.is_shutdown():
+            self.step(init_t)
+
+
+    def step(self, init_time):
+        r''' Step the ros wrapper'''
+        print('MPC')
+        self.set_mode()
+        self.set_weights()
+        self.local_map[:, :, 0] = self.local_static_map
+        self.local_map[:, :, 1] = self.local_social_cost_map
+
+        if self.update_goals_enabled:
+            self.update_goals()
+
+        if self.do_step(): 
+            self.actions = self.mpc.step(
+                state=np.array([0.]),  # not used anymore
+                actions=self.actions + self.initial_action,
+                weights=self.weights,
+                reg_parameter=self.reg_parameter,
+                loss_coef=self.loss_coef,
+                loss_rad=self.loss_rad,
+                cost_map=self.local_map[:, :, :-1],
+                goto_goal=self.goto_goal,
+                pan_goal=None,
+                human_features=None
+            )
+        else:
+            self.actions = np.copy(self.initial_action)
+        rospy.sleep(2)
+
+
+
+
+    def _pub_vel_callback(self, event):
+        if self.action_idx is not None:
+            actions = self.actions[self.action_idx]
+            self.lin_vel = 0.
+            self.ang_vel = 0.
+            if np.any(actions) or self.i == 0:
+                self.publish_vel(actions)
+            self.action_idx += 1
+            if self.action_idx > (len(self.actions) - 1):
+                self.action_idx = 0
+                self.actions = np.copy(self.initial_action)
+
+
+    def publish_vel(self, action):
+        ang, lin = action[0], action[1], action[2]
+        self.lin_vel, self.ang_vel = self._check_joint_vel_limits(lin, ang)
+        twist_msg = Twist(Vector3(self.lin_vel, 0, 0), Vector3(0, 0, self.ang_vel))
+        self._cmd_vel_pub.publish(twist_msg)
+        rospy.loginfo("vel to robot : {}, {}".format(lin, ang))
+
+    
+    def _check_joint_vel_limits(self, lin_vel, ang_vel):
+        if lin_vel > self.controller_config.u_ub[-1]:
+            rospy.logwarn("The linear velocity is greater than the maximum allowed. The maximum velocity is set.")
+            lin_vel = self.controller_config.u_ub[-1]
+        if lin_vel < self.controller_config.u_lb[-1]:
+            rospy.logwarn("The linear velocity is lower than the minimum allowed. The minimum velocity is set.")
+            lin_vel = self.controller_config.u_lb[-1]
+        if ang_vel > self.controller_config.u_ub[-2]:
+            rospy.logwarn("The angular velocity is greater than the maximum allowed. The maximum velocity is set.")
+            ang_vel = self.controller_config.u_ub[-2]
+        if ang_vel < self.controller_config.u_lb[-2]:
+            rospy.logwarn("The angular velocity is lower than the minimum allowed. The minimum velocity is set.")
+            ang_vel = self.controller_config.u_lb[-2]
+        return lin_vel, ang_vel
+
+
+    def get_robot_pose(self):
+        r""" Function that compute the position of the robot on the map """
+        # try:
+        #     map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, self.timestamp_tracking_latest)
+        # except:
+        try:
+            map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
+            #rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
+        except:
+            #rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
+            return None
+        self.robot_x_position, self.robot_y_position, _ = map_2_robot[0]
+        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_robot[1][0],
+                                                                map_2_robot[1][1],
+                                                                map_2_robot[1][2],
+                                                                map_2_robot[1][3]])
+        self.robot_yaw = float(constraint_angle(yaw))
+        self.robot_position = np.array([self.robot_x_position, self.robot_y_position])
+
+
+    def get_waypoint_pose(self):
+        try:
+            map_2_waypoint = self.tf_listener.lookupTransform(self.map_frame, 'intermediate waypoint goal', rospy.Time(0))
+            #rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
+        except:
+            #rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
+            return None
+        self.x_wp, self.y_wp, _ = map_2_waypoint[0]
+        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_waypoint[1][0],
+                                                                map_2_waypoint[1][1],
+                                                                map_2_waypoint[1][2],
+                                                                map_2_waypoint[1][3]])
+        self.yaw_wp = float(constraint_angle(yaw))
+
+
+    def update_goals(self):
+        if self.mode == 0:
+            self.get_robot_pose()
+            self.get_waypoint_pose()
+            local_wpt = global_to_local(self.robot_position, np.asarray(self.x_wp, self.y_wp))
+            wpt_angle = constraint_angle(self.yaw_wp - self.robot_yaw)
+            self.goto_goal[:] = [local_wpt[0], local_wpt[1], wpt_angle, 0., 0., 0, 0, 1]
+            self.goto_goal[-2] = 1
+        else:
+            # TODO: To implement the other mode
+            pass
+    
+    
+    def set_weights(self):
+        if self.mode is not None:
+            for i, weight_name in enumerate(self.weights_description):
+                self.weights[i] = self.controller_config.__getattribute__(
+                    weight_name)
+
+            self.loss_coef = self.controller_config.loss_coef[self.mode]
+            self.loss_rad = self.controller_config.loss_rad[self.mode]
+            self.reg_parameter = self.controller_config.reg_parameter.copy()
+
+
+    def set_mode(self):
+        if self.go_to_target_flag:
+            if not self.human_target_flag:
+                self.mode = 0  # join
+            else:
+                self.mode = 1  # escort
+        elif self.human_target_flag:
+                self.mode = 2  # follow
+        else:
+            rospy.logwarn('No mode was chosen, nothing to do')
+
+
+    def do_step(self):
+        # check if a MPC step is necessary of not
+        return True
+    
+
+    def init_var(self):
+        self.x_wp = 0.
+        self.y_wp = 0.
+        self.yaw_wp = 0.
+        self.robot_x_position = 0.
+        self.robot_y_position = 0.
+        self.robot_yaw = 0.
+        self.robot_position = [0, 0]
+        self.go_to_target_flag = False
+        self.human_target_flag = False
+        self.action_idx = 0
+        self.lin_vel = 0.
+        self.ang_vel = 0.
+        self.goto_goal = np.zeros(self.controller_config.goto_target_dim)
+        self.weights_description = self.controller_config.weights_description
+        self.weights = np.zeros(len(self.weights_description))
+        self.loss_coef = np.zeros_like(self.controller_config.loss_coef[0])
+        self.loss_rad = np.zeros_like(self.controller_config.loss_rad[0])
+        self.reg_parameter = np.zeros_like(self.controller_config.reg_parameter)
+        self.local_map = np.zeros((*self.local_static_map.shape, 3))
+
+    
+    def init_mpc(self):
+        dim_config = {'goto_target_dim': self.controller_config.goto_target_dim,
+                      'human_target_dim': self.controller_config.human_target_dim,
+                      'pan_target_dim': self.controller_config.pan_target_dim,
+                      'cost_map_dim': self.local_static_map.shape[:2] + (2,),
+                      'weights_dim': len(self.controller_config.weights_description),
+                      'loss_coef_dim': self.controller_config.loss_coef.shape[1],
+                      'loss_rad_dim': self.controller_config.loss_rad.shape[1]}
+        fw_horizon = int(self.controller_config.fw_time/self.controller_config.h)
+        joints_lb = np.array([self.robot_config.min_pan_angle])
+        joints_ub = np.array([self.robot_config.max_pan_angle])
+        world_size = self.local_map_size
+        self.mpc = LocalPathPlannerMPC(
+            h=self.controller_config.h,
+            robot_config=self.robot_config,
+            dim_config=dim_config,
+            horizon=fw_horizon,
+            u_lb=np.array(self.controller_config.u_lb),
+            u_ub=np.array(self.controller_config.u_ub),
+            joints_lb=joints_lb,
+            joints_ub=joints_ub,
+            max_acceleration=np.array(
+                self.controller_config.max_acceleration),
+            wall_avoidance_points=self.controller_config.wall_avoidance_points,
+            max_iter=self.controller_config.max_iter_optim,
+            cost_map_region=world_size
+        )
+        self.initial_action = np.zeros(self.mpc.actions_shape)
+        # self.initial_action[:, -1] = 0.1 * config.u_ub[-1]
+        self.actions = np.copy(self.initial_action)
+
+
+    def read_config(self, filename=None):
+        if filename is None:
+            filename = pkg_resources.resource_filename(
+                'social_mpc', 'config/social_mpc.yaml')
+        elif os.path.isfile(filename):
+            self.passed_config_loaded = True
+        else:
+            filename = pkg_resources.resource_filename(
+                'social_mpc', 'config/social_mpc.yaml')
+        config = yaml.load(open(filename), Loader=yaml.FullLoader)
+        self.controller_config = ControllerConfig(config)
+
+        self.goal_finder_enabled = self.controller_config.goal_finder_enabled
+        self.path_planner_enabled = self.controller_config.path_planner_enabled
+        self.update_goals_enabled = self.controller_config.update_goals_enabled
+    
+
+    def read_robot_config(self, filename=None):
+        if filename is None:
+            filename = pkg_resources.resource_filename(
+                'sim2d', 'config/robot.yaml')
+            rospy.logdebug("No filename provided for the robot configuration, basic robot config loaded")
+        elif os.path.isfile(filename):
+            rospy.logdebug("Desired robot config loaded")
+        else:
+            filename = pkg_resources.resource_filename(
+                'sim2d', 'config/robot.yaml')
+            rospy.logdebug("Desired filename for the robot configuration does not exist, basic robot config loaded")
+        config = yaml.load(open(filename), Loader=yaml.FullLoader)
+        self.robot_config = RobotConfig(config)
+
+
+    def _local_map_callback(self, data):
+        self.local_map_data = data
+        self.x_local_map = self.local_map_data.info.origin.position.x
+        self.y_local_map = self.local_map_data.info.origin.position.y
+        self.local_map_width = self.local_map_data.info.width
+        self.local_map_height = self.local_map_data.info.height
+        self.local_map_resolution = self.local_map_data.info.resolution
+        self.local_map_size = [[self.x_local_map, self.x_local_map + self.local_map_width*self.local_map_resolution],[self.y_local_map, self.y_local_map + self.local_map_height*self.local_map_resolution]]
+        self.last_shape_local_map = (self.local_map_height, self.local_map_width)
+        self.local_static_map= (np.asarray(self.local_map_data.data) / 100).reshape(self.last_shape_local_map)
+
+
+    def _local_social_cost_map_callback(self, data):
+        self.local_social_cost_map_data = data
+        self.x_local_social_cost_map = self.local_social_cost_map_data.info.origin.position.x
+        self.y_local_social_cost_map = self.local_social_cost_map_data.info.origin.position.y
+        self.local_social_cost_map_width = self.local_social_cost_map_data.info.width
+        self.local_social_cost_map_height = self.local_social_cost_map_data.info.height
+        self.local_social_cost_map_resolution = self.local_social_cost_map_data.info.resolution
+        self.local_social_cost_map_size = [[self.x_local_social_cost_map, self.x_local_social_cost_map + self.local_social_cost_map_width*self.local_social_cost_map_resolution],[self.y_local_social_cost_map, self.y_local_social_cost_map + self.local_social_cost_map_height*self.local_social_cost_map_resolution]]
+        self.last_shape_local_social_cost_map = (self.local_social_cost_map_height, self.local_social_cost_map_width)
+        self.local_social_cost_map= (np.asarray(self.local_social_cost_map_data.data) / 100).reshape(self.last_shape_local_social_cost_map)
+
+
+    def _check_publishers_connection(self):
+        """
+        Checks that all the publishers are working
+        :return:
+        """
+        rate = rospy.Rate(10)  # 10hz
+        while self._cmd_vel_pub.get_num_connections() == 0 and not rospy.is_shutdown():
+            rospy.logdebug("No susbribers to _cmd_vel_pub yet so we wait and try again")
+            try:
+                rate.sleep()
+            except rospy.ROSInterruptException:
+                # This is to avoid error when world is rested, time when backwards.
+                pass
+        rospy.logdebug("_cmd_vel_pub Publisher Connected")
+    
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_rtabmap_ready()
+        self._check_local_map_ready()
+        self._check_local_social_cost_map_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_rtabmap_ready(self):
+        rospy.logdebug("Waiting for rtabmap pose to be READY...")
+        while self.rtabmap_ready is None and not rospy.is_shutdown():
+            try:
+                self.tf_listener.waitForTransform(self.map_frame, self.robot_frame, rospy.Time(0), rospy.Duration(5.0))
+                self.rtabmap_ready = True
+                rospy.logdebug("Current rtabmap pose READY=>")
+
+            except:
+                rospy.logerr("Current rtabmap pose not ready yet, retrying for getting rtabmap pose")
+        return self.rtabmap_ready
+
+
+    def _check_local_map_ready(self):
+        self.local_map_data = None
+        rospy.logdebug("Waiting for {} to be READY...".format(self.local_occupancy_map_topic))
+        while self.local_map_data is None and not rospy.is_shutdown():
+            try:
+                self.local_map_data = rospy.wait_for_message(self.local_occupancy_map_topic, OccupancyGrid, timeout=5.0)
+                rospy.logdebug("Current {} READY=>".format(self.local_occupancy_map_topic))
+
+            except:
+                rospy.logerr("Current {} not ready yet, retrying for getting local map".format(self.local_occupancy_map_topic))
+        return self.local_map_data
+    
+
+    def _check_local_social_cost_map_ready(self):
+        self.local_social_cost_map_data = None
+        rospy.logdebug("Waiting for {} to be READY...".format(self.local_social_cost_map_topic))
+        while self.local_social_cost_map_data is None and not rospy.is_shutdown():
+            try:
+                self.local_social_cost_map_data = rospy.wait_for_message(self.local_social_cost_map_topic, OccupancyGrid, timeout=5.0)
+                rospy.logdebug("Current {} READY=>".format(self.local_social_cost_map_topic))
+
+            except:
+                rospy.logerr("Current {} not ready yet, retrying for getting local map".format(self.local_social_cost_map_topic))
+        return self.local_social_cost_map_data
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the LocalPathPlannerMPCNode")
+        self.close()
+        rospy.loginfo("Killing the LocalPathPlannerMPCNode node")
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+
+        if self._publishers:
+            for publisher in self._publishers:
+                if isinstance(publisher, dict):
+                    for pub in publisher.values():
+                        pub.unregister()
+                else:
+                    publisher.unregister()
+        if self._timers:
+            for timer in self._timers:
+                timer.shutdown()
+
+        if self._action_server:
+            self._action_server.shutdown()
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_body_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_body_action_client.py
new file mode 100755
index 0000000000000000000000000000000000000000..601bdec9c7dd24eab4e3d1ef64f6687fd2f41dd8
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_body_action_client.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+import rospy
+import actionlib
+from spring_msgs.msg import LookAtEntityAction, LookAtEntityGoal
+
+
+class LookAtBodyActionClient:
+    def __init__(self, name):
+        self._action_name = name
+        self.body_id = rospy.get_param('~body_id', 'b-1')
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
+
+        rospy.on_shutdown(self.shutdown)
+
+        # Creates the SimpleActionClient
+        self._a_client = actionlib.SimpleActionClient(self._action_name, LookAtEntityAction)
+
+        # Waits until the action server has started up and started listening for goals
+        self._a_client.wait_for_server()
+        self._a_client.cancel_all_goals()
+        rospy.sleep(0.1)
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def call_server(self):
+        goal = LookAtEntityGoal()
+        goal.id = self.body_id
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
+
+        # Sends the goal to the action server
+        self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
+
+        # Waits for the server to finish performing the action
+        self._a_client.wait_for_result()
+
+        # Prints out the result of executing the action
+        result = self._a_client.get_result()
+        return result
+
+    def feedback_cb(self, msg):
+        rospy.loginfo('Feedback received: {}'.format(msg))
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self._a_client.cancel_all_goals()
+        rospy.loginfo("Killing the %s node" % self._action_name)
diff --git a/src/robot_behavior/src/robot_behavior/lookat_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_body_action_server.py
similarity index 68%
rename from src/robot_behavior/src/robot_behavior/lookat_action_server.py
rename to src/robot_behavior/src/robot_behavior/behavior_look_at_body_action_server.py
index ab801b208c64fa64de6302c35799bd4b9708c082..9ebfe484341554383ec740d01c07e001b4bbe574 100755
--- a/src/robot_behavior/src/robot_behavior/lookat_action_server.py
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_body_action_server.py
@@ -2,18 +2,15 @@
 import rospy
 import numpy as np
 import actionlib
-from spring_msgs.msg import LookAtAction, LookAtFeedback, LookAtResult, LookAt
-import tf
-
-from robot_behavior.utils import constraint_angle
+from spring_msgs.msg import LookAtEntityAction, LookAtEntityFeedback, LookAtEntityResult, LookAtEntity
 from spring_msgs.msg import ControllerStatus
 
 
-class LookAtActionServer:
-    def __init__(self, name, hz=20, success_ang_thresh=0.3):
+class LookAtBodyActionServer:
+    def __init__(self, name):
         self._action_name = name
-        self._hz = hz
-        self._success_ang_thresh = success_ang_thresh
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
 
         self.controller_status_data = None
 
@@ -21,20 +18,19 @@ class LookAtActionServer:
         self.init_time = None
         self.head_joint_errors = [np.pi, np.pi]
 
-        # create messages that are used to publish feedback/result
-        self._feedback = LookAtFeedback()
-        self._result = LookAtResult()
-
         self._check_all_sensors_ready()
 
-        rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1)
+        self._publishers = []
+        self._subscribers = []
 
-        self._look_at_pub  = rospy.Publisher('/look_at', LookAt, queue_size=10)
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
 
-        rospy.on_shutdown(self.shutdown)
+        self._look_at_body_pub  = rospy.Publisher('/action/look_at_body', LookAtEntity, queue_size=10)
+        self._publishers.append(self._look_at_body_pub)
 
+        rospy.on_shutdown(self.shutdown)
 
-        self._a_server = actionlib.SimpleActionServer(self._action_name, LookAtAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server = actionlib.SimpleActionServer(self._action_name, LookAtEntityAction, execute_cb=self.execute_cb, auto_start=False)
         self._a_server.start()
 
         rospy.loginfo("{} Initialization Ended".format(self._action_name))
@@ -42,16 +38,16 @@ class LookAtActionServer:
 
     def execute_cb(self, goal):
         # helper variables
-        rate = rospy.Rate(self._hz)
+        rate = rospy.Rate(self.hz)
         self.init_time = rospy.Time.now()
         self.current_time = (rospy.Time.now() - self.init_time).to_sec()
 
-        self.publish_lookat(goal)
-        self.head_joint_errors = []
-
+        self.publish_look_at_body(goal)
+        self.head_joint_errors = [np.pi, np.pi]
+        rospy.sleep(0.5)
 
         # publish info to the console for the user
-        rospy.logdebug('%s: Look at human %i' % (self._action_name, goal.pan_target_id))
+        rospy.logdebug('{}: Look at body_id {}'.format(self._action_name, goal.id))
         rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
         rospy.logdebug('Goal message : {}'.format(goal))
 
@@ -63,11 +59,15 @@ class LookAtActionServer:
 
             self.current_time = (rospy.Time.now() - self.init_time).to_sec()
 
+            # create messages that are used to publish feedback/result
+            self._feedback = LookAtEntityFeedback()
+            self._result = LookAtEntityResult()
+
             # publish the feedback
             self._publish_feedback(goal)
             rate.sleep()
 
-            if len(self.head_joint_errors) > 0 and max(self.head_joint_errors) < self._success_ang_thresh:
+            if len(self.head_joint_errors) > 0 and max(self.head_joint_errors) < self.success_ang_thresh:
                 rospy.loginfo('%s: Succeeded' % self._action_name)
                 self._a_server.set_succeeded(self._result)
                 break
@@ -87,14 +87,16 @@ class LookAtActionServer:
         self._result_msg(goal)
 
         # go to idle state
-        goal.pan_target_id = -1
-        self.publish_lookat(goal)
+        goal.id = 'b-1'
+        goal.continuous = False
+        self.publish_look_at_body(goal)
 
 
-    def publish_lookat(self, goal):
-        lookat_msg = LookAt()
-        lookat_msg.human_id = goal.pan_target_id
-        self._look_at_pub.publish(lookat_msg)
+    def publish_look_at_body(self, goal):
+        look_at_body_msg = LookAtEntity()
+        look_at_body_msg.id = goal.id
+        look_at_body_msg.continuous = goal.continuous
+        self._look_at_body_pub.publish(look_at_body_msg)
 
 
     def _result_msg(self, goal):
@@ -142,5 +144,14 @@ class LookAtActionServer:
 
     def shutdown(self):
         rospy.loginfo("Stopping the %s " % self._action_name)
-        # send empty goal message
+        self.close()
         rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_group_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_group_action_client.py
new file mode 100755
index 0000000000000000000000000000000000000000..1a710c28b7d796b4ab8f0994e56cfefc8d96cf61
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_group_action_client.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+import rospy
+import actionlib
+from spring_msgs.msg import LookAtEntityAction, LookAtEntityGoal
+
+
+class LookAtGroupActionClient:
+    def __init__(self, name):
+        self._action_name = name
+        self.group_id = rospy.get_param('~group_id', 'grp-1')
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
+
+        rospy.on_shutdown(self.shutdown)
+
+        # Creates the SimpleActionClient
+        self._a_client = actionlib.SimpleActionClient(self._action_name, LookAtEntityAction)
+
+        # Waits until the action server has started up and started listening for goals
+        self._a_client.wait_for_server()
+        self._a_client.cancel_all_goals()
+        rospy.sleep(0.1)
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def call_server(self):
+        goal = LookAtEntityGoal()
+        goal.id = self.group_id
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
+
+        # Sends the goal to the action server
+        self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
+
+        # Waits for the server to finish performing the action
+        self._a_client.wait_for_result()
+
+        # Prints out the result of executing the action
+        result = self._a_client.get_result()
+        return result
+
+    def feedback_cb(self, msg):
+        rospy.loginfo('Feedback received: {}'.format(msg))
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self._a_client.cancel_all_goals()
+        rospy.loginfo("Killing the %s node" % self._action_name)
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_group_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_group_action_server.py
new file mode 100755
index 0000000000000000000000000000000000000000..351442fc5f7b2bd2ff51a4ce5be73138721d9690
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_group_action_server.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python3
+import rospy
+import numpy as np
+import actionlib
+from spring_msgs.msg import LookAtEntityAction, LookAtEntityFeedback, LookAtEntityResult, LookAtEntity
+from spring_msgs.msg import ControllerStatus
+
+
+class LookAtGroupActionServer:
+    def __init__(self, name):
+        self._action_name = name
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
+
+        self.controller_status_data = None
+
+        self.current_time = None
+        self.init_time = None
+        self.head_joint_errors = [np.pi, np.pi]
+
+        self._check_all_sensors_ready()
+
+        self._publishers = []
+        self._subscribers = []
+
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
+
+        self._look_at_group_pub  = rospy.Publisher('/action/look_at_group', LookAtEntity, queue_size=10)
+        self._publishers.append(self._look_at_group_pub)
+
+        rospy.on_shutdown(self.shutdown)
+
+        self._a_server = actionlib.SimpleActionServer(self._action_name, LookAtEntityAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server.start()
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def execute_cb(self, goal):
+        # helper variables
+        rate = rospy.Rate(self.hz)
+        self.init_time = rospy.Time.now()
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+        self.publish_look_at_group(goal)
+        self.head_joint_errors = [np.pi, np.pi]
+        rospy.sleep(0.5)
+
+        # publish info to the console for the user
+        rospy.logdebug('{}: Look at group_id {}'.format(self._action_name, goal.id))
+        rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
+        rospy.logdebug('Goal message : {}'.format(goal))
+
+        # start executing the action
+        while not rospy.is_shutdown():
+            if self._a_server.is_preempt_requested():
+                self._a_server.set_preempted()
+                break
+
+            self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+            # create messages that are used to publish feedback/result
+            self._feedback = LookAtEntityFeedback()
+            self._result = LookAtEntityResult()
+
+            # publish the feedback
+            self._publish_feedback(goal)
+            rate.sleep()
+
+            if len(self.head_joint_errors) > 0 and max(self.head_joint_errors) < self.success_ang_thresh:
+                rospy.loginfo('%s: Succeeded' % self._action_name)
+                self._a_server.set_succeeded(self._result)
+                break
+
+            if self.controller_status_data.status == ControllerStatus.IS_FAILURE:
+                text = "Controller failure"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result, text=text)
+                break
+
+            if self.current_time > goal.timeout :
+                text = "Timeout"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result)
+                break
+
+        self._result_msg(goal)
+
+        # go to idle state
+        goal.id = 'grp-1'
+        goal.continuous = False
+        self.publish_look_at_group(goal)
+
+
+    def publish_look_at_group(self, goal):
+        look_at_group_msg = LookAtEntity()
+        look_at_group_msg.id = goal.id
+        look_at_group_msg.continuous = goal.continuous
+        self._look_at_group_pub.publish(look_at_group_msg)
+
+
+    def _result_msg(self, goal):
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+        self._create_action_msg(goal, self._result)
+        self._result.final_time = self.current_time
+
+
+    def _publish_feedback(self, goal):
+        self._create_action_msg(goal, self._feedback)
+        self._feedback.current_time = self.current_time
+        self._a_server.publish_feedback(self._feedback)
+
+
+    def _create_action_msg(self, goal, action_msg):
+        if self.controller_status_data.head_joint_errors:
+            self.head_joint_errors = self.controller_status_data.head_joint_errors
+            action_msg.head_joint_errors = self.head_joint_errors
+        else:
+            action_msg.head_joint_errors = []
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_controller_status_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_controller_status_ready(self):
+        self.controller_status_data = None
+        rospy.logdebug("Waiting for /controller_status to be READY...")
+        while self.controller_status_data is None and not rospy.is_shutdown():
+            try:
+                self.controller_status_data = rospy.wait_for_message("/controller_status", ControllerStatus, timeout=5.0)
+                rospy.logdebug("Current /controller_status READY=>")
+
+            except:
+                rospy.logerr("Current /controller_status not ready yet, retrying for getting controller status")
+        return self.controller_status_data
+
+
+    def _controller_status_callback(self, data):
+        self.controller_status_data = data
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self.close()
+        rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_person_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_person_action_client.py
new file mode 100755
index 0000000000000000000000000000000000000000..705dec659aa0dabf22c72c5f54309410f08bb534
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_person_action_client.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+import rospy
+import actionlib
+from spring_msgs.msg import LookAtEntityAction, LookAtEntityGoal
+
+
+class LookAtPersonActionClient:
+    def __init__(self, name):
+        self._action_name = name
+        self.person_id = rospy.get_param('~person_id', 'p-1')
+        self.timeout = rospy.get_param('~timeout', 120)
+        self.continuous = rospy.get_param('~continuous', False)
+
+        rospy.on_shutdown(self.shutdown)
+
+        # Creates the SimpleActionClient
+        self._a_client = actionlib.SimpleActionClient(self._action_name, LookAtEntityAction)
+
+        # Waits until the action server has started up and started listening for goals
+        self._a_client.wait_for_server()
+        self._a_client.cancel_all_goals()
+        rospy.sleep(0.1)
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def call_server(self):
+        goal = LookAtEntityGoal()
+        goal.id = self.person_id
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
+
+        # Sends the goal to the action server
+        self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
+
+        # Waits for the server to finish performing the action
+        self._a_client.wait_for_result()
+
+        # Prints out the result of executing the action
+        result = self._a_client.get_result()
+        return result
+
+    def feedback_cb(self, msg):
+        rospy.loginfo('Feedback received: {}'.format(msg))
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self._a_client.cancel_all_goals()
+        rospy.loginfo("Killing the %s node" % self._action_name)
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_person_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_person_action_server.py
new file mode 100755
index 0000000000000000000000000000000000000000..d7745cdc982c8e1e61663edcaf9ee335f37ccf14
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_person_action_server.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+import rospy
+import numpy as np
+import actionlib
+from spring_msgs.msg import LookAtEntityAction, LookAtEntityFeedback, LookAtEntityResult, LookAtEntity
+from spring_msgs.msg import ControllerStatus
+
+
+class LookAtPersonActionServer:
+    def __init__(self, name,):
+        self._action_name = name
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
+
+        self.controller_status_data = None
+
+        self.current_time = None
+        self.init_time = None
+        self.head_joint_errors = [np.pi, np.pi]
+
+        self._check_all_sensors_ready()
+
+        self._publishers = []
+        self._subscribers = []
+
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
+
+        self._look_at_person_pub  = rospy.Publisher('/action/look_at_person', LookAtEntity, queue_size=10)
+        self._publishers.append(self._look_at_person_pub)
+        
+
+        rospy.on_shutdown(self.shutdown)
+
+        self._a_server = actionlib.SimpleActionServer(self._action_name, LookAtEntityAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server.start()
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def execute_cb(self, goal):
+        # helper variables
+        rate = rospy.Rate(self.hz)
+        self.init_time = rospy.Time.now()
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+        self.publish_look_at_person(goal)
+        self.head_joint_errors = [np.pi, np.pi]
+        rospy.sleep(0.5)
+
+        # publish info to the console for the user
+        rospy.logdebug('{}: Look at person_id {}'.format(self._action_name, goal.id))
+        rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
+        rospy.logdebug('Goal message : {}'.format(goal))
+
+        # start executing the action
+        while not rospy.is_shutdown():
+            if self._a_server.is_preempt_requested():
+                self._a_server.set_preempted()
+                break
+
+            self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+            # create messages that are used to publish feedback/result
+            self._feedback = LookAtEntityFeedback()
+            self._result = LookAtEntityResult()
+
+            # publish the feedback
+            self._publish_feedback(goal)
+            rate.sleep()
+
+            if len(self.head_joint_errors) > 0 and max(self.head_joint_errors) < self.success_ang_thresh:
+                rospy.loginfo('%s: Succeeded' % self._action_name)
+                self._a_server.set_succeeded(self._result)
+                break
+
+            if self.controller_status_data.status == ControllerStatus.IS_FAILURE:
+                text = "Controller failure"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result, text=text)
+                break
+
+            if self.current_time > goal.timeout :
+                text = "Timeout"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result)
+                break
+
+        self._result_msg(goal)
+
+        # go to idle state
+        goal.id = 'p-1'
+        goal.continuous = False
+        self.publish_look_at_person(goal)
+
+
+    def publish_look_at_person(self, goal):
+        look_at_person_msg = LookAtEntity()
+        look_at_person_msg.id = goal.id
+        look_at_person_msg.continuous = goal.continuous
+        self._look_at_person_pub.publish(look_at_person_msg)
+
+
+    def _result_msg(self, goal):
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+        self._create_action_msg(goal, self._result)
+        self._result.final_time = self.current_time
+
+
+    def _publish_feedback(self, goal):
+        self._create_action_msg(goal, self._feedback)
+        self._feedback.current_time = self.current_time
+        self._a_server.publish_feedback(self._feedback)
+
+
+    def _create_action_msg(self, goal, action_msg):
+        if self.controller_status_data.head_joint_errors:
+            self.head_joint_errors = self.controller_status_data.head_joint_errors
+            action_msg.head_joint_errors = self.head_joint_errors
+        else:
+            action_msg.head_joint_errors = []
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_controller_status_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_controller_status_ready(self):
+        self.controller_status_data = None
+        rospy.logdebug("Waiting for /controller_status to be READY...")
+        while self.controller_status_data is None and not rospy.is_shutdown():
+            try:
+                self.controller_status_data = rospy.wait_for_message("/controller_status", ControllerStatus, timeout=5.0)
+                rospy.logdebug("Current /controller_status READY=>")
+
+            except:
+                rospy.logerr("Current /controller_status not ready yet, retrying for getting controller status")
+        return self.controller_status_data
+
+
+    def _controller_status_callback(self, data):
+        self.controller_status_data = data
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self.close()
+        rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_position_action_client.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_position_action_client.py
new file mode 100755
index 0000000000000000000000000000000000000000..d2a233e06404a5bad5314a9a9503f0e0cd14a3c6
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_position_action_client.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+import rospy
+import actionlib
+from spring_msgs.msg import LookAtPositionAction, LookAtPositionGoal
+
+class LookAtPositionActionClient:
+    def __init__(self, name):
+        self._action_name = name
+        self.look_at_position = rospy.get_param('~look_at_position', '[0., 0., 0., 0.]')
+        self.look_at_position = self.look_at_position.strip("[]").split(", ")
+        self.look_at_position = list(map(float, self.look_at_position))
+
+        self.timeout = rospy.get_param("~timeout")
+        self.continuous = rospy.get_param('~continuous', False)
+
+        rospy.on_shutdown(self.shutdown)
+
+        # Creates the SimpleActionClient
+        self._a_client = actionlib.SimpleActionClient(self._action_name, LookAtPositionAction)
+
+        # Waits until the action server has started up and started listening for goals
+        self._a_client.wait_for_server()
+        self._a_client.cancel_all_goals()
+        rospy.sleep(0.1)
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def call_server(self):
+        goal = LookAtPositionGoal()
+        goal.look_at_position = self.look_at_position
+        goal.timeout = self.timeout
+        goal.continuous = self.continuous
+
+        # Sends the goal to the action server
+        self._a_client.send_goal(goal, feedback_cb=self.feedback_cb)
+
+        # Waits for the server to finish performing the action
+        self._a_client.wait_for_result()
+
+        # Prints out the result of executing the action
+        result = self._a_client.get_result()
+        return result
+
+    def feedback_cb(self, msg):
+        rospy.loginfo('Feedback received: {}'.format(msg))
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self._a_client.cancel_all_goals()
+        rospy.loginfo("Killing the %s node" % self._action_name)
diff --git a/src/robot_behavior/src/robot_behavior/behavior_look_at_position_action_server.py b/src/robot_behavior/src/robot_behavior/behavior_look_at_position_action_server.py
new file mode 100755
index 0000000000000000000000000000000000000000..4f8a2f572e0054bfa9423724e956e5f07fec26bc
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/behavior_look_at_position_action_server.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+import rospy
+import numpy as np
+import actionlib
+from spring_msgs.msg import LookAtPositionAction, LookAtPositionFeedback, LookAtPositionResult, LookAtPosition
+from spring_msgs.msg import ControllerStatus
+
+
+class LookAtPositionActionServer:
+    def __init__(self, name):
+        self._action_name = name
+        self.hz = rospy.get_param('~hz', 20)
+        self.success_ang_thresh = rospy.get_param('~success_ang_thresh', 0.2)
+
+        self.controller_status_data = None
+
+        self.current_time = None
+        self.init_time = None
+        self.head_joint_errors = [np.pi, np.pi]
+
+        self._check_all_sensors_ready()
+
+        self._publishers = []
+        self._subscribers = []
+
+        self._subscribers.append(rospy.Subscriber('/controller_status', ControllerStatus, callback=self._controller_status_callback, queue_size=1))
+
+        self._look_at_position_pub  = rospy.Publisher('/action/look_at_position', LookAtPosition, queue_size=10)
+        self._publishers.append(self._look_at_position_pub)
+
+        rospy.on_shutdown(self.shutdown)
+
+        self._a_server = actionlib.SimpleActionServer(self._action_name, LookAtPositionAction, execute_cb=self.execute_cb, auto_start=False)
+        self._a_server.start()
+
+        rospy.loginfo("{} Initialization Ended".format(self._action_name))
+
+
+    def execute_cb(self, goal):
+        # helper variables
+        rate = rospy.Rate(self.hz)
+        self.init_time = rospy.Time.now()
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+        self.publish_look_at_position(goal)
+        self.head_joint_errors = [np.pi, np.pi]
+        rospy.sleep(0.5)
+
+        # publish info to the console for the user
+        rospy.logdebug('{}: Look at position {}'.format(self._action_name, goal.look_at_position))
+        rospy.logdebug('The target must be reached in %i seconds' % (goal.timeout))
+        rospy.logdebug('Goal message : {}'.format(goal))
+
+        # start executing the action
+        while not rospy.is_shutdown():
+            if self._a_server.is_preempt_requested():
+                self._a_server.set_preempted()
+                break
+
+            self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+
+            # create messages that are used to publish feedback/result
+            self._feedback = LookAtPositionFeedback()
+            self._result = LookAtPositionResult()
+
+            # publish the feedback
+            self._publish_feedback(goal)
+            rate.sleep()
+
+            if len(self.head_joint_errors) > 0 and max(self.head_joint_errors) < self.success_ang_thresh:
+                rospy.loginfo('%s: Succeeded' % self._action_name)
+                self._a_server.set_succeeded(self._result)
+                break
+
+            if self.controller_status_data.status == ControllerStatus.IS_FAILURE:
+                text = "Controller failure"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result, text=text)
+                break
+
+            if self.current_time > goal.timeout :
+                text = "Timeout"
+                rospy.loginfo('{}: Aborted. {}'.format(self._action_name, text))
+                self._a_server.set_aborted(self._result)
+                break
+
+        self._result_msg(goal)
+
+        # go to idle state
+        goal.look_at_position = np.zeros_like(goal.look_at_position)
+        goal.look_at_position[-1] = -1
+        goal.continuous = False
+        self.publish_look_at_position(goal)
+
+
+    def publish_look_at_position(self, goal):
+        look_at_position_msg = LookAtPosition()
+        look_at_position_msg.look_at_position = goal.look_at_position
+        look_at_position_msg.continuous = goal.continuous
+        self._look_at_position_pub.publish(look_at_position_msg)
+
+
+    def _result_msg(self, goal):
+        self.current_time = (rospy.Time.now() - self.init_time).to_sec()
+        self._create_action_msg(goal, self._result)
+        self._result.final_time = self.current_time
+
+
+    def _publish_feedback(self, goal):
+        self._create_action_msg(goal, self._feedback)
+        self._feedback.current_time = self.current_time
+        self._a_server.publish_feedback(self._feedback)
+
+
+    def _create_action_msg(self, goal, action_msg):
+        if self.controller_status_data.head_joint_errors:
+            self.head_joint_errors = self.controller_status_data.head_joint_errors
+            action_msg.head_joint_errors = self.head_joint_errors
+        else:
+            action_msg.head_joint_errors = []
+
+
+    def _check_all_sensors_ready(self):
+        rospy.logdebug("START ALL SENSORS READY")
+        self._check_controller_status_ready()
+        rospy.logdebug("ALL SENSORS READY")
+
+
+    def _check_controller_status_ready(self):
+        self.controller_status_data = None
+        rospy.logdebug("Waiting for /controller_status to be READY...")
+        while self.controller_status_data is None and not rospy.is_shutdown():
+            try:
+                self.controller_status_data = rospy.wait_for_message("/controller_status", ControllerStatus, timeout=5.0)
+                rospy.logdebug("Current /controller_status READY=>")
+
+            except:
+                rospy.logerr("Current /controller_status not ready yet, retrying for getting controller status")
+        return self.controller_status_data
+
+
+    def _controller_status_callback(self, data):
+        self.controller_status_data = data
+
+
+    def shutdown(self):
+        rospy.loginfo("Stopping the %s " % self._action_name)
+        self.close()
+        rospy.loginfo("Killing the %s node" % self._action_name)
+
+
+    def close(self):
+        if self._subscribers:
+            for subscriber in self._subscribers:
+                subscriber.unregister()
+        if self._publishers:
+            for publisher in self._publishers:
+                publisher.unregister()
diff --git a/src/robot_behavior/src/robot_behavior/controller_node.py b/src/robot_behavior/src/robot_behavior/controller_node.py
deleted file mode 100755
index f4840c0b70b9b1a97116076f034fb4472f832d3c..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/controller_node.py
+++ /dev/null
@@ -1,817 +0,0 @@
-#!/usr/bin/env python3
-import numpy as np
-from collections import namedtuple
-import tf
-import os
-import yaml
-import cv2
-from multiprocessing import Lock
-import message_filters
-
-from social_mpc.controller import RobotController
-from social_mpc.plots import MPCPlotter
-from social_mpc.config.config import RobotConfig
-
-import rospy, rostopic
-from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3, PoseArray, Transform
-from sensor_msgs.msg import JointState
-from std_msgs.msg import Float64
-from nav_msgs.msg import Odometry, OccupancyGrid, MapMetaData
-from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
-
-from robot_behavior.utils import constraint_angle, pose_stamped_2_mat
-from spring_msgs.msg import TrackedPersons2d, GoTowards, LookAt, Navigate, ControllerStatus
-
-
-class Controller:
-    def __init__(self, mpc_config_path=None,
-                       robot_config_path=None,
-                       plot_render=False,
-                       map_frame="map",
-                       robot_frame="base_footprint",
-                       max_humans_world=20):
-        self.mpc_config_path = mpc_config_path
-        self.robot_config_path = robot_config_path
-        self.plot_render = plot_render
-        self.max_humans_world = max_humans_world
-
-        self.map_frame = map_frame
-        self.robot_frame = robot_frame
-
-        self.controller = RobotController(config_filename=self.mpc_config_path)
-
-        self.odom_data = None
-        self.joint_states_data = None
-        self.local_map_data = None
-        self.global_map_data = None
-        self.tracked_persons_2d_data = None
-        self.look_at_data = None
-        self.go_towards_data = None
-        self.navigate_data = None
-
-        # To get indexes of the joints
-        self.get_indexes = False
-        self.pan_ind = None
-        self.tilt_ind = None
-        self.current_pan = None
-        self.current_tilt = None
-        self.humans_id_default = -1
-        self.x_pan_human_target = 0.
-        self.y_pan_human_target = 0.
-
-        self.robot_config = None
-
-        self.action_idx = None
-        self.actions = None
-
-        self.last_shape_global_map = None
-        self.x_margin = 10
-        self.y_margin = 10
-        self.last_x_global_map = None
-        self.last_y_global_map = None
-        self.global_map_padded = None
-
-        self.timestamp_tracking= None
-        self.timestamp_tracking_latest = None
-        self.current_time_local_map = None
-        self.get_first_callback = False
-        self.lock = Lock()
-        self.lock_controller = Lock()
-
-        if self.plot_render:
-            self.mpc_plotter = None
-
-        self.read_robot_config(self.robot_config_path)
-        self.init_state()
-
-        self.init()
-
-
-    def init(self):
-        self._check_all_sensors_ready()
-
-        # Start all the ROS related Subscribers and publishers
-        # odom_sub = message_filters.Subscriber('/odom', Odometry)
-        rospy.Subscriber('/joint_states', JointState, callback=self._joint_states_callback, queue_size=1)
-        # tracked_persons_sub = message_filters.Subscriber('/tracked_persons_2d', TrackedPersons2d)
-        rospy.Subscriber('/tracked_persons_2d', TrackedPersons2d, callback=self._tracked_persons_2d_callback, queue_size=1)
-        rospy.Subscriber('/local_map', OccupancyGrid, callback=self._local_map_callback, queue_size=1)
-        rospy.Subscriber('/global_map', OccupancyGrid, callback=self._global_map_callback, queue_size=1)
-
-        # ts = message_filters.ApproximateTimeSynchronizer(fs=[odom_sub, joint_states_sub, local_map_sub], queue_size=10, slop=0.50, allow_headerless=False)
-        # ts.registerCallback(self._ts_callback)
-
-        rospy.Subscriber('/look_at', LookAt, callback=self._look_at_callback, queue_size=1)
-        rospy.Subscriber('/go_towards', GoTowards, callback=self._go_towards_callback, queue_size=1)
-        rospy.Subscriber('/navigate', Navigate, callback=self._navigate_callback, queue_size=1)
-
-        # rospy.Timer(rospy.Duration(1), callback=self._timer_callback)
-        rospy.Timer(rospy.Duration(self.controller.controller_config.h), callback=self._pub_vel_callback)
-
-        self._cmd_vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
-        self._pan_vel_pub = rospy.Publisher('/head_controller/command', JointTrajectory, queue_size=10)
-        self._local_cost_map_pub = rospy.Publisher('/local_cost_map', OccupancyGrid, queue_size=10)
-        self._controller_status_pub = rospy.Publisher('/controller_status', ControllerStatus, queue_size=10)
-
-        self.tf_broadcaster = tf.TransformBroadcaster()
-        self.tf_listener = tf.TransformListener()
-
-        rospy.loginfo("Initializing the Controller")
-
-        # Register handler to be called when rospy process begins shutdown
-        rospy.on_shutdown(self.shutdown)
-
-        self._check_publishers_connection()
-
-        self.i = 0
-        # while self.get_first_callback == False:
-            # rospy.sleep(0.2)
-
-        self.recreate_state()
-        self.controller.controller_config.goto_target_id = -1
-        self.controller.controller_config.human_target_id = -1
-        self.controller.controller_config.pan_target_id = -1
-        self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
-        self.x_human_features, self.y_human_features, self.th_human_features = [0., 0., 0.]
-        self.x_final = 0.
-        self.y_final = 0.
-        self.th_final = 0.
-        self.x_wp = 0.
-        self.y_wp = 0.
-        self.th_wp = 0.
-        self.controller.step(self.state)
-
-        self.action_idx = 0
-        self.actions = np.copy(self.controller.initial_action)
-
-        if not self.controller.is_init:
-            rospy.signal_shutdown("Controller shutting down.")
-
-        if self.plot_render:
-            self.mpc_plotter = MPCPlotter(direction_norm=0.5,
-                                          local_map_size=self.controller.mpc.cost_map_region,
-                                          global_map_size=self.state.config.global_map_size,
-                                          local_map_scale=self.state.config.local_map_scale,
-                                          global_map_scale=self.state.config.global_map_scale,
-                                          wall_avoidance_points=self.controller.controller_config.wall_avoidance_points,
-                                          max_step=1000)
-
-        rospy.loginfo("Arguements passed : ")
-        rospy.loginfo("Config path : {}".format(self.mpc_config_path))
-
-        rospy.loginfo("Controller Initialization Ended")
-
-
-    def read_robot_config(self, filename=None):
-        if filename is None:
-            filename = pkg_resources.resource_filename(
-                'sim2d', 'config/robot.yaml')
-            rospy.logdebug("No filename provided for the robot configuration, basic robot config loaded")
-        elif os.path.isfile(filename):
-            rospy.logdebug("Desired robot config loaded")
-        else:
-            filename = pkg_resources.resource_filename(
-                'sim2d', 'config/robot.yaml')
-            rospy.logdebug("Desired filename for the robot configuration does not exist, basic robot config loaded")
-        config = yaml.load(open(filename), Loader=yaml.FullLoader)
-        self.robot_config = RobotConfig(config)
-
-
-    def run(self):
-        init_t = rospy.Time.now()
-        while not rospy.is_shutdown():
-            self.step(init_t)
-
-    def step(self, init_time):
-        loop_t = rospy.Time.now()
-        self.publish_local_cost_map()
-        with self.lock:
-            self.recreate_state()
-        self.controller.step(self.state)
-        self.publish_controller_status()
-
-
-        time_now = rospy.Time.now()
-        rospy.logdebug('Controller time : {}, Controller loop time : {}'.format((time_now - init_time).to_sec(), (time_now - loop_t).to_sec()))
-
-        plot_time = rospy.Time.now()
-        if self.plot_render:
-            human_features = self.controller.human_features
-            with self.controller.lock:
-                target = None
-                if self.controller.shared_target[0]:
-                    target = np.zeros(3)
-                    target[0] = self.controller.shared_target[2]
-                    target[1] = self.controller.shared_target[3]
-                    target[2] = self.controller.shared_target[4]
-                waypoint = None
-                if self.controller.shared_waypoint[0]:
-                    waypoint = np.zeros(3)
-                    waypoint[0] = self.controller.shared_waypoint[1]
-                    waypoint[1] = self.controller.shared_waypoint[2]
-                    waypoint[2] = self.controller.shared_waypoint[3]
-                robot_pose = np.zeros(3)
-                robot_pose[0] = self.controller.shared_robot_pose[0]
-                robot_pose[1] = self.controller.shared_robot_pose[1]
-                robot_pose[2] = self.controller.shared_robot_pose[2]
-
-            self.mpc_plotter.update(mpc_fw_loss=self.controller.mpc.fw_loss,
-                               joint_angles=self.state.joint_angles,
-                               local_map=self.controller.local_map,
-                               global_map=self.state.global_map,
-                               base_goal=self.controller.goto_goal,
-                               human_features=human_features,
-                               pan_goal=self.controller.pan_goal,
-                               tangent_traj_vec=self.controller.tangent_traj_vec,
-                               target=target,
-                               waypoint=waypoint,
-                               robot_pose=robot_pose)
-        time_now = rospy.Time.now()
-        rospy.logdebug('Plot time : {}'.format((time_now - plot_time).to_sec()))
-        rospy.logdebug('Step time : {}'.format((time_now - loop_t).to_sec()))
-
-        time_since_callback = (time_now - self.timestamp_tracking).to_sec()
-        rospy.logdebug('time_since_callback time : {}'.format(time_since_callback))
-        sleep_time = self.controller.controller_config.h - time_since_callback
-        rospy.logdebug('sleep_time time : {}'.format(sleep_time))
-        if sleep_time > 0:
-            rospy.sleep(sleep_time)
-        self.i += 1
-
-
-    def init_state(self):
-        self.state = RobotState()
-        self.list_attr = ["robot_pose",
-                          "robot_velocity",
-                          "joint_angles",
-                          "joint_velocities",
-                          "local_map",
-                          "global_map",
-                          "humans_pose",
-                          "humans_velocity",
-                          "humans_visible",
-                          "humans_id",
-                          "config",
-                          "robot_config"]
-        for attr in self.list_attr:
-            self.state.__setattr__(attr, None)
-
-        self.Config = namedtuple('Config', ['global_map_size',
-                                            'global_map_scale',
-                                            'global_map_width',
-                                            'global_map_height',
-                                            'local_map_size',
-                                            'local_map_scale',
-                                            'local_map_width',
-                                            'local_map_height'])
-
-        self.RobotConfig = namedtuple('RobotConfig', ['has_pan',
-                                                      'has_tilt',
-                                                      'min_pan_angle',
-                                                      'max_pan_angle',
-                                                      'base_footprint_radius'])
-        self.state.robot_config = self.RobotConfig(self.robot_config.has_pan,
-                                                   self.robot_config.has_tilt,
-                                                   self.robot_config.min_pan_angle,
-                                                   self.robot_config.max_pan_angle,
-                                                   self.robot_config.base_footprint_radius)
-
-        self.reset_state()
-
-    def reset_state(self):
-        nb_humans = self.max_humans_world
-        self.state.humans_id = np.ones(nb_humans) * self.humans_id_default
-        self.state.humans_visible = np.ones(nb_humans)
-        self.state.humans_pose = np.zeros((nb_humans, 3))
-        self.state.humans_velocity = np.zeros((nb_humans, 2))
-        self.state.robot_pose = np.zeros(3)
-        if self.state.robot_config.has_pan and self.state.robot_config.has_tilt:
-            self.state.joint_angles = np.zeros(2)
-            self.state.joint_velocities = np.zeros(2)
-        elif not self.state.robot_config.has_pan and not self.state.robot_config.has_tilt:
-            self.state.joint_angles = None
-            self.state.joint_velocities = None
-        else:
-            self.state.joint_angles = np.zeros(1)
-            self.state.joint_velocities = np.zeros(1)
-
-
-    def publish_controller_status(self):
-        controller_status_msg = ControllerStatus()
-        controller_status_msg.goto_target_id = self.controller.controller_config.goto_target_id
-        controller_status_msg.human_target_id = self.controller.controller_config.human_target_id
-        controller_status_msg.pan_target_id = self.controller.controller_config.pan_target_id
-
-        # x_robot = self.odom_data.pose.pose.position.x
-        # y_robot = self.odom_data.pose.pose.position.y
-        self.x_human_features, self.y_human_features, self.th_human_features = self.controller.human_features[:3]
-        self.x_final = self.controller.shared_target[2]
-        self.y_final = self.controller.shared_target[3]
-        self.th_final = self.controller.shared_target[4]
-        self.x_wp = self.controller.goto_goal[0]
-        self.y_wp = self.controller.goto_goal[1]
-        self.th_wp = self.controller.goto_goal[2]
-
-        # (_, _, yaw_robot) = tf.transformations.euler_from_quaternion([self.odom_data.pose.pose.orientation.x,
-        #                                                               self.odom_data.pose.pose.orientation.y,
-        #                                                               self.odom_data.pose.pose.orientation.z,
-        #                                                               self.odom_data.pose.pose.orientation.w])
-        x_robot, y_robot, yaw_robot = self.state.robot_pose
-
-        if controller_status_msg.goto_target_id != -1 or self.controller.controller_config.goto_target_pose[-1] == 1:
-            dist_to_goto_target = np.linalg.norm(np.array([x_robot - self.x_final, y_robot - self.y_final]))
-            controller_status_msg.dist_to_goto_target = [np.abs(x_robot - self.x_final),
-                                              np.abs(y_robot - self.y_final),
-                                              np.abs(constraint_angle(yaw_robot - self.th_final)),
-                                              dist_to_goto_target]
-
-        if controller_status_msg.human_target_id != -1:
-            dist_to_human_target = np.linalg.norm(np.array([self.x_human_features, self.y_human_features]))
-            controller_status_msg.dist_to_human_target = [np.abs(self.x_human_features),
-                                               np.abs(self.y_human_features),
-                                               np.abs(constraint_angle(self.th_human_features)),
-                                               dist_to_human_target]
-
-        if controller_status_msg.pan_target_id != -1:
-            self.head_joint_errors = []
-
-            if self.pan_ind is not None:
-                self.head_joint_errors.append(np.abs(constraint_angle(-np.arctan2(self.x_pan_human_target - x_robot, self.y_pan_human_target - y_robot) - constraint_angle(yaw_robot + self.current_pan))))
-            if self.tilt_ind is not None:
-                self.head_joint_errors.append(0.)
-            controller_status_msg.head_joint_errors = self.head_joint_errors
-
-        if controller_status_msg.pan_target_id == -1 and controller_status_msg.human_target_id == -1 and controller_status_msg.goto_target_id == -1 and self.controller.controller_config.goto_target_pose[-1] == 0:
-            controller_status_msg.status = ControllerStatus.IDLE
-        if controller_status_msg.pan_target_id != -1 or controller_status_msg.human_target_id != -1 or controller_status_msg.goto_target_id != -1 or self.controller.controller_config.goto_target_pose[-1] == 1:
-            controller_status_msg.status = ControllerStatus.IS_RUNNING
-        if self.controller.is_failure:
-            controller_status_msg.status = ControllerStatus.IS_FAILURE
-        self._controller_status_pub.publish(controller_status_msg)
-
-        self.publish_goto_goal()
-        self.publish_human_goal()
-
-
-    def publish_human_goal(self):
-        current_time = rospy.Time.now()
-        quat = tf.transformations.quaternion_from_euler(0, 0, self.th_human_features)
-        self.tf_broadcaster.sendTransform(
-         (self.x_human_features, self.y_human_features, 0),
-         quat,
-         current_time,
-         "human_goal",
-         self.map_frame
-        )
-
-
-    def publish_goto_goal(self):
-        current_time = rospy.Time.now()
-        quat_final = tf.transformations.quaternion_from_euler(0, 0, self.th_final + np.pi/2)
-
-        quat_wp = tf.transformations.quaternion_from_euler(0, 0, self.th_wp)
-
-        self.tf_broadcaster.sendTransform(
-         (self.x_final, self.y_final, 0),
-         quat_final,
-         current_time,
-         "final goal",
-         self.map_frame
-        )
-        
-        self.tf_broadcaster.sendTransform(
-         (self.y_wp, -self.x_wp, 0),
-         quat_wp,
-         current_time,
-         "intermediate waypoint goal",
-         self.robot_frame
-        )
-
-
-    def publish_local_cost_map(self):
-        quat = tf.transformations.quaternion_from_euler(0, 0, 0)
-        x, y = self.state.config.local_map_size[0][0], self.state.config.local_map_size[1][0]  #robot_pose[0], robot_pose[1]
-        map = np.maximum(self.controller.local_map[:, :, 0], self.controller.local_map[:, :, 1]) * 100
-
-        occupancy_grid = OccupancyGrid()
-        occupancy_grid.info = MapMetaData()
-        occupancy_grid.info.map_load_time = self.current_time_local_map
-        occupancy_grid.info.resolution = 1/self.state.config.local_map_scale
-        occupancy_grid.info.width = int(self.state.config.local_map_width)
-        occupancy_grid.info.height = int(self.state.config.local_map_height)
-        occupancy_grid.info.origin = Pose(Point(x, y, 0.), Quaternion(*quat))
-
-        occupancy_grid.data = np.flip(map, axis=0).flatten().astype(np.int8)
-
-        occupancy_grid.header.stamp = self.current_time_local_map
-        occupancy_grid.header.frame_id = "local_cost_map"
-
-        # quat = tf.transformations.quaternion_from_euler(0, 0, self.state.robot_pose[2])
-        # self.tf_broadcaster.sendTransform(
-         # (self.state.robot_pose[0], self.state.robot_pose[1],  0),
-         # quat,
-         # self.current_time_local_map,
-         # "local_cost_map",
-         # self.map_frame
-        # )
-
-        self._local_cost_map_pub.publish(occupancy_grid)
-
-
-    def publish_vel(self, action):
-        pan_vel, ang, lin = action[0], action[1], action[2]
-        lin, ang = self._check_joint_vel_limits(lin, ang)
-        twist_msg = Twist(Vector3(lin, 0, 0), Vector3(0, 0, ang))
-        self._cmd_vel_pub.publish(twist_msg)
-
-        pan_angle = self.current_pan
-        pan_angle += self.controller.controller_config.h*pan_vel
-        tilt_angle = 0.  #self.current_tilt
-        pan_angle, tilt_angle = self._check_joint_pos_limits(pan_angle, tilt_angle)
-
-        head_msg = JointTrajectory()
-        p = JointTrajectoryPoint()
-        p.positions = [None] * 2
-        p.positions[0] = pan_angle
-        p.positions[1] = tilt_angle
-        # head_msg.header.frame_id = "head"
-        # head_msg.header.stamp = rospy.Time.now()
-        head_msg.joint_names = [None]*2
-        head_msg.joint_names[0] = "head_1_joint"  # pan
-        head_msg.joint_names[1] = "head_2_joint"  # tilt
-        head_msg.points = [None]
-        head_msg.points[0] = p
-        head_msg.points[0].time_from_start = rospy.Duration.from_sec(self.controller.controller_config.h)
-        self._pan_vel_pub.publish(head_msg)
-
-        print("vel to robot : {}, {}, {}".format(lin, ang, pan_vel))
-
-
-    def recreate_state(self):
-        self.reset_state()
-        self.current_pan = self.joint_states_data.position[self.pan_ind]
-        self.current_tilt = self.joint_states_data.position[self.tilt_ind]
-
-        self.timestamp_tracking_latest = self.timestamp_tracking
-
-        try:
-            map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, self.timestamp_tracking_latest)
-        except:
-            map_2_robot = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
-            rospy.logwarn("lookupTransform between {} and {} not found at time {}, tf taken at time {}".format(self.map_frame, self.robot_frame, self.timestamp_tracking_latest.to_sec(), rospy.Time(0).to_sec()))
-        x_pos, y_pos, _ = map_2_robot[0]
-        # x_pos = self.odom_data.pose.pose.position.x
-        # y_pos = self.odom_data.pose.pose.position.y
-        # lin = self.odom_data.twist.twist.linear.x
-        # ang = self.odom_data.twist.twist.angular.z
-        (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_robot[1][0],
-                                                                map_2_robot[1][1],
-                                                                map_2_robot[1][2],
-                                                                map_2_robot[1][3]])
-        # (_, _, yaw) = tf.transformations.euler_from_quaternion([self.odom_data.pose.pose.orientation.x,
-        #                                                                self.odom_data.pose.pose.orientation.y,
-        #                                                                self.odom_data.pose.pose.orientation.z,
-        #                                                                self.odom_data.pose.pose.orientation.w])
-        self.state.robot_pose = np.array([x_pos, y_pos, constraint_angle(yaw - np.pi/2)])
-        # self.state.robot_velocity = np.array([lin, ang])
-
-        self.state.joint_angles = np.array([self.joint_states_data.position[self.pan_ind]])
-        self.state.joint_velocities = np.array([self.joint_states_data.velocity[self.pan_ind]])
-
-        for human in self.tracked_persons_2d_data.detections:
-            humans_id_list = self.state.humans_id.tolist()
-            if human.track_id in humans_id_list:
-                index = humans_id_list.index(human.track_id)
-            elif self.humans_id_default in humans_id_list:
-                index = humans_id_list.index(self.humans_id_default)
-            else:
-                rospy.logerr("No more space in state.humans_ arrays. Number max is {}. It is fixed because of the use of SharedMemory multiprocessing".format(self.max_humans_world))
-
-            self.state.humans_id[index] = human.track_id
-            self.state.humans_visible[index] = human.confidence
-            try:
-                map_2_human = self.tf_listener.lookupTransform(self.map_frame, "body_" + str(human.track_id), self.timestamp_tracking_latest)
-            except:
-                map_2_human = self.tf_listener.lookupTransform(self.map_frame, "body_" + str(human.track_id), rospy.Time(0))
-                rospy.logwarn("lookupTransform between {} and {} not found at time {}, tf taken at time {}".format(self.map_frame, "body_" + str(human.track_id), self.timestamp_tracking_latest.to_sec(), rospy.Time(0).to_sec()))
-
-            x_pos, y_pos, _ = map_2_human[0]
-            # x_pos = human.pose3D.pose.position.x
-            # y_pos = human.pose3D.pose.position.y
-
-            if self.look_at_data is not None:
-                if human.track_id == self.look_at_data.human_id:
-                    self.x_pan_human_target = x_pos
-                    self.y_pan_human_target = y_pos
-
-            lin = human.velocity3D.linear.x
-            ang = human.velocity3D.angular.z
-            (_, _, yaw) = tf.transformations.euler_from_quaternion([map_2_human[1][0],
-                                                                    map_2_human[1][1],
-                                                                    map_2_human[1][2],
-                                                                    map_2_human[1][3]])
-            # (_, _, yaw) = tf.transformations.euler_from_quaternion([human.pose3D.pose.orientation.x,
-            #                                                         human.pose3D.pose.orientation.y,
-            #                                                         human.pose3D.pose.orientation.z,
-            #                                                         human.pose3D.pose.orientation.w])
-
-            self.state.humans_pose[index, :] = [x_pos, y_pos, constraint_angle(yaw - np.pi/2)]
-            self.state.humans_velocity[index, :] = [lin, ang]
-
-        x = self.global_map_data.info.origin.position.x
-        y = self.global_map_data.info.origin.position.y
-        g_width = self.global_map_data.info.width
-        g_height = self.global_map_data.info.height
-        g_resolution = self.global_map_data.info.resolution       
-        global_map = np.flip((np.asarray(self.global_map_data.data) / 100).reshape((g_height, g_width)), axis=0)
-
-        self.last_shape_global_map = (g_height, g_width)
-        if self.global_map_padded is None:
-            self.global_map_padded = np.zeros((self.last_shape_global_map[0] + 2*self.y_margin, self.last_shape_global_map[1] + 2*self.x_margin))
-            self.global_map_padded[self.y_margin:global_map.shape[0] + self.y_margin, self.x_margin:global_map.shape[1] + self.x_margin] = global_map
-            self.last_x_global_map = x
-            self.last_y_global_map = y
-        else:
-            init_x = round((x -  self.last_x_global_map)/g_resolution) + self.x_margin
-            init_y = -round((y -  self.last_y_global_map)/g_resolution) + self.y_margin
-            if init_x < 0 or (init_x + self.last_shape_global_map[1]) > self.global_map_padded.shape[1]:
-                rospy.logerr('right or left side overflow in the x axis')
-            if init_y < 0 or (init_y + self.last_shape_global_map[0]) > self.global_map_padded.shape[0]:
-                rospy.logerr('right or left side overflow in the y axis')
-            self.global_map_padded[init_y:init_y + global_map.shape[0], init_x:init_x + global_map.shape[1]] = global_map
-            self.last_x_global_map = x
-            self.last_y_global_map = y
-        x = x - self.x_margin * g_resolution
-        y = y - self.y_margin * g_resolution
-        global_map_size = [[x, x + self.global_map_padded.shape[1]*g_resolution],[y, y + self.global_map_padded.shape[0]*g_resolution]]
-
-        x = self.local_map_data.info.origin.position.x
-        y = self.local_map_data.info.origin.position.y
-        l_width = self.local_map_data.info.width
-        l_height = self.local_map_data.info.height
-        l_resolution = self.local_map_data.info.resolution
-        local_map_size = [[x, x + l_width*l_resolution],[y, y + l_height*l_resolution]]
-
-        self.state.config = self.Config(global_map_size,
-                                        round(1/g_resolution),
-                                        self.global_map_padded.shape[1],
-                                        self.global_map_padded.shape[0],
-                                        local_map_size,
-                                        round(1/l_resolution),
-                                        l_width,
-                                        l_height)
-
-        scale = self.state.config.global_map_scale
-        radius = int(
-        self.state.robot_config.base_footprint_radius * scale)
-        kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,
-                                   (radius, radius))
-        global_map = cv2.dilate(self.global_map_padded, kernel, iterations=self.controller.controller_config.dilation_iter)
-        self.state.global_map = global_map
-        self.state.local_map = np.flip((np.asarray(self.local_map_data.data) / 100).reshape((l_height, l_width)), axis=0)
-
-
-    def _check_joint_vel_limits(self, lin_vel, ang_vel):
-        if lin_vel > self.controller.controller_config.u_ub[-1]:
-            rospy.logwarn("The linear velocity is greater than the maximum allowed. The maximum velocity is set.")
-            lin_vel = self.controller.controller_config.u_ub[-1]
-        if lin_vel < self.controller.controller_config.u_lb[-1]:
-            rospy.logwarn("The linear velocity is lower than the minimum allowed. The minimum velocity is set.")
-            lin_vel = self.controller.controller_config.u_lb[-1]
-        if ang_vel > self.controller.controller_config.u_ub[-2]:
-            rospy.logwarn("The angular velocity is greater than the maximum allowed. The maximum velocity is set.")
-            ang_vel = self.controller.controller_config.u_ub[-2]
-        if ang_vel < self.controller.controller_config.u_lb[-2]:
-            rospy.logwarn("The angular velocity is lower than the minimum allowed. The minimum velocity is set.")
-            ang_vel = self.controller.controller_config.u_lb[-2]
-        return lin_vel, ang_vel
-
-
-    def _check_joint_pos_limits(self, p, t):
-        if p < self.robot_config.min_pan_angle:
-            p = self.robot_config.min_pan_angle
-            rospy.logwarn("The pan angle desired is lower than the miminum allowed. The minimum oan angle is set.")
-        if p > self.robot_config.max_pan_angle:
-            p = self.robot_config.max_pan_angle
-            rospy.logwarn("The pan angle desired is greater than the maximum allowed. The maximum oan angle is set.")
-        if t < self.robot_config.min_tilt_angle:
-            t = self.robot_config.min_tilt_angle
-            rospy.logwarn("The tilt angle desired is lower than the miminum allowed. The minimum oan angle is set.")
-        if t > self.robot_config.max_tilt_angle:
-            t = self.robot_config.max_tilt_angle
-            rospy.logwarn("The tilt angle desired is higher than the maximum allowed. The maximum oan angle is set.")
-        return p, t
-
-
-    def _check_all_sensors_ready(self):
-        rospy.logdebug("START ALL SENSORS READY")
-        # self._check_odom_ready()
-        self._check_joint_states_ready()
-        self._check_tracked_persons_2d_ready()
-        self._check_local_map_ready()
-        self._check_global_map_ready()
-        rospy.logdebug("ALL SENSORS READY")
-
-
-    def _check_global_map_ready(self):
-        self.global_map_data = None
-        rospy.logdebug("Waiting for /global_map to be READY...")
-        while self.global_map_data is None and not rospy.is_shutdown():
-            try:
-                self.global_map_data = rospy.wait_for_message("/global_map", OccupancyGrid, timeout=5.0)
-                rospy.logdebug("Current /global_map READY=>")
-
-            except:
-                rospy.logerr("Current /global_map not ready yet, retrying for getting global map")
-        return self.global_map_data
-
-
-    def _check_local_map_ready(self):
-        self.local_map_data = None
-        rospy.logdebug("Waiting for /local_map to be READY...")
-        while self.local_map_data is None and not rospy.is_shutdown():
-            try:
-                self.local_map_data = rospy.wait_for_message("/local_map", OccupancyGrid, timeout=5.0)
-                rospy.logdebug("Current /local_map READY=>")
-
-            except:
-                rospy.logerr("Current /local_map not ready yet, retrying for getting local map")
-        return self.local_map_data
-
-
-    def _check_tracked_persons_2d_ready(self):
-        self.tracked_persons_2d_data = None
-        rospy.logdebug("Waiting for /tracked_persons_2d to be READY...")
-        while self.tracked_persons_2d_data is None and not rospy.is_shutdown():
-            try:
-                self.tracked_persons_2d_data = rospy.wait_for_message("/tracked_persons_2d", TrackedPersons2d, timeout=5.0)
-                rospy.logdebug("Current /tracked_persons_2d READY=>")
-
-            except:
-                rospy.logerr("Current /tracked_persons_2d not ready yet, retrying for getting tracked persons 2d")
-        return self.tracked_persons_2d_data
-
-
-    def _check_odom_ready(self):
-        self.odom_data = None
-        rospy.logdebug("Waiting for /odom to be READY...")
-        while self.odom_data is None and not rospy.is_shutdown():
-            try:
-                self.odom_data = rospy.wait_for_message("/odom", Odometry, timeout=5.0)
-                rospy.logdebug("Current /odom READY=>")
-
-            except:
-                rospy.logerr("Current /odom not ready yet, retrying for getting odom")
-        return self.odom_data
-
-
-    def _check_joint_states_ready(self):
-        self.joint_states_data = None
-        rospy.logdebug("Waiting for /joint_states to be READY...")
-        while self.joint_states_data is None and not rospy.is_shutdown():
-            try:
-                self.joint_states_data = rospy.wait_for_message("/joint_states", JointState, timeout=5.0)
-                rospy.logdebug("Current /joint_states READY=>")
-
-            except:
-                rospy.logerr("Current /joint_states not ready yet, retrying for getting joint states")
-        if not self.get_indexes:
-            # joint_states : head_1_joint, head_2_joint, wheel_left_joint, wheel_right_joint, caster_left_joint, caster_right_joint
-            for index, name in enumerate(self.joint_states_data.name):
-                if name == "head_1_joint":
-                    self.pan_ind = index
-                if name == "head_2_joint":
-                    self.tilt_ind = index
-            self.get_indexes = True
-
-        return self.joint_states_data
-
-
-    def _check_publishers_connection(self):
-        """
-        Checks that all the publishers are working
-        :return:
-        """
-        rate = rospy.Rate(10)  # 10hz
-        while self._cmd_vel_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _cmd_vel_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_cmd_vel_pub Publisher Connected")
-
-        while self._pan_vel_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _pan_vel_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_pan_vel_pub Publisher Connected")
-
-        rospy.logdebug("All Publishers READY")
-
-
-    def _look_at_callback(self, data):
-        with self.lock_controller:
-            self.look_at_data = data
-            self.controller.controller_config.pan_target_id = self.look_at_data.human_id
-            self.controller.set_pan_target(self.state, target=self.look_at_data.human_id)
-            self.controller.set_mode()
-            self.controller.set_weights()
-
-    def _go_towards_callback(self, data):
-        with self.lock_controller:
-            self.go_towards_data = data
-            self.controller.controller_config.goto_target_id = self.go_towards_data.human_id
-            self.controller.controller_config.human_target_id = -1
-            self.controller.controller_config.goto_target_pose = np.zeros_like(self.controller.controller_config.goto_target_pose)
-            self.controller.set_goto_target(self.state, target=self.go_towards_data.human_id)
-            self.controller.set_mode()
-            self.controller.set_weights()
-
-
-    def _navigate_callback(self, data):
-        with self.lock_controller:
-            self.navigate_data = data
-            self.controller.controller_config.goto_target_id = self.navigate_data.goto_target_id
-            self.controller.controller_config.human_target_id = self.navigate_data.human_target_id
-            self.controller.controller_config.relative_h_pose = np.asarray(self.navigate_data.relative_h_pose)
-            self.controller.controller_config.goto_target_pose = np.asarray(self.navigate_data.goto_target_pose)
-            self.controller.set_human_target(self.state, target=self.navigate_data.human_target_id)
-            if not self.controller.is_failure:
-                if self.navigate_data.goto_target_id != -1:
-                    self.controller.set_goto_target(self.state, target=self.navigate_data.goto_target_id)
-                elif np.any(np.asarray(self.navigate_data.goto_target_pose)):
-                    self.controller.set_goto_target(self.state, target=np.asarray(self.navigate_data.goto_target_pose))
-                else:
-                    rospy.logwarn('no goto_target_id nor goto_target_pose to set')
-            self.controller.set_mode()
-            self.controller.set_weights()
-
-    def _ts_callback(self, odom_data, joint_states_data, local_map_data):
-        if not self.get_first_callback:
-            self.get_first_callback = True
-        self._odom_callback(odom_data)
-        self._joint_states_callback(joint_states_data)
-        # self._tracked_persons_2d_callback(tracked_persons_2d_data)
-        self._local_map_callback(local_map_data)
-
-    def _joint_states_callback(self, data):
-        with self.lock:
-            self.joint_states_data = data
-
-
-    def _tracked_persons_2d_callback(self, data):
-        with self.lock:
-            self.tracked_persons_2d_data = data
-            self.timestamp_tracking = data.header.stamp
-            if self.controller.actions is not None:
-                self.action_idx = int((self.timestamp_tracking - self.timestamp_tracking_latest).to_sec()/self.controller.controller_config.h)
-                self.action_idx = min(self.action_idx, len(self.controller.actions) - 1)
-                self.actions = self.controller.actions
-                # self.publish_vel(self.controller.actions[self.action_idx])
-
-
-
-    def _local_map_callback(self, data):
-        with self.lock:
-            self.local_map_data = data
-        self.current_time_local_map = data.header.stamp
-
-
-    def _global_map_callback(self, data):
-        with self.lock:
-            self.global_map_data = data
-
-
-    def _odom_callback(self, data):
-        with self.lock:
-            self.odom_data = data
-
-
-    def _timer_callback(self, event):
-        pass
-    
-
-    def _pub_vel_callback(self, event):
-        if self.action_idx is not None:
-            self.publish_vel(self.actions[self.action_idx])
-            self.action_idx += 1
-            if self.action_idx > (len(self.controller.actions) - 1):
-                self.action_idx = 0
-                self.actions = np.copy(self.controller.initial_action)
-
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the Controller")
-        if self.plot_render:
-            self.mpc_plotter.close()
-        self.controller.close()
-        rospy.loginfo("Killing the Controller node")
-
-
-class RobotState:
-    pass
diff --git a/src/robot_behavior/src/robot_behavior/position_tracker.py b/src/robot_behavior/src/robot_behavior/position_tracker.py
deleted file mode 100644
index c2ced9f6d4f4bb84b5ba14d00778fb095cfa4413..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/position_tracker.py
+++ /dev/null
@@ -1,180 +0,0 @@
-from itertools import chain, count
-from scipy.sparse.extract import find
-import sklearn
-from sklearn.neighbors import NearestNeighbors
-import numpy as np
-import time
-from collections import namedtuple
-from copy import deepcopy
-
-TrackedPosition = namedtuple('TrackedPosition', ['vec', 'visual_id'])
-
-DIST_ANGLE = 0.3
-DIST_THRESHOLD = 1.5
-VISUAL_ID_COEF = 1.0
-MEMORY_TIME = 10
-MAX_IDX = 20
-
-def check_vec(v):
-    if v is None:
-        return False
-    try:
-        sklearn.utils.assert_all_finite(v)
-    except:
-        return False
-    return True
-
-class PositionTracker:
-    def __init__(self):
-        self.last_positions = {}
-        self.last_seen = {}
-        self.approximate_position = {}
-        self.last_idx = 0
-        self.last_time = time.time()
-
-    def vec(self, pt_list):
-        f = lambda x,y,angle,visual_id : [x, y, x + DIST_ANGLE * np.cos(angle), y + DIST_ANGLE * np.sin(angle), VISUAL_ID_COEF * visual_id]
-        return np.array([f(*x.vec, x.visual_id) for x in pt_list])
-
-    def push(self, new_pts):
-        dt = time.time() - self.last_time
-        self.last_time = time.time()
-        self.last_idx = MAX_IDX
-        updated_ids = []
-
-        for pt in new_pts:
-            try:
-                j = next(j for j in self.last_positions.keys() if pt.visual_id == self.last_positions[j].visual_id)
-                if check_vec(pt.vec):                    
-                    print('if')
-                    self.last_positions[j] = pt
-                    self.last_seen[j] = 0.0
-                    self.approximate_position[j] = False
-                    updated_ids.append(j)
-                else:
-                    print('else')
-                    self.last_seen[j] = 0.0
-                    self.approximate_position[j] = True
-                    updated_ids.append(j)
-            except StopIteration:
-                print('execpt')
-
-                if check_vec(pt.vec):
-                    print('if execpt')
-                    # self.last_positions[self.last_idx] = pt
-                    self.last_positions[pt.visual_id] = pt
-                    self.last_seen[pt.visual_id] = 0.
-                    updated_ids.append(pt.visual_id)
-                    # self.last_idx += 1
-
-        # self.fix_idx()
-
-        for k in list(self.last_positions):
-            if k not in updated_ids:
-                self.last_seen[k] += dt
-                if (self.last_seen[k] > MEMORY_TIME):
-                    del self.last_positions[k]
-                    del self.last_seen[k]
-
-
-
-
-
-    def push_old(self, new_pts):
-        dt = time.time() - self.last_time
-        self.last_time = time.time()
-        """Add the keypoints from a new frame into the buffer."""
-        if not self.last_positions:  # First pass
-            if len(new_pts) > MAX_IDX:
-                raise ValueError("Too many objects to track")
-            i = 0
-            for pt in new_pts:
-                if pt.vec is not None:
-                    self.last_positions[i] = pt
-                    self.last_seen[i] = 0
-                    self.approximate_position[i] = False
-                    i += 1
-                return
-
-       
-
-        # Previously observed skeletons which aren't close to a new one:
-        unseen = set(self.last_positions.keys())
-
-        # We append new points starting after max_idx, they are then placed
-        # in [0, MAX_IDX) in function self.fix_idx()
-        self.last_idx = MAX_IDX
-
-       
-
-        # indices which have location data are matched to nearest neighbors
-        """
-        
-        notnone_idx = [i for i in range(len(new_pts)) if check_vec(new_pts[i].vec)]
-        if len(notnone_idx) > 0:
-            match_keys = [k for k in self.approximate_position.keys() if self.approximate_position[k] == False]
-            
-            prev = self.vec([self.last_positions[k] for k in match_keys])
-            curr = self.vec([new_pts[i] for i in notnone_idx])
-
-            if len(match_keys) > 0:
-                nn_model = NearestNeighbors(n_neighbors=1, algorithm='brute')
-                nn_model.fit(prev)
-                dists, neighbors = nn_model.kneighbors(curr, return_distance=True)
-                neighbors = neighbors.flatten()
-            else:
-                dists = 2*DIST_THRESHOLD*np.ones(len(notnone_idx))
-
-            for i, id in enumerate(notnone_idx):
-                if 0. < dists[i] <= DIST_THRESHOLD:
-                    neighbor = match_keys[neighbors[i]]
-                    self.last_positions[neighbor] = new_pts[id]
-                    self.last_seen[neighbor] = 0
-                    self.approximate_position[neighbor] = False
-                    unseen.discard(neighbor)
-                else:
-                    self.last_positions[self.last_idx] = new_pts[id]
-                    self.last_idx += 1
-        """
-        """
-
-        # indices which have no location data are matched to same visual id (and keep same location)
-        none_idx = list(set(range(len(new_pts))))# - set(notnone_idx))
-        to_discard = []
-        if len(none_idx) > 0:
-            unused_vidx = [new_pts[i].visual_id for i in none_idx]
-            for i in unseen:
-                try:
-                    j = next(j for j in unused_vidx if self.last_positions[i].visual_id == j)
-                    to_discard.append(i)
-                    if new_pts[j].vec is not None:
-                    self.approximate_position[i] = True
-                except StopIteration:
-                    continue
-        for i in to_discard:
-            unseen.discard(i)
-
-        self.fix_idx()
-
-        # Remove points which have not been matched using either location OR visual id
-        # and have not been seen for MEMORY_TIME s
-        for i in unseen:
-            self.last_seen[i] += dt
-            if (self.last_seen[i] > MEMORY_TIME):
-                del self.last_positions[i]
-        """
-
-    def tracked_positions(self):
-        return deepcopy(self.last_positions)
-
-    def fix_idx(self):
-        if len(self.last_positions) >= MAX_IDX:
-            raise ValueError("Length of tracks greater than the maximum allowed")
-        else:
-            j = 0
-            for i in range(MAX_IDX, self.last_idx):
-                while j in self.last_positions:
-                    j += 1
-                self.last_positions[j] = self.last_positions.pop(i)
-                self.last_seen[j] = 0.
-                self.approximate_position[j] = False
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/republish_from_apriltag_node.py b/src/robot_behavior/src/robot_behavior/republish_from_apriltag_node.py
deleted file mode 100755
index af337fef4333f4683764505b88dfe2fee3f002bd..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/republish_from_apriltag_node.py
+++ /dev/null
@@ -1,235 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import numpy as np
-import re
-import cv2
-import os
-
-from spring_msgs.msg import TrackedPerson2d, TrackedPersons2d
-from tf2_msgs.msg import TFMessage
-from sensor_msgs.msg import RegionOfInterest
-from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3, PoseStamped
-from apriltag_ros.msg import AprilTagDetectionArray
-import tf
-
-from robot_behavior.utils import constraint_angle
-
-
-class RepublishFromApriltag:
-    def __init__(self, cam_optical_frame, robot_frame="base_footprint"):
-        self.robot_frame = robot_frame
-
-        self.tf_data = None
-        self.tag_detections_data = None
-
-        self.robot_2_cam_mat = None
-        self.cam_optical_frame = cam_optical_frame
-
-        self.id_list = []
-        self.id_features = []
-
-        self.current_time = None
-
-        cmd = "rostopic pub -1  /interaction_profile/set/cancel actionlib_msgs/GoalID \"{}\""
-        nodes = os.popen("rosnode list").readlines()
-        for i in range(len(nodes)):
-            nodes[i] = nodes[i].replace("\n","")
-        if '/pal_head_manager' in nodes:
-            os.system("rosnode kill " + '/pal_head_manager')  # '/pal_webcommander'
-        os.system(cmd)  # disable alive mode
-
-        cmd_pub = "rostopic pub -1 /pause_navigation std_msgs/Bool 'data: false'"
-        os.system(cmd_pub)  # disable pause navigation mode
-
-        self._check_all_sensors_ready()
-
-        # rospy.Subscriber('/tf', TFMessage, callback=self._tf_callback, queue_size=1)
-        rospy.Subscriber('/tag_detections', AprilTagDetectionArray, callback=self._tag_detections_callback, queue_size=1)
-
-        self._tracked_persons_2d_pub  = rospy.Publisher('/tracked_persons_2d', TrackedPersons2d, queue_size=10)
-
-        self.tf_listener = tf.TransformListener()
-        self.tf_broadcaster = tf.TransformBroadcaster()
-
-        rospy.on_shutdown(self.shutdown)        
-
-        rospy.loginfo("RepublishFromApriltag Initialization Ended")
-
-
-    def get_id_list(self):
-        if self.robot_2_cam_mat is None:
-            try :
-                self.tf_listener.waitForTransform(self.robot_frame, self.cam_optical_frame, rospy.Time(0), rospy.Duration(3.0))
-                reference_frame_2_cam = self.tf_listener.lookupTransform(self.robot_frame, self.cam_optical_frame, rospy.Time(0))
-            except:
-                reference_frame_2_cam = [[-0.1418, 0.0, 1.3206],
-                                         [-0.5168012549258786, 0.5168012549258786, -0.4826141967524755, 0.48261419675247563]]
-                rospy.logwarn("Can not get the lookupTransform between {} and {}, the default value will be taken".format(self.robot_frame, self.cam_optical_frame))
-            self.robot_2_cam_mat = self.tf_listener.fromTranslationRotation(reference_frame_2_cam [0], reference_frame_2_cam [1])
-        # try:
-        #     self.reference_frame_2_cam = self.tf_listener.lookupTransform(self.robot_frame, self.cam_optical_frame, rospy.Time(0))
-        # except:
-        #     rospy.logwarn("Can not get the lookupTransform between {} and {}".format(self.robot_frame, self.reference_frame_2_cam))
-        self.current_time = self.tag_detections_data.header.stamp
-        for detection in self.tag_detections_data.detections:
-            tag_id = detection.id[0]
-            # tf_map_2_robot = [[0., 0., 0], [0., 0., 0., 0.]]
-            try:
-                tf_map_2_robot = self.tf_listener.lookupTransform("rtabmap_map", "robot_pose", self.current_time)
-
-            except:
-                rospy.logwarn("Can not get the lookupTransform between {} and {}".format("rtabmap_map", "robot_pose"))
-                continue
-
-            if tag_id not in self.id_list:
-                self.id_list.append(tag_id)
-                self.id_features.append([[0., self.current_time, 0], np.zeros(3), np.zeros(2)])  # [[ last time seen, last_time, init], [x, y, theta], [lin_vel, ang_vel]]
-            else:
-                idx = self.id_list.index(tag_id)
-                self.id_features[idx][0][1] = self.current_time
-            x_pos = detection.pose.pose.pose.position.x
-            y_pos = detection.pose.pose.pose.position.y
-            z_pos = detection.pose.pose.pose.position.z
-            quat = [detection.pose.pose.pose.orientation.x,
-                    detection.pose.pose.pose.orientation.y,
-                    detection.pose.pose.pose.orientation.z,
-                    detection.pose.pose.pose.orientation.w]
-
-            cam_2_tag_mat = self.tf_listener.fromTranslationRotation([x_pos, y_pos, z_pos], quat)
-            map_2_robot_mat = self.tf_listener.fromTranslationRotation(tf_map_2_robot[0], tf_map_2_robot[1])
-            rot_pi_2 = self.tf_listener.fromTranslationRotation([0., 0., 0.], tf.transformations.quaternion_from_euler(0, 0, np.pi/2))
-            map_2_tag_mat = np.dot(map_2_robot_mat, np.dot(rot_pi_2, np.dot(self.robot_2_cam_mat, cam_2_tag_mat)))
-            trans_map_2_tag = tf.transformations.translation_from_matrix(map_2_tag_mat)
-            quat_map_2_tag = tf.transformations.quaternion_from_matrix(map_2_tag_mat)
-            (_, _, yaw) = tf.transformations.euler_from_quaternion([quat_map_2_tag[0],
-                                                                    quat_map_2_tag[1],
-                                                                    quat_map_2_tag[2],
-                                                                    quat_map_2_tag[3]])
-            yaw = constraint_angle(yaw - np.pi)
-            idx = self.id_list.index(tag_id)
-            self.id_features[idx][1][2] = yaw
-            self.id_features[idx][1][0] = trans_map_2_tag[0]
-            self.id_features[idx][1][1] = trans_map_2_tag[1]
-
-
-    def _publish_tracked_persons_2d(self):
-        tracked_persons_2d = TrackedPersons2d()
-
-        for tag_id in self.id_list:
-            from_mem_pose = False
-            from_mem_twist = False
-            yaw = 0.
-            map_2_tag_new_twist = [[0., 0., 0.], [0., 0., 0.]]
-
-            index_id = self.id_list.index(tag_id)
-            self.id_features[index_id][0][0] = (self.current_time - self.id_features[index_id][0][1]).to_sec()
-
-            if self.id_features[index_id][0][0] > 0.5:
-                from_mem_pose = True
-                rospy.logwarn("can not see human {}, last time seen {}".format(tag_id, self.id_features[index_id][0][0]))
-
-            angle = self.id_features[index_id][1][2]
-            quat = tf.transformations.quaternion_from_euler(0, 0, angle)
-            x, y, _ = self.id_features[index_id][1]
-            visible = 0 # self.id_features[index_id][0][0]
-            v = np.array([-np.sin(angle), np.cos(angle)])
-
-            self.tf_broadcaster.sendTransform(
-             [x, y, 0.],
-             quat,
-             self.current_time,
-             "tag_" + str(tag_id) + "_new",
-             "rtabmap_map"
-            )
-
-            # try:
-            #     map_2_tag_new_twist = self.tf_listener.lookupTwist("tag_" + str(tag_id) + "_new", "rtabmap_map", rospy.Time(0), rospy.Duration(0.001))
-            # except:
-            #     from_mem_twist = True
-            #     rospy.logwarn("lookupTwist not getting values for human {}".format(tag_id))
-
-            if not from_mem_twist:
-                self.id_features[index_id][2][0] = np.dot(np.asarray(map_2_tag_new_twist[1][:2]), v)
-                self.id_features[index_id][2][1] = map_2_tag_new_twist[1][2]
-            else:
-                # if can not be computed, set velocities to zeros
-                self.id_features[index_id][2][0] = 0.
-                self.id_features[index_id][2][1] = 0.
-
-            lin_vel = self.id_features[index_id][2][0]
-            ang_vel = self.id_features[index_id][2][1]
-
-            rospy.logdebug("id : {}, pos : {}, vel : {}".format(tag_id, self.id_features[index_id][1], self.id_features[index_id][2]))
-
-
-            if not from_mem_twist and not from_mem_pose:
-                self.id_features[index_id][0][2] = 1
-            # else:
-            #     self.id_features[index_id][0][2] = 0
-
-            if self.id_features[index_id][0][2]:
-                tracked_person_2d = TrackedPerson2d()
-                tracked_person_2d.track_id = int(tag_id)
-                tracked_person_2d.velocity3D = Twist(Vector3(lin_vel, 0, 0), Vector3(0, 0, ang_vel))
-                tracked_person_2d.bounding_box = RegionOfInterest(0, 0, 0, 0, False)
-                tracked_person_2d.confidence = float(visible)
-                tracked_person_2d.pose3D = PoseStamped()
-                tracked_person_2d.pose3D.pose = Pose(Point(x, y, 0.), Quaternion(*quat))
-                tracked_person_2d.pose3D.header.stamp = self.current_time
-
-                tracked_persons_2d.detections.append(tracked_person_2d)
-
-        tracked_persons_2d.header.stamp = self.current_time
-        self._tracked_persons_2d_pub.publish(tracked_persons_2d)
-
-
-    def _check_all_sensors_ready(self):
-        rospy.logdebug("START ALL SENSORS READY")
-        # self._check_tf_ready()
-        self._check_tag_detections_ready()
-        rospy.logdebug("ALL SENSORS READY")
-
-
-    def _check_tag_detections_ready(self):
-        self.tag_detections_data = None
-        rospy.logdebug("Waiting for /tag_detections to be READY...")
-        while self.tag_detections_data is None and not rospy.is_shutdown():
-            try:
-                self.tag_detections_data = rospy.wait_for_message("/tag_detections", AprilTagDetectionArray, timeout=5.0)
-                rospy.logdebug("Current /tag_detections READY=>")
-
-            except:
-                rospy.logerr("Current /tag_detections not ready yet, retrying for getting tag_detections")
-        return self.tag_detections_data
-
-
-    def _check_tf_ready(self):
-        self.tf_data = None
-        rospy.logdebug("Waiting for /tf to be READY...")
-        while self.tf_data is None and not rospy.is_shutdown():
-            try:
-                self.tf_data = rospy.wait_for_message("/tf", TFMessage, timeout=5.0)
-                rospy.logdebug("Current /tf READY=>")
-
-            except:
-                rospy.logerr("Current /tf not ready yet, retrying for getting tf")
-        return self.tf_data
-
-
-
-    def _tag_detections_callback(self, data):
-        self.tag_detections_data = data
-        self.get_id_list()
-        if self.id_list:
-            self._publish_tracked_persons_2d()
-
-
-    def _tf_callback(self, data):
-        self.tf_data = data
-        # self.get_id_list()
-
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the RepublishFromApriltag node")
-        # send empty goal message
-        rospy.loginfo("Killing the RepublishFromApriltag node")
diff --git a/src/robot_behavior/src/robot_behavior/republish_from_openpose_node.py b/src/robot_behavior/src/robot_behavior/republish_from_openpose_node.py
deleted file mode 100755
index e5274a072e89297db0789dcbba5bfb494ecc1dac..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/republish_from_openpose_node.py
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import numpy as np
-import cv2
-from scipy.optimize import linear_sum_assignment
-
-from spring_msgs.msg import Frame
-from std_msgs.msg import String
-from sensor_msgs.msg import Image, CompressedImage
-from robot_behavior.utils import get_color
-from cv_bridge import CvBridge, CvBridgeError
-
-
-class RepublishFromOpenpose:
-    def __init__(self, tracking_topic, frame_topic, image_pose_topic):
-        self.tracking_topic = tracking_topic
-        self.frame_topic = frame_topic
-        self.image_pose_topic = image_pose_topic
-
-        self.track_data = None
-        self.frame_data = None
-        self.image_pose_data = None
-
-        self.current_time = None
-        self.nb_joints = 25
-
-        self.bridge = CvBridge()
-        self.line_thickness = 4
-
-        self._check_all_sensors_ready()
-
-        rospy.Subscriber(self.frame_topic, Frame, callback=self._frame_callback, queue_size=1)
-        rospy.Subscriber(self.image_pose_topic, Image, callback=self._image_pose_callback, queue_size=1)
-        rospy.Subscriber(self.tracking_topic, String, callback=self._tracking_callback, queue_size=1)
-
-        self._tracked_pose_2d_pub  = rospy.Publisher('/tracked_pose_2d/frame', Frame, queue_size=10)
-        self._tracked_pose_2d_img_pub  = rospy.Publisher('/tracked_pose_2d/image_raw/compressed', CompressedImage, queue_size=10)
-
-        rospy.on_shutdown(self.shutdown)        
-
-        rospy.loginfo("RepublishFromOpenpose Initialization Ended")
-  
-  
-    def _check_all_sensors_ready(self):
-        rospy.logdebug("START ALL SENSORS READY")
-        self._check_frame_ready()
-        self._check_tracking_ready()
-        self._check_image_pose_ready()
-        rospy.logdebug("ALL SENSORS READY")
-
-    
-    def _check_tracking_ready(self):
-        self.track_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.tracking_topic))
-        while self.track_data is None and not rospy.is_shutdown():
-            try:
-                self.track_data = rospy.wait_for_message(self.tracking_topic, String, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.tracking_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for tracking results".format(self.tracking_topic))
-        return self.track_data
-
-
-    def _check_frame_ready(self):
-        self.frame_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.frame_topic))
-        while self.frame_data is None and not rospy.is_shutdown():
-            try:
-                self.frame_data = rospy.wait_for_message(self.frame_topic, Frame, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.frame_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for getting 2D openpose frames".format(self.frame_topic))
-        return self.frame_data
-    
-
-    def _check_image_pose_ready(self):
-        self.image_pose_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.image_pose_topic))
-        while self.image_pose_data is None and not rospy.is_shutdown():
-            try:
-                self.image_pose_data = rospy.wait_for_message(self.image_pose_topic, Image, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.image_pose_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for getting 2D openpose image result".format(self.image_pose_topic))
-        return self.image_pose_data
-
-
-    def _frame_callback(self, data):
-        self.frame_data = data
-
-        if self.track_data is not None:
-            splited_msg=self.track_data.data.split(',')
-            delta_time = (self.frame_data.header.stamp).to_sec() - float(splited_msg[0])
-            rospy.logdebug('Time latency between _frame_callback and (-) _tracking_callback : {}'.format(delta_time))
-
-            targets = []
-            ids_ordered_list = []
-            fr = Frame()
-            fr.header = self.frame_data.header
-
-            for i in range(2, len(splited_msg)):
-                targets.append(splited_msg[i].split())
-            num_persons_tracked = len(targets)
-            num_persons_pose_estimated = len(self.frame_data.persons)
-
-            if num_persons_pose_estimated > 0 and num_persons_tracked > 0:
-                joints_data = np.zeros((num_persons_pose_estimated, self.nb_joints, 2))
-                for person_idx, person in enumerate(self.frame_data.persons):
-                    for body_part_idx, body_part in enumerate(person.bodyParts):
-                        joints_data[person_idx, body_part_idx, :] = [body_part.pixel.x, body_part.pixel.y]
-                
-                ids_ordered_list, idx_list = self.pose_tracking_matching(joints_data, targets, num_persons_tracked)
-
-                if len(ids_ordered_list) == num_persons_tracked:                    
-                    fr.persons = np.array(self.frame_data.persons)[idx_list]
-                    # if delta_time < 0 : 
-                        # fr.header.stamp = rospy.Time.from_sec(float(splited_msg[0]))
-                    fr.ids = ids_ordered_list
-            self._tracked_pose_2d_pub.publish(fr) 
-
-    
-    def _tracking_callback(self, data):
-        self.track_data = data
-        # assuming that the 2D trcker with be faster than the pose estimation through Openpose
-
-
-    def _image_pose_callback(self, data):
-        self.image_pose_data = data
-        targets = []
-        
-        if self.track_data is not None:
-            splited_msg=self.track_data.data.split(',')
-            for i in range(2, len(splited_msg)):
-                targets.append(splited_msg[i].split())
-
-            image = self.bridge.imgmsg_to_cv2(self.image_pose_data, "rgb8")
-
-            for target in targets:
-                id = int(target[0])
-                x, y, width, height = int(float(target[1])), int(float(target[2])), int(float(target[3])), int(float(target[4]))
-                color = get_color(id)
-                cv2.rectangle(image, (x, y), (x + width, y + height), color=color, thickness=self.line_thickness)
-                cv2.putText(image, 'ID :' + str(id), org=(x, y - 20), fontFace=cv2.FONT_HERSHEY_SIMPLEX,  fontScale=0.8, color=color, thickness=self.line_thickness, lineType=cv2.LINE_AA)
-            
-            #### Create CompressedIamge ####
-            image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
-            img_msg = CompressedImage()
-            img_msg.header = self.image_pose_data.header
-            img_msg.format = "jpeg"
-            img_msg.data = np.array(cv2.imencode('.jpg', image)[1]).tostring()
-            # img_msg = self.bridge.cv2_to_imgmsg(image, "bgr8")
-            # img_msg.header = self.image_pose_data.header
-            self._tracked_pose_2d_img_pub.publish(img_msg)
-
-
-    def pose_tracking_matching(self, joints_data, targets, num_persons_tracked):
-        pose_target_idx = np.zeros((joints_data.shape[0], num_persons_tracked)) + self.nb_joints
-        ids = np.zeros(num_persons_tracked) - 1
-        
-        for person_idx, person_data in enumerate(joints_data):
-            for joint_data in person_data:
-                for target_idx, target in enumerate(targets):
-                    ids[target_idx] = int(target[0])
-                    x, y, width, height = int(float(target[1])), int(float(target[2])), int(float(target[3])), int(float(target[4]))
-                    if x <= joint_data[0] <= x + width and y <= joint_data[1] <= y + height:
-                        pose_target_idx[person_idx, target_idx] -= 1
-       
-        row_ind, col_ind = linear_sum_assignment(pose_target_idx)
-        return ids.astype(np.int16), col_ind
-        
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the RepublishFromOpenpose node")
-        # send empty goal message
-        rospy.loginfo("Killing the RepublishFromOpenpose node")
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/republish_from_rtabmap_node.py b/src/robot_behavior/src/robot_behavior/republish_from_rtabmap_node.py
deleted file mode 100755
index f13dae9036a11d1da92799729b3ffaf64fc5feb7..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/republish_from_rtabmap_node.py
+++ /dev/null
@@ -1,289 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import numpy as np
-import re
-import cv2
-import os
-
-from nav_msgs.msg import Odometry, OccupancyGrid, MapMetaData
-from geometry_msgs.msg import Point, Pose, Quaternion, Twist
-import tf
-from spring_msgs.msg import Frame
-
-from robot_behavior.utils import constraint_angle
-
-
-class RepublishFromRtabmap:
-    def __init__(self, frame_topic, robot_frame, map_frame):
-        self.map_frame = map_frame
-        self.robot_frame = robot_frame
-        self.frame_topic = frame_topic
-        self.robot_pose = np.zeros(3)  # x, y, theta
-        self.local_map_depth = 3  # m
-        self.local_map_width = 2*self.local_map_depth # m
-        self.local_map_height = 2*self.local_map_depth # m
-
-        self.rtabmap_proj_map_data = None
-        self.frame_data = None
-        self.rtabmap_ready = False
-
-        self.map_2_odom_mat = None
-
-        self.current_time = None
-
-        cmd = "rostopic pub -1  /interaction_profile/set/cancel actionlib_msgs/GoalID \"{}\""
-        nodes = os.popen("rosnode list").readlines()
-        for i in range(len(nodes)):
-            nodes[i] = nodes[i].replace("\n","")
-        if '/pal_head_manager' in nodes:
-            os.system("rosnode kill " + '/pal_head_manager')  # '/pal_webcommander'
-        os.system(cmd)  # disable alive mode
-
-        cmd_pub = "rostopic pub -1 /pause_navigation std_msgs/Bool 'data: false'"
-        os.system(cmd_pub)  # disable pause navigation mode
-        
-        self.tf_listener = tf.TransformListener()
-
-        self._check_all_sensors_ready()
-
-        rospy.Subscriber(self.frame_topic, Frame, callback=self._frame_callback, queue_size=1)
-        rospy.Subscriber('/rtabmap/proj_map', OccupancyGrid, callback=self._proj_map_callback, queue_size=1)
-
-        self._rtabmap_local_map_pub = rospy.Publisher('/rtabmap_local_map_rectified', OccupancyGrid, queue_size=10)
-
-        self.tf_broadcaster = tf.TransformBroadcaster()
-
-        rospy.on_shutdown(self.shutdown)        
-
-        rospy.loginfo("RepublishFromRtabmap Initialization Ended")
-
-
-    def meter_to_px(self, point, map_size, resolution):
-        if point[0] < map_size[0][0]:
-            point[0] = map_size[0][0]
-        if point[0] > map_size[0][1]:
-            point[0] = map_size[0][1]
-        if point[1] < map_size[1][0]:
-            point[1] = map_size[1][0]
-        if point[1] > map_size[1][1]:
-            point[1] = map_size[1][1]
-
-        point_px = [0., 0.]
-        point_px[0] = abs(map_size[0][0] - point[0])/resolution
-        point_px[1] = abs(map_size[1][1] - point[1])/resolution
-
-        return np.rint(point_px)
-
-
-    def _publish_local_map(self):
-        self.current_time = self.frame_data.header.stamp
-        x = self.rtabmap_proj_map_data.info.origin.position.x
-        y = self.rtabmap_proj_map_data.info.origin.position.y
-        width = self.rtabmap_proj_map_data.info.width
-        height = self.rtabmap_proj_map_data.info.height
-        resolution = self.rtabmap_proj_map_data.info.resolution
-        map_size = [[x, x + width*resolution],[y, y + height*resolution]]
-        global_map_static = np.flip((np.asarray(self.rtabmap_proj_map_data.data) / 100).reshape((height, width)), axis=0)
-
-        tf_map_2_odom = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
-        map_2_robot_mat = self.tf_listener.fromTranslationRotation(tf_map_2_odom[0], tf_map_2_odom[1])
-        trans_map_2_robot = tf.transformations.translation_from_matrix(map_2_robot_mat)
-        quat_map_2_robot = tf.transformations.quaternion_from_matrix(map_2_robot_mat)
-        (_, _, yaw) = tf.transformations.euler_from_quaternion([quat_map_2_robot[0],
-                                                                quat_map_2_robot[1],
-                                                                quat_map_2_robot[2],
-                                                                quat_map_2_robot[3]])
-        self.robot_pose[0] = trans_map_2_robot[0]
-        self.robot_pose[1] = trans_map_2_robot[1]
-        self.robot_pose[2] = constraint_angle(yaw - np.pi/2)
-
-        pos = self.robot_pose[:2]
-        angle = self.robot_pose[2]
-
-        hvect = np.array([np.cos(angle), np.sin(angle)])
-        vvect = np.array([-np.sin(angle), np.cos(angle)])
-
-        rect_ul = (pos -
-                   0.5 * hvect * self.local_map_width +
-                   0.5 * vvect * self.local_map_height)
-        rect_ul = self.meter_to_px(rect_ul, map_size, resolution)
-
-        rect_ur = (pos +
-                   0.5 * hvect * self.local_map_width +
-                   0.5 * vvect * self.local_map_height)
-        rect_ur = self.meter_to_px(rect_ur, map_size, resolution)
-
-        rect_ll = (pos -
-                   0.5 * hvect * self.local_map_width -
-                   0.5 * vvect * self.local_map_height)
-        rect_ll = self.meter_to_px(rect_ll, map_size, resolution)
-
-        rect_lr = (pos +
-                   0.5 * hvect * self.local_map_width -
-                   0.5 * vvect * self.local_map_height)
-        rect_lr = self.meter_to_px(rect_lr, map_size, resolution)
-
-        src_pts = np.vstack([rect_ll,
-                             rect_ul,
-                             rect_ur,
-                             rect_lr]).astype(np.float32)
-
-        width = round(self.local_map_width/resolution)
-        height = round(self.local_map_height/resolution)
-        dst_pts = np.array([[0, height-1],
-                            [0, 0],
-                            [width-1, 0],
-                            [width-1,height-1]], dtype="float32")
-
-        M = cv2.getPerspectiveTransform(src_pts, dst_pts)
-        local_cost_map = cv2.warpPerspective(global_map_static, M,
-                                        (height, width))
-
-        quat = tf.transformations.quaternion_from_euler(0, 0, 0)
-        x, y = -self.local_map_width/2, -self.local_map_height/2  #robot_pose[0], robot_pose[1]
-
-        occupancy_grid = OccupancyGrid()
-        occupancy_grid.info = MapMetaData()
-        occupancy_grid.info.map_load_time = self.current_time
-        occupancy_grid.info.resolution = resolution
-        occupancy_grid.info.width = width
-        occupancy_grid.info.height = height
-        occupancy_grid.info.origin = Pose(Point(x, y, 0.), Quaternion(*quat))
-
-        occupancy_grid.data = np.flip(local_cost_map * 100, axis=0).flatten().astype(np.int8)
-
-        occupancy_grid.header.stamp = self.current_time
-        occupancy_grid.header.frame_id = "local_cost_map"
-
-        quat = tf.transformations.quaternion_from_euler(0, 0, self.robot_pose[2])
-        self.tf_broadcaster.sendTransform(
-         (self.robot_pose[0], self.robot_pose[1],  0),
-         quat,
-         self.current_time,
-         "local_cost_map",
-         self.map_frame
-        )
-        self._rtabmap_local_map_pub.publish(occupancy_grid)
-
-
-    def _republish_rtabmap_odom(self):
-        x_pos = self.rtabmap_odom_data.pose.pose.position.x
-        y_pos = self.rtabmap_odom_data.pose.pose.position.y
-        self.current_time = self.rtabmap_odom_data.header.stamp
-
-        if self.map_2_odom_mat is None:
-            tf_map_2_odom = self.tf_listener.lookupTransform(self.map_frame, self.odom_frame, rospy.Time(0))
-            self.map_2_odom_mat = self.tf_listener.fromTranslationRotation(tf_map_2_odom[0], tf_map_2_odom[1])
-        odom_robot_mat = self.tf_listener.fromTranslationRotation([x_pos, y_pos, 0.], [self.rtabmap_odom_data.pose.pose.orientation.x,
-                                                                       self.rtabmap_odom_data.pose.pose.orientation.y,
-                                                                       self.rtabmap_odom_data.pose.pose.orientation.z,
-                                                                       self.rtabmap_odom_data.pose.pose.orientation.w])
-        map_2_robot_mat = np.dot(self.map_2_odom_mat, odom_robot_mat)
-        trans_map_2_robot = tf.transformations.translation_from_matrix(map_2_robot_mat)
-        quat_map_2_robot = tf.transformations.quaternion_from_matrix(map_2_robot_mat)
-        (_, _, yaw) = tf.transformations.euler_from_quaternion([quat_map_2_robot[0],
-                                                                quat_map_2_robot[1],
-                                                                quat_map_2_robot[2],
-                                                                quat_map_2_robot[3]])
-        self.robot_pose[0] = trans_map_2_robot[0]
-        self.robot_pose[1] = trans_map_2_robot[1]
-        self.robot_pose[2] = constraint_angle(yaw - np.pi/2)
-        odom_quat = tf.transformations.quaternion_from_euler(0, 0, self.robot_pose[2])
-        # first, we'll publish the transform over tf
-        # self.tf_broadcaster.sendTransform(
-        #  (self.robot_pose[0], self.robot_pose[1], 0.),
-        #  tf.transformations.quaternion_from_euler(0, 0, self.robot_pose[2]),
-        #  self.current_time,
-        #  "robot_pose",
-        #  self.map_frame
-        # )
-
-        # next, we'll publish the odometry message over ROS
-        odom = Odometry()
-        odom.header.stamp = self.current_time
-        odom.header.frame_id = "robot_pose"
-
-        # set the position
-        odom.pose.pose = Pose(Point(self.robot_pose[0], self.robot_pose[1], 0.), Quaternion(*odom_quat))
-
-        # set the velocity
-        odom.child_frame_id = self.map_frame
-        odom.twist = self.rtabmap_odom_data.twist
-
-        # publish the message
-        self._rtabmap_odom_pub.publish(odom)
-
-
-    def _check_all_sensors_ready(self):
-        rospy.logdebug("START ALL SENSORS READY")
-        self._check_rtabmap_ready()
-        self._check_rtabmap_proj_map_ready()
-        self._check_frame_ready()
-        rospy.logdebug("ALL SENSORS READY")
-
-
-    def _check_rtabmap_ready(self):
-        self.tf_listener.waitForTransform(self.map_frame, self.robot_frame, rospy.Time(0), rospy.Duration(5.0))
-        self.rtabmap_ready = True
-
-
-    def _check_rtabmap_proj_map_ready(self):
-        self.rtabmap_proj_map_data = None
-        rospy.logdebug("Waiting for /rtabmap/proj_map to be READY...")
-        while self.rtabmap_proj_map_data is None and not rospy.is_shutdown():
-            try:
-                self.rtabmap_proj_map_data = rospy.wait_for_message("/rtabmap/proj_map", OccupancyGrid, timeout=5.0)
-                rospy.logdebug("Current /rtabmap/proj_map READY=>")
-
-            except:
-                rospy.logerr("Current /rtabmap/proj_map not ready yet, retrying for getting rtabmap proj_map")
-        return self.rtabmap_proj_map_data
-
-
-    def _check_rtabmap_odom_ready(self):
-        self.rtabmap_odom_data = None
-        rospy.logdebug("Waiting for /rtabmap/rtabmap_odom to be READY...")
-        while self.rtabmap_odom_data is None and not rospy.is_shutdown():
-            try:
-                self.rtabmap_odom_data = rospy.wait_for_message("/rtabmap/rtabmap_odom", Odometry, timeout=5.0)
-                rospy.logdebug("Current /rtabmap/rtabmap_odom READY=>")
-
-            except:
-                rospy.logerr("Current /rtabmap/rtabmap_odom not ready yet, retrying for getting rtabmap odom")
-        return self.rtabmap_odom_data
-
-
-    def _check_frame_ready(self):
-        self.frame_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.frame_topic))
-        while self.frame_data is None and not rospy.is_shutdown():
-            try:
-                self.frame_data = rospy.wait_for_message(self.frame_topic, Frame, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.frame_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for getting 2D openpose frames".format(self.frame_topic))
-        return self.frame_data
-
-
-    def _frame_callback(self, data):
-        self.frame_data = data
-        if self.rtabmap_ready and self.rtabmap_proj_map_data is not None:
-            self._publish_local_map()
-
-
-    def _rtabmap_odom_callback(self, data):
-        self.rtabmap_odom_data = data
-        if self.rtabmap_ready:
-            self._republish_rtabmap_odom()
-            self._publish_local_map()
-
-
-    def _proj_map_callback(self, data):
-        self.rtabmap_proj_map_data = data
-
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the RepublishFromRtabmap node")
-        # send empty goal message
-        rospy.loginfo("Killing the RepublishFromRtabmap node")
\ No newline at end of file
diff --git a/src/robot_behavior/src/robot_behavior/ros_4_hri_interface.py b/src/robot_behavior/src/robot_behavior/ros_4_hri_interface.py
new file mode 100644
index 0000000000000000000000000000000000000000..9aeb2dbf7566a688aea88201d28cddc7ba4cd710
--- /dev/null
+++ b/src/robot_behavior/src/robot_behavior/ros_4_hri_interface.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+
+from py_spring_hri import HRIListener
+
+class ROS4HRIInterface:
+    def __init__(self):
+        self.hri_listener = HRIListener(
+            subscribe_persons = True,
+            subscribe_bodies = True,
+            subscribe_groups = True,
+            default_target_frame='map')
+        self.hri_listener.start()
+            
+    
+    def get_data(self):
+        body_person_map = {}
+        person_body_map = {}
+        bodies_data = {}
+        groups_data = {}
+
+        with self.hri_listener.tracked_persons.lock:
+            for person_id, person in self.hri_listener.tracked_persons.items():
+                if person.body:
+                    body_person_map[person.body.id] = person_id
+                    person_body_map[person_id] = person.body.id
+
+        with self.hri_listener.bodies.lock:
+            for body_id, body in self.hri_listener.bodies.items():
+                person_id = body_person_map.get(body_id, None)                    
+                bodies_data[body_id] = (body.transform(), person_id)
+
+        with self.hri_listener.groups.lock:
+            for group_id, group in self.hri_listener.groups.items():
+                add_group = True
+                bodies_data_tmp = {}
+                # to modify when faces-bodies-voices-persons-groups tracked will publish correctly
+                if group.member_ids:
+                    for member in group.member_ids:
+                        if member not in person_body_map or person_body_map[member] not in bodies_data:
+                            add_group = False
+                        else:
+                            bodies_data_tmp[person_body_map[member]] = (*bodies_data[person_body_map[member]], group_id)
+                    if add_group:
+                        groups_data[group_id] = (group.transform(), group.member_ids)
+                        bodies_data.update(bodies_data_tmp)
+                        
+        # bodies_data : for each body_id : transform, person_id, group_id if group
+        # groups_data : for each group_id : transform, group member_ids
+        # body_person_map : for each body_id gets person_id
+        # person_body_map : for each person_id gets body_id
+        return bodies_data, groups_data, person_body_map, body_person_map
+    
+
+    def close(self):
+        self.hri_listener.close()
+        
+
diff --git a/src/robot_behavior/src/robot_behavior/ros_mediapipe_node.py b/src/robot_behavior/src/robot_behavior/ros_mediapipe_node.py
deleted file mode 100755
index 7ffbcce582af7fc782d0a1112e25fee91ef816e8..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/ros_mediapipe_node.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import numpy as np
-import cv2
-from cv_bridge import CvBridge, CvBridgeError
-import mediapipe as mp
-mp_drawing = mp.solutions.drawing_utils
-mp_drawing_styles = mp.solutions.drawing_styles
-mp_pose = mp.solutions.pose
-
-from spring_msgs.msg import Frame, Person, BodyPart
-from sensor_msgs.msg import Image, CompressedImage
-from std_msgs.msg import String
-
-from robot_behavior.utils import constraint_angle, get_color
-
-
-class RosMediapipe:
-    def __init__(self, tracking_topic, image_topic):
-        self.tracking_topic = tracking_topic
-        self.image_topic = image_topic
-
-        self.img_data = None
-        self.track_data = None
-
-        self.current_time = None
-        self.bridge = CvBridge()
-        self.line_thickness = 4
-
-        self._check_all_sensors_ready()
-
-        rospy.Subscriber(self.image_topic, Image, callback=self._image_callback, queue_size=1)
-        rospy.Subscriber(self.tracking_topic, String, callback=self._tracking_callback, queue_size=1)
-
-        self._tracked_pose_2d_pub  = rospy.Publisher('/tracked_pose_2d/frame', Frame, queue_size=10)
-        self._tracked_pose_2d_img_pub = rospy.Publisher('/tracked_pose_2d/image_raw/compressed', CompressedImage, queue_size=10)
-
-        rospy.on_shutdown(self.shutdown)        
-
-        rospy.loginfo("RosMediapipe Initialization Ended")
-
-
-    def _check_all_sensors_ready(self):
-        rospy.logdebug("START ALL SENSORS READY")
-        self._check_image_ready()
-        self._check_tracking_ready()
-        rospy.logdebug("ALL SENSORS READY")
-
-
-    def _check_image_ready(self):
-        self.img_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.image_topic))
-        while self.img_data is None and not rospy.is_shutdown():
-            try:
-                self.img_data = rospy.wait_for_message(self.image_topic, Image, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.image_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for image".format(self.image_topic))
-        return self.img_data
-
-
-    def _check_tracking_ready(self):
-        self.track_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.tracking_topic))
-        while self.track_data is None and not rospy.is_shutdown():
-            try:
-                self.track_data = rospy.wait_for_message(self.tracking_topic, String, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.tracking_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for tracking results".format(self.tracking_topic))
-        return self.track_data
-
-
-    def _image_callback(self, data):
-        self.img_data = data
-
-    def _tracking_callback(self, data):
-        self.track_data = data
-        self._targets = []
-
-        if self.img_data is not None:
-
-
-            splited_msg=data.data.split(',')
-            self.current_time = splited_msg[0]
-            for i in range(2, len(splited_msg)):
-                self._targets.append(splited_msg[i].split())
-            num_persons = len(self._targets)
-
-            image = self.bridge.imgmsg_to_cv2(self.img_data, "rgb8")
-
-            fr = Frame()
-            fr.header = self.img_data.header
-            # Handle body points
-            fr.persons = [Person() for _ in range(num_persons)]
-            fr.ids = [int(target[0]) for target in self._targets]
-
-            if num_persons != 0:
-
-                with mp_pose.Pose(static_image_mode=True,
-                                  model_complexity=1,
-                                  min_detection_confidence=0.2) as pose:
-
-                    for i, target in enumerate(self._targets):
-                        id = int(target[0])
-                        x, y, width, height = int(float(target[1])), int(float(target[2])), int(float(target[3])), int(float(target[4]))
-                        roi = image[y:y + height, x:x + width]
-                        color = get_color(id)
-                        cv2.rectangle(image, (x, y), (x + width, y + height), color=color, thickness=self.line_thickness)
-                        cv2.putText(image, 'ID :' + str(id), org=(x, y - 20), fontFace=cv2.FONT_HERSHEY_SIMPLEX,  fontScale=0.8, color=color, thickness=self.line_thickness, lineType=cv2.LINE_AA)
-
-                        try:
-                            results = pose.process(roi)
-                            pose_landmarks = results.pose_landmarks
-                        except Exception as e:
-                            rospy.logwarn("Could not get pose landmarks: {}".format(e))
-                            continue
-
-
-                        keypoints = []
-                        if pose_landmarks is not None:
-                            for data_point in results.pose_landmarks.landmark:
-                                keypoints.append([data_point.x, data_point.y, data_point.z, data_point.visibility])
-                            body_part_count = len(keypoints)
-
-                            # Draw the pose annotation on the image.
-                            # image.flags.writeable = True
-                            # image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
-                            mp_drawing.draw_landmarks(
-                            roi,
-                            results.pose_landmarks,
-                            mp_pose.POSE_CONNECTIONS,
-                            landmark_drawing_spec=mp_drawing_styles.get_default_pose_landmarks_style())
-                            # Flip the image horizontally for a selfie-view display.
-                            # cv2.imshow('MediaPipe Pose', cv2.flip(image, 1))
-                            # if cv2.waitKey(5) & 0xFF == 27:
-                            #   break
-                            fr.persons[i].bodyParts = [BodyPart() for _ in range(body_part_count)]
-                            fr.ids[i] = id
-                            # no leftHandParts nor rightHandParts
-                            # Process the body
-                            for bp_idx, bp in enumerate(keypoints):
-                                u, v, s = bp[0]*width + x, bp[1]*height + y, bp[-1]
-                                bp_x, bp_y, bp_z = bp[0], bp[1], bp[2]  # warning: in the image plane, see medip documentation https://google.github.io/mediapipe/solutions/pose
-                                arr = fr.persons[i].bodyParts[bp_idx]
-                                arr.pixel.x = u
-                                arr.pixel.y = v
-                                arr.score = s
-                                arr.point.x = bp_x
-                                arr.point.y = bp_y
-                                arr.point.z = bp_z
-
-            #### Create CompressedIamge ####
-            image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
-            img_msg = CompressedImage()
-            img_msg.header = self.img_data.header
-            img_msg.format = "jpeg"
-            img_msg.data = np.array(cv2.imencode('.jpg', image)[1]).tostring()
-            # img_msg = self.bridge.cv2_to_imgmsg(image, "rgb8")
-            # img_msg.header = self.img_data.header
-            self._tracked_pose_2d_img_pub.publish(img_msg)
-            self._tracked_pose_2d_pub.publish(fr)
-
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the RepublishFromOpenpose node")
-        # send empty goal message
-        rospy.loginfo("Killing the RepublishFromOpenpose node")
diff --git a/src/robot_behavior/src/robot_behavior/sim_2d_node.py b/src/robot_behavior/src/robot_behavior/sim_2d_node.py
deleted file mode 100755
index 8be9157e04704b03ff2905cbb0404170482ed3cf..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/sim_2d_node.py
+++ /dev/null
@@ -1,345 +0,0 @@
-#!/usr/bin/env python3
-import gym
-import numpy as np
-import sim2d.gym_env
-import tf
-
-import rospy, rostopic
-from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3, PoseStamped
-from sensor_msgs.msg import JointState, RegionOfInterest
-from std_msgs.msg import Float64
-from nav_msgs.msg import Odometry, OccupancyGrid, MapMetaData
-from trajectory_msgs.msg import JointTrajectory
-
-from spring_msgs.msg import TrackedPerson2d, TrackedPersons2d
-
-
-class Simulator:
-    def __init__(self, gui=True, config_path=None, pub_hz=20, joint_names=['head_1_joint']):
-        self.gui = gui
-        self.config_path = config_path
-        self.hz = pub_hz
-        self.joint_names = joint_names
-
-        # To get indexes of the joints
-        self.get_indexes = False
-        self.pan_ind = None
-        self.tilt_ind = None
-        self.pan_ang = 0.
-
-        self.env = gym.make('HumanRobot2D-v0', gui=self.gui, config_path=self.config_path)
-
-        self.action = np.zeros(len(self.env.action))
-
-        self.init()
-
-
-    def init(self):
-        # Start all the ROS related Subscribers and publishers
-        rospy.Subscriber('/cmd_vel', Twist, callback=self._cmd_vel_callback, queue_size=1)
-        rospy.Subscriber('/pan_vel', JointTrajectory, callback=self._pan_vel_callback, queue_size=1)
-
-        rospy.Timer(rospy.Duration(1), callback=self._timer_callback)
-
-        self._odom_pub = rospy.Publisher('/odom', Odometry, queue_size=10)
-        self._joint_states_pub = rospy.Publisher('/joint_states', JointState, queue_size=10)
-        self._tracked_persons_2d_pub = rospy.Publisher('/tracked_persons_2d', TrackedPersons2d, queue_size=10)
-        self._local_map_pub = rospy.Publisher('/local_map', OccupancyGrid, queue_size=10)
-        self._global_map_pub = rospy.Publisher('/global_map', OccupancyGrid, queue_size=10)
-
-        self.broadcaster = tf.TransformBroadcaster()
-
-        rospy.loginfo("Initializing the Simulator")
-
-        # Register handler to be called when rospy process begins shutdown
-        rospy.on_shutdown(self.shutdown)
-
-        # self.last_time = rospy.Time.now()
-
-        self.odom_pub_rate = rospy.Rate(self.hz)
-        self.joint_states_pub_rate = rospy.Rate(self.hz)
-        self.tracked_persons_2d_pub_rate = rospy.Rate(self.hz)
-        self.local_map_pub_rate = rospy.Rate(self.hz)
-        self.global_map_pub_rate = rospy.Rate(self.hz)
-
-        self._check_publishers_connection()
-
-        rospy.loginfo("Arguements passed : ")
-        rospy.loginfo("Publish rate : {} Hz".format(self.hz))
-        rospy.loginfo("Gui : {}".format(self.gui))
-        rospy.loginfo("Config path : {}".format(self.config_path))
-
-        rospy.loginfo("Simulator Initialization Ended")
-
-
-    def run(self):
-        gym_running = True
-        init_t = rospy.Time.now()
-        while not rospy.is_shutdown() and gym_running:
-            loop_t = rospy.Time.now()
-            state, _, done, info = self.env.step(self.action.tolist())
-            self.republish(state)
-            # rospy.logdebug('{}, {}, {}'.format(state.joint_angles, state.humans_id, state.humans_visible))
-            time_now = rospy.Time.now()
-            print('Simulation time : {}, Simulation loop time : {}'.format((time_now - init_t).to_sec(), (time_now - loop_t).to_sec()))
-            gym_running = info['gym_running'] and not info['max_steps'] and not done
-
-
-    def _check_publishers_connection(self):
-        """
-        Checks that all the publishers are working
-        :return:
-        """
-        rate = rospy.Rate(10)  # 10hz
-
-        while self._odom_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _odom_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_odom_pub Publisher Connected")
-
-        while self._joint_states_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _joint_states_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_joint_states_pub Publisher Connected")
-
-        while self._tracked_persons_2d_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _tracked_persons_2d_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_tracked_persons_2d_pub Publisher Connected")
-
-        while self._local_map_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _local_map_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_local_map_pub Publisher Connected")
-
-        while self._global_map_pub.get_num_connections() == 0 and not rospy.is_shutdown():
-            rospy.logdebug("No susbribers to _global_map_pub yet so we wait and try again")
-            try:
-                rate.sleep()
-            except rospy.ROSInterruptException:
-                # This is to avoid error when world is rested, time when backwards.
-                pass
-        rospy.logdebug("_global_map_pub Publisher Connected")
-
-        rospy.logdebug("All Publishers READY")
-
-
-    def republish(self, state):
-        self.republish_odom(state.robot_pose, state.robot_velocity)
-        self.republish_joint_states(state.joint_angles, state.joint_velocities)
-        self.republish_tracked_persons_2d(humans_id=state.humans_id,
-                                          humans_pose=state.humans_pose,
-                                          humans_velocity=state.humans_velocity,
-                                          humans_visible=state.humans_visible)
-        self.republish_local_map(state.local_map, state.robot_pose, self.env.env_config)
-        self.republish_global_map(state.global_map, self.env.env_config)
-
-
-    def republish_global_map(self, global_map, config):
-        quat = tf.transformations.quaternion_from_euler(0, 0, 0)
-        current_time = rospy.Time.now()
-        x, y = config.global_map_size[0][0], config.global_map_size[1][0]  #robot_pose[0], robot_pose[1]
-        map = global_map[:, :, 0]* 100
-
-        occupancy_grid = OccupancyGrid()
-        occupancy_grid.info = MapMetaData()
-        occupancy_grid.info.map_load_time = current_time
-        occupancy_grid.info.resolution = 1/config.global_map_scale
-        occupancy_grid.info.width = int(config.global_map_width)
-        occupancy_grid.info.height = int(config.global_map_height)
-        occupancy_grid.info.origin = Pose(Point(x, y, 0.), Quaternion(*quat))
-
-        occupancy_grid.data = np.flip(map, axis=0).flatten().astype(np.int8)
-
-        occupancy_grid.header.stamp = current_time
-        occupancy_grid.header.frame_id = "global_map"
-
-        quat = tf.transformations.quaternion_from_euler(0, 0, 0)
-        self.broadcaster.sendTransform(
-         (0, 0,  0),
-         quat,
-         current_time,
-         "global_map",
-         "map"
-        )
-
-        self._global_map_pub.publish(occupancy_grid)
-        # self.global_map_pub_rate.sleep()
-
-
-    def republish_local_map(self, local_map, robot_pose, config):
-        quat = tf.transformations.quaternion_from_euler(0, 0, 0)
-        current_time = rospy.Time.now()
-        x, y = config.local_map_size[0][0], config.local_map_size[1][0]  #robot_pose[0], robot_pose[1]
-        map = np.maximum(local_map[:, :, 0], local_map[:, :, 1]) * 100
-
-        occupancy_grid = OccupancyGrid()
-        occupancy_grid.info = MapMetaData()
-        occupancy_grid.info.map_load_time = current_time
-        occupancy_grid.info.resolution = 1/config.local_map_scale
-        occupancy_grid.info.width = int(config.local_map_width)
-        occupancy_grid.info.height = int(config.local_map_height)
-        occupancy_grid.info.origin = Pose(Point(x, y, 0.), Quaternion(*quat))
-
-        occupancy_grid.data = np.flip(map, axis=0).flatten().astype(np.int8)
-
-        occupancy_grid.header.stamp = current_time
-        occupancy_grid.header.frame_id = "local_map"
-
-        quat = tf.transformations.quaternion_from_euler(0, 0, robot_pose[2])
-        self.broadcaster.sendTransform(
-         (robot_pose[0], robot_pose[1],  0),
-         quat,
-         current_time,
-         "local_map",
-         "map"
-        )
-
-        self._local_map_pub.publish(occupancy_grid)
-        # self.local_map_pub_rate.sleep()
-
-
-    def republish_tracked_persons_2d(self, humans_id, humans_pose, humans_velocity, humans_visible):
-        tracked_persons_2d = TrackedPersons2d()
-        for i, id in enumerate(humans_id):
-
-            x, y, th = humans_pose[i]
-            lin, vth = humans_velocity[i]
-            # since all odometry is 6DOF we'll need a quaternion created from yaw
-            quat = tf.transformations.quaternion_from_euler(0, 0, th)
-            visible = humans_visible[i]
-
-            tracked_person_2d = TrackedPerson2d()
-            tracked_person_2d.track_id = int(id)
-            tracked_person_2d.velocity3D = Twist(Vector3(lin, 0, 0), Vector3(0, 0, vth))
-            tracked_person_2d.bounding_box = RegionOfInterest(0, 0, 0, 0, False)
-            tracked_person_2d.confidence = float(visible)
-            tracked_person_2d.pose3D = PoseStamped()
-            tracked_person_2d.pose3D.pose = Pose(Point(x, y, 0.), Quaternion(*quat))
-
-            current_time = rospy.Time.now()
-            tracked_person_2d.pose3D.header.stamp = current_time
-
-            self.broadcaster.sendTransform(
-             (x, y, 0.),
-             quat,
-             current_time,
-             "humans_" + str(id),
-             "map"
-            )
-            tracked_persons_2d.detections.append(tracked_person_2d)
-        tracked_persons_2d.header.stamp = current_time
-
-        self._tracked_persons_2d_pub.publish(tracked_persons_2d)
-        # self.tracked_persons_2d_pub_rate.sleep()
-
-
-    def republish_joint_states(self, joint_angles, joint_velocities):
-        assert (len(self.joint_names) == len(joint_angles)) and (len(self.joint_names) == len(joint_velocities)), "Joint params should habe the same length"
-
-        joint_state = JointState()
-        for i, name in enumerate(self.joint_names):
-            joint_state.name.append(name)
-            joint_state.position.append(joint_angles[i])
-            joint_state.velocity.append(joint_velocities[i])
-        joint_state.header.stamp = rospy.Time.now()
-
-        self._joint_states_pub.publish(joint_state)
-        # self.joint_states_pub_rate.sleep()
-
-
-    def republish_odom(self, robot_pose, robot_velocity):
-        x, y, th = robot_pose
-        lin, vth = robot_velocity
-        # vx, vy = -lin*np.sin(th), lin*np.cos(th)
-        current_time = rospy.Time.now()
-
-        # compute odometry in a typical way given the velocities of the robot
-        # dt = (current_time - self.last_time).to_sec()
-        # delta_x = (vx * np.cos(th) - vy * np.sin(th)) * dt
-        # delta_y = (vx * np.sin(th) + vy * np.cos(th)) * dt
-        # delta_th = vth * dt
-
-        # x += delta_x
-        # y += delta_y
-        # th += delta_th
-
-        # since all odometry is 6DOF we'll need a quaternion created from yaw
-        odom_quat = tf.transformations.quaternion_from_euler(0, 0, th)
-
-        # first, we'll publish the transform over tf
-        self.broadcaster.sendTransform(
-         (x, y, 0.),
-         odom_quat,
-         current_time,
-         "odom",
-         "map"
-        )
-
-        # next, we'll publish the odometry message over ROS
-        odom = Odometry()
-        odom.header.stamp = current_time
-        odom.header.frame_id = "odom"
-
-        # set the position
-        odom.pose.pose = Pose(Point(x, y, 0.), Quaternion(*odom_quat))
-
-        # set the velocity
-        odom.child_frame_id = "map"
-        odom.twist.twist = Twist(Vector3(lin, 0, 0), Vector3(0, 0, vth))
-
-        # publish the message
-        self._odom_pub.publish(odom)
-
-        # self.last_time = current_time
-        # self.odom_pub_rate.sleep()
-
-
-    def _cmd_vel_callback(self, data):
-        self.action[2] = data.linear.x
-        self.action[1] = data.angular.z
-
-
-    def _pan_vel_callback(self, data):
-        self.action[0] = 0.
-        if not self.get_indexes:
-            # joint_states : head_1_joint, head_2_joint, wheel_left_joint, wheel_right_joint, caster_left_joint, caster_right_joint
-            for index, name in enumerate(data.joint_names):
-                if name == "head_1_joint":
-                    self.pan_ind = index
-                if name == "head_2_joint":
-                    self.tilt_ind = index
-            self.get_indexes = True
-
-        if self.get_indexes:
-            if data.points:
-                if data.points[0].positions:
-                    self.action[0] = (data.points[0].positions[self.pan_ind] - self.pan_ang)/data.points[0].time_from_start.to_sec()
-                    self.pan_ang = data.points[0].positions[self.pan_ind]
-
-
-
-    def _timer_callback(self, event):
-        self.action = np.zeros_like(self.action)
-
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the simulation")
-        self.env.close()
-        rospy.loginfo("Killing the Simulator node")
diff --git a/src/robot_behavior/src/robot_behavior/track_frame_node.py b/src/robot_behavior/src/robot_behavior/track_frame_node.py
deleted file mode 100755
index aab5cbeea620d309df9202da10b422c87003602b..0000000000000000000000000000000000000000
--- a/src/robot_behavior/src/robot_behavior/track_frame_node.py
+++ /dev/null
@@ -1,245 +0,0 @@
-#!/usr/bin/env python3
-import rospy
-import numpy as np
-
-from spring_msgs.msg import Frame, TrackedPerson2d, TrackedPersons2d
-from hri_msgs.msg import IdsList
-from tf2_msgs.msg import TFMessage
-from sensor_msgs.msg import RegionOfInterest
-from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3, PoseStamped
-import tf
-import omnicam
-
-from robot_behavior.utils import constraint_angle
-from robot_behavior.position_tracker import PositionTracker, TrackedPosition
-
-
-CONFIDENCE_THRESHOLD = 0.5
-MAX_DISTANCE = 5 # discard points further from x m
-
-
-class TrackFrame:
-    def __init__(self, cam_optical_frame, frame_topic, map_frame, republish_from_openpose, robot_frame="base_footprint"):
-        self.robot_frame = robot_frame
-        self.map_frame = map_frame
-        self.frame_topic = frame_topic
-        self.republish_from_openpose = republish_from_openpose
-
-        self.tf_data = None
-        self.frame_data = None
-
-        self.robot_2_cam_mat = None
-        self.cam_optical_frame = cam_optical_frame
-
-        self.id_list = []
-        self.id_features = []
-
-        self.idx_l_heel = None
-        self.idx_r_heel = None
-        self.idx_l_big_toe = None
-        self.idx_r_big_toe = None
-
-        # TODO : READ FROM CALIBRATION FILE
-        self.homo_mat = np.array([[-3.2162706 , -0.07751149, -0.03978077],
-                                  [-0.00879659, -0.92802197, -3.15779137],
-                                  [-0.0107728 , -2.2602428 ,  1.        ]])
-        self.geom = omnicam.OmniCameraGeometry(2.79,-0.09,0.5,-0.003,-0.002,1729.,1729.,637.,482.,1280,960)
-
-
-        self.position_tracker = PositionTracker()
-        self.tracked_persons = None
-
-        self.current_time = None
-        
-        self._init()        
-    
-
-    def _init(self):
-        if self.republish_from_openpose:
-            self.idx_l_heel = 21  # 21/29 ## openpose/mediapipe
-            self.idx_r_heel = 24  # 24/30  ## openpose/mediapipe
-            self.idx_l_big_toe = 19  # 19/31  ## openpose/mediapipe
-            self.idx_r_big_toe = 22  # 22/32  ## openpose/mediapipe
-        else:
-            self.idx_l_heel = 29  # 21/29 ## openpose/mediapipe
-            self.idx_r_heel = 30  # 24/30  ## openpose/mediapipe
-            self.idx_l_big_toe = 31  # 19/31  ## openpose/mediapipe
-            self.idx_r_big_toe = 32  # 22/32  ## openpose/mediapipe
-
-        self._check_all_sensors_ready()
-        
-        rospy.Subscriber(self.frame_topic, Frame, callback=self._frame_callback, queue_size=1)
-        self._tracked_persons_2d_pub  = rospy.Publisher('/tracked_persons_2d', TrackedPersons2d, queue_size=10)
-        
-        self.tf_listener = tf.TransformListener()
-        self.tf_broadcaster = tf.TransformBroadcaster()
-
-        rospy.on_shutdown(self.shutdown)                
-
-        rospy.loginfo("Track Frames Initialization Ended")
-
-
-    def body_parts_to_local_position_and_orientation(self, body_parts):
-        """
-        Computes position and orientation in the local map, given body parts in the image plane.
-        Returns : x, y, theta (theta=0 means person parallel to the robot)
-        """
-        if not body_parts:
-            return None
-
-        body_indices = [self.idx_l_heel, self.idx_r_heel, self.idx_l_big_toe, self.idx_r_big_toe]
-        if np.any([body_parts[i].score < CONFIDENCE_THRESHOLD for i in body_indices]):
-            return None
-
-        x_l_heel, y_l_heel =  body_parts[self.idx_l_heel].pixel.x, body_parts[self.idx_l_heel].pixel.y
-        x_r_heel, y_r_heel =  body_parts[self.idx_r_heel].pixel.x, body_parts[self.idx_r_heel].pixel.y
-        x_l_big_toe, y_l_big_toe =  body_parts[self.idx_l_big_toe].pixel.x, body_parts[self.idx_l_big_toe].pixel.y
-        x_r_big_toe, y_r_big_toe =  body_parts[self.idx_r_big_toe].pixel.x, body_parts[self.idx_r_big_toe].pixel.y
-
-        points_image_plane = np.array([[x_l_heel, y_l_heel],[x_r_heel, y_r_heel], [x_l_big_toe, y_l_big_toe], [x_r_big_toe, y_r_big_toe]])
-        points_local_map = np.zeros((4, 2))
-
-        for idx, point_2d in enumerate(points_image_plane):
-            x, y, z = self.geom.lift_projective(point_2d[0], point_2d[1])
-            x, y, z = np.dot(self.homo_mat, [x,y,z])
-            points_local_map[idx, :] = [y/z, -x/z]
-
-        center_point = np.mean(points_local_map, axis=0)
-        if np.linalg.norm(center_point) > MAX_DISTANCE:
-            return None
-        angle = np.arctan2(((points_local_map[2, 1] - points_local_map[0, 1]) + (points_local_map[3, 1]-points_local_map[1, 1])),
-                            (points_local_map[2, 0] - points_local_map[0, 0]) + (points_local_map[3, 0]-points_local_map[1, 0]))
-
-        return center_point[0], center_point[1], angle
-
-    def local_to_global_map(self, x, y, angle):
-        """
-        Given x,y,angle in local robot map,
-        computes x,y,angle in global map (self.map_frame) frame.
-        Returns: x,y,angle
-        """
-        quat = tf.transformations.quaternion_from_euler(0, 0, angle)
-        try:
-            map_to_robot_tf = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, self.current_time)
-        except:
-            try:
-                map_to_robot_tf = self.tf_listener.lookupTransform(self.map_frame, self.robot_frame, rospy.Time(0))
-                rospy.logwarn("Could not get transformation between {} and {} at correct time. Using latest available.".format(self.map_frame, self.robot_frame))
-            except:
-                rospy.logerr("Could not get transformation between {} and {}.".format(self.map_frame, self.robot_frame))
-                return None
-
-        map_to_robot_tf_mat = self.tf_listener.fromTranslationRotation(map_to_robot_tf[0], map_to_robot_tf[1])
-        robot_to_human_tf_mat = self.tf_listener.fromTranslationRotation([x, y, 0.], quat)
-        map_to_human_tf_mat = np.dot(map_to_robot_tf_mat, robot_to_human_tf_mat)
-
-        map_to_human_tvec = tf.transformations.translation_from_matrix(map_to_human_tf_mat)
-        cos_angle, sin_angle = map_to_human_tf_mat[0, 0], map_to_human_tf_mat[1, 0]
-        angle = np.arctan2(sin_angle, cos_angle)
-        x_c, y_c = map_to_human_tvec[0], map_to_human_tvec[1]
-        return x_c, y_c, angle
-
-
-    def get_id_list(self):
-        self.current_time = self.frame_data.header.stamp
-        new_pts = []
-
-        for person_idx, person in enumerate(self.frame_data.persons):
-            p_local = self.body_parts_to_local_position_and_orientation(person.bodyParts)
-            if p_local is None:
-            #     new_pts.append(TrackedPosition(vec=None, visual_id=self.frame_data.ids[person_idx]))
-                continue
-            p_global = self.local_to_global_map(*p_local)
-            if p_global is None:
-            #     new_pts.append(TrackedPosition(vec=None, visual_id=self.frame_data.ids[person_idx]))
-                continue
-            x,y,angle = p_global
-
-            new_pts.append(TrackedPosition(vec=np.array([x, y, angle]), visual_id=self.frame_data.ids[person_idx]))
-        if new_pts:
-            self.position_tracker.push(new_pts)
-
-    def _publish_tracked_persons_2d(self):
-        tracked_persons = self.position_tracker.tracked_positions()
-        tracked_persons_2d = TrackedPersons2d()
-       
-        for id, person in tracked_persons.items():
-            angle = person.vec[2]
-            quat = tf.transformations.quaternion_from_euler(0, 0, angle)
-            x, y  = person.vec[:2]
-            visible = 0
-
-            map_to_human_mat = self.tf_listener.fromTranslationRotation([x, y, 0.], quat)
-            trans_map_2_human = tf.transformations.translation_from_matrix(map_to_human_mat)
-            quat_map_2_human = tf.transformations.quaternion_from_matrix(map_to_human_mat)
-
-
-            self.tf_broadcaster.sendTransform(
-             trans_map_2_human,
-             quat_map_2_human,
-             self.current_time,
-             "body_" + str(id),
-             self.map_frame
-            )
-
-            lin_vel = 0.
-            ang_vel = 0.
-
-            rospy.logdebug("id : {}, pos : {}, vel : {}".format(id, person[:2], person[-2:]))
-
-            tracked_person_2d = TrackedPerson2d()
-            tracked_person_2d.track_id = int(id)
-            tracked_person_2d.velocity3D = Twist(Vector3(lin_vel, 0, 0), Vector3(0, 0, ang_vel))
-            tracked_person_2d.bounding_box = RegionOfInterest(0, 0, 0, 0, False)
-            tracked_person_2d.confidence = float(visible)
-            tracked_person_2d.pose3D = PoseStamped()
-            tracked_person_2d.pose3D.pose = Pose(Point(x, y, 0.), Quaternion(*quat))
-            tracked_person_2d.pose3D.header.stamp = self.current_time
-
-            tracked_persons_2d.detections.append(tracked_person_2d)
-
-        tracked_persons_2d.header.stamp = self.current_time
-        self._tracked_persons_2d_pub.publish(tracked_persons_2d)
-
-        self._ids_list_pub = rospy.Publisher('/human/bodies/tracked', IdsList, queue_size=10)
-        id_msg = IdsList()
-        ids = []
-        id_msg.header.stamp = self.current_time
-        id_msg.ids = ids
-        for id, last_seen in self.position_tracker.last_seen.items():
-            if last_seen == 0.0:
-                ids.append("{0:0=4d}/".format(id))
-        self._ids_list_pub.publish(id_msg)
-
-
-
-    def _check_all_sensors_ready(self):
-        rospy.logdebug("START ALL SENSORS READY")
-        self._check_frame_ready()
-        rospy.logdebug("ALL SENSORS READY")
-
-
-    def _check_frame_ready(self):
-        self.frame_data = None
-        rospy.logdebug("Waiting for {} to be READY...".format(self.frame_topic))
-        while self.frame_data is None and not rospy.is_shutdown():
-            try:
-                self.frame_data = rospy.wait_for_message(self.frame_topic, Frame, timeout=5.0)
-                rospy.logdebug("Current {} READY=>".format(self.frame_topic))
-
-            except:
-                rospy.logerr("Current {} not ready yet, retrying for getting 2D openpose frames".format(self.frame_topic))
-        return self.frame_data
-
-
-    def _frame_callback(self, data):
-        self.frame_data = data
-        self.get_id_list()
-        self._publish_tracked_persons_2d()
-
-
-
-    def shutdown(self):
-        rospy.loginfo("Stopping the RepublishFromOpenpose node")
-        # send empty goal message
-        rospy.loginfo("Killing the RepublishFromOpenpose node")
diff --git a/src/robot_behavior/src/robot_behavior/utils.py b/src/robot_behavior/src/robot_behavior/utils.py
index 99fcc970c17e3ff560a6f8dd4d0bae11fa439587..55bf1dbe25a5bfea3bf9c419ce7b2ad6af061bb6 100755
--- a/src/robot_behavior/src/robot_behavior/utils.py
+++ b/src/robot_behavior/src/robot_behavior/utils.py
@@ -1,6 +1,10 @@
 #!/usr/bin/env python3
 import numpy as np
 import tf
+from scipy.interpolate import RectBivariateSpline
+import jax.numpy as jnp
+from jax import grad, jit, vmap, jacfwd, custom_jvp, partial
+
 
 def constraint_angle(angle, min_value=-np.pi, max_value=np.pi):
 
@@ -19,6 +23,38 @@ def constraint_angle(angle, min_value=-np.pi, max_value=np.pi):
     return new_angle
 
 
+def local_to_global_jax(robot_position, x):
+    angle = rotmat_2d_jax(robot_position[:, -1])
+    y = vmap(vmpa_dot_jax, in_axes=(0, None))(jnp.moveaxis(angle, -1, 0), x)
+    return jnp.array(y + robot_position[:, :2])
+
+
+def vmpa_dot_jax(a, b):
+    return a.dot(b)
+
+
+def rotmat_2d(angle):
+    return np.matrix([[np.cos(angle), -np.sin(angle)],
+                      [np.sin(angle), np.cos(angle)]])
+
+
+def rotmat_2d_jax(angle):
+    return jnp.array([[jnp.cos(angle), -jnp.sin(angle)],
+                      [jnp.sin(angle), jnp.cos(angle)]])
+
+
+def local_to_global(robot_position, x):
+    angle = robot_position[-1]
+    y = rotmat_2d(angle).dot(x) + robot_position[:2]
+    return np.array(y).reshape(x.shape)
+
+
+def global_to_local(robot_position, x):
+    angle = robot_position[-1]
+    y = rotmat_2d(-angle).dot(x - robot_position[:2])
+    return np.array(y).reshape(x.shape)
+
+
 def pose_stamped_2_mat(p):
     q = p.pose.orientation
     pos = p.pose.position
@@ -32,3 +68,95 @@ def get_color(idx):
     color = ((37 * idx) % 255, (17 * idx) % 255, (29 * idx) % 255)
 
     return color
+
+
+def meter_to_px(point, map_size, resolution):
+    if point[0] < map_size[0][0]:
+        point[0] = map_size[0][0]
+    if point[0] > map_size[0][1]:
+        point[0] = map_size[0][1]
+    if point[1] < map_size[1][0]:
+        point[1] = map_size[1][0]
+    if point[1] > map_size[1][1]:
+        point[1] = map_size[1][1]
+
+    point_px = [0., 0.]
+    point_px[0] = abs(map_size[0][0] - point[0])/resolution
+    point_px[1] = abs(map_size[1][1] - point[1])/resolution
+
+    return np.rint(point_px)
+
+def optimal_path_2d(travel_time, starting_point, dx, coords, goal=None, max_d_orientation=None, N=100):
+    """
+    Find the optimal path from starting_point to the zero contour
+    of travel_time. dx is the grid spacing
+    Solve the equation x_t = - grad t / | grad t |
+    """
+    
+    grad_t_y, grad_t_x = np.gradient(travel_time, dx)
+    if isinstance(travel_time, np.ma.MaskedArray):
+        grad_t_y[grad_t_y.mask] = 0.0
+        grad_t_y = grad_t_y.data
+        grad_t_x[grad_t_x.mask] = 0.0
+        grad_t_x = grad_t_x.data
+
+    d_interp = RectBivariateSpline(coords[1], coords[0],
+                                   travel_time)
+    gradx_interp = RectBivariateSpline(coords[1], coords[0],
+                                       grad_t_x)
+    grady_interp = RectBivariateSpline(coords[1], coords[0],
+                                       grad_t_y)
+
+    def get_velocity(position):
+        """ return normalized velocity at pos """
+        x, y = position
+        vel = np.array([gradx_interp(y, x)[0][0],
+                        grady_interp(y, x)[0][0]])
+        return vel / np.linalg.norm(vel)
+
+    def get_distance(position):
+        x, y = position
+        return d_interp(y, x)
+
+    def euler_point_update(pos, ds):
+        return pos - get_velocity(pos) * ds
+
+    def runge_kutta(pos, ds):
+        """ Fourth order Runge Kutta point update """
+        k1 = ds * get_velocity(pos)
+        k2 = ds * get_velocity(pos - k1/2.0)
+        k3 = ds * get_velocity(pos - k2/2.0)
+        k4 = ds * get_velocity(pos - k3)
+        v = (k1 + 2*k2 + 2*k3 + k4)/6.0
+        return pos - v, v
+
+    # def sym(x, v):
+    #     x = x[0], 0.5 * (coords[1][-1] + coords[1][0]) - x[1]
+    #     v = v[0], -v[1]
+    #     return x, v
+    #
+    # starting_point, _ = sym(starting_point, [0, 0])
+
+    x, v = runge_kutta(starting_point, dx)
+    xl, yl, vxl, vyl, dl = [], [], [], [], []
+    starting_distance = get_distance(starting_point)
+
+    for i in range(N):
+        # xp, vp = sym(x, v)
+        d = get_distance(x)[0][0]
+        # if negative value d < 0, waypoint in an obstacle
+        if d < 0:
+            return False
+        # goal is implicit defined in the travel_time map but we need to test it explicitly because we can not really trust d
+        if ((goal is not None and max_d_orientation is not None) and
+                (np.linalg.norm(goal - np.asarray(x)) < max_d_orientation)):
+            if dl:
+                break
+        xl.append(x[0])
+        yl.append(x[1])
+        vxl.append(v[0])
+        vyl.append(v[1])
+        dl.append(d)
+        x, v = runge_kutta(x, dx)
+
+    return xl, yl, vxl, vyl, dl
diff --git a/src/spring_msgs b/src/spring_msgs
new file mode 160000
index 0000000000000000000000000000000000000000..92f213883475cf028433e4d3725a2d4b7a4018f4
--- /dev/null
+++ b/src/spring_msgs
@@ -0,0 +1 @@
+Subproject commit 92f213883475cf028433e4d3725a2d4b7a4018f4