Mentions légales du service

Skip to content
Snippets Groups Projects
Commit 6318ebdf authored by Rakshith Madhavan's avatar Rakshith Madhavan
Browse files

Hloc as an initial guess for rtabmap

parent f2812888
Branches
No related tags found
No related merge requests found
...@@ -61,8 +61,8 @@ find_package(catkin REQUIRED COMPONENTS ...@@ -61,8 +61,8 @@ find_package(catkin REQUIRED COMPONENTS
## Generate services in the 'srv' folder ## Generate services in the 'srv' folder
add_service_files( add_service_files(
FILES FILES
hloc_pose.srv
map_capture.srv map_capture.srv
InitPose.srv
) )
## Generate actions in the 'action' folder ## Generate actions in the 'action' folder
......
...@@ -41,16 +41,22 @@ RUN apt-get update && apt-get install -y \ ...@@ -41,16 +41,22 @@ RUN apt-get update && apt-get install -y \
mkdir -p ~/catkin_ws/src mkdir -p ~/catkin_ws/src
# conda update -n base -c defaults conda && \ # conda update -n base -c defaults conda && \
COPY ./environment.yml /root/environment.yml
WORKDIR /root/
RUN wget --no-check-certificate https://cvg-data.inf.ethz.ch/hloc/netvlad/Pitts30K_struct.mat -O /root/VGG16-NetVLAD-Pitts30K.mat
RUN conda update -n base -c defaults conda && conda env create -f environment.yml
#INSTALL ROS
RUN source activate meshroom && conda install -c conda-forge -c robostack ros-noetic-desktop && pip install catkin_tools
COPY . /root/catkin_ws/src/ari_hloc/ COPY . /root/catkin_ws/src/ari_hloc/
WORKDIR /root/catkin_ws/src/ari_hloc WORKDIR /root/catkin_ws/src/ari_hloc/
RUN conda update -n base -c defaults conda && conda env create -f environment.yml RUN mkdir -p ./third_party/Hierarchical-Localization/third_party/netvlad && mv /root/VGG16-NetVLAD-Pitts30K.mat ./third_party/Hierarchical-Localization/third_party/netvlad/
RUN bash init.sh RUN bash init.sh
#INSTALL ROS # BUILD
RUN source activate meshroom && conda install -c conda-forge -c robostack ros-noetic-desktop && pip install catkin_tools
WORKDIR /root/catkin_ws WORKDIR /root/catkin_ws
RUN source activate meshroom && pip uninstall -y enum34 && catkin build RUN source activate meshroom && pip uninstall -y enum34 && catkin build
RUN chmod +x /root/catkin_ws/src/ari_hloc/scripts/entrypoint.sh RUN chmod +x /root/catkin_ws/src/ari_hloc/scripts/entrypoint.sh
ENTRYPOINT ["/root/catkin_ws/src/ari_hloc/scripts/entrypoint.sh","roslaunch","ari_hloc","hloc_map_loc.launch"] ENTRYPOINT ["/root/catkin_ws/src/ari_hloc/scripts/entrypoint.sh","roslaunch","ari_hloc","hloc_map_loc.launch"]
CMD ["hloc_localization_mode:=False", "pose_topic:=/robot_pose"] CMD ["hloc_localization_mode:=True", "pose_topic:=/robot_pose"]
\ No newline at end of file \ No newline at end of file
...@@ -27,7 +27,7 @@ For the mapping mode. ...@@ -27,7 +27,7 @@ For the mapping mode.
For the last `-v` replace `<cache in basestation>` with the path in basestation where you want the cache to be stored. This cache will be used by the localizer. For the last `-v` replace `<cache in basestation>` with the path in basestation where you want the cache to be stored. This cache will be used by the localizer.
Default `cache_folder` is `/home/hloc_mapping/cache`. If nothing is passed, it creates the cache folder in this location in your basestation. Default `cache_folder` is `/home/hloc_mapping/cache`. If nothing is passed, it creates the cache folder in this location in your basestation.
In this case, the commmand would like: In this case, the commmand would look like:
`docker run -it --rm --env ROS_MASTER_URI=http://10.68.0.1:11311 --network=host --gpus all -v /var/run/docker.sock:/var/run/docker.sock -v /home/hloc_mapping/cache:/root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache registry.gitlab.inria.fr/spring/dockers/wp2_hloc:latest pose_topic:=<relevant pose topic> ` `docker run -it --rm --env ROS_MASTER_URI=http://10.68.0.1:11311 --network=host --gpus all -v /var/run/docker.sock:/var/run/docker.sock -v /home/hloc_mapping/cache:/root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache registry.gitlab.inria.fr/spring/dockers/wp2_hloc:latest pose_topic:=<relevant pose topic> `
**Note: IMPORTANT: Pass the SAME `<cache in basestation>` which you mount in the docker to the argument `cache_folder`** **Note: IMPORTANT: Pass the SAME `<cache in basestation>` which you mount in the docker to the argument `cache_folder`**
...@@ -88,4 +88,11 @@ Defaults are: `/front_camera/fisheye/image_raw/compressed` and `/rear_camera/fis ...@@ -88,4 +88,11 @@ Defaults are: `/front_camera/fisheye/image_raw/compressed` and `/rear_camera/fis
2. `python scripts/visualize_alignment.py <cache folder>/IOConverter/<inner_cache_folder>/images.txt <cache folder>/ModelsAligner/<inner_cache_folder>/images.txt` 2. `python scripts/visualize_alignment.py <cache folder>/IOConverter/<inner_cache_folder>/images.txt <cache folder>/ModelsAligner/<inner_cache_folder>/images.txt`
NOTE: Inner Cache folder is the folder which is created by the mapping process. It usually is a long alphanumeric .string. NOTE: Inner Cache folder is the folder which is created by the mapping process. It usually is a long alphanumeric .string.
The red points are the positions according to the local mapping (from the `pose_topic`), and the blue points are the positions from the Hloc mapping aligned to the local frame. The red points are the positions according to the local mapping (from the `pose_topic`), and the blue points are the positions from the Hloc mapping aligned to the local frame.
\ No newline at end of file
## TODO:
1. Change it back to a service, which is called when rtabmap loses localization
2. publish pose to /slam/initialpose when called.
\ No newline at end of file
...@@ -2,8 +2,6 @@ ...@@ -2,8 +2,6 @@
source activate meshroom source activate meshroom
pip install -e ./third_party/Hierarchical-Localization/ pip install -e ./third_party/Hierarchical-Localization/
# download large files # download large files
mkdir ./third_party/Hierarchical-Localization/third_party/netvlad
wget --no-check-certificate https://cvg-data.inf.ethz.ch/hloc/netvlad/Pitts30K_struct.mat -O ./third_party/Hierarchical-Localization/third_party/netvlad/VGG16-NetVLAD-Pitts30K.mat
echo "Wget done" echo "Wget done"
# create containers # create containers
# docker pull uodcvip/colmap:latest # docker pull uodcvip/colmap:latest
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
<param name="rear_fisheye_topic" type="string" value="$(arg mapping_rear_fisheye_topic)" /> <param name="rear_fisheye_topic" type="string" value="$(arg mapping_rear_fisheye_topic)" />
</node> </node>
<node if="$(arg hloc_localization_mode)" pkg="ari_hloc" type="hloc_localizer.py" name="hloc_loc"> <node if="$(arg hloc_localization_mode)" pkg="ari_hloc" type="hloc_initializer.py" name="hloc_loc">
<param name="front_fisheye_topic" type="string" value="$(arg localization_front_fisheye_topic)" /> <param name="front_fisheye_topic" type="string" value="$(arg localization_front_fisheye_topic)" />
<param name="rear_fisheye_topic" type="string" value="$(arg localization_rear_fisheye_topic)" /> <param name="rear_fisheye_topic" type="string" value="$(arg localization_rear_fisheye_topic)" />
<param name="num_pairs" type="int" value="$(arg num_pairs_matching)" /> <param name="num_pairs" type="int" value="$(arg num_pairs_matching)" />
......
{
"header": {
"pipelineVersion": "2.2",
"releaseVersion": "2021.1.0",
"fileVersion": "1.1",
"nodesVersions": {
"KeyframeSelector": "0.1",
"PoseFilter": "0.1",
"HlocMapCreator": "0.1",
"Matcher": "0.1",
"KeypointsDetector": "0.1",
"Mapper": "0.1"
}
},
"graph": {
"Matcher_1": {
"nodeType": "Matcher",
"position": [
267,
-291
],
"parallelization": {
"blockSize": 0,
"size": 1,
"split": 1
},
"uids": {
"0": "fc2686c675158e5b0ff4fee21c3a219bf4e1f244"
},
"internalFolder": "{cache}/{nodeType}/{uid0}/",
"inputs": {
"databaseFile": "{KeypointsDetector_1.output}",
"inputMatchesFormat": "no data",
"inputMatches": "",
"algorithm": "COLMAP",
"matchingType": "Sequential",
"vocabTree": "/root/catkin_ws/src/ari_hloc/vocab_tree_flickr100K_words32K.bin",
"matchingTreshold": 2,
"verboseLevel": "info"
},
"outputs": {
"output": "{cache}/{nodeType}/{uid0}/",
"databaseOutputFile": "{cache}/{nodeType}/{uid0}/database.db"
}
},
"KeypointsDetector_1": {
"nodeType": "KeypointsDetector",
"position": [
15,
-260
],
"parallelization": {
"blockSize": 0,
"size": 1,
"split": 1
},
"uids": {
"0": "bbe4a27a14fb382ed4a052c0d9699b4bdfb7f957"
},
"internalFolder": "{cache}/{nodeType}/{uid0}/",
"inputs": {
"imagesFolder": "{PoseFilter_1.output}",
"database": "",
"algorithm": "SIFT",
"cameraModel": "OPENCV_FISHEYE",
"removeImages": true,
"verboseLevel": "info"
},
"outputs": {
"output": "{cache}/{nodeType}/{uid0}/database.db"
}
},
"PoseFilter_1": {
"nodeType": "PoseFilter",
"position": [
-255,
-260
],
"parallelization": {
"blockSize": 0,
"size": 1,
"split": 1
},
"uids": {
"0": "aaa372b517a3eb60d3c67506584f611af9baf127"
},
"internalFolder": "{cache}/{nodeType}/{uid0}/",
"inputs": {
"inputDir": "{KeyframeSelector_1.output}",
"imageFolderNames": "{KeyframeSelector_1.imageFolderNames}",
"PoseFile": "{KeyframeSelector_1.PoseFile}",
"PoseFilter": true,
"distanceThreshold": 0.1,
"orientationThreshold": 30.0,
"maxCamerasinPosition": 1,
"imageType": "jpg",
"verboseLevel": "info"
},
"outputs": {
"output": "{cache}/{nodeType}/{uid0}/"
}
},
"KeyframeSelector_1": {
"nodeType": "KeyframeSelector",
"position": [
-505,
-262
],
"parallelization": {
"blockSize": 0,
"size": 1,
"split": 1
},
"uids": {
"0": "45279c32f343f7cd7a2ed395d0ad152df26c737b"
},
"internalFolder": "{cache}/{nodeType}/{uid0}/",
"inputs": {
"recordingDir": "/root/data_folder",
"imageFolderNames": "/root/data_folder/ImageFolders",
"PoseFile": "/root/data_folder/robot_pose.json",
"recordingSource": "BROCA",
"pvBlurThreshold": 10.0,
"pvMinFrameOffset": 0,
"vlcMinFrameOffset": 0,
"verboseLevel": "debug"
},
"outputs": {
"output": "{cache}/{nodeType}/{uid0}/"
}
},
"HlocMapCreator_2": {
"nodeType": "HlocMapCreator",
"position": [
805,
-297
],
"parallelization": {
"blockSize": 0,
"size": 1,
"split": 1
},
"uids": {
"0": "1f02ddaf1aee25d2096c686be756624ba0d40027"
},
"internalFolder": "{cache}/{nodeType}/{uid0}/",
"inputs": {
"inputSfM": "",
"imageDirectory": "{Mapper_1.imagesDirectory}",
"imageFolderNames": "{PoseFilter_1.imageFolderNames}",
"copyDensePts": false,
"verboseLevel": "debug"
},
"outputs": {
"output": "{cache}/{nodeType}/{uid0}/"
}
},
"Mapper_1": {
"nodeType": "Mapper",
"position": [
514,
-289
],
"parallelization": {
"blockSize": 0,
"size": 1,
"split": 1
},
"uids": {
"0": "aaf4ca5817dcafee13985dda902b2da1bbd733bc"
},
"internalFolder": "{cache}/{nodeType}/{uid0}/",
"inputs": {
"databaseFile": "{Matcher_1.databaseOutputFile}",
"imagesDirectory": "{KeypointsDetector_1.imagesFolder}",
"algorithm": "COLMAP",
"ignoreTwoView": true,
"verboseLevel": "info"
},
"outputs": {
"output": "{cache}/{nodeType}/{uid0}/"
}
}
}
}
\ No newline at end of file
from pathlib import Path
from hloc import matchers
from hloc import extractors
from hloc import extract_features, match_features, pairs_from_retrieval
from hloc import localize_sfm
from hloc.utils.read_write_model import read_images_binary
from hloc.utils.io import list_h5_names
from hloc.utils.base_model import dynamic_load
import h5py
import torch
class Hloc():
def __init__(self, map_path, num_pairs=25):
map_path = Path(map_path)
self.retrieval_conf = extract_features.confs['netvlad']
self.feature_conf = extract_features.confs['superpoint_aachen']
self.matcher_conf = match_features.confs['superglue']
# database features and descriptors
self.reference_sfm = map_path / 'sfm_superpoint+superglue'
self.db_feature_path = map_path / 'feats-superpoint-n4096-r1024.h5'
self.db_descriptors_path = map_path / 'global-feats-netvlad.h5'
self.device = 'cuda' if torch.cuda.is_available() else 'cpu'
self.db_desc = None
self.db_data = self.get_db()
self.feature_model = self.get_model(self.feature_conf)
self.retrieval_model = self.get_model(self.retrieval_conf)
self.matcher_model = self.get_model(self.matcher_conf, match=True)
self.models = (self.feature_model, self.retrieval_model, self.matcher_model)
self.num_matching_pairs = num_pairs
def get_model(self, conf, match=False):
if not match:
Model = dynamic_load(extractors, conf['model']['name'])
else:
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(self.device)
return model
def get_db(self):
db_model = self.reference_sfm
db_descriptors = self.db_descriptors_path
if isinstance(db_descriptors, (Path, str)):
db_descriptors = [db_descriptors]
name2db = {n: i for i, p in enumerate(db_descriptors)
for n in list_h5_names(p)}
images = read_images_binary(db_model / 'images.bin')
db_names = [i.name for i in images.values()]
if len(db_names) == 0:
raise ValueError('Could not find any database image.')
def get_descriptors(names, path, name2idx=None, key='global_descriptor'):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if name2idx is None:
with h5py.File(str(path), 'r') as fd:
desc = [fd[n][key].__array__() for n in names]
else:
desc = []
for n in names:
with h5py.File(str(path[name2idx[n]]), 'r') as fd:
desc.append(fd[n][key].__array__())
return torch.from_numpy(np.stack(desc, 0)).to(device).float()
db_desc = get_descriptors(db_names, db_descriptors, name2db)
return (db_desc, db_names)
#!/usr/bin/env python
import Hloc
import rospy
from geometry_msgs.msg import Pose,PoseStamped,PoseWithCovarianceStamped
from sensor_msgs.msg import CompressedImage
import cv2
import rospy
import tf
import numpy as np
import os
import sys
file_path = __file__
dir_path = os.path.dirname(file_path)
outer_dir_path = os.path.dirname(dir_path)
sys.path.append(outer_dir_path)
import src.colmap.ColmapIO as colmap
from third_party.meshroom.bin import meshroom_compute
from ari_hloc.srv import InitPose,InitPoseResponse
front_fisheye_image = None
rear_fisheye_image = None
CameraObjects = []
front_fisheye_topic = rospy.get_param("/hloc_loc/front_fisheye_topic")
rear_fisheye_topic = rospy.get_param("/hloc_loc/rear_fisheye_topic")
num_pairs = rospy.get_param("/hloc_loc/num_pairs")
class Camera():
def __init__(self, camera):
self.camera_name = camera[0]
self.topic = camera[1]
self.sub = rospy.Subscriber(self.topic, CompressedImage, self.fisheye_callback, queue_size=10)
self.image = None
def fisheye_callback(self, msg):
np_arr = np.frombuffer(msg.data, np.uint8)
self.image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
def initialize_pose(req, front_fisheye_image, rear_fisheye_image, pose_pub, hloc_object):
try:
rospy.loginfo("Writing query")
cv2.imwrite("/root/data_folder/Query/front_fisheye_query_image.jpg", front_fisheye_image)
except:
try:
rospy.logwarn("FRONT FISHEYE TOPIC NOT PUBLISHING. TRYING REAR FISHEYE")
cv2.imwrite("/root/data_folder/Query/rear_fisheye_query_image.jpg", rear_fisheye_image)
except:
rospy.logwarn("REAR FISHEYE NOT PUBLISHING EITHER")
return False
hloc_pose = PoseWithCovarianceStamped()
st = rospy.Time.now()
succ=True
try:
meshroom_compute.execute("/root/catkin_ws/src/ari_hloc/pipelines/ARI_localizer.mg", additional_params=(hloc_object.models,hloc_object.db_data, hloc_object.num_matching_pairs) )
except:
succ=False
hloc_pose.header.stamp = st
if succ:
loc_folder = "/root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache/HlocLocalizer"
cache_folder = os.path.join(loc_folder, os.listdir("/root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache/HlocLocalizer")[0])
loc_res_file = os.path.join(cache_folder, 'query_localization_results.txt')
file_reader = open(loc_res_file, 'r')
data_lines = file_reader.read().split("\n")
line = data_lines[0]
data = line.split(" ")
_ = data[0]
quat_hloc = data[1:5]
quat_hloc = [float(d) for d in quat_hloc]
w = quat_hloc[0]
#hloc outputs in w,x,y,z format whereas ROS uses x,y,z,w
quat = np.zeros_like(quat_hloc)
quat[:3] = quat_hloc[1:4]
quat[-1] = w
t = data[5:]
t = np.asarray([float(tr) for tr in t])
hloc_pose.header.frame_id = "map"
#INVERSE FOR MAP FRAME
hloc_pose.pose.pose.orientation.x = -quat[0]
hloc_pose.pose.pose.orientation.y = -quat[1]
hloc_pose.pose.pose.orientation.z = -quat[2]
hloc_pose.pose.pose.orientation.w = quat[3]
R_inv = tf.transformations.quaternion_matrix([hloc_pose.pose.orientation.x, hloc_pose.pose.orientation.y, hloc_pose.pose.orientation.z, hloc_pose.pose.orientation.w ])[:3,:3]
C = -R_inv @ t.reshape((3,1))
hloc_pose.pose.pose.position.x = C[0]
hloc_pose.pose.pose.position.y = C[1]
hloc_pose.pose.pose.position.z = C[2]
os.system("rm -rf /root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache/HlocLocalizer")
rospy.loginfo("Deleted Cache")
pose_pub.publish(hloc_pose)
res = InitPoseResponse(succ)
return res
if __name__=='__main__':
C = colmap.ColmapIO()
cameras = {
'front_fisheye':front_fisheye_topic,
'rear_fisheye':rear_fisheye_topic,
}
map_outer_folder = '/root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache/Mapper/'
map_folder = os.path.join(map_outer_folder, os.listdir(map_outer_folder)[0])
hloc_map_outer_folder = '/root/catkin_ws/src/ari_hloc/pipelines/MeshroomCache/HlocMapCreator/'
hloc_map_folder = os.path.join(hloc_map_outer_folder, os.listdir(hloc_map_outer_folder)[0])
hloc_obj = Hloc(hloc_map_folder, num_pairs=num_pairs)
cameras_file = os.path.join(map_folder, 'cameras.txt')
cameras_colmap = C.load_cameras(cameras_file)
front_fisheye_cam = cameras_colmap[1]
if len(cameras_colmap) == 2:
rear_fisheye_cam = cameras_colmap[2]
os.makedirs("/root/data_folder/Query", exist_ok=True)
line = front_fisheye_cam['model'] + ' ' + str(front_fisheye_cam['width']) + ' ' + str(front_fisheye_cam['height']) + ' ' + ' '.join(str(f) for f in front_fisheye_cam['f']) + ' ' + ' '.join(str(pp) for pp in front_fisheye_cam['pp'] ) + ' ' + ' '.join(str(d) for d in front_fisheye_cam['rd'])
with open('/root/data_folder/Query/params.txt', 'w') as f:
f.write(f"{line}\n")
cameras_list = list(cameras.keys())
pose_init_topic = "/slam/initialpose"
rospy.init_node('hloc_InitPose')
pose_pub = rospy.Publisher(pose_init_topic, PoseWithCovarianceStamped, queue_size=10)
CameraObjects = [Camera(cameras_list[i], cameras[cameras_list[i]]) for i in range(len(cameras_list))]
pose_callback_lambda = lambda x: initialize_pose(x,CameraObjects[0].image, CameraObjects[1].image, pose_pub, hloc_obj)
s = rospy.Service('stop_capture',InitPose, pose_callback_lambda)
rospy.spin()
\ No newline at end of file
...@@ -137,6 +137,7 @@ def get_pose_from_image(): ...@@ -137,6 +137,7 @@ def get_pose_from_image():
quat = data[1:5] quat = data[1:5]
quat = [float(d) for d in quat] quat = [float(d) for d in quat]
w = quat[0] w = quat[0]
#hloc outputs in w,x,y,z format whereas ROS uses x,y,z,w
quat[:3] = quat[1:4] quat[:3] = quat[1:4]
quat[-1] = w quat[-1] = w
t = data[5:] t = data[5:]
...@@ -149,7 +150,7 @@ def get_pose_from_image(): ...@@ -149,7 +150,7 @@ def get_pose_from_image():
pose.pose.orientation.z = -quat[2] pose.pose.orientation.z = -quat[2]
pose.pose.orientation.w = quat[3] pose.pose.orientation.w = quat[3]
R_inv = tf.transformations.quaternion_matrix([pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ])[:3,:3] R_inv = tf.transformations.quaternion_matrix([pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ])[:3,:3]
C = -1 * R_inv @ t.reshape((3,1)) C = -1 * R_inv @ t.reshape((3,1))
pose.pose.position.x = C[0] pose.pose.position.x = C[0]
pose.pose.position.y = C[1] pose.pose.position.y = C[1]
pose.pose.position.z = C[2] pose.pose.position.z = C[2]
......
---
bool success
...@@ -121,7 +121,7 @@ This node COLMAP mapper on database which contains matches. ...@@ -121,7 +121,7 @@ This node COLMAP mapper on database which contains matches.
out_dir_docker = out_dir.replace(cache_folder, host_cache_folder) out_dir_docker = out_dir.replace(cache_folder, host_cache_folder)
colmap_container = UtilsContainers("docker", "uodcvip/colmap", out_dir_docker) colmap_container = UtilsContainers("docker", "uodcvip/colmap", out_dir_docker)
# colmap_container = UtilsContainers("singularity", dir_path + "/colmap.sif", out_dir)
colmap = Colmap(colmap_container) colmap = Colmap(colmap_container)
if chunk.node.ignoreTwoView.value: if chunk.node.ignoreTwoView.value:
colmap.mapper("/data/database.db", "/data", "/data") colmap.mapper("/data/database.db", "/data", "/data")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment